From 9b8caf6c4176fc1ee61fbd902a17861593d1280d Mon Sep 17 00:00:00 2001 From: Kevin Stillhammer Date: Sat, 11 Apr 2026 19:14:05 +0200 Subject: [PATCH] Add manifest-file input (#352) --- .github/workflows/test.yml | 26 + README.md | 42 +- __tests__/download/custom-manifest.ndjson | 1 + __tests__/download/download-version.test.ts | 509 ++ __tests__/download/manifest.test.ts | 196 + action.yml | 6 +- dist/ruff-action/index.cjs | 4981 ++++--------------- package-lock.json | 3 +- package.json | 3 +- src/download/download-version.ts | 248 +- src/download/manifest.ts | 243 + src/download/variant-selection.ts | 39 + src/ruff-action.ts | 17 +- src/utils/constants.ts | 6 + src/utils/fetch.ts | 21 + src/utils/inputs.ts | 1 + 16 files changed, 2129 insertions(+), 4213 deletions(-) create mode 100644 __tests__/download/custom-manifest.ndjson create mode 100644 __tests__/download/download-version.test.ts create mode 100644 __tests__/download/manifest.test.ts create mode 100644 src/download/manifest.ts create mode 100644 src/download/variant-selection.ts create mode 100644 src/utils/fetch.ts diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 7686ec3..f17db8c 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -431,6 +431,31 @@ jobs: env: RUFF_VERSION: ${{ steps.ruff-action.outputs.ruff-version }} + test-custom-manifest-file: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + persist-credentials: false + - name: Create test src + run: | + mkdir -p "${{ runner.temp }}/ruff-manifest-test" + printf 'print("hello")\n' > "${{ runner.temp }}/ruff-manifest-test/hello.py" + - name: Install from custom manifest file + id: ruff-action + uses: ./ + with: + src: ${{ runner.temp }}/ruff-manifest-test + manifest-file: "https://raw.githubusercontent.com/astral-sh/ruff-action/${{ github.ref }}/__tests__/download/custom-manifest.ndjson" + - name: Correct version gets installed + run: | + if [ "$RUFF_VERSION" != "0.15.10" ]; then + echo "Wrong ruff version: $RUFF_VERSION" + exit 1 + fi + env: + RUFF_VERSION: ${{ steps.ruff-action.outputs.ruff-version }} + all-tests-passed: runs-on: ubuntu-latest needs: @@ -456,6 +481,7 @@ jobs: - test-failure - test-multiple-src - test-parent-directory-pyproject + - test-custom-manifest-file if: always() steps: - name: All tests passed diff --git a/README.md b/README.md index bbd0bd2..0d14262 100644 --- a/README.md +++ b/README.md @@ -20,20 +20,25 @@ anything `ruff` can (ex, fix). - [Install a specific version](#install-a-specific-version) - [Install a version by supplying a semver range or pep440 specifier](#install-a-version-by-supplying-a-semver-range-or-pep440-specifier) - [Install a version from a specified version file](#install-a-version-from-a-specified-version-file) + - [Install using a custom manifest URL](#install-using-a-custom-manifest-url) - [Validate checksum](#validate-checksum) - [GitHub authentication token](#github-authentication-token) - [Outputs](#outputs) ## Usage -| Input | Description | Default | -|----------------|--------------------------------------------------------------------------------------------------------------------------------------------|--------------------| -| `version` | The version of Ruff to install. See [Install specific versions](#install-specific-versions) | `latest` | -| `version-file` | The file to read the version from. See [Install a version from a specified version file](#install-a-version-from-a-specified-version-file) | None | -| `args` | The arguments to pass to the `ruff` command. See [Configuring Ruff] | `check` | -| `src` | The directory or single files to run `ruff` on. | [github.workspace] | -| `checksum` | The sha256 checksum of the downloaded executable. | None | -| `github-token` | The GitHub token to use for authentication. | `GITHUB_TOKEN` | +| Input | Description | Default | +|-----------------|--------------------------------------------------------------------------------------------------------------------------------------------|--------------------| +| `version` | The version of Ruff to install. See [Install specific versions](#install-specific-versions) | `latest` | +| `version-file` | The file to read the version from. See [Install a version from a specified version file](#install-a-version-from-a-specified-version-file) | None | +| `manifest-file` | URL to a custom Ruff manifest in the `astral-sh/versions` format. | None | +| `args` | The arguments to pass to the `ruff` command. See [Configuring Ruff] | `check` | +| `src` | The directory or single files to run `ruff` on. | [github.workspace] | +| `checksum` | The sha256 checksum of the downloaded artifact. | None | +| `github-token` | The GitHub token to use when downloading Ruff release artifacts from GitHub. | `GITHUB_TOKEN` | + +By default, Ruff version metadata is resolved from the +[`astral-sh/versions` Ruff manifest](https://github.com/astral-sh/versions/blob/main/v1/ruff.ndjson). ### Basic @@ -155,6 +160,19 @@ Currently `pyproject.toml` and `requirements.txt` are supported. version-file: "my-path/to/pyproject.toml-or-requirements.txt" ``` +#### Install using a custom manifest URL + +You can override the default `astral-sh/versions` manifest with `manifest-file`. +This affects both version resolution and artifact selection. + +```yaml +- name: Install Ruff from a custom manifest + uses: astral-sh/ruff-action@v3 + with: + version: "latest" + manifest-file: "https://example.com/ruff.ndjson" +``` + ### Validate checksum You can specify a checksum to validate the downloaded executable. Checksums up to the default version @@ -171,9 +189,11 @@ are automatically verified by this action. The sha256 hashes can be found on the ### GitHub authentication token -This action uses the GitHub API to fetch the ruff release artifacts. To avoid hitting the GitHub API -rate limit too quickly, an authentication token can be provided via the `github-token` input. By -default, the `GITHUB_TOKEN` secret is used, which is automatically provided by GitHub Actions. +By default, this action resolves available uv versions from +[`astral-sh/versions`](https://github.com/astral-sh/versions) and downloads release artifacts from `https://releases.astral.sh`. If this fails this action falls back to downloading from the GitHub releases page of the ruff repository. + +You can provide a token via `github-token` to authenticate those downloads. By default, the +`GITHUB_TOKEN` secret is used, which is automatically provided by GitHub Actions. If the default [permissions for the GitHub token](https://docs.github.com/en/actions/security-for-github-actions/security-guides/automatic-token-authentication#permissions-for-the-github_token) diff --git a/__tests__/download/custom-manifest.ndjson b/__tests__/download/custom-manifest.ndjson new file mode 100644 index 0000000..abb2f05 --- /dev/null +++ b/__tests__/download/custom-manifest.ndjson @@ -0,0 +1 @@ +{"version":"0.15.10","artifacts":[{"platform":"x86_64-unknown-linux-gnu","variant":"default","url":"https://github.com/astral-sh/ruff/releases/download/0.15.10/ruff-x86_64-unknown-linux-gnu.tar.gz","archive_format":"tar.gz","sha256":"e3e9e5c791542f00d95edc74a506e1ac24efc0af9574de01ab338187bf1ff9f6"}]} diff --git a/__tests__/download/download-version.test.ts b/__tests__/download/download-version.test.ts new file mode 100644 index 0000000..950224a --- /dev/null +++ b/__tests__/download/download-version.test.ts @@ -0,0 +1,509 @@ +import { beforeEach, describe, expect, it, jest } from "@jest/globals"; +import * as semver from "semver"; + +const mockInfo = jest.fn(); +const mockWarning = jest.fn(); + +jest.unstable_mockModule("@actions/core", () => ({ + debug: jest.fn(), + info: mockInfo, + warning: mockWarning, +})); + +const mockDownloadTool = jest.fn(); +const mockExtractTar = jest.fn(); +const mockExtractZip = jest.fn(); +const mockCacheDir = jest.fn(); + +jest.unstable_mockModule("@actions/tool-cache", () => ({ + cacheDir: mockCacheDir, + downloadTool: mockDownloadTool, + evaluateVersions: (versions: string[], range: string) => + semver.maxSatisfying(versions, range) ?? "", + extractTar: mockExtractTar, + extractZip: mockExtractZip, + find: () => "", + findAllVersions: () => [], + isExplicitVersion: (version: string) => semver.valid(version) !== null, +})); + +const mockGetLatestVersion = jest.fn(); +const mockGetAllVersions = jest.fn(); +const mockGetArtifact = jest.fn(); + +jest.unstable_mockModule("../../src/download/manifest", () => ({ + getAllVersions: mockGetAllVersions, + getArtifact: mockGetArtifact, + getLatestVersion: mockGetLatestVersion, +})); + +const mockValidateChecksum = jest.fn(); + +jest.unstable_mockModule("../../src/download/checksum/checksum", () => ({ + validateChecksum: mockValidateChecksum, +})); + +const mockCopyFile = jest.fn(); +const mockReaddir = jest.fn(); + +jest.unstable_mockModule("node:fs", () => ({ + promises: { + copyFile: mockCopyFile, + readdir: mockReaddir, + }, +})); + +const { downloadVersion, resolveVersion, rewriteToMirror } = await import( + "../../src/download/download-version" +); + +describe("download-version", () => { + beforeEach(() => { + mockInfo.mockReset(); + mockWarning.mockReset(); + mockDownloadTool.mockReset(); + mockExtractTar.mockReset(); + mockExtractZip.mockReset(); + mockCacheDir.mockReset(); + mockGetLatestVersion.mockReset(); + mockGetAllVersions.mockReset(); + mockGetArtifact.mockReset(); + mockValidateChecksum.mockReset(); + mockCopyFile.mockReset(); + mockReaddir.mockReset(); + + mockDownloadTool.mockResolvedValue("/tmp/downloaded"); + mockExtractTar.mockResolvedValue("/tmp/extracted"); + mockExtractZip.mockResolvedValue("/tmp/extracted"); + mockCacheDir.mockResolvedValue("/tmp/cached"); + mockReaddir.mockResolvedValue(["ruff"]); + }); + + describe("resolveVersion", () => { + it("uses the default manifest to resolve latest", async () => { + mockGetLatestVersion.mockResolvedValue("0.15.8"); + + const version = await resolveVersion("latest", undefined); + + expect(version).toBe("0.15.8"); + expect(mockGetLatestVersion).toHaveBeenCalledTimes(1); + expect(mockGetLatestVersion).toHaveBeenCalledWith(undefined); + }); + + it("uses the default manifest to resolve available versions", async () => { + mockGetAllVersions.mockResolvedValue(["0.15.8", "0.15.7"]); + + const version = await resolveVersion("0.15.x", undefined); + + expect(version).toBe("0.15.8"); + expect(mockGetAllVersions).toHaveBeenCalledTimes(1); + expect(mockGetAllVersions).toHaveBeenCalledWith(undefined); + }); + + it("uses manifest-file when provided", async () => { + mockGetAllVersions.mockResolvedValue(["0.15.8", "0.15.7"]); + + const version = await resolveVersion( + "0.15.x", + "https://example.com/custom.ndjson", + ); + + expect(version).toBe("0.15.8"); + expect(mockGetAllVersions).toHaveBeenCalledWith( + "https://example.com/custom.ndjson", + ); + }); + }); + + describe("downloadVersion", () => { + it("fails when manifest lookup fails", async () => { + mockGetArtifact.mockRejectedValue(new Error("manifest unavailable")); + + await expect( + downloadVersion( + "unknown-linux-gnu", + "x86_64", + "0.15.8", + undefined, + "token", + ), + ).rejects.toThrow("manifest unavailable"); + + expect(mockDownloadTool).not.toHaveBeenCalled(); + expect(mockValidateChecksum).not.toHaveBeenCalled(); + }); + + it("fails when no matching artifact exists in the default manifest", async () => { + mockGetArtifact.mockResolvedValue(undefined); + + await expect( + downloadVersion( + "unknown-linux-gnu", + "x86_64", + "0.15.8", + undefined, + "token", + ), + ).rejects.toThrow( + "Could not find artifact for version 0.15.8, arch x86_64, platform unknown-linux-gnu in https://raw.githubusercontent.com/astral-sh/versions/main/v1/ruff.ndjson .", + ); + + expect(mockDownloadTool).not.toHaveBeenCalled(); + expect(mockValidateChecksum).not.toHaveBeenCalled(); + }); + + it("uses built-in checksums for default manifest downloads", async () => { + mockGetArtifact.mockResolvedValue({ + archiveFormat: "tar.gz", + checksum: "manifest-checksum-that-should-be-ignored", + downloadUrl: "https://example.com/ruff.tar.gz", + }); + + await downloadVersion( + "unknown-linux-gnu", + "x86_64", + "0.15.8", + undefined, + "token", + ); + + expect(mockValidateChecksum).toHaveBeenCalledWith( + undefined, + "/tmp/downloaded", + "x86_64", + "unknown-linux-gnu", + "0.15.8", + ); + }); + + it("rewrites GitHub Releases URLs to the Astral mirror", async () => { + mockGetArtifact.mockResolvedValue({ + archiveFormat: "tar.gz", + checksum: "abc123", + downloadUrl: + "https://github.com/astral-sh/ruff/releases/download/0.15.8/ruff-x86_64-unknown-linux-gnu.tar.gz", + }); + + await downloadVersion( + "unknown-linux-gnu", + "x86_64", + "0.15.8", + undefined, + "token", + ); + + expect(mockDownloadTool).toHaveBeenCalledWith( + "https://releases.astral.sh/github/ruff/releases/download/0.15.8/ruff-x86_64-unknown-linux-gnu.tar.gz", + undefined, + undefined, + ); + }); + + it("does not rewrite non-GitHub URLs", async () => { + mockGetArtifact.mockResolvedValue({ + archiveFormat: "tar.gz", + checksum: "abc123", + downloadUrl: "https://example.com/ruff.tar.gz", + }); + + await downloadVersion( + "unknown-linux-gnu", + "x86_64", + "0.15.8", + undefined, + "token", + ); + + expect(mockDownloadTool).toHaveBeenCalledWith( + "https://example.com/ruff.tar.gz", + undefined, + undefined, + ); + }); + + it("falls back to GitHub Releases when the mirror download fails", async () => { + mockGetArtifact.mockResolvedValue({ + archiveFormat: "tar.gz", + checksum: "abc123", + downloadUrl: + "https://github.com/astral-sh/ruff/releases/download/0.15.8/ruff-x86_64-unknown-linux-gnu.tar.gz", + }); + + mockDownloadTool + .mockRejectedValueOnce(new Error("mirror unavailable")) + .mockResolvedValueOnce("/tmp/downloaded"); + + await downloadVersion( + "unknown-linux-gnu", + "x86_64", + "0.15.8", + undefined, + "token", + ); + + expect(mockDownloadTool).toHaveBeenCalledTimes(2); + expect(mockDownloadTool).toHaveBeenNthCalledWith( + 1, + "https://releases.astral.sh/github/ruff/releases/download/0.15.8/ruff-x86_64-unknown-linux-gnu.tar.gz", + undefined, + undefined, + ); + expect(mockDownloadTool).toHaveBeenNthCalledWith( + 2, + "https://github.com/astral-sh/ruff/releases/download/0.15.8/ruff-x86_64-unknown-linux-gnu.tar.gz", + undefined, + "token", + ); + expect(mockWarning).toHaveBeenCalledWith( + "Failed to download from mirror, falling back to GitHub Releases: mirror unavailable", + ); + }); + + it("falls back to the canonical old GitHub Releases URL", async () => { + mockGetArtifact.mockResolvedValue({ + archiveFormat: "tar.gz", + checksum: "abc123", + downloadUrl: + "https://github.com/astral-sh/ruff/releases/download/0.4.7/ruff-x86_64-unknown-linux-gnu.tar.gz", + }); + + mockDownloadTool + .mockRejectedValueOnce(new Error("mirror unavailable")) + .mockResolvedValueOnce("/tmp/downloaded"); + + await downloadVersion( + "unknown-linux-gnu", + "x86_64", + "0.4.7", + undefined, + "token", + ); + + expect(mockDownloadTool).toHaveBeenNthCalledWith( + 2, + "https://github.com/astral-sh/ruff/releases/download/v0.4.7/ruff-0.4.7-x86_64-unknown-linux-gnu.tar.gz", + undefined, + "token", + ); + }); + + it("does not fall back when checksum validation fails", async () => { + mockGetArtifact.mockResolvedValue({ + archiveFormat: "tar.gz", + checksum: "abc123", + downloadUrl: + "https://github.com/astral-sh/ruff/releases/download/0.15.8/ruff-x86_64-unknown-linux-gnu.tar.gz", + }); + mockValidateChecksum.mockRejectedValue(new Error("bad checksum")); + + await expect( + downloadVersion( + "unknown-linux-gnu", + "x86_64", + "0.15.8", + undefined, + "token", + ), + ).rejects.toThrow("bad checksum"); + + expect(mockDownloadTool).toHaveBeenCalledTimes(1); + expect(mockWarning).not.toHaveBeenCalled(); + }); + + it("does not fall back when extraction fails", async () => { + mockGetArtifact.mockResolvedValue({ + archiveFormat: "tar.gz", + checksum: "abc123", + downloadUrl: + "https://github.com/astral-sh/ruff/releases/download/0.15.8/ruff-x86_64-unknown-linux-gnu.tar.gz", + }); + mockExtractTar.mockRejectedValue(new Error("extract failed")); + + await expect( + downloadVersion( + "unknown-linux-gnu", + "x86_64", + "0.15.8", + undefined, + "token", + ), + ).rejects.toThrow("extract failed"); + + expect(mockDownloadTool).toHaveBeenCalledTimes(1); + expect(mockWarning).not.toHaveBeenCalled(); + }); + + it("does not fall back for non-GitHub URLs", async () => { + mockGetArtifact.mockResolvedValue({ + archiveFormat: "tar.gz", + checksum: "abc123", + downloadUrl: "https://example.com/ruff.tar.gz", + }); + + mockDownloadTool.mockRejectedValue(new Error("download failed")); + + await expect( + downloadVersion( + "unknown-linux-gnu", + "x86_64", + "0.15.8", + undefined, + "token", + ), + ).rejects.toThrow("download failed"); + + expect(mockDownloadTool).toHaveBeenCalledTimes(1); + }); + + it("uses manifest-file checksum metadata when checksum input is unset", async () => { + mockGetArtifact.mockResolvedValue({ + archiveFormat: "tar.gz", + checksum: "manifest-checksum", + downloadUrl: "https://example.com/custom-ruff.tar.gz", + }); + + await downloadVersion( + "unknown-linux-gnu", + "x86_64", + "0.15.8", + "", + "token", + "https://example.com/custom.ndjson", + ); + + expect(mockValidateChecksum).toHaveBeenCalledWith( + "manifest-checksum", + "/tmp/downloaded", + "x86_64", + "unknown-linux-gnu", + "0.15.8", + ); + }); + + it("prefers checksum input over manifest-file checksum metadata", async () => { + mockGetArtifact.mockResolvedValue({ + archiveFormat: "tar.gz", + checksum: "manifest-checksum", + downloadUrl: "https://example.com/custom-ruff.tar.gz", + }); + + await downloadVersion( + "unknown-linux-gnu", + "x86_64", + "0.15.8", + "user-checksum", + "token", + "https://example.com/custom.ndjson", + ); + + expect(mockValidateChecksum).toHaveBeenCalledWith( + "user-checksum", + "/tmp/downloaded", + "x86_64", + "unknown-linux-gnu", + "0.15.8", + ); + }); + + it("preserves tar extraction behavior for newer versions", async () => { + mockGetArtifact.mockResolvedValue({ + archiveFormat: "tar.gz", + checksum: "abc123", + downloadUrl: "https://example.com/ruff.tar.gz", + }); + + await downloadVersion( + "unknown-linux-gnu", + "x86_64", + "0.15.8", + "user-checksum", + "token", + ); + + expect(mockExtractTar).toHaveBeenCalledWith("/tmp/downloaded"); + expect(mockCacheDir).toHaveBeenCalledWith( + "/tmp/extracted/ruff-x86_64-unknown-linux-gnu", + "ruff", + "0.15.8", + "x86_64", + ); + }); + + it("preserves tar extraction behavior for older versions", async () => { + mockGetArtifact.mockResolvedValue({ + archiveFormat: "tar.gz", + checksum: "abc123", + downloadUrl: "https://example.com/ruff.tar.gz", + }); + + await downloadVersion( + "unknown-linux-gnu", + "x86_64", + "0.4.10", + undefined, + "token", + ); + + expect(mockCacheDir).toHaveBeenCalledWith( + "/tmp/extracted", + "ruff", + "0.4.10", + "x86_64", + ); + }); + + it("preserves zip extraction behavior on Windows", async () => { + mockGetArtifact.mockResolvedValue({ + archiveFormat: "zip", + checksum: "abc123", + downloadUrl: "https://example.com/ruff.zip", + }); + + await downloadVersion( + "pc-windows-msvc", + "x86_64", + "0.15.8", + undefined, + "token", + ); + + expect(mockCopyFile).toHaveBeenCalledWith( + "/tmp/downloaded", + "/tmp/downloaded.zip", + ); + expect(mockExtractZip).toHaveBeenCalledWith("/tmp/downloaded.zip"); + expect(mockCacheDir).toHaveBeenCalledWith( + "/tmp/extracted", + "ruff", + "0.15.8", + "x86_64", + ); + }); + }); + + describe("rewriteToMirror", () => { + it("rewrites a GitHub Releases URL to the Astral mirror", () => { + expect( + rewriteToMirror( + "https://github.com/astral-sh/ruff/releases/download/0.15.8/ruff-x86_64-unknown-linux-gnu.tar.gz", + ), + ).toBe( + "https://releases.astral.sh/github/ruff/releases/download/0.15.8/ruff-x86_64-unknown-linux-gnu.tar.gz", + ); + }); + + it("returns undefined for non-GitHub URLs", () => { + expect( + rewriteToMirror("https://example.com/ruff.tar.gz"), + ).toBeUndefined(); + }); + + it("returns undefined for a different GitHub repo", () => { + expect( + rewriteToMirror( + "https://github.com/other/repo/releases/download/v1.0/file.tar.gz", + ), + ).toBeUndefined(); + }); + }); +}); diff --git a/__tests__/download/manifest.test.ts b/__tests__/download/manifest.test.ts new file mode 100644 index 0000000..c9128d0 --- /dev/null +++ b/__tests__/download/manifest.test.ts @@ -0,0 +1,196 @@ +import { beforeEach, describe, expect, it, jest } from "@jest/globals"; + +const mockFetch = jest.fn(); + +jest.unstable_mockModule("@actions/core", () => ({ + debug: jest.fn(), + info: jest.fn(), +})); + +jest.unstable_mockModule("../../src/utils/fetch", () => ({ + fetch: mockFetch, +})); + +const { + clearManifestCache, + fetchManifest, + getAllVersions, + getArtifact, + getLatestVersion, + parseManifest, +} = await import("../../src/download/manifest"); + +const sampleManifestResponse = `{"version":"0.15.8","artifacts":[{"platform":"aarch64-apple-darwin","variant":"default","url":"https://github.com/astral-sh/ruff/releases/download/0.15.8/ruff-aarch64-apple-darwin.tar.gz","archive_format":"tar.gz","sha256":"fcf0a9ea6599c6ae28a4c854ac6da76f2c889354d7c36ce136ef071f7ab9721f"},{"platform":"x86_64-pc-windows-msvc","variant":"default","url":"https://github.com/astral-sh/ruff/releases/download/0.15.8/ruff-x86_64-pc-windows-msvc.zip","archive_format":"zip","sha256":"eb02fd95d8e0eed462b4a67ecdd320d865b38c560bffcda9a0b87ec944bdf036"}]} +{"version":"0.15.7","artifacts":[{"platform":"aarch64-apple-darwin","variant":"default","url":"https://github.com/astral-sh/ruff/releases/download/0.15.7/ruff-aarch64-apple-darwin.tar.gz","archive_format":"tar.gz","sha256":"606b3c6949d971709f2526fa0d9f0fd23ccf60e09f117999b406b424af18a6a6"}]}`; + +const multiVariantManifestResponse = `{"version":"0.15.8","artifacts":[{"platform":"aarch64-apple-darwin","variant":"python-managed","url":"https://github.com/astral-sh/ruff/releases/download/0.15.8/ruff-aarch64-apple-darwin-managed.tar.gz","archive_format":"tar.gz","sha256":"managed-checksum"},{"platform":"aarch64-apple-darwin","variant":"default","url":"https://github.com/astral-sh/ruff/releases/download/0.15.8/ruff-aarch64-apple-darwin.zip","archive_format":"zip","sha256":"default-checksum"}]}`; + +const oldVersionManifestResponse = `{"version":"v0.4.7","artifacts":[{"platform":"0.4.7-aarch64-apple-darwin","variant":"default","url":"https://github.com/astral-sh/ruff/releases/download/v0.4.7/ruff-0.4.7-aarch64-apple-darwin.tar.gz","archive_format":"tar.gz","sha256":"old-checksum"}]}`; + +function createMockResponse( + ok: boolean, + status: number, + statusText: string, + data: string, +) { + return { + ok, + status, + statusText, + text: async () => data, + }; +} + +describe("manifest", () => { + beforeEach(() => { + clearManifestCache(); + mockFetch.mockReset(); + }); + + describe("fetchManifest", () => { + it("fetches and parses manifest data", async () => { + mockFetch.mockResolvedValue( + createMockResponse(true, 200, "OK", sampleManifestResponse), + ); + + const versions = await fetchManifest(); + + expect(versions).toHaveLength(2); + expect(versions[0]?.version).toBe("0.15.8"); + expect(versions[1]?.version).toBe("0.15.7"); + }); + + it("throws on a failed fetch", async () => { + mockFetch.mockResolvedValue( + createMockResponse(false, 500, "Internal Server Error", ""), + ); + + await expect(fetchManifest()).rejects.toThrow( + "Failed to fetch manifest data: 500 Internal Server Error", + ); + }); + + it("caches results per URL", async () => { + mockFetch.mockResolvedValue( + createMockResponse(true, 200, "OK", sampleManifestResponse), + ); + + await fetchManifest("https://example.com/custom.ndjson"); + await fetchManifest("https://example.com/custom.ndjson"); + + expect(mockFetch).toHaveBeenCalledTimes(1); + }); + }); + + describe("getAllVersions", () => { + it("returns all version strings", async () => { + mockFetch.mockResolvedValue( + createMockResponse(true, 200, "OK", sampleManifestResponse), + ); + + const versions = await getAllVersions( + "https://example.com/custom.ndjson", + ); + + expect(versions).toEqual(["0.15.8", "0.15.7"]); + }); + }); + + describe("getLatestVersion", () => { + it("returns the first version string", async () => { + mockFetch.mockResolvedValue( + createMockResponse(true, 200, "OK", sampleManifestResponse), + ); + + await expect( + getLatestVersion("https://example.com/custom.ndjson"), + ).resolves.toBe("0.15.8"); + }); + }); + + describe("getArtifact", () => { + beforeEach(() => { + mockFetch.mockResolvedValue( + createMockResponse(true, 200, "OK", sampleManifestResponse), + ); + }); + + it("finds an artifact by version and platform", async () => { + const artifact = await getArtifact("0.15.8", "aarch64", "apple-darwin"); + + expect(artifact).toEqual({ + archiveFormat: "tar.gz", + checksum: + "fcf0a9ea6599c6ae28a4c854ac6da76f2c889354d7c36ce136ef071f7ab9721f", + downloadUrl: + "https://github.com/astral-sh/ruff/releases/download/0.15.8/ruff-aarch64-apple-darwin.tar.gz", + }); + }); + + it("finds a windows artifact", async () => { + const artifact = await getArtifact("0.15.8", "x86_64", "pc-windows-msvc"); + + expect(artifact).toEqual({ + archiveFormat: "zip", + checksum: + "eb02fd95d8e0eed462b4a67ecdd320d865b38c560bffcda9a0b87ec944bdf036", + downloadUrl: + "https://github.com/astral-sh/ruff/releases/download/0.15.8/ruff-x86_64-pc-windows-msvc.zip", + }); + }); + + it("prefers the default variant when multiple artifacts share a platform", async () => { + mockFetch.mockResolvedValue( + createMockResponse(true, 200, "OK", multiVariantManifestResponse), + ); + + const artifact = await getArtifact("0.15.8", "aarch64", "apple-darwin"); + + expect(artifact).toEqual({ + archiveFormat: "zip", + checksum: "default-checksum", + downloadUrl: + "https://github.com/astral-sh/ruff/releases/download/0.15.8/ruff-aarch64-apple-darwin.zip", + }); + }); + + it("finds an old artifact when the manifest version has a v prefix", async () => { + mockFetch.mockResolvedValue( + createMockResponse(true, 200, "OK", oldVersionManifestResponse), + ); + + const artifact = await getArtifact("0.4.7", "aarch64", "apple-darwin"); + + expect(artifact).toEqual({ + archiveFormat: "tar.gz", + checksum: "old-checksum", + downloadUrl: + "https://github.com/astral-sh/ruff/releases/download/v0.4.7/ruff-0.4.7-aarch64-apple-darwin.tar.gz", + }); + }); + + it("returns undefined for an unknown version", async () => { + const artifact = await getArtifact("0.0.1", "aarch64", "apple-darwin"); + + expect(artifact).toBeUndefined(); + }); + + it("returns undefined for an unknown platform", async () => { + const artifact = await getArtifact( + "0.15.8", + "aarch64", + "unknown-linux-musl", + ); + + expect(artifact).toBeUndefined(); + }); + }); + + describe("parseManifest", () => { + it("throws for malformed manifest data", () => { + expect(() => parseManifest('{"version":"0.1.0"', "test-source")).toThrow( + "Failed to parse manifest data from test-source", + ); + }); + }); +}); diff --git a/action.yml b/action.yml index a72f645..4e47051 100644 --- a/action.yml +++ b/action.yml @@ -20,10 +20,12 @@ inputs: checksum: description: "The checksum of the ruff version to install" required: false + manifest-file: + description: "URL to a custom manifest file in the astral-sh/versions format." + required: false github-token: description: - "Used to increase the rate limit when retrieving versions and downloading - ruff." + "Used for authenticated downloads of Ruff release artifacts from GitHub." required: false default: ${{ github.token }} outputs: diff --git a/dist/ruff-action/index.cjs b/dist/ruff-action/index.cjs index 790eebc..b9a859c 100644 --- a/dist/ruff-action/index.cjs +++ b/dist/ruff-action/index.cjs @@ -1215,31 +1215,31 @@ var require_util = __commonJS({ function isBuffer(buffer) { return buffer instanceof Uint8Array || Buffer.isBuffer(buffer); } - function validateHandler(handler2, method, upgrade) { - if (!handler2 || typeof handler2 !== "object") { + function validateHandler(handler, method, upgrade) { + if (!handler || typeof handler !== "object") { throw new InvalidArgumentError("handler must be an object"); } - if (typeof handler2.onConnect !== "function") { + if (typeof handler.onConnect !== "function") { throw new InvalidArgumentError("invalid onConnect method"); } - if (typeof handler2.onError !== "function") { + if (typeof handler.onError !== "function") { throw new InvalidArgumentError("invalid onError method"); } - if (typeof handler2.onBodySent !== "function" && handler2.onBodySent !== void 0) { + if (typeof handler.onBodySent !== "function" && handler.onBodySent !== void 0) { throw new InvalidArgumentError("invalid onBodySent method"); } if (upgrade || method === "CONNECT") { - if (typeof handler2.onUpgrade !== "function") { + if (typeof handler.onUpgrade !== "function") { throw new InvalidArgumentError("invalid onUpgrade method"); } } else { - if (typeof handler2.onHeaders !== "function") { + if (typeof handler.onHeaders !== "function") { throw new InvalidArgumentError("invalid onHeaders method"); } - if (typeof handler2.onData !== "function") { + if (typeof handler.onData !== "function") { throw new InvalidArgumentError("invalid onData method"); } - if (typeof handler2.onComplete !== "function") { + if (typeof handler.onComplete !== "function") { throw new InvalidArgumentError("invalid onComplete method"); } } @@ -1266,14 +1266,14 @@ var require_util = __commonJS({ }; } function ReadableStreamFrom(iterable) { - let iterator2; + let iterator; return new ReadableStream( { async start() { - iterator2 = iterable[Symbol.asyncIterator](); + iterator = iterable[Symbol.asyncIterator](); }, async pull(controller) { - const { done, value } = await iterator2.next(); + const { done, value } = await iterator.next(); if (done) { queueMicrotask(() => { controller.close(); @@ -1288,7 +1288,7 @@ var require_util = __commonJS({ return controller.desiredSize > 0; }, async cancel(reason) { - await iterator2.return(); + await iterator.return(); }, type: "bytes" } @@ -1373,10 +1373,10 @@ var require_util = __commonJS({ } obj[kListeners] = null; } - function errorRequest(client, request2, err) { + function errorRequest(client, request, err) { try { - request2.onError(err); - assert(request2.aborted); + request.onError(err); + assert(request.aborted); } catch (err2) { client.emit("error", err2); } @@ -1682,7 +1682,7 @@ var require_request = __commonJS({ throwOnError, expectContinue, servername - }, handler2) { + }, handler) { if (typeof path9 !== "string") { throw new InvalidArgumentError("path must be a string"); } else if (path9[0] !== "/" && !(path9.startsWith("http://") || path9.startsWith("https://")) && method !== "CONNECT") { @@ -1787,9 +1787,9 @@ var require_request = __commonJS({ } else if (headers != null) { throw new InvalidArgumentError("headers must be an object or an array"); } - validateHandler(handler2, method, upgrade); + validateHandler(handler, method, upgrade); this.servername = servername || getServerName(this.host); - this[kHandler] = handler2; + this[kHandler] = handler; if (channels.create.hasSubscribers) { channels.create.publish({ request: this }); } @@ -1894,7 +1894,7 @@ var require_request = __commonJS({ return this; } }; - function processHeader(request2, key, val) { + function processHeader(request, key, val) { if (val && (typeof val === "object" && !Array.isArray(val))) { throw new InvalidArgumentError(`invalid ${key} header`); } else if (val === void 0) { @@ -1934,24 +1934,24 @@ var require_request = __commonJS({ val = `${val}`; } if (headerName === "host") { - if (request2.host !== null) { + if (request.host !== null) { throw new InvalidArgumentError("duplicate host header"); } if (typeof val !== "string") { throw new InvalidArgumentError("invalid host header"); } - request2.host = val; + request.host = val; } else if (headerName === "content-length") { - if (request2.contentLength !== null) { + if (request.contentLength !== null) { throw new InvalidArgumentError("duplicate content-length header"); } - request2.contentLength = parseInt(val, 10); - if (!Number.isFinite(request2.contentLength)) { + request.contentLength = parseInt(val, 10); + if (!Number.isFinite(request.contentLength)) { throw new InvalidArgumentError("invalid content-length header"); } - } else if (request2.contentType === null && headerName === "content-type") { - request2.contentType = val; - request2.headers.push(key, val); + } else if (request.contentType === null && headerName === "content-type") { + request.contentType = val; + request.headers.push(key, val); } else if (headerName === "transfer-encoding" || headerName === "keep-alive" || headerName === "upgrade") { throw new InvalidArgumentError(`invalid ${headerName} header`); } else if (headerName === "connection") { @@ -1960,12 +1960,12 @@ var require_request = __commonJS({ throw new InvalidArgumentError("invalid connection header"); } if (value === "close") { - request2.reset = true; + request.reset = true; } } else if (headerName === "expect") { throw new NotSupportedError("expect header not supported"); } else { - request2.headers.push(key, val); + request.headers.push(key, val); } } module2.exports = Request; @@ -2148,20 +2148,20 @@ var require_dispatcher_base = __commonJS({ queueMicrotask(onDestroyed); }); } - [kInterceptedDispatch](opts, handler2) { + [kInterceptedDispatch](opts, handler) { if (!this[kInterceptors] || this[kInterceptors].length === 0) { this[kInterceptedDispatch] = this[kDispatch]; - return this[kDispatch](opts, handler2); + return this[kDispatch](opts, handler); } let dispatch = this[kDispatch].bind(this); for (let i = this[kInterceptors].length - 1; i >= 0; i--) { dispatch = this[kInterceptors][i](dispatch); } this[kInterceptedDispatch] = dispatch; - return dispatch(opts, handler2); + return dispatch(opts, handler); } - dispatch(opts, handler2) { - if (!handler2 || typeof handler2 !== "object") { + dispatch(opts, handler) { + if (!handler || typeof handler !== "object") { throw new InvalidArgumentError("handler must be an object"); } try { @@ -2174,12 +2174,12 @@ var require_dispatcher_base = __commonJS({ if (this[kClosed]) { throw new ClientClosedError(); } - return this[kInterceptedDispatch](opts, handler2); + return this[kInterceptedDispatch](opts, handler); } catch (err) { - if (typeof handler2.onError !== "function") { + if (typeof handler.onError !== "function") { throw new InvalidArgumentError("invalid onError method"); } - handler2.onError(err); + handler.onError(err); return false; } } @@ -2428,7 +2428,7 @@ var require_connect = __commonJS({ var util2 = require_util(); var { InvalidArgumentError, ConnectTimeoutError } = require_errors(); var timers = require_timers(); - function noop2() { + function noop() { } var tls; var SessionCache; @@ -2553,7 +2553,7 @@ var require_connect = __commonJS({ } var setupConnectTimeout = process.platform === "win32" ? (socketWeakRef, opts) => { if (!opts.timeout) { - return noop2; + return noop; } let s1 = null; let s2 = null; @@ -2569,7 +2569,7 @@ var require_connect = __commonJS({ }; } : (socketWeakRef, opts) => { if (!opts.timeout) { - return noop2; + return noop; } let s1 = null; const fastTimer = timers.setFastTimeout(() => { @@ -4039,11 +4039,11 @@ var require_util2 = __commonJS({ function normalizeBinaryStringToUtf8(value) { return Buffer.from(value, "binary").toString("utf8"); } - function requestCurrentURL(request2) { - return request2.urlList[request2.urlList.length - 1]; + function requestCurrentURL(request) { + return request.urlList[request.urlList.length - 1]; } - function requestBadPort(request2) { - const url = requestCurrentURL(request2); + function requestBadPort(request) { + const url = requestCurrentURL(request); if (urlIsHttpHttpsScheme(url) && badPortsSet.has(url.port)) { return "blocked"; } @@ -4067,7 +4067,7 @@ var require_util2 = __commonJS({ function isValidHeaderValue(potentialValue) { return (potentialValue[0] === " " || potentialValue[0] === " " || potentialValue[potentialValue.length - 1] === " " || potentialValue[potentialValue.length - 1] === " " || potentialValue.includes("\n") || potentialValue.includes("\r") || potentialValue.includes("\0")) === false; } - function setRequestReferrerPolicyOnRedirect(request2, actualResponse) { + function setRequestReferrerPolicyOnRedirect(request, actualResponse) { const { headersList } = actualResponse; const policyHeader = (headersList.get("referrer-policy", true) ?? "").split(","); let policy = ""; @@ -4081,7 +4081,7 @@ var require_util2 = __commonJS({ } } if (policy !== "") { - request2.referrerPolicy = policy; + request.referrerPolicy = policy; } } function crossOriginResourcePolicyCheck() { @@ -4098,33 +4098,33 @@ var require_util2 = __commonJS({ header = httpRequest.mode; httpRequest.headersList.set("sec-fetch-mode", header, true); } - function appendRequestOriginHeader(request2) { - let serializedOrigin = request2.origin; + function appendRequestOriginHeader(request) { + let serializedOrigin = request.origin; if (serializedOrigin === "client" || serializedOrigin === void 0) { return; } - if (request2.responseTainting === "cors" || request2.mode === "websocket") { - request2.headersList.append("origin", serializedOrigin, true); - } else if (request2.method !== "GET" && request2.method !== "HEAD") { - switch (request2.referrerPolicy) { + if (request.responseTainting === "cors" || request.mode === "websocket") { + request.headersList.append("origin", serializedOrigin, true); + } else if (request.method !== "GET" && request.method !== "HEAD") { + switch (request.referrerPolicy) { case "no-referrer": serializedOrigin = null; break; case "no-referrer-when-downgrade": case "strict-origin": case "strict-origin-when-cross-origin": - if (request2.origin && urlHasHttpsScheme(request2.origin) && !urlHasHttpsScheme(requestCurrentURL(request2))) { + if (request.origin && urlHasHttpsScheme(request.origin) && !urlHasHttpsScheme(requestCurrentURL(request))) { serializedOrigin = null; } break; case "same-origin": - if (!sameOrigin(request2, requestCurrentURL(request2))) { + if (!sameOrigin(request, requestCurrentURL(request))) { serializedOrigin = null; } break; default: } - request2.headersList.append("origin", serializedOrigin, true); + request.headersList.append("origin", serializedOrigin, true); } } function coarsenTime(timestamp, crossOriginIsolatedCapability) { @@ -4178,26 +4178,26 @@ var require_util2 = __commonJS({ referrerPolicy: policyContainer.referrerPolicy }; } - function determineRequestsReferrer(request2) { - const policy = request2.referrerPolicy; + function determineRequestsReferrer(request) { + const policy = request.referrerPolicy; assert(policy); let referrerSource = null; - if (request2.referrer === "client") { + if (request.referrer === "client") { const globalOrigin = getGlobalOrigin(); if (!globalOrigin || globalOrigin.origin === "null") { return "no-referrer"; } referrerSource = new URL(globalOrigin); - } else if (request2.referrer instanceof URL) { - referrerSource = request2.referrer; + } else if (request.referrer instanceof URL) { + referrerSource = request.referrer; } let referrerURL = stripURLForReferrer(referrerSource); const referrerOrigin = stripURLForReferrer(referrerSource, true); if (referrerURL.toString().length > 4096) { referrerURL = referrerOrigin; } - const areSameOrigin = sameOrigin(request2, referrerURL); - const isNonPotentiallyTrustWorthy = isURLPotentiallyTrustworthy(referrerURL) && !isURLPotentiallyTrustworthy(request2.url); + const areSameOrigin = sameOrigin(request, referrerURL); + const isNonPotentiallyTrustWorthy = isURLPotentiallyTrustworthy(referrerURL) && !isURLPotentiallyTrustworthy(request.url); switch (policy) { case "origin": return referrerOrigin != null ? referrerOrigin : stripURLForReferrer(referrerSource, true); @@ -4208,7 +4208,7 @@ var require_util2 = __commonJS({ case "origin-when-cross-origin": return areSameOrigin ? referrerURL : referrerOrigin; case "strict-origin-when-cross-origin": { - const currentURL = requestCurrentURL(request2); + const currentURL = requestCurrentURL(request); if (sameOrigin(referrerURL, currentURL)) { return referrerURL; } @@ -4369,7 +4369,7 @@ var require_util2 = __commonJS({ } return true; } - function tryUpgradeRequestToAPotentiallyTrustworthyURL(request2) { + function tryUpgradeRequestToAPotentiallyTrustworthyURL(request) { } function sameOrigin(A, B) { if (A.origin === B.origin && A.origin === "null") { @@ -5360,7 +5360,7 @@ var require_body = __commonJS({ random = (max) => Math.floor(Math.random(max)); } var textEncoder = new TextEncoder(); - function noop2() { + function noop() { } var hasFinalizationRegistry = globalThis.FinalizationRegistry && process.version.indexOf("v18") !== 0; var streamRegistry; @@ -5368,7 +5368,7 @@ var require_body = __commonJS({ streamRegistry = new FinalizationRegistry((weakRef) => { const stream2 = weakRef.deref(); if (stream2 && !stream2.locked && !isDisturbed(stream2) && !isErrored(stream2)) { - stream2.cancel("Response object has been garbage collected").catch(noop2); + stream2.cancel("Response object has been garbage collected").catch(noop); } }); } @@ -5477,13 +5477,13 @@ Content-Type: ${value.type || "application/octet-stream"}\r length = Buffer.byteLength(source); } if (action != null) { - let iterator2; + let iterator; stream2 = new ReadableStream({ async start() { - iterator2 = action(object)[Symbol.asyncIterator](); + iterator = action(object)[Symbol.asyncIterator](); }, async pull(controller) { - const { value, done } = await iterator2.next(); + const { value, done } = await iterator.next(); if (done) { queueMicrotask(() => { controller.close(); @@ -5500,7 +5500,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r return controller.desiredSize > 0; }, async cancel(reason) { - await iterator2.return(); + await iterator.return(); }, type: "bytes" }); @@ -5899,11 +5899,11 @@ var require_client_h1 = __commonJS({ if (socket.destroyed) { return -1; } - const request2 = client[kQueue][client[kRunningIdx]]; - if (!request2) { + const request = client[kQueue][client[kRunningIdx]]; + if (!request) { return -1; } - request2.onResponseStarted(); + request.onResponseStarted(); } onHeaderField(buf) { const len = this.headers.length; @@ -5948,9 +5948,9 @@ var require_client_h1 = __commonJS({ assert(!socket.destroyed); assert(!this.paused); assert((headers.length & 1) === 0); - const request2 = client[kQueue][client[kRunningIdx]]; - assert(request2); - assert(request2.upgrade || request2.method === "CONNECT"); + const request = client[kQueue][client[kRunningIdx]]; + assert(request); + assert(request.upgrade || request.method === "CONNECT"); this.statusCode = null; this.statusText = ""; this.shouldKeepAlive = null; @@ -5967,7 +5967,7 @@ var require_client_h1 = __commonJS({ client[kQueue][client[kRunningIdx]++] = null; client.emit("disconnect", client[kUrl], [client], new InformationalError("upgrade")); try { - request2.onUpgrade(statusCode, headers, socket); + request.onUpgrade(statusCode, headers, socket); } catch (err) { util2.destroy(socket, err); } @@ -5978,8 +5978,8 @@ var require_client_h1 = __commonJS({ if (socket.destroyed) { return -1; } - const request2 = client[kQueue][client[kRunningIdx]]; - if (!request2) { + const request = client[kQueue][client[kRunningIdx]]; + if (!request) { return -1; } assert(!this.upgrade); @@ -5988,23 +5988,23 @@ var require_client_h1 = __commonJS({ util2.destroy(socket, new SocketError("bad response", util2.getSocketInfo(socket))); return -1; } - if (upgrade && !request2.upgrade) { + if (upgrade && !request.upgrade) { util2.destroy(socket, new SocketError("bad upgrade", util2.getSocketInfo(socket))); return -1; } assert(this.timeoutType === TIMEOUT_HEADERS); this.statusCode = statusCode; this.shouldKeepAlive = shouldKeepAlive || // Override llhttp value which does not allow keepAlive for HEAD. - request2.method === "HEAD" && !socket[kReset] && this.connection.toLowerCase() === "keep-alive"; + request.method === "HEAD" && !socket[kReset] && this.connection.toLowerCase() === "keep-alive"; if (this.statusCode >= 200) { - const bodyTimeout = request2.bodyTimeout != null ? request2.bodyTimeout : client[kBodyTimeout]; + const bodyTimeout = request.bodyTimeout != null ? request.bodyTimeout : client[kBodyTimeout]; this.setTimeout(bodyTimeout, TIMEOUT_BODY); } else if (this.timeout) { if (this.timeout.refresh) { this.timeout.refresh(); } } - if (request2.method === "CONNECT") { + if (request.method === "CONNECT") { assert(client[kRunning] === 1); this.upgrade = true; return 2; @@ -6035,11 +6035,11 @@ var require_client_h1 = __commonJS({ } else { socket[kReset] = true; } - const pause = request2.onHeaders(statusCode, headers, this.resume, statusText) === false; - if (request2.aborted) { + const pause = request.onHeaders(statusCode, headers, this.resume, statusText) === false; + if (request.aborted) { return -1; } - if (request2.method === "HEAD") { + if (request.method === "HEAD") { return 1; } if (statusCode < 200) { @@ -6056,8 +6056,8 @@ var require_client_h1 = __commonJS({ if (socket.destroyed) { return -1; } - const request2 = client[kQueue][client[kRunningIdx]]; - assert(request2); + const request = client[kQueue][client[kRunningIdx]]; + assert(request); assert(this.timeoutType === TIMEOUT_BODY); if (this.timeout) { if (this.timeout.refresh) { @@ -6070,7 +6070,7 @@ var require_client_h1 = __commonJS({ return -1; } this.bytesRead += buf.length; - if (request2.onData(buf) === false) { + if (request.onData(buf) === false) { return constants3.ERROR.PAUSED; } } @@ -6084,8 +6084,8 @@ var require_client_h1 = __commonJS({ } assert(statusCode >= 100); assert((this.headers.length & 1) === 0); - const request2 = client[kQueue][client[kRunningIdx]]; - assert(request2); + const request = client[kQueue][client[kRunningIdx]]; + assert(request); this.statusCode = null; this.statusText = ""; this.bytesRead = 0; @@ -6097,11 +6097,11 @@ var require_client_h1 = __commonJS({ if (statusCode < 200) { return; } - if (request2.method !== "HEAD" && contentLength && bytesRead !== parseInt(contentLength, 10)) { + if (request.method !== "HEAD" && contentLength && bytesRead !== parseInt(contentLength, 10)) { util2.destroy(socket, new ResponseContentLengthMismatchError()); return -1; } - request2.onComplete(headers); + request.onComplete(headers); client[kQueue][client[kRunningIdx]++] = null; if (socket[kWriting]) { assert(client[kRunning] === 0); @@ -6188,13 +6188,13 @@ var require_client_h1 = __commonJS({ assert(client2[kPending] === 0); const requests = client2[kQueue].splice(client2[kRunningIdx]); for (let i = 0; i < requests.length; i++) { - const request2 = requests[i]; - util2.errorRequest(client2, request2, err); + const request = requests[i]; + util2.errorRequest(client2, request, err); } } else if (client2[kRunning] > 0 && err.code !== "UND_ERR_INFO") { - const request2 = client2[kQueue][client2[kRunningIdx]]; + const request = client2[kQueue][client2[kRunningIdx]]; client2[kQueue][client2[kRunningIdx]++] = null; - util2.errorRequest(client2, request2, err); + util2.errorRequest(client2, request, err); } client2[kPendingIdx] = client2[kRunningIdx]; assert(client2[kRunning] === 0); @@ -6224,18 +6224,18 @@ var require_client_h1 = __commonJS({ get destroyed() { return socket.destroyed; }, - busy(request2) { + busy(request) { if (socket[kWriting] || socket[kReset] || socket[kBlocking]) { return true; } - if (request2) { - if (client[kRunning] > 0 && !request2.idempotent) { + if (request) { + if (client[kRunning] > 0 && !request.idempotent) { return true; } - if (client[kRunning] > 0 && (request2.upgrade || request2.method === "CONNECT")) { + if (client[kRunning] > 0 && (request.upgrade || request.method === "CONNECT")) { return true; } - if (client[kRunning] > 0 && util2.bodyLength(request2.body) !== 0 && (util2.isStream(request2.body) || util2.isAsyncIterable(request2.body) || util2.isFormDataLike(request2.body))) { + if (client[kRunning] > 0 && util2.bodyLength(request.body) !== 0 && (util2.isStream(request.body) || util2.isAsyncIterable(request.body) || util2.isFormDataLike(request.body))) { return true; } } @@ -6261,8 +6261,8 @@ var require_client_h1 = __commonJS({ } } else if (client[kRunning] > 0 && socket[kParser].statusCode < 200) { if (socket[kParser].timeoutType !== TIMEOUT_HEADERS) { - const request2 = client[kQueue][client[kRunningIdx]]; - const headersTimeout = request2.headersTimeout != null ? request2.headersTimeout : client[kHeadersTimeout]; + const request = client[kQueue][client[kRunningIdx]]; + const headersTimeout = request.headersTimeout != null ? request.headersTimeout : client[kHeadersTimeout]; socket[kParser].setTimeout(headersTimeout, TIMEOUT_HEADERS); } } @@ -6271,21 +6271,21 @@ var require_client_h1 = __commonJS({ function shouldSendContentLength(method) { return method !== "GET" && method !== "HEAD" && method !== "OPTIONS" && method !== "TRACE" && method !== "CONNECT"; } - function writeH1(client, request2) { - const { method, path: path9, host, upgrade, blocking, reset } = request2; - let { body, headers, contentLength } = request2; + function writeH1(client, request) { + const { method, path: path9, host, upgrade, blocking, reset } = request; + let { body, headers, contentLength } = request; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH" || method === "QUERY" || method === "PROPFIND" || method === "PROPPATCH"; if (util2.isFormDataLike(body)) { if (!extractBody) { extractBody = require_body().extractBody; } const [bodyStream, contentType] = extractBody(body); - if (request2.contentType == null) { + if (request.contentType == null) { headers.push("content-type", contentType); } body = bodyStream.stream; contentLength = bodyStream.length; - } else if (util2.isBlobLike(body) && request2.contentType == null && body.type) { + } else if (util2.isBlobLike(body) && request.contentType == null && body.type) { headers.push("content-type", body.type); } if (body && typeof body.read === "function") { @@ -6294,33 +6294,33 @@ var require_client_h1 = __commonJS({ const bodyLength = util2.bodyLength(body); contentLength = bodyLength ?? contentLength; if (contentLength === null) { - contentLength = request2.contentLength; + contentLength = request.contentLength; } if (contentLength === 0 && !expectsPayload) { contentLength = null; } - if (shouldSendContentLength(method) && contentLength > 0 && request2.contentLength !== null && request2.contentLength !== contentLength) { + if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength !== null && request.contentLength !== contentLength) { if (client[kStrictContentLength]) { - util2.errorRequest(client, request2, new RequestContentLengthMismatchError()); + util2.errorRequest(client, request, new RequestContentLengthMismatchError()); return false; } process.emitWarning(new RequestContentLengthMismatchError()); } const socket = client[kSocket]; const abort = (err) => { - if (request2.aborted || request2.completed) { + if (request.aborted || request.completed) { return; } - util2.errorRequest(client, request2, err || new RequestAbortedError()); + util2.errorRequest(client, request, err || new RequestAbortedError()); util2.destroy(body); util2.destroy(socket, new InformationalError("aborted")); }; try { - request2.onConnect(abort); + request.onConnect(abort); } catch (err) { - util2.errorRequest(client, request2, err); + util2.errorRequest(client, request, err); } - if (request2.aborted) { + if (request.aborted) { return false; } if (method === "HEAD") { @@ -6371,31 +6371,31 @@ upgrade: ${upgrade}\r } } if (channels.sendHeaders.hasSubscribers) { - channels.sendHeaders.publish({ request: request2, headers: header, socket }); + channels.sendHeaders.publish({ request, headers: header, socket }); } if (!body || bodyLength === 0) { - writeBuffer(abort, null, client, request2, socket, contentLength, header, expectsPayload); + writeBuffer(abort, null, client, request, socket, contentLength, header, expectsPayload); } else if (util2.isBuffer(body)) { - writeBuffer(abort, body, client, request2, socket, contentLength, header, expectsPayload); + writeBuffer(abort, body, client, request, socket, contentLength, header, expectsPayload); } else if (util2.isBlobLike(body)) { if (typeof body.stream === "function") { - writeIterable(abort, body.stream(), client, request2, socket, contentLength, header, expectsPayload); + writeIterable(abort, body.stream(), client, request, socket, contentLength, header, expectsPayload); } else { - writeBlob(abort, body, client, request2, socket, contentLength, header, expectsPayload); + writeBlob(abort, body, client, request, socket, contentLength, header, expectsPayload); } } else if (util2.isStream(body)) { - writeStream(abort, body, client, request2, socket, contentLength, header, expectsPayload); + writeStream(abort, body, client, request, socket, contentLength, header, expectsPayload); } else if (util2.isIterable(body)) { - writeIterable(abort, body, client, request2, socket, contentLength, header, expectsPayload); + writeIterable(abort, body, client, request, socket, contentLength, header, expectsPayload); } else { assert(false); } return true; } - function writeStream(abort, body, client, request2, socket, contentLength, header, expectsPayload) { + function writeStream(abort, body, client, request, socket, contentLength, header, expectsPayload) { assert(contentLength !== 0 || client[kRunning] === 0, "stream body cannot be pipelined"); let finished = false; - const writer = new AsyncWriter({ abort, socket, request: request2, contentLength, client, expectsPayload, header }); + const writer = new AsyncWriter({ abort, socket, request, contentLength, client, expectsPayload, header }); const onData = function(chunk) { if (finished) { return; @@ -6461,7 +6461,7 @@ upgrade: ${upgrade}\r setImmediate(onClose); } } - function writeBuffer(abort, body, client, request2, socket, contentLength, header, expectsPayload) { + function writeBuffer(abort, body, client, request, socket, contentLength, header, expectsPayload) { try { if (!body) { if (contentLength === 0) { @@ -6481,18 +6481,18 @@ upgrade: ${upgrade}\r `, "latin1"); socket.write(body); socket.uncork(); - request2.onBodySent(body); - if (!expectsPayload && request2.reset !== false) { + request.onBodySent(body); + if (!expectsPayload && request.reset !== false) { socket[kReset] = true; } } - request2.onRequestSent(); + request.onRequestSent(); client[kResume](); } catch (err) { abort(err); } } - async function writeBlob(abort, body, client, request2, socket, contentLength, header, expectsPayload) { + async function writeBlob(abort, body, client, request, socket, contentLength, header, expectsPayload) { assert(contentLength === body.size, "blob body must have content length"); try { if (contentLength != null && contentLength !== body.size) { @@ -6505,9 +6505,9 @@ upgrade: ${upgrade}\r `, "latin1"); socket.write(buffer); socket.uncork(); - request2.onBodySent(buffer); - request2.onRequestSent(); - if (!expectsPayload && request2.reset !== false) { + request.onBodySent(buffer); + request.onRequestSent(); + if (!expectsPayload && request.reset !== false) { socket[kReset] = true; } client[kResume](); @@ -6515,7 +6515,7 @@ upgrade: ${upgrade}\r abort(err); } } - async function writeIterable(abort, body, client, request2, socket, contentLength, header, expectsPayload) { + async function writeIterable(abort, body, client, request, socket, contentLength, header, expectsPayload) { assert(contentLength !== 0 || client[kRunning] === 0, "iterator body cannot be pipelined"); let callback = null; function onDrain() { @@ -6534,7 +6534,7 @@ upgrade: ${upgrade}\r } }); socket.on("close", onDrain).on("drain", onDrain); - const writer = new AsyncWriter({ abort, socket, request: request2, contentLength, client, expectsPayload, header }); + const writer = new AsyncWriter({ abort, socket, request, contentLength, client, expectsPayload, header }); try { for await (const chunk of body) { if (socket[kError]) { @@ -6552,9 +6552,9 @@ upgrade: ${upgrade}\r } } var AsyncWriter = class { - constructor({ abort, socket, request: request2, contentLength, client, expectsPayload, header }) { + constructor({ abort, socket, request, contentLength, client, expectsPayload, header }) { this.socket = socket; - this.request = request2; + this.request = request; this.contentLength = contentLength; this.client = client; this.bytesWritten = 0; @@ -6564,7 +6564,7 @@ upgrade: ${upgrade}\r socket[kWriting] = true; } write(chunk) { - const { socket, request: request2, contentLength, client, bytesWritten, expectsPayload, header } = this; + const { socket, request, contentLength, client, bytesWritten, expectsPayload, header } = this; if (socket[kError]) { throw socket[kError]; } @@ -6583,7 +6583,7 @@ upgrade: ${upgrade}\r } socket.cork(); if (bytesWritten === 0) { - if (!expectsPayload && request2.reset !== false) { + if (!expectsPayload && request.reset !== false) { socket[kReset] = true; } if (contentLength === null) { @@ -6603,7 +6603,7 @@ ${len.toString(16)}\r this.bytesWritten += len; const ret = socket.write(chunk); socket.uncork(); - request2.onBodySent(chunk); + request.onBodySent(chunk); if (!ret) { if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) { if (socket[kParser].timeout.refresh) { @@ -6614,8 +6614,8 @@ ${len.toString(16)}\r return ret; } end() { - const { socket, contentLength, client, bytesWritten, expectsPayload, header, request: request2 } = this; - request2.onRequestSent(); + const { socket, contentLength, client, bytesWritten, expectsPayload, header, request } = this; + request.onRequestSent(); socket[kWriting] = false; if (socket[kError]) { throw socket[kError]; @@ -6755,8 +6755,8 @@ var require_client_h2 = __commonJS({ assert(client2[kPending] === 0); const requests = client2[kQueue].splice(client2[kRunningIdx]); for (let i = 0; i < requests.length; i++) { - const request2 = requests[i]; - util2.errorRequest(client2, request2, err); + const request = requests[i]; + util2.errorRequest(client2, request, err); } } }); @@ -6850,9 +6850,9 @@ var require_client_h2 = __commonJS({ } util2.destroy(this[kSocket], err); if (client[kRunningIdx] < client[kQueue].length) { - const request2 = client[kQueue][client[kRunningIdx]]; + const request = client[kQueue][client[kRunningIdx]]; client[kQueue][client[kRunningIdx]++] = null; - util2.errorRequest(client, request2, err); + util2.errorRequest(client, request, err); client[kPendingIdx] = client[kRunningIdx]; } assert(client[kRunning] === 0); @@ -6862,12 +6862,12 @@ var require_client_h2 = __commonJS({ function shouldSendContentLength(method) { return method !== "GET" && method !== "HEAD" && method !== "OPTIONS" && method !== "TRACE" && method !== "CONNECT"; } - function writeH2(client, request2) { + function writeH2(client, request) { const session = client[kHTTP2Session]; - const { method, path: path9, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; - let { body } = request2; + const { method, path: path9, host, upgrade, expectContinue, signal, headers: reqHeaders } = request; + let { body } = request; if (upgrade) { - util2.errorRequest(client, request2, new Error("Upgrade not supported for H2")); + util2.errorRequest(client, request, new Error("Upgrade not supported for H2")); return false; } const headers = {}; @@ -6891,11 +6891,11 @@ var require_client_h2 = __commonJS({ headers[HTTP2_HEADER_AUTHORITY] = host || `${hostname}${port ? `:${port}` : ""}`; headers[HTTP2_HEADER_METHOD] = method; const abort = (err) => { - if (request2.aborted || request2.completed) { + if (request.aborted || request.completed) { return; } err = err || new RequestAbortedError(); - util2.errorRequest(client, request2, err); + util2.errorRequest(client, request, err); if (stream2 != null) { util2.destroy(stream2, err); } @@ -6904,23 +6904,23 @@ var require_client_h2 = __commonJS({ client[kResume](); }; try { - request2.onConnect(abort); + request.onConnect(abort); } catch (err) { - util2.errorRequest(client, request2, err); + util2.errorRequest(client, request, err); } - if (request2.aborted) { + if (request.aborted) { return false; } if (method === "CONNECT") { session.ref(); stream2 = session.request(headers, { endStream: false, signal }); if (stream2.id && !stream2.pending) { - request2.onUpgrade(null, null, stream2); + request.onUpgrade(null, null, stream2); ++session[kOpenStreams]; client[kQueue][client[kRunningIdx]++] = null; } else { stream2.once("ready", () => { - request2.onUpgrade(null, null, stream2); + request.onUpgrade(null, null, stream2); ++session[kOpenStreams]; client[kQueue][client[kRunningIdx]++] = null; }); @@ -6946,14 +6946,14 @@ var require_client_h2 = __commonJS({ contentLength = bodyStream.length; } if (contentLength == null) { - contentLength = request2.contentLength; + contentLength = request.contentLength; } if (contentLength === 0 || !expectsPayload) { contentLength = null; } - if (shouldSendContentLength(method) && contentLength > 0 && request2.contentLength != null && request2.contentLength !== contentLength) { + if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength != null && request.contentLength !== contentLength) { if (client[kStrictContentLength]) { - util2.errorRequest(client, request2, new RequestContentLengthMismatchError()); + util2.errorRequest(client, request, new RequestContentLengthMismatchError()); return false; } process.emitWarning(new RequestContentLengthMismatchError()); @@ -6978,25 +6978,25 @@ var require_client_h2 = __commonJS({ ++session[kOpenStreams]; stream2.once("response", (headers2) => { const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers2; - request2.onResponseStarted(); - if (request2.aborted) { + request.onResponseStarted(); + if (request.aborted) { const err = new RequestAbortedError(); - util2.errorRequest(client, request2, err); + util2.errorRequest(client, request, err); util2.destroy(stream2, err); return; } - if (request2.onHeaders(Number(statusCode), parseH2Headers(realHeaders), stream2.resume.bind(stream2), "") === false) { + if (request.onHeaders(Number(statusCode), parseH2Headers(realHeaders), stream2.resume.bind(stream2), "") === false) { stream2.pause(); } stream2.on("data", (chunk) => { - if (request2.onData(chunk) === false) { + if (request.onData(chunk) === false) { stream2.pause(); } }); }); stream2.once("end", () => { if (stream2.state?.state == null || stream2.state.state < 6) { - request2.onComplete([]); + request.onComplete([]); } if (session[kOpenStreams] === 0) { session.unref(); @@ -7026,7 +7026,7 @@ var require_client_h2 = __commonJS({ stream2, null, client, - request2, + request, client[kSocket], contentLength, expectsPayload @@ -7037,7 +7037,7 @@ var require_client_h2 = __commonJS({ stream2, body, client, - request2, + request, client[kSocket], contentLength, expectsPayload @@ -7049,7 +7049,7 @@ var require_client_h2 = __commonJS({ stream2, body.stream(), client, - request2, + request, client[kSocket], contentLength, expectsPayload @@ -7060,7 +7060,7 @@ var require_client_h2 = __commonJS({ stream2, body, client, - request2, + request, client[kSocket], contentLength, expectsPayload @@ -7074,7 +7074,7 @@ var require_client_h2 = __commonJS({ stream2, body, client, - request2, + request, contentLength ); } else if (util2.isIterable(body)) { @@ -7083,7 +7083,7 @@ var require_client_h2 = __commonJS({ stream2, body, client, - request2, + request, client[kSocket], contentLength, expectsPayload @@ -7093,7 +7093,7 @@ var require_client_h2 = __commonJS({ } } } - function writeBuffer(abort, h2stream, body, client, request2, socket, contentLength, expectsPayload) { + function writeBuffer(abort, h2stream, body, client, request, socket, contentLength, expectsPayload) { try { if (body != null && util2.isBuffer(body)) { assert(contentLength === body.byteLength, "buffer body must have content length"); @@ -7101,18 +7101,18 @@ var require_client_h2 = __commonJS({ h2stream.write(body); h2stream.uncork(); h2stream.end(); - request2.onBodySent(body); + request.onBodySent(body); } if (!expectsPayload) { socket[kReset] = true; } - request2.onRequestSent(); + request.onRequestSent(); client[kResume](); } catch (error2) { abort(error2); } } - function writeStream(abort, socket, expectsPayload, h2stream, body, client, request2, contentLength) { + function writeStream(abort, socket, expectsPayload, h2stream, body, client, request, contentLength) { assert(contentLength !== 0 || client[kRunning] === 0, "stream body cannot be pipelined"); const pipe = pipeline2( body, @@ -7123,7 +7123,7 @@ var require_client_h2 = __commonJS({ abort(err); } else { util2.removeAllListeners(pipe); - request2.onRequestSent(); + request.onRequestSent(); if (!expectsPayload) { socket[kReset] = true; } @@ -7133,10 +7133,10 @@ var require_client_h2 = __commonJS({ ); util2.addListener(pipe, "data", onPipeData); function onPipeData(chunk) { - request2.onBodySent(chunk); + request.onBodySent(chunk); } } - async function writeBlob(abort, h2stream, body, client, request2, socket, contentLength, expectsPayload) { + async function writeBlob(abort, h2stream, body, client, request, socket, contentLength, expectsPayload) { assert(contentLength === body.size, "blob body must have content length"); try { if (contentLength != null && contentLength !== body.size) { @@ -7147,8 +7147,8 @@ var require_client_h2 = __commonJS({ h2stream.write(buffer); h2stream.uncork(); h2stream.end(); - request2.onBodySent(buffer); - request2.onRequestSent(); + request.onBodySent(buffer); + request.onRequestSent(); if (!expectsPayload) { socket[kReset] = true; } @@ -7157,7 +7157,7 @@ var require_client_h2 = __commonJS({ abort(err); } } - async function writeIterable(abort, h2stream, body, client, request2, socket, contentLength, expectsPayload) { + async function writeIterable(abort, h2stream, body, client, request, socket, contentLength, expectsPayload) { assert(contentLength !== 0 || client[kRunning] === 0, "iterator body cannot be pipelined"); let callback = null; function onDrain() { @@ -7182,13 +7182,13 @@ var require_client_h2 = __commonJS({ throw socket[kError]; } const res = h2stream.write(chunk); - request2.onBodySent(chunk); + request.onBodySent(chunk); if (!res) { await waitForDrain(); } } h2stream.end(); - request2.onRequestSent(); + request.onRequestSent(); if (!expectsPayload) { socket[kReset] = true; } @@ -7226,17 +7226,17 @@ var require_redirect_handler = __commonJS({ } }; var RedirectHandler = class { - constructor(dispatch, maxRedirections, opts, handler2) { + constructor(dispatch, maxRedirections, opts, handler) { if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) { throw new InvalidArgumentError("maxRedirections must be a positive number"); } - util2.validateHandler(handler2, opts.method, opts.upgrade); + util2.validateHandler(handler, opts.method, opts.upgrade); this.dispatch = dispatch; this.location = null; this.abort = null; this.opts = { ...opts, maxRedirections: 0 }; this.maxRedirections = maxRedirections; - this.handler = handler2; + this.handler = handler; this.history = []; this.redirectionLimitReached = false; if (util2.isStream(this.opts.body)) { @@ -7369,12 +7369,12 @@ var require_redirect_interceptor = __commonJS({ var RedirectHandler = require_redirect_handler(); function createRedirectInterceptor({ maxRedirections: defaultMaxRedirections }) { return (dispatch) => { - return function Intercept(opts, handler2) { + return function Intercept(opts, handler) { const { maxRedirections = defaultMaxRedirections } = opts; if (!maxRedirections) { - return dispatch(opts, handler2); + return dispatch(opts, handler); } - const redirectHandler = new RedirectHandler(dispatch, maxRedirections, opts, handler2); + const redirectHandler = new RedirectHandler(dispatch, maxRedirections, opts, handler); opts = { ...opts, maxRedirections: 0 }; return dispatch(opts, redirectHandler); }; @@ -7447,7 +7447,7 @@ var require_client = __commonJS({ var connectH2 = require_client_h2(); var deprecatedInterceptorWarned = false; var kClosedResolve = /* @__PURE__ */ Symbol("kClosedResolve"); - var noop2 = () => { + var noop = () => { }; function getPipelining(client) { return client[kPipelining] ?? client[kHTTPContext]?.defaultPipelining ?? 1; @@ -7632,12 +7632,12 @@ var require_client = __commonJS({ connect(this); this.once("connect", cb); } - [kDispatch](opts, handler2) { + [kDispatch](opts, handler) { const origin = opts.origin || this[kUrl].origin; - const request2 = new Request(origin, opts, handler2); - this[kQueue].push(request2); + const request = new Request(origin, opts, handler); + this[kQueue].push(request); if (this[kResuming]) { - } else if (util2.bodyLength(request2.body) == null && util2.isIterable(request2.body)) { + } else if (util2.bodyLength(request.body) == null && util2.isIterable(request.body)) { this[kResuming] = 1; queueMicrotask(() => resume(this)); } else { @@ -7661,8 +7661,8 @@ var require_client = __commonJS({ return new Promise((resolve4) => { const requests = this[kQueue].splice(this[kPendingIdx]); for (let i = 0; i < requests.length; i++) { - const request2 = requests[i]; - util2.errorRequest(this, request2, err); + const request = requests[i]; + util2.errorRequest(this, request, err); } const callback = () => { if (this[kClosedResolve]) { @@ -7687,8 +7687,8 @@ var require_client = __commonJS({ assert(client[kPendingIdx] === client[kRunningIdx]); const requests = client[kQueue].splice(client[kRunningIdx]); for (let i = 0; i < requests.length; i++) { - const request2 = requests[i]; - util2.errorRequest(client, request2, err); + const request = requests[i]; + util2.errorRequest(client, request, err); } assert(client[kSize] === 0); } @@ -7737,14 +7737,14 @@ var require_client = __commonJS({ }); }); if (client.destroyed) { - util2.destroy(socket.on("error", noop2), new ClientDestroyedError()); + util2.destroy(socket.on("error", noop), new ClientDestroyedError()); return; } assert(socket); try { client[kHTTPContext] = socket.alpnProtocol === "h2" ? await connectH2(client, socket) : await connectH1(client, socket); } catch (err) { - socket.destroy().on("error", noop2); + socket.destroy().on("error", noop); throw err; } client[kConnecting] = false; @@ -7791,8 +7791,8 @@ var require_client = __commonJS({ if (err.code === "ERR_TLS_CERT_ALTNAME_INVALID") { assert(client[kRunning] === 0); while (client[kPending] > 0 && client[kQueue][client[kPendingIdx]].servername === client[kServerName]) { - const request2 = client[kQueue][client[kPendingIdx]++]; - util2.errorRequest(client, request2, err); + const request = client[kQueue][client[kPendingIdx]++]; + util2.errorRequest(client, request, err); } } else { onError(client, err); @@ -7849,12 +7849,12 @@ var require_client = __commonJS({ if (client[kRunning] >= (getPipelining(client) || 1)) { return; } - const request2 = client[kQueue][client[kPendingIdx]]; - if (client[kUrl].protocol === "https:" && client[kServerName] !== request2.servername) { + const request = client[kQueue][client[kPendingIdx]]; + if (client[kUrl].protocol === "https:" && client[kServerName] !== request.servername) { if (client[kRunning] > 0) { return; } - client[kServerName] = request2.servername; + client[kServerName] = request.servername; client[kHTTPContext]?.destroy(new InformationalError("servername changed"), () => { client[kHTTPContext] = null; resume(client); @@ -7870,10 +7870,10 @@ var require_client = __commonJS({ if (client[kHTTPContext].destroyed) { return; } - if (client[kHTTPContext].busy(request2)) { + if (client[kHTTPContext].busy(request)) { return; } - if (!request2.aborted && client[kHTTPContext].write(request2)) { + if (!request.aborted && client[kHTTPContext].write(request)) { client[kPendingIdx]++; } else { client[kQueue].splice(client[kPendingIdx], 1); @@ -8083,13 +8083,13 @@ var require_pool_base = __commonJS({ } await Promise.all(this[kClients].map((c) => c.destroy(err))); } - [kDispatch](opts, handler2) { + [kDispatch](opts, handler) { const dispatcher = this[kGetDispatcher](); if (!dispatcher) { this[kNeedDrain] = true; - this[kQueue].push({ opts, handler: handler2 }); + this[kQueue].push({ opts, handler }); this[kQueued]++; - } else if (!dispatcher.dispatch(opts, handler2)) { + } else if (!dispatcher.dispatch(opts, handler)) { dispatcher[kNeedDrain] = true; this[kNeedDrain] = !this[kGetDispatcher](); } @@ -8425,7 +8425,7 @@ var require_agent = __commonJS({ } return ret; } - [kDispatch](opts, handler2) { + [kDispatch](opts, handler) { let key; if (opts.origin && (typeof opts.origin === "string" || opts.origin instanceof URL)) { key = String(opts.origin); @@ -8437,7 +8437,7 @@ var require_agent = __commonJS({ dispatcher = this[kFactory](opts.origin, this[kOptions]).on("drain", this[kOnDrain]).on("connect", this[kOnConnect]).on("disconnect", this[kOnDisconnect]).on("connectionError", this[kOnConnectionError]); this[kClients].set(key, dispatcher); } - return dispatcher.dispatch(opts, handler2); + return dispatcher.dispatch(opts, handler); } async [kClose]() { const closePromises = []; @@ -8485,7 +8485,7 @@ var require_proxy_agent = __commonJS({ function defaultFactory(origin, opts) { return new Pool(origin, opts); } - var noop2 = () => { + var noop = () => { }; function defaultAgentFactory(origin, opts) { if (opts.connections === 1) { @@ -8507,12 +8507,12 @@ var require_proxy_agent = __commonJS({ this.#client = new Client(proxyUrl, { connect }); } } - [kDispatch](opts, handler2) { - const onHeaders = handler2.onHeaders; - handler2.onHeaders = function(statusCode, data, resume) { + [kDispatch](opts, handler) { + const onHeaders = handler.onHeaders; + handler.onHeaders = function(statusCode, data, resume) { if (statusCode === 407) { - if (typeof handler2.onError === "function") { - handler2.onError(new InvalidArgumentError("Proxy Authentication Required (407)")); + if (typeof handler.onError === "function") { + handler.onError(new InvalidArgumentError("Proxy Authentication Required (407)")); } return; } @@ -8529,7 +8529,7 @@ var require_proxy_agent = __commonJS({ headers.host = host; } opts.headers = { ...this[kProxyHeaders], ...headers }; - return this.#client[kDispatch](opts, handler2); + return this.#client[kDispatch](opts, handler); } async [kClose]() { return this.#client.close(); @@ -8538,7 +8538,7 @@ var require_proxy_agent = __commonJS({ return this.#client.destroy(err); } }; - var ProxyAgent2 = class extends DispatcherBase { + var ProxyAgent3 = class extends DispatcherBase { constructor(opts) { super(); if (!opts || typeof opts === "object" && !(opts instanceof URL2) && !opts.uri) { @@ -8602,7 +8602,7 @@ var require_proxy_agent = __commonJS({ servername: this[kProxyTls]?.servername || proxyHostname }); if (statusCode !== 200) { - socket.on("error", noop2).destroy(); + socket.on("error", noop).destroy(); callback(new RequestAbortedError(`Proxy response (${statusCode}) !== 200 when HTTP Tunneling`)); } if (opts2.protocol !== "https:") { @@ -8626,7 +8626,7 @@ var require_proxy_agent = __commonJS({ } }); } - dispatch(opts, handler2) { + dispatch(opts, handler) { const headers = buildHeaders(opts.headers); throwIfProxyAuthIsSent(headers); if (headers && !("host" in headers) && !("Host" in headers)) { @@ -8638,7 +8638,7 @@ var require_proxy_agent = __commonJS({ ...opts, headers }, - handler2 + handler ); } /** @@ -8679,7 +8679,7 @@ var require_proxy_agent = __commonJS({ throw new InvalidArgumentError("Proxy-Authorization should be sent in ProxyAgent constructor"); } } - module2.exports = ProxyAgent2; + module2.exports = ProxyAgent3; } }); @@ -8689,7 +8689,7 @@ var require_env_http_proxy_agent = __commonJS({ "use strict"; var DispatcherBase = require_dispatcher_base(); var { kClose, kDestroy, kClosed, kDestroyed, kDispatch, kNoProxyAgent, kHttpProxyAgent, kHttpsProxyAgent } = require_symbols(); - var ProxyAgent2 = require_proxy_agent(); + var ProxyAgent3 = require_proxy_agent(); var Agent3 = require_agent(); var DEFAULT_PORTS = { "http:": 80, @@ -8713,22 +8713,22 @@ var require_env_http_proxy_agent = __commonJS({ this[kNoProxyAgent] = new Agent3(agentOpts); const HTTP_PROXY = httpProxy ?? process.env.http_proxy ?? process.env.HTTP_PROXY; if (HTTP_PROXY) { - this[kHttpProxyAgent] = new ProxyAgent2({ ...agentOpts, uri: HTTP_PROXY }); + this[kHttpProxyAgent] = new ProxyAgent3({ ...agentOpts, uri: HTTP_PROXY }); } else { this[kHttpProxyAgent] = this[kNoProxyAgent]; } const HTTPS_PROXY = httpsProxy ?? process.env.https_proxy ?? process.env.HTTPS_PROXY; if (HTTPS_PROXY) { - this[kHttpsProxyAgent] = new ProxyAgent2({ ...agentOpts, uri: HTTPS_PROXY }); + this[kHttpsProxyAgent] = new ProxyAgent3({ ...agentOpts, uri: HTTPS_PROXY }); } else { this[kHttpsProxyAgent] = this[kHttpProxyAgent]; } this.#parseNoProxy(); } - [kDispatch](opts, handler2) { + [kDispatch](opts, handler) { const url = new URL(opts.origin); const agent = this.#getProxyAgentForUrl(url); - return agent.dispatch(opts, handler2); + return agent.dispatch(opts, handler); } async [kClose]() { await this[kNoProxyAgent].close(); @@ -9131,13 +9131,13 @@ var require_retry_agent = __commonJS({ this.#agent = agent; this.#options = options; } - dispatch(opts, handler2) { + dispatch(opts, handler) { const retry = new RetryHandler({ ...opts, retryOptions: this.#options }, { dispatch: this.#agent.dispatch.bind(this.#agent), - handler: handler2 + handler }); return this.#agent.dispatch(opts, retry); } @@ -9167,7 +9167,7 @@ var require_readable = __commonJS({ var kAbort = /* @__PURE__ */ Symbol("kAbort"); var kContentType = /* @__PURE__ */ Symbol("kContentType"); var kContentLength = /* @__PURE__ */ Symbol("kContentLength"); - var noop2 = () => { + var noop = () => { }; var BodyReadable = class extends Readable { constructor({ @@ -9299,7 +9299,7 @@ var require_readable = __commonJS({ } else { resolve4(null); } - }).on("error", noop2).on("data", function(chunk) { + }).on("error", noop).on("data", function(chunk) { limit -= chunk.length; if (limit <= 0) { this.destroy(); @@ -9669,10 +9669,10 @@ var require_api_request = __commonJS({ } } }; - function request2(opts, callback) { + function request(opts, callback) { if (callback === void 0) { return new Promise((resolve4, reject) => { - request2.call(this, opts, (err, data) => { + request.call(this, opts, (err, data) => { return err ? reject(err) : resolve4(data); }); }); @@ -9687,7 +9687,7 @@ var require_api_request = __commonJS({ queueMicrotask(() => callback(err, { opaque })); } } - module2.exports = request2; + module2.exports = request; module2.exports.RequestHandler = RequestHandler; } }); @@ -9968,11 +9968,11 @@ var require_api_pipeline = __commonJS({ } }; var PipelineHandler = class extends AsyncResource { - constructor(opts, handler2) { + constructor(opts, handler) { if (!opts || typeof opts !== "object") { throw new InvalidArgumentError("invalid opts"); } - if (typeof handler2 !== "function") { + if (typeof handler !== "function") { throw new InvalidArgumentError("invalid handler"); } const { signal, method, opaque, onInfo, responseHeaders } = opts; @@ -9988,7 +9988,7 @@ var require_api_pipeline = __commonJS({ super("UNDICI_PIPELINE"); this.opaque = opaque || null; this.responseHeaders = responseHeaders || null; - this.handler = handler2; + this.handler = handler; this.abort = null; this.context = null; this.onInfo = onInfo || null; @@ -10043,7 +10043,7 @@ var require_api_pipeline = __commonJS({ this.context = context; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context } = this; + const { opaque, handler, context } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util2.parseRawHeaders(rawHeaders) : util2.parseHeaders(rawHeaders); @@ -10056,7 +10056,7 @@ var require_api_pipeline = __commonJS({ try { this.handler = null; const headers = this.responseHeaders === "raw" ? util2.parseRawHeaders(rawHeaders) : util2.parseHeaders(rawHeaders); - body = this.runInAsyncScope(handler2, null, { + body = this.runInAsyncScope(handler, null, { statusCode, headers, opaque, @@ -10103,9 +10103,9 @@ var require_api_pipeline = __commonJS({ util2.destroy(ret, err); } }; - function pipeline2(opts, handler2) { + function pipeline2(opts, handler) { try { - const pipelineHandler = new PipelineHandler(opts, handler2); + const pipelineHandler = new PipelineHandler(opts, handler); this.dispatch({ ...opts, body: pipelineHandler.req }, pipelineHandler); return pipelineHandler.ret; } catch (err) { @@ -10463,7 +10463,7 @@ var require_mock_utils = __commonJS({ const headersMatch = matchHeaders(mockDispatch2, headers); return pathMatch && methodMatch && bodyMatch && headersMatch; } - function getResponseData2(data) { + function getResponseData(data) { if (Buffer.isBuffer(data)) { return data; } else if (data instanceof Uint8Array) { @@ -10553,7 +10553,7 @@ var require_mock_utils = __commonJS({ } return Buffer.concat(buffers).toString("utf8"); } - function mockDispatch(opts, handler2) { + function mockDispatch(opts, handler) { const key = buildKey(opts); const mockDispatch2 = getMockDispatch(this[kDispatches], key); mockDispatch2.timesInvoked++; @@ -10566,7 +10566,7 @@ var require_mock_utils = __commonJS({ mockDispatch2.pending = timesInvoked < times; if (error2 !== null) { deleteMockDispatch(this[kDispatches], key); - handler2.onError(error2); + handler.onError(error2); return true; } if (typeof delay === "number" && delay > 0) { @@ -10583,13 +10583,13 @@ var require_mock_utils = __commonJS({ body.then((newData) => handleReply(mockDispatches, newData)); return; } - const responseData = getResponseData2(body); + const responseData = getResponseData(body); const responseHeaders = generateKeyValues(headers); const responseTrailers = generateKeyValues(trailers); - handler2.onConnect?.((err) => handler2.onError(err), null); - handler2.onHeaders?.(statusCode, responseHeaders, resume, getStatusText(statusCode)); - handler2.onData?.(Buffer.from(responseData)); - handler2.onComplete?.(responseTrailers); + handler.onConnect?.((err) => handler.onError(err), null); + handler.onHeaders?.(statusCode, responseHeaders, resume, getStatusText(statusCode)); + handler.onData?.(Buffer.from(responseData)); + handler.onComplete?.(responseTrailers); deleteMockDispatch(mockDispatches, key); } function resume() { @@ -10600,10 +10600,10 @@ var require_mock_utils = __commonJS({ const agent = this[kMockAgent]; const origin = this[kOrigin]; const originalDispatch = this[kOriginalDispatch]; - return function dispatch(opts, handler2) { + return function dispatch(opts, handler) { if (agent.isMockActive) { try { - mockDispatch.call(this, opts, handler2); + mockDispatch.call(this, opts, handler); } catch (error2) { if (error2 instanceof MockNotMatchedError) { const netConnect = agent[kGetNetConnect](); @@ -10611,7 +10611,7 @@ var require_mock_utils = __commonJS({ throw new MockNotMatchedError(`${error2.message}: subsequent request to origin ${origin} was not allowed (net.connect disabled)`); } if (checkNetConnect(netConnect, origin)) { - originalDispatch.call(this, opts, handler2); + originalDispatch.call(this, opts, handler); } else { throw new MockNotMatchedError(`${error2.message}: subsequent request to origin ${origin} was not allowed (net.connect is not enabled for this origin)`); } @@ -10620,7 +10620,7 @@ var require_mock_utils = __commonJS({ } } } else { - originalDispatch.call(this, opts, handler2); + originalDispatch.call(this, opts, handler); } }; } @@ -10640,7 +10640,7 @@ var require_mock_utils = __commonJS({ } } module2.exports = { - getResponseData: getResponseData2, + getResponseData, getMockDispatch, addMockDispatch, deleteMockDispatch, @@ -10663,7 +10663,7 @@ var require_mock_utils = __commonJS({ var require_mock_interceptor = __commonJS({ "node_modules/undici/lib/mock/mock-interceptor.js"(exports2, module2) { "use strict"; - var { getResponseData: getResponseData2, buildKey, addMockDispatch } = require_mock_utils(); + var { getResponseData, buildKey, addMockDispatch } = require_mock_utils(); var { kDispatches, kDispatchKey, @@ -10735,7 +10735,7 @@ var require_mock_interceptor = __commonJS({ this[kContentLength] = false; } createMockScopeDispatchData({ statusCode, data, responseOptions }) { - const responseData = getResponseData2(data); + const responseData = getResponseData(data); const contentLength = this[kContentLength] ? { "content-length": responseData.length } : {}; const headers = { ...this[kDefaultHeaders], ...contentLength, ...responseOptions.headers }; const trailers = { ...this[kDefaultTrailers], ...responseOptions.trailers }; @@ -11044,9 +11044,9 @@ var require_mock_agent = __commonJS({ } return dispatcher; } - dispatch(opts, handler2) { + dispatch(opts, handler) { this.get(opts.origin); - return this[kAgent].dispatch(opts, handler2); + return this[kAgent].dispatch(opts, handler); } async close() { await this[kAgent].close(); @@ -11166,11 +11166,11 @@ var require_decorator_handler = __commonJS({ "use strict"; module2.exports = class DecoratorHandler { #handler; - constructor(handler2) { - if (typeof handler2 !== "object" || handler2 === null) { + constructor(handler) { + if (typeof handler !== "object" || handler === null) { throw new TypeError("handler must be an object"); } - this.#handler = handler2; + this.#handler = handler; } onConnect(...args2) { return this.#handler.onConnect?.(...args2); @@ -11208,16 +11208,16 @@ var require_redirect = __commonJS({ module2.exports = (opts) => { const globalMaxRedirections = opts?.maxRedirections; return (dispatch) => { - return function redirectInterceptor(opts2, handler2) { + return function redirectInterceptor(opts2, handler) { const { maxRedirections = globalMaxRedirections, ...baseOpts } = opts2; if (!maxRedirections) { - return dispatch(opts2, handler2); + return dispatch(opts2, handler); } const redirectHandler = new RedirectHandler( dispatch, maxRedirections, opts2, - handler2 + handler ); return dispatch(baseOpts, redirectHandler); }; @@ -11233,13 +11233,13 @@ var require_retry = __commonJS({ var RetryHandler = require_retry_handler(); module2.exports = (globalOpts) => { return (dispatch) => { - return function retryInterceptor(opts, handler2) { + return function retryInterceptor(opts, handler) { return dispatch( opts, new RetryHandler( { ...opts, retryOptions: { ...globalOpts, ...opts.retryOptions } }, { - handler: handler2, + handler, dispatch } ) @@ -11265,13 +11265,13 @@ var require_dump = __commonJS({ #size = 0; #reason = null; #handler = null; - constructor({ maxSize }, handler2) { - super(handler2); + constructor({ maxSize }, handler) { + super(handler); if (maxSize != null && (!Number.isFinite(maxSize) || maxSize < 1)) { throw new InvalidArgumentError("maxSize must be a number greater than 0"); } this.#maxSize = maxSize ?? this.#maxSize; - this.#handler = handler2; + this.#handler = handler; } onConnect(abort) { this.#abort = abort; @@ -11334,11 +11334,11 @@ var require_dump = __commonJS({ maxSize: 1024 * 1024 }) { return (dispatch) => { - return function Intercept(opts, handler2) { + return function Intercept(opts, handler) { const { dumpMaxSize = defaultMaxSize } = opts; const dumpHandler = new DumpHandler( { maxSize: dumpMaxSize }, - handler2 + handler ); return dispatch(opts, dumpHandler); }; @@ -11529,10 +11529,10 @@ var require_dns = __commonJS({ #dispatch = null; #handler = null; #origin = null; - constructor(state, { origin, handler: handler2, dispatch }, opts) { - super(handler2); + constructor(state, { origin, handler, dispatch }, opts) { + super(handler); this.#origin = origin; - this.#handler = handler2; + this.#handler = handler; this.#opts = { ...opts }; this.#state = state; this.#dispatch = dispatch; @@ -11605,14 +11605,14 @@ var require_dns = __commonJS({ }; const instance = new DNSInstance(opts); return (dispatch) => { - return function dnsInterceptor(origDispatchOpts, handler2) { + return function dnsInterceptor(origDispatchOpts, handler) { const origin = origDispatchOpts.origin.constructor === URL ? origDispatchOpts.origin : new URL(origDispatchOpts.origin); if (isIP(origin.hostname) !== 0) { - return dispatch(origDispatchOpts, handler2); + return dispatch(origDispatchOpts, handler); } instance.runLookup(origin, origDispatchOpts, (err, newOrigin) => { if (err) { - return handler2.onError(err); + return handler.onError(err); } let dispatchOpts = null; dispatchOpts = { @@ -11627,7 +11627,7 @@ var require_dns = __commonJS({ }; dispatch( dispatchOpts, - instance.getHandler({ origin, dispatch, handler: handler2 }, origDispatchOpts) + instance.getHandler({ origin, dispatch, handler }, origDispatchOpts) ); }); return true; @@ -11836,12 +11836,12 @@ var require_headers = __commonJS({ if (size === 0) { return array; } - const iterator2 = this[kHeadersMap][Symbol.iterator](); - const firstValue = iterator2.next().value; + const iterator = this[kHeadersMap][Symbol.iterator](); + const firstValue = iterator.next().value; array[0] = [firstValue[0], firstValue[1].value]; assert(firstValue[1].value !== null); for (let i = 1, j = 0, right = 0, left = 0, pivot = 0, x, value; i < size; ++i) { - value = iterator2.next().value; + value = iterator.next().value; x = array[i] = [value[0], value[1].value]; assert(x[1] !== null); left = 0; @@ -11862,7 +11862,7 @@ var require_headers = __commonJS({ array[left] = x; } } - if (!iterator2.next().done) { + if (!iterator.next().done) { throw new TypeError("Unreachable"); } return array; @@ -12049,15 +12049,15 @@ var require_headers = __commonJS({ }); webidl.converters.HeadersInit = function(V, prefix, argument) { if (webidl.util.Type(V) === "Object") { - const iterator2 = Reflect.get(V, Symbol.iterator); - if (!util2.types.isProxy(V) && iterator2 === Headers2.prototype.entries) { + const iterator = Reflect.get(V, Symbol.iterator); + if (!util2.types.isProxy(V) && iterator === Headers2.prototype.entries) { try { return getHeadersList(V).entriesList; } catch { } } - if (typeof iterator2 === "function") { - return webidl.converters["sequence>"](V, prefix, argument, iterator2.bind(V)); + if (typeof iterator === "function") { + return webidl.converters["sequence>"](V, prefix, argument, iterator.bind(V)); } return webidl.converters["record"](V, prefix, argument); } @@ -12594,7 +12594,7 @@ var require_request2 = __commonJS({ webidl.argumentLengthCheck(arguments, 1, prefix); input = webidl.converters.RequestInfo(input, prefix, "input"); init = webidl.converters.RequestInit(init, prefix, "init"); - let request2 = null; + let request = null; let fallbackMode = null; const baseUrl = environmentSettingsObject.settingsObject.baseUrl; let signal = null; @@ -12611,18 +12611,18 @@ var require_request2 = __commonJS({ "Request cannot be constructed from a URL that includes credentials: " + input ); } - request2 = makeRequest({ urlList: [parsedURL] }); + request = makeRequest({ urlList: [parsedURL] }); fallbackMode = "cors"; } else { this[kDispatcher] = init.dispatcher || input[kDispatcher]; assert(input instanceof _Request); - request2 = input[kState]; + request = input[kState]; signal = input[kSignal]; } const origin = environmentSettingsObject.settingsObject.origin; let window = "client"; - if (request2.window?.constructor?.name === "EnvironmentSettingsObject" && sameOrigin(request2.window, origin)) { - window = request2.window; + if (request.window?.constructor?.name === "EnvironmentSettingsObject" && sameOrigin(request.window, origin)) { + window = request.window; } if (init.window != null) { throw new TypeError(`'window' option '${window}' must be null`); @@ -12630,66 +12630,66 @@ var require_request2 = __commonJS({ if ("window" in init) { window = "no-window"; } - request2 = makeRequest({ + request = makeRequest({ // URL request’s URL. // undici implementation note: this is set as the first item in request's urlList in makeRequest // method request’s method. - method: request2.method, + method: request.method, // header list A copy of request’s header list. // undici implementation note: headersList is cloned in makeRequest - headersList: request2.headersList, + headersList: request.headersList, // unsafe-request flag Set. - unsafeRequest: request2.unsafeRequest, + unsafeRequest: request.unsafeRequest, // client This’s relevant settings object. client: environmentSettingsObject.settingsObject, // window window. window, // priority request’s priority. - priority: request2.priority, + priority: request.priority, // origin request’s origin. The propagation of the origin is only significant for navigation requests // being handled by a service worker. In this scenario a request can have an origin that is different // from the current client. - origin: request2.origin, + origin: request.origin, // referrer request’s referrer. - referrer: request2.referrer, + referrer: request.referrer, // referrer policy request’s referrer policy. - referrerPolicy: request2.referrerPolicy, + referrerPolicy: request.referrerPolicy, // mode request’s mode. - mode: request2.mode, + mode: request.mode, // credentials mode request’s credentials mode. - credentials: request2.credentials, + credentials: request.credentials, // cache mode request’s cache mode. - cache: request2.cache, + cache: request.cache, // redirect mode request’s redirect mode. - redirect: request2.redirect, + redirect: request.redirect, // integrity metadata request’s integrity metadata. - integrity: request2.integrity, + integrity: request.integrity, // keepalive request’s keepalive. - keepalive: request2.keepalive, + keepalive: request.keepalive, // reload-navigation flag request’s reload-navigation flag. - reloadNavigation: request2.reloadNavigation, + reloadNavigation: request.reloadNavigation, // history-navigation flag request’s history-navigation flag. - historyNavigation: request2.historyNavigation, + historyNavigation: request.historyNavigation, // URL list A clone of request’s URL list. - urlList: [...request2.urlList] + urlList: [...request.urlList] }); const initHasKey = Object.keys(init).length !== 0; if (initHasKey) { - if (request2.mode === "navigate") { - request2.mode = "same-origin"; + if (request.mode === "navigate") { + request.mode = "same-origin"; } - request2.reloadNavigation = false; - request2.historyNavigation = false; - request2.origin = "client"; - request2.referrer = "client"; - request2.referrerPolicy = ""; - request2.url = request2.urlList[request2.urlList.length - 1]; - request2.urlList = [request2.url]; + request.reloadNavigation = false; + request.historyNavigation = false; + request.origin = "client"; + request.referrer = "client"; + request.referrerPolicy = ""; + request.url = request.urlList[request.urlList.length - 1]; + request.urlList = [request.url]; } if (init.referrer !== void 0) { const referrer = init.referrer; if (referrer === "") { - request2.referrer = "no-referrer"; + request.referrer = "no-referrer"; } else { let parsedReferrer; try { @@ -12698,14 +12698,14 @@ var require_request2 = __commonJS({ throw new TypeError(`Referrer "${referrer}" is not a valid URL.`, { cause: err }); } if (parsedReferrer.protocol === "about:" && parsedReferrer.hostname === "client" || origin && !sameOrigin(parsedReferrer, environmentSettingsObject.settingsObject.baseUrl)) { - request2.referrer = "client"; + request.referrer = "client"; } else { - request2.referrer = parsedReferrer; + request.referrer = parsedReferrer; } } } if (init.referrerPolicy !== void 0) { - request2.referrerPolicy = init.referrerPolicy; + request.referrerPolicy = init.referrerPolicy; } let mode; if (init.mode !== void 0) { @@ -12720,33 +12720,33 @@ var require_request2 = __commonJS({ }); } if (mode != null) { - request2.mode = mode; + request.mode = mode; } if (init.credentials !== void 0) { - request2.credentials = init.credentials; + request.credentials = init.credentials; } if (init.cache !== void 0) { - request2.cache = init.cache; + request.cache = init.cache; } - if (request2.cache === "only-if-cached" && request2.mode !== "same-origin") { + if (request.cache === "only-if-cached" && request.mode !== "same-origin") { throw new TypeError( "'only-if-cached' can be set only with 'same-origin' mode" ); } if (init.redirect !== void 0) { - request2.redirect = init.redirect; + request.redirect = init.redirect; } if (init.integrity != null) { - request2.integrity = String(init.integrity); + request.integrity = String(init.integrity); } if (init.keepalive !== void 0) { - request2.keepalive = Boolean(init.keepalive); + request.keepalive = Boolean(init.keepalive); } if (init.method !== void 0) { let method = init.method; const mayBeNormalized = normalizedMethodRecords[method]; if (mayBeNormalized !== void 0) { - request2.method = mayBeNormalized; + request.method = mayBeNormalized; } else { if (!isValidHTTPToken(method)) { throw new TypeError(`'${method}' is not a valid HTTP method.`); @@ -12756,9 +12756,9 @@ var require_request2 = __commonJS({ throw new TypeError(`'${method}' HTTP method is unsupported.`); } method = normalizedMethodRecordsBase[upperCase] ?? method; - request2.method = method; + request.method = method; } - if (!patchMethodWarning && request2.method === "patch") { + if (!patchMethodWarning && request.method === "patch") { process.emitWarning("Using `patch` is highly likely to result in a `405 Method Not Allowed`. `PATCH` is much more likely to succeed.", { code: "UNDICI-FETCH-patch" }); @@ -12768,7 +12768,7 @@ var require_request2 = __commonJS({ if (init.signal !== void 0) { signal = init.signal; } - this[kState] = request2; + this[kState] = request; const ac = new AbortController(); this[kSignal] = ac.signal; if (signal != null) { @@ -12796,12 +12796,12 @@ var require_request2 = __commonJS({ } } this[kHeaders] = new Headers2(kConstruct); - setHeadersList(this[kHeaders], request2.headersList); + setHeadersList(this[kHeaders], request.headersList); setHeadersGuard(this[kHeaders], "request"); if (mode === "no-cors") { - if (!corsSafeListedMethodsSet.has(request2.method)) { + if (!corsSafeListedMethodsSet.has(request.method)) { throw new TypeError( - `'${request2.method} is unsupported in no-cors mode.` + `'${request.method} is unsupported in no-cors mode.` ); } setHeadersGuard(this[kHeaders], "request-no-cors"); @@ -12820,14 +12820,14 @@ var require_request2 = __commonJS({ } } const inputBody = input instanceof _Request ? input[kState].body : null; - if ((init.body != null || inputBody != null) && (request2.method === "GET" || request2.method === "HEAD")) { + if ((init.body != null || inputBody != null) && (request.method === "GET" || request.method === "HEAD")) { throw new TypeError("Request with GET/HEAD method cannot have body."); } let initBody = null; if (init.body != null) { const [extractedBody, contentType] = extractBody( init.body, - request2.keepalive + request.keepalive ); initBody = extractedBody; if (contentType && !getHeadersList(this[kHeaders]).contains("content-type", true)) { @@ -12839,12 +12839,12 @@ var require_request2 = __commonJS({ if (initBody != null && init.duplex == null) { throw new TypeError("RequestInit: duplex option is required when sending a body."); } - if (request2.mode !== "same-origin" && request2.mode !== "cors") { + if (request.mode !== "same-origin" && request.mode !== "cors") { throw new TypeError( 'If request is made from ReadableStream, mode should be "same-origin" or "cors"' ); } - request2.useCORSPreflightFlag = true; + request.useCORSPreflightFlag = true; } let finalBody = inputOrInitBody; if (initBody == null && inputBody != null) { @@ -13073,21 +13073,21 @@ var require_request2 = __commonJS({ headersList: init.headersList ? new HeadersList(init.headersList) : new HeadersList() }; } - function cloneRequest(request2) { - const newRequest = makeRequest({ ...request2, body: null }); - if (request2.body != null) { - newRequest.body = cloneBody(newRequest, request2.body); + function cloneRequest(request) { + const newRequest = makeRequest({ ...request, body: null }); + if (request.body != null) { + newRequest.body = cloneBody(newRequest, request.body); } return newRequest; } function fromInnerRequest(innerRequest, signal, guard) { - const request2 = new Request(kConstruct); - request2[kState] = innerRequest; - request2[kSignal] = signal; - request2[kHeaders] = new Headers2(kConstruct); - setHeadersList(request2[kHeaders], innerRequest.headersList); - setHeadersGuard(request2[kHeaders], guard); - return request2; + const request = new Request(kConstruct); + request[kState] = innerRequest; + request[kSignal] = signal; + request[kHeaders] = new Headers2(kConstruct); + setHeadersList(request[kHeaders], innerRequest.headersList); + setHeadersGuard(request[kHeaders], guard); + return request; } Object.defineProperties(Request.prototype, { method: kEnumerableProperty, @@ -13319,7 +13319,7 @@ var require_fetch = __commonJS({ function handleFetchDone(response) { finalizeAndReportTiming(response, "fetch"); } - function fetch(input, init = void 0) { + function fetch2(input, init = void 0) { webidl.argumentLengthCheck(arguments, 1, "globalThis.fetch"); let p = createDeferredPromise(); let requestObject; @@ -13329,14 +13329,14 @@ var require_fetch = __commonJS({ p.reject(e); return p.promise; } - const request2 = requestObject[kState]; + const request = requestObject[kState]; if (requestObject.signal.aborted) { - abortFetch(p, request2, null, requestObject.signal.reason); + abortFetch(p, request, null, requestObject.signal.reason); return p.promise; } - const globalObject = request2.client.globalObject; + const globalObject = request.client.globalObject; if (globalObject?.constructor?.name === "ServiceWorkerGlobalScope") { - request2.serviceWorkers = "none"; + request.serviceWorkers = "none"; } let responseObject = null; let locallyAborted = false; @@ -13348,7 +13348,7 @@ var require_fetch = __commonJS({ assert(controller != null); controller.abort(requestObject.signal.reason); const realResponse = responseObject?.deref(); - abortFetch(p, request2, realResponse, requestObject.signal.reason); + abortFetch(p, request, realResponse, requestObject.signal.reason); } ); const processResponse = (response) => { @@ -13356,7 +13356,7 @@ var require_fetch = __commonJS({ return; } if (response.aborted) { - abortFetch(p, request2, responseObject, controller.serializedAbortReason); + abortFetch(p, request, responseObject, controller.serializedAbortReason); return; } if (response.type === "error") { @@ -13368,7 +13368,7 @@ var require_fetch = __commonJS({ p = null; }; controller = fetching({ - request: request2, + request, processResponseEndOfBody: handleFetchDone, processResponse, dispatcher: requestObject[kDispatcher] @@ -13409,12 +13409,12 @@ var require_fetch = __commonJS({ ); } var markResourceTiming = performance.markResourceTiming; - function abortFetch(p, request2, responseObject, error2) { + function abortFetch(p, request, responseObject, error2) { if (p) { p.reject(error2); } - if (request2.body != null && isReadable(request2.body?.stream)) { - request2.body.stream.cancel(error2).catch((err) => { + if (request.body != null && isReadable(request.body?.stream)) { + request.body.stream.cancel(error2).catch((err) => { if (err.code === "ERR_INVALID_STATE") { return; } @@ -13435,7 +13435,7 @@ var require_fetch = __commonJS({ } } function fetching({ - request: request2, + request, processRequestBodyChunkLength, processRequestEndOfBody, processResponse, @@ -13448,9 +13448,9 @@ var require_fetch = __commonJS({ assert(dispatcher); let taskDestination = null; let crossOriginIsolatedCapability = false; - if (request2.client != null) { - taskDestination = request2.client.globalObject; - crossOriginIsolatedCapability = request2.client.crossOriginIsolatedCapability; + if (request.client != null) { + taskDestination = request.client.globalObject; + crossOriginIsolatedCapability = request.client.crossOriginIsolatedCapability; } const currentTime = coarsenedSharedCurrentTime(crossOriginIsolatedCapability); const timingInfo = createOpaqueTimingInfo({ @@ -13458,7 +13458,7 @@ var require_fetch = __commonJS({ }); const fetchParams = { controller: new Fetch(dispatcher), - request: request2, + request, timingInfo, processRequestBodyChunkLength, processRequestEndOfBody, @@ -13468,32 +13468,32 @@ var require_fetch = __commonJS({ taskDestination, crossOriginIsolatedCapability }; - assert(!request2.body || request2.body.stream); - if (request2.window === "client") { - request2.window = request2.client?.globalObject?.constructor?.name === "Window" ? request2.client : "no-window"; + assert(!request.body || request.body.stream); + if (request.window === "client") { + request.window = request.client?.globalObject?.constructor?.name === "Window" ? request.client : "no-window"; } - if (request2.origin === "client") { - request2.origin = request2.client.origin; + if (request.origin === "client") { + request.origin = request.client.origin; } - if (request2.policyContainer === "client") { - if (request2.client != null) { - request2.policyContainer = clonePolicyContainer( - request2.client.policyContainer + if (request.policyContainer === "client") { + if (request.client != null) { + request.policyContainer = clonePolicyContainer( + request.client.policyContainer ); } else { - request2.policyContainer = makePolicyContainer(); + request.policyContainer = makePolicyContainer(); } } - if (!request2.headersList.contains("accept", true)) { + if (!request.headersList.contains("accept", true)) { const value = "*/*"; - request2.headersList.append("accept", value, true); + request.headersList.append("accept", value, true); } - if (!request2.headersList.contains("accept-language", true)) { - request2.headersList.append("accept-language", "*", true); + if (!request.headersList.contains("accept-language", true)) { + request.headersList.append("accept-language", "*", true); } - if (request2.priority === null) { + if (request.priority === null) { } - if (subresourceSet.has(request2.destination)) { + if (subresourceSet.has(request.destination)) { } mainFetch(fetchParams).catch((err) => { fetchParams.controller.terminate(err); @@ -13501,50 +13501,50 @@ var require_fetch = __commonJS({ return fetchParams.controller; } async function mainFetch(fetchParams, recursive = false) { - const request2 = fetchParams.request; + const request = fetchParams.request; let response = null; - if (request2.localURLsOnly && !urlIsLocal(requestCurrentURL(request2))) { + if (request.localURLsOnly && !urlIsLocal(requestCurrentURL(request))) { response = makeNetworkError("local URLs only"); } - tryUpgradeRequestToAPotentiallyTrustworthyURL(request2); - if (requestBadPort(request2) === "blocked") { + tryUpgradeRequestToAPotentiallyTrustworthyURL(request); + if (requestBadPort(request) === "blocked") { response = makeNetworkError("bad port"); } - if (request2.referrerPolicy === "") { - request2.referrerPolicy = request2.policyContainer.referrerPolicy; + if (request.referrerPolicy === "") { + request.referrerPolicy = request.policyContainer.referrerPolicy; } - if (request2.referrer !== "no-referrer") { - request2.referrer = determineRequestsReferrer(request2); + if (request.referrer !== "no-referrer") { + request.referrer = determineRequestsReferrer(request); } if (response === null) { response = await (async () => { - const currentURL = requestCurrentURL(request2); + const currentURL = requestCurrentURL(request); if ( // - request’s current URL’s origin is same origin with request’s origin, // and request’s response tainting is "basic" - sameOrigin(currentURL, request2.url) && request2.responseTainting === "basic" || // request’s current URL’s scheme is "data" + sameOrigin(currentURL, request.url) && request.responseTainting === "basic" || // request’s current URL’s scheme is "data" currentURL.protocol === "data:" || // - request’s mode is "navigate" or "websocket" - (request2.mode === "navigate" || request2.mode === "websocket") + (request.mode === "navigate" || request.mode === "websocket") ) { - request2.responseTainting = "basic"; + request.responseTainting = "basic"; return await schemeFetch(fetchParams); } - if (request2.mode === "same-origin") { + if (request.mode === "same-origin") { return makeNetworkError('request mode cannot be "same-origin"'); } - if (request2.mode === "no-cors") { - if (request2.redirect !== "follow") { + if (request.mode === "no-cors") { + if (request.redirect !== "follow") { return makeNetworkError( 'redirect mode cannot be "follow" for "no-cors" request' ); } - request2.responseTainting = "opaque"; + request.responseTainting = "opaque"; return await schemeFetch(fetchParams); } - if (!urlIsHttpHttpsScheme(requestCurrentURL(request2))) { + if (!urlIsHttpHttpsScheme(requestCurrentURL(request))) { return makeNetworkError("URL scheme must be a HTTP(S) scheme"); } - request2.responseTainting = "cors"; + request.responseTainting = "cors"; return await httpFetch(fetchParams); })(); } @@ -13552,13 +13552,13 @@ var require_fetch = __commonJS({ return response; } if (response.status !== 0 && !response.internalResponse) { - if (request2.responseTainting === "cors") { + if (request.responseTainting === "cors") { } - if (request2.responseTainting === "basic") { + if (request.responseTainting === "basic") { response = filterResponse(response, "basic"); - } else if (request2.responseTainting === "cors") { + } else if (request.responseTainting === "cors") { response = filterResponse(response, "cors"); - } else if (request2.responseTainting === "opaque") { + } else if (request.responseTainting === "opaque") { response = filterResponse(response, "opaque"); } else { assert(false); @@ -13566,26 +13566,26 @@ var require_fetch = __commonJS({ } let internalResponse = response.status === 0 ? response : response.internalResponse; if (internalResponse.urlList.length === 0) { - internalResponse.urlList.push(...request2.urlList); + internalResponse.urlList.push(...request.urlList); } - if (!request2.timingAllowFailed) { + if (!request.timingAllowFailed) { response.timingAllowPassed = true; } - if (response.type === "opaque" && internalResponse.status === 206 && internalResponse.rangeRequested && !request2.headers.contains("range", true)) { + if (response.type === "opaque" && internalResponse.status === 206 && internalResponse.rangeRequested && !request.headers.contains("range", true)) { response = internalResponse = makeNetworkError(); } - if (response.status !== 0 && (request2.method === "HEAD" || request2.method === "CONNECT" || nullBodyStatus.includes(internalResponse.status))) { + if (response.status !== 0 && (request.method === "HEAD" || request.method === "CONNECT" || nullBodyStatus.includes(internalResponse.status))) { internalResponse.body = null; fetchParams.controller.dump = true; } - if (request2.integrity) { + if (request.integrity) { const processBodyError = (reason) => fetchFinale(fetchParams, makeNetworkError(reason)); - if (request2.responseTainting === "opaque" || response.body == null) { + if (request.responseTainting === "opaque" || response.body == null) { processBodyError(response.error); return; } const processBody = (bytes) => { - if (!bytesMatch(bytes, request2.integrity)) { + if (!bytesMatch(bytes, request.integrity)) { processBodyError("integrity mismatch"); return; } @@ -13601,8 +13601,8 @@ var require_fetch = __commonJS({ if (isCancelled(fetchParams) && fetchParams.request.redirectCount === 0) { return Promise.resolve(makeAppropriateNetworkError(fetchParams)); } - const { request: request2 } = fetchParams; - const { protocol: scheme } = requestCurrentURL(request2); + const { request } = fetchParams; + const { protocol: scheme } = requestCurrentURL(request); switch (scheme) { case "about:": { return Promise.resolve(makeNetworkError("about scheme is not supported")); @@ -13611,19 +13611,19 @@ var require_fetch = __commonJS({ if (!resolveObjectURL) { resolveObjectURL = require("node:buffer").resolveObjectURL; } - const blobURLEntry = requestCurrentURL(request2); + const blobURLEntry = requestCurrentURL(request); if (blobURLEntry.search.length !== 0) { return Promise.resolve(makeNetworkError("NetworkError when attempting to fetch resource.")); } const blob = resolveObjectURL(blobURLEntry.toString()); - if (request2.method !== "GET" || !isBlobLike(blob)) { + if (request.method !== "GET" || !isBlobLike(blob)) { return Promise.resolve(makeNetworkError("invalid method")); } const response = makeResponse(); const fullLength = blob.size; const serializedFullLength = isomorphicEncode(`${fullLength}`); const type = blob.type; - if (!request2.headersList.contains("range", true)) { + if (!request.headersList.contains("range", true)) { const bodyWithType = extractBody(blob); response.statusText = "OK"; response.body = bodyWithType[0]; @@ -13631,7 +13631,7 @@ var require_fetch = __commonJS({ response.headersList.set("content-type", type, true); } else { response.rangeRequested = true; - const rangeHeader = request2.headersList.get("range", true); + const rangeHeader = request.headersList.get("range", true); const rangeValue = simpleRangeHeaderValue(rangeHeader, true); if (rangeValue === "failure") { return Promise.resolve(makeNetworkError("failed to fetch the data URL")); @@ -13662,7 +13662,7 @@ var require_fetch = __commonJS({ return Promise.resolve(response); } case "data:": { - const currentURL = requestCurrentURL(request2); + const currentURL = requestCurrentURL(request); const dataURLStruct = dataURLProcessor(currentURL); if (dataURLStruct === "failure") { return Promise.resolve(makeNetworkError("failed to fetch the data URL")); @@ -13751,41 +13751,41 @@ var require_fetch = __commonJS({ } } async function httpFetch(fetchParams) { - const request2 = fetchParams.request; + const request = fetchParams.request; let response = null; let actualResponse = null; const timingInfo = fetchParams.timingInfo; - if (request2.serviceWorkers === "all") { + if (request.serviceWorkers === "all") { } if (response === null) { - if (request2.redirect === "follow") { - request2.serviceWorkers = "none"; + if (request.redirect === "follow") { + request.serviceWorkers = "none"; } actualResponse = response = await httpNetworkOrCacheFetch(fetchParams); - if (request2.responseTainting === "cors" && corsCheck(request2, response) === "failure") { + if (request.responseTainting === "cors" && corsCheck(request, response) === "failure") { return makeNetworkError("cors failure"); } - if (TAOCheck(request2, response) === "failure") { - request2.timingAllowFailed = true; + if (TAOCheck(request, response) === "failure") { + request.timingAllowFailed = true; } } - if ((request2.responseTainting === "opaque" || response.type === "opaque") && crossOriginResourcePolicyCheck( - request2.origin, - request2.client, - request2.destination, + if ((request.responseTainting === "opaque" || response.type === "opaque") && crossOriginResourcePolicyCheck( + request.origin, + request.client, + request.destination, actualResponse ) === "blocked") { return makeNetworkError("blocked"); } if (redirectStatusSet.has(actualResponse.status)) { - if (request2.redirect !== "manual") { + if (request.redirect !== "manual") { fetchParams.controller.connection.destroy(void 0, false); } - if (request2.redirect === "error") { + if (request.redirect === "error") { response = makeNetworkError("unexpected redirect"); - } else if (request2.redirect === "manual") { + } else if (request.redirect === "manual") { response = actualResponse; - } else if (request2.redirect === "follow") { + } else if (request.redirect === "follow") { response = await httpRedirectFetch(fetchParams, response); } else { assert(false); @@ -13795,13 +13795,13 @@ var require_fetch = __commonJS({ return response; } function httpRedirectFetch(fetchParams, response) { - const request2 = fetchParams.request; + const request = fetchParams.request; const actualResponse = response.internalResponse ? response.internalResponse : response; let locationURL; try { locationURL = responseLocationURL( actualResponse, - requestCurrentURL(request2).hash + requestCurrentURL(request).hash ); if (locationURL == null) { return response; @@ -13812,63 +13812,63 @@ var require_fetch = __commonJS({ if (!urlIsHttpHttpsScheme(locationURL)) { return Promise.resolve(makeNetworkError("URL scheme must be a HTTP(S) scheme")); } - if (request2.redirectCount === 20) { + if (request.redirectCount === 20) { return Promise.resolve(makeNetworkError("redirect count exceeded")); } - request2.redirectCount += 1; - if (request2.mode === "cors" && (locationURL.username || locationURL.password) && !sameOrigin(request2, locationURL)) { + request.redirectCount += 1; + if (request.mode === "cors" && (locationURL.username || locationURL.password) && !sameOrigin(request, locationURL)) { return Promise.resolve(makeNetworkError('cross origin not allowed for request mode "cors"')); } - if (request2.responseTainting === "cors" && (locationURL.username || locationURL.password)) { + if (request.responseTainting === "cors" && (locationURL.username || locationURL.password)) { return Promise.resolve(makeNetworkError( 'URL cannot contain credentials for request mode "cors"' )); } - if (actualResponse.status !== 303 && request2.body != null && request2.body.source == null) { + if (actualResponse.status !== 303 && request.body != null && request.body.source == null) { return Promise.resolve(makeNetworkError()); } - if ([301, 302].includes(actualResponse.status) && request2.method === "POST" || actualResponse.status === 303 && !GET_OR_HEAD.includes(request2.method)) { - request2.method = "GET"; - request2.body = null; + if ([301, 302].includes(actualResponse.status) && request.method === "POST" || actualResponse.status === 303 && !GET_OR_HEAD.includes(request.method)) { + request.method = "GET"; + request.body = null; for (const headerName of requestBodyHeader) { - request2.headersList.delete(headerName); + request.headersList.delete(headerName); } } - if (!sameOrigin(requestCurrentURL(request2), locationURL)) { - request2.headersList.delete("authorization", true); - request2.headersList.delete("proxy-authorization", true); - request2.headersList.delete("cookie", true); - request2.headersList.delete("host", true); + if (!sameOrigin(requestCurrentURL(request), locationURL)) { + request.headersList.delete("authorization", true); + request.headersList.delete("proxy-authorization", true); + request.headersList.delete("cookie", true); + request.headersList.delete("host", true); } - if (request2.body != null) { - assert(request2.body.source != null); - request2.body = safelyExtractBody(request2.body.source)[0]; + if (request.body != null) { + assert(request.body.source != null); + request.body = safelyExtractBody(request.body.source)[0]; } const timingInfo = fetchParams.timingInfo; timingInfo.redirectEndTime = timingInfo.postRedirectStartTime = coarsenedSharedCurrentTime(fetchParams.crossOriginIsolatedCapability); if (timingInfo.redirectStartTime === 0) { timingInfo.redirectStartTime = timingInfo.startTime; } - request2.urlList.push(locationURL); - setRequestReferrerPolicyOnRedirect(request2, actualResponse); + request.urlList.push(locationURL); + setRequestReferrerPolicyOnRedirect(request, actualResponse); return mainFetch(fetchParams, true); } async function httpNetworkOrCacheFetch(fetchParams, isAuthenticationFetch = false, isNewConnectionFetch = false) { - const request2 = fetchParams.request; + const request = fetchParams.request; let httpFetchParams = null; let httpRequest = null; let response = null; const httpCache = null; const revalidatingFlag = false; - if (request2.window === "no-window" && request2.redirect === "error") { + if (request.window === "no-window" && request.redirect === "error") { httpFetchParams = fetchParams; - httpRequest = request2; + httpRequest = request; } else { - httpRequest = cloneRequest(request2); + httpRequest = cloneRequest(request); httpFetchParams = { ...fetchParams }; httpFetchParams.request = httpRequest; } - const includeCredentials = request2.credentials === "include" || request2.credentials === "same-origin" && request2.responseTainting === "basic"; + const includeCredentials = request.credentials === "include" || request.credentials === "same-origin" && request.responseTainting === "basic"; const contentLength = httpRequest.body ? httpRequest.body.length : null; let contentLengthHeaderValue = null; if (httpRequest.body == null && ["POST", "PUT"].includes(httpRequest.method)) { @@ -13945,7 +13945,7 @@ var require_fetch = __commonJS({ } response.requestIncludesCredentials = includeCredentials; if (response.status === 407) { - if (request2.window === "no-window") { + if (request.window === "no-window") { return makeNetworkError(); } if (isCancelled(fetchParams)) { @@ -13957,7 +13957,7 @@ var require_fetch = __commonJS({ // response’s status is 421 response.status === 421 && // isNewConnectionFetch is false !isNewConnectionFetch && // request’s body is null, or request’s body is non-null and request’s body’s source is non-null - (request2.body == null || request2.body.source != null) + (request.body == null || request.body.source != null) ) { if (isCancelled(fetchParams)) { return makeAppropriateNetworkError(fetchParams); @@ -13987,21 +13987,21 @@ var require_fetch = __commonJS({ } } }; - const request2 = fetchParams.request; + const request = fetchParams.request; let response = null; const timingInfo = fetchParams.timingInfo; const httpCache = null; if (httpCache == null) { - request2.cache = "no-store"; + request.cache = "no-store"; } const newConnection = forceNewConnection ? "yes" : "no"; - if (request2.mode === "websocket") { + if (request.mode === "websocket") { } else { } let requestBody = null; - if (request2.body == null && fetchParams.processRequestEndOfBody) { + if (request.body == null && fetchParams.processRequestEndOfBody) { queueMicrotask(() => fetchParams.processRequestEndOfBody()); - } else if (request2.body != null) { + } else if (request.body != null) { const processBodyChunk = async function* (bytes) { if (isCancelled(fetchParams)) { return; @@ -14029,7 +14029,7 @@ var require_fetch = __commonJS({ }; requestBody = (async function* () { try { - for await (const bytes of request2.body.stream) { + for await (const bytes of request.body.stream) { yield* processBodyChunk(bytes); } processEndOfBody(); @@ -14043,8 +14043,8 @@ var require_fetch = __commonJS({ if (socket) { response = makeResponse({ status, statusText, headersList, socket }); } else { - const iterator2 = body[Symbol.asyncIterator](); - fetchParams.controller.next = () => iterator2.next(); + const iterator = body[Symbol.asyncIterator](); + fetchParams.controller.next = () => iterator.next(); response = makeResponse({ status, statusText, headersList }); } } catch (err) { @@ -14139,17 +14139,17 @@ var require_fetch = __commonJS({ } return response; function dispatch({ body }) { - const url = requestCurrentURL(request2); + const url = requestCurrentURL(request); const agent = fetchParams.controller.dispatcher; return new Promise((resolve4, reject) => agent.dispatch( { path: url.pathname + url.search, origin: url.origin, - method: request2.method, - body: agent.isMockActive ? request2.body && (request2.body.source || request2.body.stream) : body, - headers: request2.headersList.entries, + method: request.method, + body: agent.isMockActive ? request.body && (request.body.source || request.body.stream) : body, + headers: request.headersList.entries, maxRedirections: 0, - upgrade: request2.mode === "websocket" ? "websocket" : void 0 + upgrade: request.mode === "websocket" ? "websocket" : void 0 }, { body: null, @@ -14180,8 +14180,8 @@ var require_fetch = __commonJS({ location = headersList.get("location", true); this.body = new Readable({ read: resume }); const decoders = []; - const willFollow = location && request2.redirect === "follow" && redirectStatusSet.has(status); - if (request2.method !== "HEAD" && request2.method !== "CONNECT" && !nullBodyStatus.includes(status) && !willFollow) { + const willFollow = location && request.redirect === "follow" && redirectStatusSet.has(status); + if (request.method !== "HEAD" && request.method !== "CONNECT" && !nullBodyStatus.includes(status) && !willFollow) { const contentEncoding = headersList.get("content-encoding", true); const codings = contentEncoding ? contentEncoding.toLowerCase().split(",") : []; const maxContentEncodings = 5; @@ -14276,7 +14276,7 @@ var require_fetch = __commonJS({ } } module2.exports = { - fetch, + fetch: fetch2, Fetch, fetching, finalizeAndReportTiming @@ -15164,31 +15164,31 @@ var require_cache = __commonJS({ webidl.util.markAsUncloneable(this); this.#relevantRequestResponseList = arguments[1]; } - async match(request2, options = {}) { + async match(request, options = {}) { webidl.brandCheck(this, _Cache); const prefix = "Cache.match"; webidl.argumentLengthCheck(arguments, 1, prefix); - request2 = webidl.converters.RequestInfo(request2, prefix, "request"); + request = webidl.converters.RequestInfo(request, prefix, "request"); options = webidl.converters.CacheQueryOptions(options, prefix, "options"); - const p = this.#internalMatchAll(request2, options, 1); + const p = this.#internalMatchAll(request, options, 1); if (p.length === 0) { return; } return p[0]; } - async matchAll(request2 = void 0, options = {}) { + async matchAll(request = void 0, options = {}) { webidl.brandCheck(this, _Cache); const prefix = "Cache.matchAll"; - if (request2 !== void 0) request2 = webidl.converters.RequestInfo(request2, prefix, "request"); + if (request !== void 0) request = webidl.converters.RequestInfo(request, prefix, "request"); options = webidl.converters.CacheQueryOptions(options, prefix, "options"); - return this.#internalMatchAll(request2, options); + return this.#internalMatchAll(request, options); } - async add(request2) { + async add(request) { webidl.brandCheck(this, _Cache); const prefix = "Cache.add"; webidl.argumentLengthCheck(arguments, 1, prefix); - request2 = webidl.converters.RequestInfo(request2, prefix, "request"); - const requests = [request2]; + request = webidl.converters.RequestInfo(request, prefix, "request"); + const requests = [request]; const responseArrayPromise = this.addAll(requests); return await responseArrayPromise; } @@ -15198,19 +15198,19 @@ var require_cache = __commonJS({ webidl.argumentLengthCheck(arguments, 1, prefix); const responsePromises = []; const requestList = []; - for (let request2 of requests) { - if (request2 === void 0) { + for (let request of requests) { + if (request === void 0) { throw webidl.errors.conversionFailed({ prefix, argument: "Argument 1", types: ["undefined is not allowed"] }); } - request2 = webidl.converters.RequestInfo(request2); - if (typeof request2 === "string") { + request = webidl.converters.RequestInfo(request); + if (typeof request === "string") { continue; } - const r = request2[kState]; + const r = request[kState]; if (!urlIsHttpHttpsScheme(r.url) || r.method !== "GET") { throw webidl.errors.exception({ header: prefix, @@ -15219,8 +15219,8 @@ var require_cache = __commonJS({ } } const fetchControllers = []; - for (const request2 of requests) { - const r = new Request(request2)[kState]; + for (const request of requests) { + const r = new Request(request)[kState]; if (!urlIsHttpHttpsScheme(r.url)) { throw webidl.errors.exception({ header: prefix, @@ -15297,17 +15297,17 @@ var require_cache = __commonJS({ }); return cacheJobPromise.promise; } - async put(request2, response) { + async put(request, response) { webidl.brandCheck(this, _Cache); const prefix = "Cache.put"; webidl.argumentLengthCheck(arguments, 2, prefix); - request2 = webidl.converters.RequestInfo(request2, prefix, "request"); + request = webidl.converters.RequestInfo(request, prefix, "request"); response = webidl.converters.Response(response, prefix, "response"); let innerRequest = null; - if (request2 instanceof Request) { - innerRequest = request2[kState]; + if (request instanceof Request) { + innerRequest = request[kState]; } else { - innerRequest = new Request(request2)[kState]; + innerRequest = new Request(request)[kState]; } if (!urlIsHttpHttpsScheme(innerRequest.url) || innerRequest.method !== "GET") { throw webidl.errors.exception({ @@ -15378,21 +15378,21 @@ var require_cache = __commonJS({ }); return cacheJobPromise.promise; } - async delete(request2, options = {}) { + async delete(request, options = {}) { webidl.brandCheck(this, _Cache); const prefix = "Cache.delete"; webidl.argumentLengthCheck(arguments, 1, prefix); - request2 = webidl.converters.RequestInfo(request2, prefix, "request"); + request = webidl.converters.RequestInfo(request, prefix, "request"); options = webidl.converters.CacheQueryOptions(options, prefix, "options"); let r = null; - if (request2 instanceof Request) { - r = request2[kState]; + if (request instanceof Request) { + r = request[kState]; if (r.method !== "GET" && !options.ignoreMethod) { return false; } } else { - assert(typeof request2 === "string"); - r = new Request(request2)[kState]; + assert(typeof request === "string"); + r = new Request(request)[kState]; } const operations = []; const operation = { @@ -15424,25 +15424,25 @@ var require_cache = __commonJS({ * @param {import('../../types/cache').CacheQueryOptions} options * @returns {Promise} */ - async keys(request2 = void 0, options = {}) { + async keys(request = void 0, options = {}) { webidl.brandCheck(this, _Cache); const prefix = "Cache.keys"; - if (request2 !== void 0) request2 = webidl.converters.RequestInfo(request2, prefix, "request"); + if (request !== void 0) request = webidl.converters.RequestInfo(request, prefix, "request"); options = webidl.converters.CacheQueryOptions(options, prefix, "options"); let r = null; - if (request2 !== void 0) { - if (request2 instanceof Request) { - r = request2[kState]; + if (request !== void 0) { + if (request instanceof Request) { + r = request[kState]; if (r.method !== "GET" && !options.ignoreMethod) { return []; } - } else if (typeof request2 === "string") { - r = new Request(request2)[kState]; + } else if (typeof request === "string") { + r = new Request(request)[kState]; } } const promise = createDeferredPromise(); const requests = []; - if (request2 === void 0) { + if (request === void 0) { for (const requestResponse of this.#relevantRequestResponseList) { requests.push(requestResponse[0]); } @@ -15454,9 +15454,9 @@ var require_cache = __commonJS({ } queueMicrotask(() => { const requestList = []; - for (const request3 of requests) { + for (const request2 of requests) { const requestObject = fromInnerRequest( - request3, + request2, new AbortController().signal, "immutable" ); @@ -15574,9 +15574,9 @@ var require_cache = __commonJS({ * @param {import('../../types/cache').CacheQueryOptions | undefined} options * @returns {boolean} */ - #requestMatchesCachedItem(requestQuery, request2, response = null, options) { + #requestMatchesCachedItem(requestQuery, request, response = null, options) { const queryURL = new URL(requestQuery.url); - const cachedURL = new URL(request2.url); + const cachedURL = new URL(request.url); if (options?.ignoreSearch) { cachedURL.search = ""; queryURL.search = ""; @@ -15592,7 +15592,7 @@ var require_cache = __commonJS({ if (fieldValue === "*") { return false; } - const requestValue = request2.headersList.get(fieldValue); + const requestValue = request.headersList.get(fieldValue); const queryValue = requestQuery.headersList.get(fieldValue); if (requestValue !== queryValue) { return false; @@ -15600,20 +15600,20 @@ var require_cache = __commonJS({ } return true; } - #internalMatchAll(request2, options, maxResponses = Infinity) { + #internalMatchAll(request, options, maxResponses = Infinity) { let r = null; - if (request2 !== void 0) { - if (request2 instanceof Request) { - r = request2[kState]; + if (request !== void 0) { + if (request instanceof Request) { + r = request[kState]; if (r.method !== "GET" && !options.ignoreMethod) { return []; } - } else if (typeof request2 === "string") { - r = new Request(request2)[kState]; + } else if (typeof request === "string") { + r = new Request(request)[kState]; } } const responses = []; - if (request2 === void 0) { + if (request === void 0) { for (const requestResponse of this.#relevantRequestResponseList) { responses.push(requestResponse[1]); } @@ -15702,21 +15702,21 @@ var require_cachestorage = __commonJS({ } webidl.util.markAsUncloneable(this); } - async match(request2, options = {}) { + async match(request, options = {}) { webidl.brandCheck(this, _CacheStorage); webidl.argumentLengthCheck(arguments, 1, "CacheStorage.match"); - request2 = webidl.converters.RequestInfo(request2); + request = webidl.converters.RequestInfo(request); options = webidl.converters.MultiCacheQueryOptions(options); if (options.cacheName != null) { if (this.#caches.has(options.cacheName)) { const cacheList = this.#caches.get(options.cacheName); const cache = new Cache(kConstruct, cacheList); - return await cache.match(request2, options); + return await cache.match(request, options); } } else { for (const cacheList of this.#caches.values()) { const cache = new Cache(kConstruct, cacheList); - const response = await cache.match(request2, options); + const response = await cache.match(request, options); if (response !== void 0) { return response; } @@ -16862,7 +16862,7 @@ var require_connection = __commonJS({ function establishWebSocketConnection(url, protocols, client, ws, onEstablish, options) { const requestURL = url; requestURL.protocol = url.protocol === "ws:" ? "http:" : "https:"; - const request2 = makeRequest({ + const request = makeRequest({ urlList: [requestURL], client, serviceWorkers: "none", @@ -16874,18 +16874,18 @@ var require_connection = __commonJS({ }); if (options.headers) { const headersList = getHeadersList(new Headers2(options.headers)); - request2.headersList = headersList; + request.headersList = headersList; } const keyValue = crypto4.randomBytes(16).toString("base64"); - request2.headersList.append("sec-websocket-key", keyValue); - request2.headersList.append("sec-websocket-version", "13"); + request.headersList.append("sec-websocket-key", keyValue); + request.headersList.append("sec-websocket-version", "13"); for (const protocol of protocols) { - request2.headersList.append("sec-websocket-protocol", protocol); + request.headersList.append("sec-websocket-protocol", protocol); } const permessageDeflate = "permessage-deflate; client_max_window_bits"; - request2.headersList.append("sec-websocket-extensions", permessageDeflate); + request.headersList.append("sec-websocket-extensions", permessageDeflate); const controller = fetching({ - request: request2, + request, useParallelQueue: true, dispatcher: options.dispatcher, processResponse(response) { @@ -16922,7 +16922,7 @@ var require_connection = __commonJS({ } const secProtocol = response.headersList.get("Sec-WebSocket-Protocol"); if (secProtocol !== null) { - const requestProtocols = getDecodeSplit("sec-websocket-protocol", request2.headersList); + const requestProtocols = getDecodeSplit("sec-websocket-protocol", request.headersList); if (!requestProtocols.includes(secProtocol)) { failWebsocketConnection(ws, "Protocol was not set in the opening handshake."); return; @@ -18450,7 +18450,7 @@ var require_undici = __commonJS({ var Pool = require_pool(); var BalancedPool = require_balanced_pool(); var Agent3 = require_agent(); - var ProxyAgent2 = require_proxy_agent(); + var ProxyAgent3 = require_proxy_agent(); var EnvHttpProxyAgent = require_env_http_proxy_agent(); var RetryAgent = require_retry_agent(); var errors = require_errors(); @@ -18473,7 +18473,7 @@ var require_undici = __commonJS({ module2.exports.Pool = Pool; module2.exports.BalancedPool = BalancedPool; module2.exports.Agent = Agent3; - module2.exports.ProxyAgent = ProxyAgent2; + module2.exports.ProxyAgent = ProxyAgent3; module2.exports.EnvHttpProxyAgent = EnvHttpProxyAgent; module2.exports.RetryAgent = RetryAgent; module2.exports.RetryHandler = RetryHandler; @@ -18493,9 +18493,9 @@ var require_undici = __commonJS({ headerNameToString: util2.headerNameToString }; function makeDispatcher(fn) { - return (url, opts, handler2) => { + return (url, opts, handler) => { if (typeof opts === "function") { - handler2 = opts; + handler = opts; opts = null; } if (!url || typeof url !== "string" && typeof url !== "object" && !(url instanceof URL)) { @@ -18528,13 +18528,13 @@ var require_undici = __commonJS({ origin: url.origin, path: url.search ? `${url.pathname}${url.search}` : url.pathname, method: opts.method || (opts.body ? "PUT" : "GET") - }, handler2); + }, handler); }; } module2.exports.setGlobalDispatcher = setGlobalDispatcher; module2.exports.getGlobalDispatcher = getGlobalDispatcher; var fetchImpl = require_fetch().fetch; - module2.exports.fetch = async function fetch(init, options = void 0) { + module2.exports.fetch = async function fetch2(init, options = void 0) { try { return await fetchImpl(init, options); } catch (err) { @@ -18720,8 +18720,8 @@ var require_semver = __commonJS({ } } var i; - exports2.parse = parse3; - function parse3(version2, options) { + exports2.parse = parse2; + function parse2(version2, options) { if (!options || typeof options !== "object") { options = { loose: !!options, @@ -18749,12 +18749,12 @@ var require_semver = __commonJS({ } exports2.valid = valid2; function valid2(version2, options) { - var v = parse3(version2, options); + var v = parse2(version2, options); return v ? v.version : null; } exports2.clean = clean2; function clean2(version2, options) { - var s = parse3(version2.trim().replace(/^[=v]+/, ""), options); + var s = parse2(version2.trim().replace(/^[=v]+/, ""), options); return s ? s.version : null; } exports2.SemVer = SemVer; @@ -18990,8 +18990,8 @@ var require_semver = __commonJS({ if (eq(version1, version2)) { return null; } else { - var v1 = parse3(version1); - var v2 = parse3(version2); + var v1 = parse2(version1); + var v2 = parse2(version2); var prefix = ""; if (v1.prerelease.length || v2.prerelease.length) { prefix = "pre"; @@ -19697,7 +19697,7 @@ var require_semver = __commonJS({ } exports2.prerelease = prerelease; function prerelease(version2, options) { - var parsed = parse3(version2, options); + var parsed = parse2(version2, options); return parsed && parsed.prerelease.length ? parsed.prerelease : null; } exports2.intersects = intersects; @@ -19734,7 +19734,7 @@ var require_semver = __commonJS({ if (match === null) { return null; } - return parse3(match[2] + "." + (match[3] || "0") + "." + (match[4] || "0"), options); + return parse2(match[2] + "." + (match[3] || "0") + "." + (match[4] || "0"), options); } } }); @@ -20197,7 +20197,7 @@ var require_parse2 = __commonJS({ "node_modules/@actions/tool-cache/node_modules/semver/functions/parse.js"(exports2, module2) { "use strict"; var SemVer = require_semver2(); - var parse3 = (version2, options, throwErrors = false) => { + var parse2 = (version2, options, throwErrors = false) => { if (version2 instanceof SemVer) { return version2; } @@ -20210,7 +20210,7 @@ var require_parse2 = __commonJS({ throw er; } }; - module2.exports = parse3; + module2.exports = parse2; } }); @@ -20218,9 +20218,9 @@ var require_parse2 = __commonJS({ var require_valid = __commonJS({ "node_modules/@actions/tool-cache/node_modules/semver/functions/valid.js"(exports2, module2) { "use strict"; - var parse3 = require_parse2(); + var parse2 = require_parse2(); var valid2 = (version2, options) => { - const v = parse3(version2, options); + const v = parse2(version2, options); return v ? v.version : null; }; module2.exports = valid2; @@ -20231,9 +20231,9 @@ var require_valid = __commonJS({ var require_clean = __commonJS({ "node_modules/@actions/tool-cache/node_modules/semver/functions/clean.js"(exports2, module2) { "use strict"; - var parse3 = require_parse2(); + var parse2 = require_parse2(); var clean2 = (version2, options) => { - const s = parse3(version2.trim().replace(/^[=v]+/, ""), options); + const s = parse2(version2.trim().replace(/^[=v]+/, ""), options); return s ? s.version : null; }; module2.exports = clean2; @@ -20268,10 +20268,10 @@ var require_inc = __commonJS({ var require_diff = __commonJS({ "node_modules/@actions/tool-cache/node_modules/semver/functions/diff.js"(exports2, module2) { "use strict"; - var parse3 = require_parse2(); + var parse2 = require_parse2(); var diff = (version1, version2) => { - const v1 = parse3(version1, null, true); - const v2 = parse3(version2, null, true); + const v1 = parse2(version1, null, true); + const v2 = parse2(version2, null, true); const comparison = v1.compare(v2); if (comparison === 0) { return null; @@ -20342,9 +20342,9 @@ var require_patch = __commonJS({ var require_prerelease = __commonJS({ "node_modules/@actions/tool-cache/node_modules/semver/functions/prerelease.js"(exports2, module2) { "use strict"; - var parse3 = require_parse2(); + var parse2 = require_parse2(); var prerelease = (version2, options) => { - const parsed = parse3(version2, options); + const parsed = parse2(version2, options); return parsed && parsed.prerelease.length ? parsed.prerelease : null; }; module2.exports = prerelease; @@ -20530,7 +20530,7 @@ var require_coerce = __commonJS({ "node_modules/@actions/tool-cache/node_modules/semver/functions/coerce.js"(exports2, module2) { "use strict"; var SemVer = require_semver2(); - var parse3 = require_parse2(); + var parse2 = require_parse2(); var { safeRe: re, t } = require_re(); var coerce = (version2, options) => { if (version2 instanceof SemVer) { @@ -20565,7 +20565,7 @@ var require_coerce = __commonJS({ const patch = match[4] || "0"; const prerelease = options.includePrerelease && match[5] ? `-${match[5]}` : ""; const build = options.includePrerelease && match[6] ? `+${match[6]}` : ""; - return parse3(`${major}.${minor}.${patch}${prerelease}${build}`, options); + return parse2(`${major}.${minor}.${patch}${prerelease}${build}`, options); }; module2.exports = coerce; } @@ -21582,7 +21582,7 @@ var require_semver3 = __commonJS({ var constants3 = require_constants6(); var SemVer = require_semver2(); var identifiers = require_identifiers(); - var parse3 = require_parse2(); + var parse2 = require_parse2(); var valid2 = require_valid(); var clean2 = require_clean(); var inc = require_inc(); @@ -21620,7 +21620,7 @@ var require_semver3 = __commonJS({ var simplifyRange = require_simplify(); var subset = require_subset(); module2.exports = { - parse: parse3, + parse: parse2, valid: valid2, clean: clean2, inc, @@ -21669,102 +21669,6 @@ var require_semver3 = __commonJS({ } }); -// node_modules/fast-content-type-parse/index.js -var require_fast_content_type_parse = __commonJS({ - "node_modules/fast-content-type-parse/index.js"(exports2, module2) { - "use strict"; - var NullObject = function NullObject2() { - }; - NullObject.prototype = /* @__PURE__ */ Object.create(null); - var paramRE = /; *([!#$%&'*+.^\w`|~-]+)=("(?:[\v\u0020\u0021\u0023-\u005b\u005d-\u007e\u0080-\u00ff]|\\[\v\u0020-\u00ff])*"|[!#$%&'*+.^\w`|~-]+) */gu; - var quotedPairRE = /\\([\v\u0020-\u00ff])/gu; - var mediaTypeRE = /^[!#$%&'*+.^\w|~-]+\/[!#$%&'*+.^\w|~-]+$/u; - var defaultContentType = { type: "", parameters: new NullObject() }; - Object.freeze(defaultContentType.parameters); - Object.freeze(defaultContentType); - function parse3(header) { - if (typeof header !== "string") { - throw new TypeError("argument header is required and must be a string"); - } - let index = header.indexOf(";"); - const type = index !== -1 ? header.slice(0, index).trim() : header.trim(); - if (mediaTypeRE.test(type) === false) { - throw new TypeError("invalid media type"); - } - const result = { - type: type.toLowerCase(), - parameters: new NullObject() - }; - if (index === -1) { - return result; - } - let key; - let match; - let value; - paramRE.lastIndex = index; - while (match = paramRE.exec(header)) { - if (match.index !== index) { - throw new TypeError("invalid parameter format"); - } - index += match[0].length; - key = match[1].toLowerCase(); - value = match[2]; - if (value[0] === '"') { - value = value.slice(1, value.length - 1); - quotedPairRE.test(value) && (value = value.replace(quotedPairRE, "$1")); - } - result.parameters[key] = value; - } - if (index !== header.length) { - throw new TypeError("invalid parameter format"); - } - return result; - } - function safeParse2(header) { - if (typeof header !== "string") { - return defaultContentType; - } - let index = header.indexOf(";"); - const type = index !== -1 ? header.slice(0, index).trim() : header.trim(); - if (mediaTypeRE.test(type) === false) { - return defaultContentType; - } - const result = { - type: type.toLowerCase(), - parameters: new NullObject() - }; - if (index === -1) { - return result; - } - let key; - let match; - let value; - paramRE.lastIndex = index; - while (match = paramRE.exec(header)) { - if (match.index !== index) { - return defaultContentType; - } - index += match[0].length; - key = match[1].toLowerCase(); - value = match[2]; - if (value[0] === '"') { - value = value.slice(1, value.length - 1); - quotedPairRE.test(value) && (value = value.replace(quotedPairRE, "$1")); - } - result.parameters[key] = value; - } - if (index !== header.length) { - return defaultContentType; - } - return result; - } - module2.exports.default = { parse: parse3, safeParse: safeParse2 }; - module2.exports.parse = parse3; - module2.exports.safeParse = safeParse2; - module2.exports.defaultContentType = defaultContentType; - } -}); - // node_modules/@renovatebot/pep440/lib/version.js var require_version = __commonJS({ "node_modules/@renovatebot/pep440/lib/version.js"(exports2, module2) { @@ -21833,7 +21737,7 @@ var require_version = __commonJS({ valid: valid2, clean: clean2, explain, - parse: parse3, + parse: parse2, stringify: stringify2 }; var validRegex = new RegExp("^" + VERSION_PATTERN + "$", "i"); @@ -21842,9 +21746,9 @@ var require_version = __commonJS({ } var cleanRegex = new RegExp("^\\s*" + VERSION_PATTERN + "\\s*$", "i"); function clean2(version2) { - return stringify2(parse3(version2, cleanRegex)); + return stringify2(parse2(version2, cleanRegex)); } - function parse3(version2, regex) { + function parse2(version2, regex) { const { groups } = (regex || validRegex).exec(version2) || {}; if (!groups) { return null; @@ -21920,7 +21824,7 @@ var require_version = __commonJS({ return null; } function explain(version2) { - const parsed = parse3(version2); + const parsed = parse2(version2); if (!parsed) { return parsed; } @@ -21953,7 +21857,7 @@ var require_version = __commonJS({ // node_modules/@renovatebot/pep440/lib/operator.js var require_operator = __commonJS({ "node_modules/@renovatebot/pep440/lib/operator.js"(exports2, module2) { - var { parse: parse3 } = require_version(); + var { parse: parse2 } = require_version(); module2.exports = { compare, rcompare, @@ -21993,8 +21897,8 @@ var require_operator = __commonJS({ return version2.toLowerCase() === other.toLowerCase(); } function compare(version2, other) { - const parsedVersion = parse3(version2); - const parsedOther = parse3(other); + const parsedVersion = parse2(version2); + const parsedOther = parse2(other); const keyVersion = calculateKey(parsedVersion); const keyOther = calculateKey(parsedOther); return pyCompare(keyVersion, keyOther); @@ -22076,7 +21980,7 @@ var require_specifier = __commonJS({ ].join(""); module2.exports = { RANGE_PATTERN, - parse: parse3, + parse: parse2, satisfies: satisfies3, filter, validRange, @@ -22085,7 +21989,7 @@ var require_specifier = __commonJS({ }; var isEqualityOperator = (op) => ["==", "!=", "==="].includes(op); var rangeRegex = new RegExp("^" + RANGE_PATTERN + "$", "i"); - function parse3(ranges) { + function parse2(ranges) { if (!ranges.trim()) { return []; } @@ -22137,7 +22041,7 @@ var require_specifier = __commonJS({ return found.length === 0 ? null : found[0]; } function pick(versions, specifier, options) { - const parsed = parse3(specifier); + const parsed = parse2(specifier); if (!parsed) { return []; } @@ -22206,7 +22110,7 @@ var require_specifier = __commonJS({ return op(version2, spec.version || spec.legacy); } function validRange(specifier) { - return Boolean(parse3(specifier)); + return Boolean(parse2(specifier)); } } }); @@ -22214,7 +22118,7 @@ var require_specifier = __commonJS({ // node_modules/@renovatebot/pep440/lib/semantic.js var require_semantic = __commonJS({ "node_modules/@renovatebot/pep440/lib/semantic.js"(exports2, module2) { - var { explain, parse: parse3, stringify: stringify2 } = require_version(); + var { explain, parse: parse2, stringify: stringify2 } = require_version(); module2.exports = { major, minor, @@ -22250,7 +22154,7 @@ var require_semantic = __commonJS({ } function inc(input, release, preReleaseIdentifier) { let identifier = preReleaseIdentifier || `a`; - const version2 = parse3(input); + const version2 = parse2(input); if (!version2) { return null; } @@ -22366,7 +22270,7 @@ var require_semantic = __commonJS({ // node_modules/@renovatebot/pep440/index.js var require_pep440 = __commonJS({ "node_modules/@renovatebot/pep440/index.js"(exports2, module2) { - var { valid: valid2, clean: clean2, explain, parse: parse3 } = require_version(); + var { valid: valid2, clean: clean2, explain, parse: parse2 } = require_version(); var { lt: lt2, le, eq, ne, ge, gt: gt2, compare, rcompare } = require_operator(); var { filter, @@ -22382,7 +22286,7 @@ var require_pep440 = __commonJS({ valid: valid2, clean: clean2, explain, - parse: parse3, + parse: parse2, // operator lt: lt2, le, @@ -22718,7 +22622,7 @@ var HttpClientResponse = class { } }; var HttpClient = class { - constructor(userAgent3, handlers, requestOptions) { + constructor(userAgent2, handlers, requestOptions) { this._ignoreSslError = false; this._allowRedirects = true; this._allowRedirectDowngrade = false; @@ -22727,7 +22631,7 @@ var HttpClient = class { this._maxRetries = 1; this._keepAlive = false; this._disposed = false; - this.userAgent = this._getUserAgentWithOrchestrationId(userAgent3); + this.userAgent = this._getUserAgentWithOrchestrationId(userAgent2); this.handlers = handlers || []; this.requestOptions = requestOptions; if (requestOptions) { @@ -22852,9 +22756,9 @@ var HttpClient = class { response = yield this.requestRaw(info2, data); if (response && response.message && response.message.statusCode === HttpCodes.Unauthorized) { let authenticationHandler; - for (const handler2 of this.handlers) { - if (handler2.canHandleAuthentication(response)) { - authenticationHandler = handler2; + for (const handler of this.handlers) { + if (handler.canHandleAuthentication(response)) { + authenticationHandler = handler; break; } } @@ -23012,8 +22916,8 @@ var HttpClient = class { } info2.options.agent = this._getAgent(info2.parsedUrl); if (this.handlers) { - for (const handler2 of this.handlers) { - handler2.prepareRequest(info2.options); + for (const handler of this.handlers) { + handler.prepareRequest(info2.options); } } return info2; @@ -23152,8 +23056,8 @@ var HttpClient = class { } return proxyAgent; } - _getUserAgentWithOrchestrationId(userAgent3) { - const baseUserAgent = userAgent3 || "actions/http-client"; + _getUserAgentWithOrchestrationId(userAgent2) { + const baseUserAgent = userAgent2 || "actions/http-client"; const orchId = process.env["ACTIONS_ORCHESTRATION_ID"]; if (orchId) { const sanitizedId = orchId.replace(/[^a-z0-9_.-]/gi, "_"); @@ -24529,7 +24433,7 @@ var HTTPError = class extends Error { var IS_WINDOWS3 = process.platform === "win32"; var IS_MAC = process.platform === "darwin"; var userAgent = "actions/tool-cache"; -function downloadTool(url, dest, auth2, headers) { +function downloadTool(url, dest, auth, headers) { return __awaiter8(this, void 0, void 0, function* () { dest = dest || path5.join(_getTempDirectory(), crypto2.randomUUID()); yield mkdirP(path5.dirname(dest)); @@ -24540,7 +24444,7 @@ function downloadTool(url, dest, auth2, headers) { const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); const retryHelper = new RetryHelper(maxAttempts, minSeconds, maxSeconds); return yield retryHelper.execute(() => __awaiter8(this, void 0, void 0, function* () { - return yield downloadToolAttempt(url, dest || "", auth2, headers); + return yield downloadToolAttempt(url, dest || "", auth, headers); }), (err) => { if (err instanceof HTTPError && err.httpStatusCode) { if (err.httpStatusCode < 500 && err.httpStatusCode !== 408 && err.httpStatusCode !== 429) { @@ -24551,7 +24455,7 @@ function downloadTool(url, dest, auth2, headers) { }); }); } -function downloadToolAttempt(url, dest, auth2, headers) { +function downloadToolAttempt(url, dest, auth, headers) { return __awaiter8(this, void 0, void 0, function* () { if (fs3.existsSync(dest)) { throw new Error(`Destination file path ${dest} already exists`); @@ -24559,12 +24463,12 @@ function downloadToolAttempt(url, dest, auth2, headers) { const http2 = new HttpClient(userAgent, [], { allowRetries: false }); - if (auth2) { + if (auth) { debug("set auth"); if (headers === void 0) { headers = {}; } - headers.authorization = auth2; + headers.authorization = auth; } const response = yield http2.get(url, headers); if (response.message.statusCode !== 200) { @@ -24841,3313 +24745,15 @@ function _getGlobal(key, defaultValue) { return value !== void 0 ? value : defaultValue; } -// node_modules/universal-user-agent/index.js -function getUserAgent() { - if (typeof navigator === "object" && "userAgent" in navigator) { - return navigator.userAgent; - } - if (typeof process === "object" && process.version !== void 0) { - return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`; - } - return ""; -} - -// node_modules/before-after-hook/lib/register.js -function register(state, name, method, options) { - if (typeof method !== "function") { - throw new Error("method for before hook must be a function"); - } - if (!options) { - options = {}; - } - if (Array.isArray(name)) { - return name.reverse().reduce((callback, name2) => { - return register.bind(null, state, name2, callback, options); - }, method)(); - } - return Promise.resolve().then(() => { - if (!state.registry[name]) { - return method(options); - } - return state.registry[name].reduce((method2, registered) => { - return registered.hook.bind(null, method2, options); - }, method)(); - }); -} - -// node_modules/before-after-hook/lib/add.js -function addHook(state, kind, name, hook2) { - const orig = hook2; - if (!state.registry[name]) { - state.registry[name] = []; - } - if (kind === "before") { - hook2 = (method, options) => { - return Promise.resolve().then(orig.bind(null, options)).then(method.bind(null, options)); - }; - } - if (kind === "after") { - hook2 = (method, options) => { - let result; - return Promise.resolve().then(method.bind(null, options)).then((result_) => { - result = result_; - return orig(result, options); - }).then(() => { - return result; - }); - }; - } - if (kind === "error") { - hook2 = (method, options) => { - return Promise.resolve().then(method.bind(null, options)).catch((error2) => { - return orig(error2, options); - }); - }; - } - state.registry[name].push({ - hook: hook2, - orig - }); -} - -// node_modules/before-after-hook/lib/remove.js -function removeHook(state, name, method) { - if (!state.registry[name]) { - return; - } - const index = state.registry[name].map((registered) => { - return registered.orig; - }).indexOf(method); - if (index === -1) { - return; - } - state.registry[name].splice(index, 1); -} - -// node_modules/before-after-hook/index.js -var bind = Function.bind; -var bindable = bind.bind(bind); -function bindApi(hook2, state, name) { - const removeHookRef = bindable(removeHook, null).apply( - null, - name ? [state, name] : [state] - ); - hook2.api = { remove: removeHookRef }; - hook2.remove = removeHookRef; - ["before", "error", "after", "wrap"].forEach((kind) => { - const args2 = name ? [state, kind, name] : [state, kind]; - hook2[kind] = hook2.api[kind] = bindable(addHook, null).apply(null, args2); - }); -} -function Singular() { - const singularHookName = /* @__PURE__ */ Symbol("Singular"); - const singularHookState = { - registry: {} - }; - const singularHook = register.bind(null, singularHookState, singularHookName); - bindApi(singularHook, singularHookState, singularHookName); - return singularHook; -} -function Collection() { - const state = { - registry: {} - }; - const hook2 = register.bind(null, state); - bindApi(hook2, state); - return hook2; -} -var before_after_hook_default = { Singular, Collection }; - -// node_modules/@octokit/endpoint/dist-bundle/index.js -var VERSION = "0.0.0-development"; -var userAgent2 = `octokit-endpoint.js/${VERSION} ${getUserAgent()}`; -var DEFAULTS = { - method: "GET", - baseUrl: "https://api.github.com", - headers: { - accept: "application/vnd.github.v3+json", - "user-agent": userAgent2 - }, - mediaType: { - format: "" - } -}; -function lowercaseKeys2(object) { - if (!object) { - return {}; - } - return Object.keys(object).reduce((newObj, key) => { - newObj[key.toLowerCase()] = object[key]; - return newObj; - }, {}); -} -function isPlainObject(value) { - if (typeof value !== "object" || value === null) return false; - if (Object.prototype.toString.call(value) !== "[object Object]") return false; - const proto = Object.getPrototypeOf(value); - if (proto === null) return true; - const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; - return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); -} -function mergeDeep(defaults, options) { - const result = Object.assign({}, defaults); - Object.keys(options).forEach((key) => { - if (isPlainObject(options[key])) { - if (!(key in defaults)) Object.assign(result, { [key]: options[key] }); - else result[key] = mergeDeep(defaults[key], options[key]); - } else { - Object.assign(result, { [key]: options[key] }); - } - }); - return result; -} -function removeUndefinedProperties(obj) { - for (const key in obj) { - if (obj[key] === void 0) { - delete obj[key]; - } - } - return obj; -} -function merge(defaults, route, options) { - if (typeof route === "string") { - let [method, url] = route.split(" "); - options = Object.assign(url ? { method, url } : { url: method }, options); - } else { - options = Object.assign({}, route); - } - options.headers = lowercaseKeys2(options.headers); - removeUndefinedProperties(options); - removeUndefinedProperties(options.headers); - const mergedOptions = mergeDeep(defaults || {}, options); - if (options.url === "/graphql") { - if (defaults && defaults.mediaType.previews?.length) { - mergedOptions.mediaType.previews = defaults.mediaType.previews.filter( - (preview) => !mergedOptions.mediaType.previews.includes(preview) - ).concat(mergedOptions.mediaType.previews); - } - mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, "")); - } - return mergedOptions; -} -function addQueryParameters(url, parameters) { - const separator = /\?/.test(url) ? "&" : "?"; - const names = Object.keys(parameters); - if (names.length === 0) { - return url; - } - return url + separator + names.map((name) => { - if (name === "q") { - return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); - } - return `${name}=${encodeURIComponent(parameters[name])}`; - }).join("&"); -} -var urlVariableRegex = /\{[^{}}]+\}/g; -function removeNonChars(variableName) { - return variableName.replace(/(?:^\W+)|(?:(? a.concat(b), []); -} -function omit(object, keysToOmit) { - const result = { __proto__: null }; - for (const key of Object.keys(object)) { - if (keysToOmit.indexOf(key) === -1) { - result[key] = object[key]; - } - } - return result; -} -function encodeReserved(str) { - return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) { - if (!/%[0-9A-Fa-f]/.test(part)) { - part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); - } - return part; - }).join(""); -} -function encodeUnreserved(str) { - return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { - return "%" + c.charCodeAt(0).toString(16).toUpperCase(); - }); -} -function encodeValue(operator, value, key) { - value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); - if (key) { - return encodeUnreserved(key) + "=" + value; - } else { - return value; - } -} -function isDefined(value) { - return value !== void 0 && value !== null; -} -function isKeyOperator(operator) { - return operator === ";" || operator === "&" || operator === "?"; -} -function getValues(context, operator, key, modifier) { - var value = context[key], result = []; - if (isDefined(value) && value !== "") { - if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { - value = value.toString(); - if (modifier && modifier !== "*") { - value = value.substring(0, parseInt(modifier, 10)); - } - result.push( - encodeValue(operator, value, isKeyOperator(operator) ? key : "") - ); - } else { - if (modifier === "*") { - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function(value2) { - result.push( - encodeValue(operator, value2, isKeyOperator(operator) ? key : "") - ); - }); - } else { - Object.keys(value).forEach(function(k) { - if (isDefined(value[k])) { - result.push(encodeValue(operator, value[k], k)); - } - }); - } - } else { - const tmp = []; - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function(value2) { - tmp.push(encodeValue(operator, value2)); - }); - } else { - Object.keys(value).forEach(function(k) { - if (isDefined(value[k])) { - tmp.push(encodeUnreserved(k)); - tmp.push(encodeValue(operator, value[k].toString())); - } - }); - } - if (isKeyOperator(operator)) { - result.push(encodeUnreserved(key) + "=" + tmp.join(",")); - } else if (tmp.length !== 0) { - result.push(tmp.join(",")); - } - } - } - } else { - if (operator === ";") { - if (isDefined(value)) { - result.push(encodeUnreserved(key)); - } - } else if (value === "" && (operator === "&" || operator === "?")) { - result.push(encodeUnreserved(key) + "="); - } else if (value === "") { - result.push(""); - } - } - return result; -} -function parseUrl(template) { - return { - expand: expand.bind(null, template) - }; -} -function expand(template, context) { - var operators = ["+", "#", ".", "/", ";", "?", "&"]; - template = template.replace( - /\{([^\{\}]+)\}|([^\{\}]+)/g, - function(_, expression, literal) { - if (expression) { - let operator = ""; - const values = []; - if (operators.indexOf(expression.charAt(0)) !== -1) { - operator = expression.charAt(0); - expression = expression.substr(1); - } - expression.split(/,/g).forEach(function(variable) { - var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); - }); - if (operator && operator !== "+") { - var separator = ","; - if (operator === "?") { - separator = "&"; - } else if (operator !== "#") { - separator = operator; - } - return (values.length !== 0 ? operator : "") + values.join(separator); - } else { - return values.join(","); - } - } else { - return encodeReserved(literal); - } - } - ); - if (template === "/") { - return template; - } else { - return template.replace(/\/$/, ""); - } -} -function parse(options) { - let method = options.method.toUpperCase(); - let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); - let headers = Object.assign({}, options.headers); - let body; - let parameters = omit(options, [ - "method", - "baseUrl", - "url", - "headers", - "request", - "mediaType" - ]); - const urlVariableNames = extractUrlVariableNames(url); - url = parseUrl(url).expand(parameters); - if (!/^http/.test(url)) { - url = options.baseUrl + url; - } - const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat("baseUrl"); - const remainingParameters = omit(parameters, omittedParameters); - const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); - if (!isBinaryRequest) { - if (options.mediaType.format) { - headers.accept = headers.accept.split(/,/).map( - (format) => format.replace( - /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, - `application/vnd$1$2.${options.mediaType.format}` - ) - ).join(","); - } - if (url.endsWith("/graphql")) { - if (options.mediaType.previews?.length) { - const previewsFromAcceptHeader = headers.accept.match(/(? { - const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; - return `application/vnd.github.${preview}-preview${format}`; - }).join(","); - } - } - } - if (["GET", "HEAD"].includes(method)) { - url = addQueryParameters(url, remainingParameters); - } else { - if ("data" in remainingParameters) { - body = remainingParameters.data; - } else { - if (Object.keys(remainingParameters).length) { - body = remainingParameters; - } - } - } - if (!headers["content-type"] && typeof body !== "undefined") { - headers["content-type"] = "application/json; charset=utf-8"; - } - if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { - body = ""; - } - return Object.assign( - { method, url, headers }, - typeof body !== "undefined" ? { body } : null, - options.request ? { request: options.request } : null - ); -} -function endpointWithDefaults(defaults, route, options) { - return parse(merge(defaults, route, options)); -} -function withDefaults(oldDefaults, newDefaults) { - const DEFAULTS2 = merge(oldDefaults, newDefaults); - const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2); - return Object.assign(endpoint2, { - DEFAULTS: DEFAULTS2, - defaults: withDefaults.bind(null, DEFAULTS2), - merge: merge.bind(null, DEFAULTS2), - parse - }); -} -var endpoint = withDefaults(null, DEFAULTS); - -// node_modules/@octokit/request/dist-bundle/index.js -var import_fast_content_type_parse = __toESM(require_fast_content_type_parse(), 1); - -// node_modules/@octokit/request-error/dist-src/index.js -var RequestError = class extends Error { - name; - /** - * http status code - */ - status; - /** - * Request options that lead to the error. - */ - request; - /** - * Response object if a response was received - */ - response; - constructor(message, statusCode, options) { - super(message); - this.name = "HttpError"; - this.status = Number.parseInt(statusCode); - if (Number.isNaN(this.status)) { - this.status = 0; - } - if ("response" in options) { - this.response = options.response; - } - const requestCopy = Object.assign({}, options.request); - if (options.request.headers.authorization) { - requestCopy.headers = Object.assign({}, options.request.headers, { - authorization: options.request.headers.authorization.replace( - /(? [ - name, - String(value) - ]) - ); - let fetchResponse; - try { - fetchResponse = await fetch(requestOptions.url, { - method: requestOptions.method, - body, - redirect: requestOptions.request?.redirect, - headers: requestHeaders, - signal: requestOptions.request?.signal, - // duplex must be set if request.body is ReadableStream or Async Iterables. - // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex. - ...requestOptions.body && { duplex: "half" } - }); - } catch (error2) { - let message = "Unknown Error"; - if (error2 instanceof Error) { - if (error2.name === "AbortError") { - error2.status = 500; - throw error2; - } - message = error2.message; - if (error2.name === "TypeError" && "cause" in error2) { - if (error2.cause instanceof Error) { - message = error2.cause.message; - } else if (typeof error2.cause === "string") { - message = error2.cause; - } - } - } - const requestError = new RequestError(message, 500, { - request: requestOptions - }); - requestError.cause = error2; - throw requestError; - } - const status = fetchResponse.status; - const url = fetchResponse.url; - const responseHeaders = {}; - for (const [key, value] of fetchResponse.headers) { - responseHeaders[key] = value; - } - const octokitResponse = { - url, - status, - headers: responseHeaders, - data: "" - }; - if ("deprecation" in responseHeaders) { - const matches = responseHeaders.link && responseHeaders.link.match(/<([^<>]+)>; rel="deprecation"/); - const deprecationLink = matches && matches.pop(); - log.warn( - `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${responseHeaders.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}` - ); - } - if (status === 204 || status === 205) { - return octokitResponse; - } - if (requestOptions.method === "HEAD") { - if (status < 400) { - return octokitResponse; - } - throw new RequestError(fetchResponse.statusText, status, { - response: octokitResponse, - request: requestOptions - }); - } - if (status === 304) { - octokitResponse.data = await getResponseData(fetchResponse); - throw new RequestError("Not modified", status, { - response: octokitResponse, - request: requestOptions - }); - } - if (status >= 400) { - octokitResponse.data = await getResponseData(fetchResponse); - throw new RequestError(toErrorMessage(octokitResponse.data), status, { - response: octokitResponse, - request: requestOptions - }); - } - octokitResponse.data = parseSuccessResponseBody ? await getResponseData(fetchResponse) : fetchResponse.body; - return octokitResponse; -} -async function getResponseData(response) { - const contentType = response.headers.get("content-type"); - if (!contentType) { - return response.text().catch(() => ""); - } - const mimetype = (0, import_fast_content_type_parse.safeParse)(contentType); - if (isJSONResponse(mimetype)) { - let text = ""; - try { - text = await response.text(); - return JSON.parse(text); - } catch (err) { - return text; - } - } else if (mimetype.type.startsWith("text/") || mimetype.parameters.charset?.toLowerCase() === "utf-8") { - return response.text().catch(() => ""); - } else { - return response.arrayBuffer().catch(() => new ArrayBuffer(0)); - } -} -function isJSONResponse(mimetype) { - return mimetype.type === "application/json" || mimetype.type === "application/scim+json"; -} -function toErrorMessage(data) { - if (typeof data === "string") { - return data; - } - if (data instanceof ArrayBuffer) { - return "Unknown error"; - } - if ("message" in data) { - const suffix = "documentation_url" in data ? ` - ${data.documentation_url}` : ""; - return Array.isArray(data.errors) ? `${data.message}: ${data.errors.map((v) => JSON.stringify(v)).join(", ")}${suffix}` : `${data.message}${suffix}`; - } - return `Unknown error: ${JSON.stringify(data)}`; -} -function withDefaults2(oldEndpoint, newDefaults) { - const endpoint2 = oldEndpoint.defaults(newDefaults); - const newApi = function(route, parameters) { - const endpointOptions = endpoint2.merge(route, parameters); - if (!endpointOptions.request || !endpointOptions.request.hook) { - return fetchWrapper(endpoint2.parse(endpointOptions)); - } - const request2 = (route2, parameters2) => { - return fetchWrapper( - endpoint2.parse(endpoint2.merge(route2, parameters2)) - ); - }; - Object.assign(request2, { - endpoint: endpoint2, - defaults: withDefaults2.bind(null, endpoint2) - }); - return endpointOptions.request.hook(request2, endpointOptions); - }; - return Object.assign(newApi, { - endpoint: endpoint2, - defaults: withDefaults2.bind(null, endpoint2) - }); -} -var request = withDefaults2(endpoint, defaults_default); - -// node_modules/@octokit/graphql/dist-bundle/index.js -var VERSION3 = "0.0.0-development"; -function _buildMessageForResponseErrors(data) { - return `Request failed due to following response errors: -` + data.errors.map((e) => ` - ${e.message}`).join("\n"); -} -var GraphqlResponseError = class extends Error { - constructor(request2, headers, response) { - super(_buildMessageForResponseErrors(response)); - this.request = request2; - this.headers = headers; - this.response = response; - this.errors = response.errors; - this.data = response.data; - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - } - name = "GraphqlResponseError"; - errors; - data; -}; -var NON_VARIABLE_OPTIONS = [ - "method", - "baseUrl", - "url", - "headers", - "request", - "query", - "mediaType", - "operationName" -]; -var FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; -var GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; -function graphql(request2, query, options) { - if (options) { - if (typeof query === "string" && "query" in options) { - return Promise.reject( - new Error(`[@octokit/graphql] "query" cannot be used as variable name`) - ); - } - for (const key in options) { - if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue; - return Promise.reject( - new Error( - `[@octokit/graphql] "${key}" cannot be used as variable name` - ) - ); - } - } - const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query; - const requestOptions = Object.keys( - parsedOptions - ).reduce((result, key) => { - if (NON_VARIABLE_OPTIONS.includes(key)) { - result[key] = parsedOptions[key]; - return result; - } - if (!result.variables) { - result.variables = {}; - } - result.variables[key] = parsedOptions[key]; - return result; - }, {}); - const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl; - if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { - requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); - } - return request2(requestOptions).then((response) => { - if (response.data.errors) { - const headers = {}; - for (const key of Object.keys(response.headers)) { - headers[key] = response.headers[key]; - } - throw new GraphqlResponseError( - requestOptions, - headers, - response.data - ); - } - return response.data.data; - }); -} -function withDefaults3(request2, newDefaults) { - const newRequest = request2.defaults(newDefaults); - const newApi = (query, options) => { - return graphql(newRequest, query, options); - }; - return Object.assign(newApi, { - defaults: withDefaults3.bind(null, newRequest), - endpoint: newRequest.endpoint - }); -} -var graphql2 = withDefaults3(request, { - headers: { - "user-agent": `octokit-graphql.js/${VERSION3} ${getUserAgent()}` - }, - method: "POST", - url: "/graphql" -}); -function withCustomRequest(customRequest) { - return withDefaults3(customRequest, { - method: "POST", - url: "/graphql" - }); -} - -// node_modules/@octokit/auth-token/dist-bundle/index.js -var b64url = "(?:[a-zA-Z0-9_-]+)"; -var sep2 = "\\."; -var jwtRE = new RegExp(`^${b64url}${sep2}${b64url}${sep2}${b64url}$`); -var isJWT = jwtRE.test.bind(jwtRE); -async function auth(token) { - const isApp = isJWT(token); - const isInstallation = token.startsWith("v1.") || token.startsWith("ghs_"); - const isUserToServer = token.startsWith("ghu_"); - const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; - return { - type: "token", - token, - tokenType - }; -} -function withAuthorizationPrefix(token) { - if (token.split(/\./).length === 3) { - return `bearer ${token}`; - } - return `token ${token}`; -} -async function hook(token, request2, route, parameters) { - const endpoint2 = request2.endpoint.merge( - route, - parameters - ); - endpoint2.headers.authorization = withAuthorizationPrefix(token); - return request2(endpoint2); -} -var createTokenAuth = function createTokenAuth2(token) { - if (!token) { - throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); - } - if (typeof token !== "string") { - throw new Error( - "[@octokit/auth-token] Token passed to createTokenAuth is not a string" - ); - } - token = token.replace(/^(token|bearer) +/i, ""); - return Object.assign(auth.bind(null, token), { - hook: hook.bind(null, token) - }); -}; - -// node_modules/@octokit/core/dist-src/version.js -var VERSION4 = "7.0.3"; - -// node_modules/@octokit/core/dist-src/index.js -var noop = () => { -}; -var consoleWarn = console.warn.bind(console); -var consoleError = console.error.bind(console); -function createLogger(logger = {}) { - if (typeof logger.debug !== "function") { - logger.debug = noop; - } - if (typeof logger.info !== "function") { - logger.info = noop; - } - if (typeof logger.warn !== "function") { - logger.warn = consoleWarn; - } - if (typeof logger.error !== "function") { - logger.error = consoleError; - } - return logger; -} -var userAgentTrail = `octokit-core.js/${VERSION4} ${getUserAgent()}`; -var Octokit = class { - static VERSION = VERSION4; - static defaults(defaults) { - const OctokitWithDefaults = class extends this { - constructor(...args2) { - const options = args2[0] || {}; - if (typeof defaults === "function") { - super(defaults(options)); - return; - } - super( - Object.assign( - {}, - defaults, - options, - options.userAgent && defaults.userAgent ? { - userAgent: `${options.userAgent} ${defaults.userAgent}` - } : null - ) - ); - } - }; - return OctokitWithDefaults; - } - static plugins = []; - /** - * Attach a plugin (or many) to your Octokit instance. - * - * @example - * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) - */ - static plugin(...newPlugins) { - const currentPlugins = this.plugins; - const NewOctokit = class extends this { - static plugins = currentPlugins.concat( - newPlugins.filter((plugin) => !currentPlugins.includes(plugin)) - ); - }; - return NewOctokit; - } - constructor(options = {}) { - const hook2 = new before_after_hook_default.Collection(); - const requestDefaults = { - baseUrl: request.endpoint.DEFAULTS.baseUrl, - headers: {}, - request: Object.assign({}, options.request, { - // @ts-ignore internal usage only, no need to type - hook: hook2.bind(null, "request") - }), - mediaType: { - previews: [], - format: "" - } - }; - requestDefaults.headers["user-agent"] = options.userAgent ? `${options.userAgent} ${userAgentTrail}` : userAgentTrail; - if (options.baseUrl) { - requestDefaults.baseUrl = options.baseUrl; - } - if (options.previews) { - requestDefaults.mediaType.previews = options.previews; - } - if (options.timeZone) { - requestDefaults.headers["time-zone"] = options.timeZone; - } - this.request = request.defaults(requestDefaults); - this.graphql = withCustomRequest(this.request).defaults(requestDefaults); - this.log = createLogger(options.log); - this.hook = hook2; - if (!options.authStrategy) { - if (!options.auth) { - this.auth = async () => ({ - type: "unauthenticated" - }); - } else { - const auth2 = createTokenAuth(options.auth); - hook2.wrap("request", auth2.hook); - this.auth = auth2; - } - } else { - const { authStrategy, ...otherOptions } = options; - const auth2 = authStrategy( - Object.assign( - { - request: this.request, - log: this.log, - // we pass the current octokit instance as well as its constructor options - // to allow for authentication strategies that return a new octokit instance - // that shares the same internal state as the current one. The original - // requirement for this was the "event-octokit" authentication strategy - // of https://github.com/probot/octokit-auth-probot. - octokit: this, - octokitOptions: otherOptions - }, - options.auth - ) - ); - hook2.wrap("request", auth2.hook); - this.auth = auth2; - } - const classConstructor = this.constructor; - for (let i = 0; i < classConstructor.plugins.length; ++i) { - Object.assign(this, classConstructor.plugins[i](this, options)); - } - } - // assigned during constructor - request; - graphql; - log; - hook; - // TODO: type `octokit.auth` based on passed options.authStrategy - auth; -}; - -// node_modules/@octokit/plugin-paginate-rest/dist-bundle/index.js -var VERSION5 = "0.0.0-development"; -function normalizePaginatedListResponse(response) { - if (!response.data) { - return { - ...response, - data: [] - }; - } - const responseNeedsNormalization = ("total_count" in response.data || "total_commits" in response.data) && !("url" in response.data); - if (!responseNeedsNormalization) return response; - const incompleteResults = response.data.incomplete_results; - const repositorySelection = response.data.repository_selection; - const totalCount = response.data.total_count; - const totalCommits = response.data.total_commits; - delete response.data.incomplete_results; - delete response.data.repository_selection; - delete response.data.total_count; - delete response.data.total_commits; - const namespaceKey = Object.keys(response.data)[0]; - const data = response.data[namespaceKey]; - response.data = data; - if (typeof incompleteResults !== "undefined") { - response.data.incomplete_results = incompleteResults; - } - if (typeof repositorySelection !== "undefined") { - response.data.repository_selection = repositorySelection; - } - response.data.total_count = totalCount; - response.data.total_commits = totalCommits; - return response; -} -function iterator(octokit, route, parameters) { - const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); - const requestMethod = typeof route === "function" ? route : octokit.request; - const method = options.method; - const headers = options.headers; - let url = options.url; - return { - [Symbol.asyncIterator]: () => ({ - async next() { - if (!url) return { done: true }; - try { - const response = await requestMethod({ method, url, headers }); - const normalizedResponse = normalizePaginatedListResponse(response); - url = ((normalizedResponse.headers.link || "").match( - /<([^<>]+)>;\s*rel="next"/ - ) || [])[1]; - if (!url && "total_commits" in normalizedResponse.data) { - const parsedUrl = new URL(normalizedResponse.url); - const params = parsedUrl.searchParams; - const page = parseInt(params.get("page") || "1", 10); - const per_page = parseInt(params.get("per_page") || "250", 10); - if (page * per_page < normalizedResponse.data.total_commits) { - params.set("page", String(page + 1)); - url = parsedUrl.toString(); - } - } - return { value: normalizedResponse }; - } catch (error2) { - if (error2.status !== 409) throw error2; - url = ""; - return { - value: { - status: 200, - headers: {}, - data: [] - } - }; - } - } - }) - }; -} -function paginate(octokit, route, parameters, mapFn) { - if (typeof parameters === "function") { - mapFn = parameters; - parameters = void 0; - } - return gather( - octokit, - [], - iterator(octokit, route, parameters)[Symbol.asyncIterator](), - mapFn - ); -} -function gather(octokit, results, iterator2, mapFn) { - return iterator2.next().then((result) => { - if (result.done) { - return results; - } - let earlyExit = false; - function done() { - earlyExit = true; - } - results = results.concat( - mapFn ? mapFn(result.value, done) : result.value.data - ); - if (earlyExit) { - return results; - } - return gather(octokit, results, iterator2, mapFn); - }); -} -var composePaginateRest = Object.assign(paginate, { - iterator -}); -function paginateRest(octokit) { - return { - paginate: Object.assign(paginate.bind(null, octokit), { - iterator: iterator.bind(null, octokit) - }) - }; -} -paginateRest.VERSION = VERSION5; - -// node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/version.js -var VERSION6 = "16.0.0"; - -// node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/endpoints.js -var Endpoints = { - actions: { - addCustomLabelsToSelfHostedRunnerForOrg: [ - "POST /orgs/{org}/actions/runners/{runner_id}/labels" - ], - addCustomLabelsToSelfHostedRunnerForRepo: [ - "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" - ], - addRepoAccessToSelfHostedRunnerGroupInOrg: [ - "PUT /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories/{repository_id}" - ], - addSelectedRepoToOrgSecret: [ - "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" - ], - addSelectedRepoToOrgVariable: [ - "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" - ], - approveWorkflowRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve" - ], - cancelWorkflowRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel" - ], - createEnvironmentVariable: [ - "POST /repos/{owner}/{repo}/environments/{environment_name}/variables" - ], - createHostedRunnerForOrg: ["POST /orgs/{org}/actions/hosted-runners"], - createOrUpdateEnvironmentSecret: [ - "PUT /repos/{owner}/{repo}/environments/{environment_name}/secrets/{secret_name}" - ], - createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], - createOrUpdateRepoSecret: [ - "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}" - ], - createOrgVariable: ["POST /orgs/{org}/actions/variables"], - createRegistrationTokenForOrg: [ - "POST /orgs/{org}/actions/runners/registration-token" - ], - createRegistrationTokenForRepo: [ - "POST /repos/{owner}/{repo}/actions/runners/registration-token" - ], - createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], - createRemoveTokenForRepo: [ - "POST /repos/{owner}/{repo}/actions/runners/remove-token" - ], - createRepoVariable: ["POST /repos/{owner}/{repo}/actions/variables"], - createWorkflowDispatch: [ - "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches" - ], - deleteActionsCacheById: [ - "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}" - ], - deleteActionsCacheByKey: [ - "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}" - ], - deleteArtifact: [ - "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" - ], - deleteEnvironmentSecret: [ - "DELETE /repos/{owner}/{repo}/environments/{environment_name}/secrets/{secret_name}" - ], - deleteEnvironmentVariable: [ - "DELETE /repos/{owner}/{repo}/environments/{environment_name}/variables/{name}" - ], - deleteHostedRunnerForOrg: [ - "DELETE /orgs/{org}/actions/hosted-runners/{hosted_runner_id}" - ], - deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], - deleteOrgVariable: ["DELETE /orgs/{org}/actions/variables/{name}"], - deleteRepoSecret: [ - "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}" - ], - deleteRepoVariable: [ - "DELETE /repos/{owner}/{repo}/actions/variables/{name}" - ], - deleteSelfHostedRunnerFromOrg: [ - "DELETE /orgs/{org}/actions/runners/{runner_id}" - ], - deleteSelfHostedRunnerFromRepo: [ - "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}" - ], - deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], - deleteWorkflowRunLogs: [ - "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs" - ], - disableSelectedRepositoryGithubActionsOrganization: [ - "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}" - ], - disableWorkflow: [ - "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable" - ], - downloadArtifact: [ - "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}" - ], - downloadJobLogsForWorkflowRun: [ - "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs" - ], - downloadWorkflowRunAttemptLogs: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs" - ], - downloadWorkflowRunLogs: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs" - ], - enableSelectedRepositoryGithubActionsOrganization: [ - "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}" - ], - enableWorkflow: [ - "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable" - ], - forceCancelWorkflowRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel" - ], - generateRunnerJitconfigForOrg: [ - "POST /orgs/{org}/actions/runners/generate-jitconfig" - ], - generateRunnerJitconfigForRepo: [ - "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig" - ], - getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], - getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], - getActionsCacheUsageByRepoForOrg: [ - "GET /orgs/{org}/actions/cache/usage-by-repository" - ], - getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], - getAllowedActionsOrganization: [ - "GET /orgs/{org}/actions/permissions/selected-actions" - ], - getAllowedActionsRepository: [ - "GET /repos/{owner}/{repo}/actions/permissions/selected-actions" - ], - getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - getCustomOidcSubClaimForRepo: [ - "GET /repos/{owner}/{repo}/actions/oidc/customization/sub" - ], - getEnvironmentPublicKey: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}/secrets/public-key" - ], - getEnvironmentSecret: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}/secrets/{secret_name}" - ], - getEnvironmentVariable: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}/variables/{name}" - ], - getGithubActionsDefaultWorkflowPermissionsOrganization: [ - "GET /orgs/{org}/actions/permissions/workflow" - ], - getGithubActionsDefaultWorkflowPermissionsRepository: [ - "GET /repos/{owner}/{repo}/actions/permissions/workflow" - ], - getGithubActionsPermissionsOrganization: [ - "GET /orgs/{org}/actions/permissions" - ], - getGithubActionsPermissionsRepository: [ - "GET /repos/{owner}/{repo}/actions/permissions" - ], - getHostedRunnerForOrg: [ - "GET /orgs/{org}/actions/hosted-runners/{hosted_runner_id}" - ], - getHostedRunnersGithubOwnedImagesForOrg: [ - "GET /orgs/{org}/actions/hosted-runners/images/github-owned" - ], - getHostedRunnersLimitsForOrg: [ - "GET /orgs/{org}/actions/hosted-runners/limits" - ], - getHostedRunnersMachineSpecsForOrg: [ - "GET /orgs/{org}/actions/hosted-runners/machine-sizes" - ], - getHostedRunnersPartnerImagesForOrg: [ - "GET /orgs/{org}/actions/hosted-runners/images/partner" - ], - getHostedRunnersPlatformsForOrg: [ - "GET /orgs/{org}/actions/hosted-runners/platforms" - ], - getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], - getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], - getOrgVariable: ["GET /orgs/{org}/actions/variables/{name}"], - getPendingDeploymentsForRun: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" - ], - getRepoPermissions: [ - "GET /repos/{owner}/{repo}/actions/permissions", - {}, - { renamed: ["actions", "getGithubActionsPermissionsRepository"] } - ], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - getRepoVariable: ["GET /repos/{owner}/{repo}/actions/variables/{name}"], - getReviewsForRun: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals" - ], - getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], - getSelfHostedRunnerForRepo: [ - "GET /repos/{owner}/{repo}/actions/runners/{runner_id}" - ], - getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], - getWorkflowAccessToRepository: [ - "GET /repos/{owner}/{repo}/actions/permissions/access" - ], - getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], - getWorkflowRunAttempt: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}" - ], - getWorkflowRunUsage: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing" - ], - getWorkflowUsage: [ - "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing" - ], - listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], - listEnvironmentSecrets: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}/secrets" - ], - listEnvironmentVariables: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}/variables" - ], - listGithubHostedRunnersInGroupForOrg: [ - "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/hosted-runners" - ], - listHostedRunnersForOrg: ["GET /orgs/{org}/actions/hosted-runners"], - listJobsForWorkflowRun: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" - ], - listJobsForWorkflowRunAttempt: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs" - ], - listLabelsForSelfHostedRunnerForOrg: [ - "GET /orgs/{org}/actions/runners/{runner_id}/labels" - ], - listLabelsForSelfHostedRunnerForRepo: [ - "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" - ], - listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], - listOrgVariables: ["GET /orgs/{org}/actions/variables"], - listRepoOrganizationSecrets: [ - "GET /repos/{owner}/{repo}/actions/organization-secrets" - ], - listRepoOrganizationVariables: [ - "GET /repos/{owner}/{repo}/actions/organization-variables" - ], - listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], - listRepoVariables: ["GET /repos/{owner}/{repo}/actions/variables"], - listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], - listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], - listRunnerApplicationsForRepo: [ - "GET /repos/{owner}/{repo}/actions/runners/downloads" - ], - listSelectedReposForOrgSecret: [ - "GET /orgs/{org}/actions/secrets/{secret_name}/repositories" - ], - listSelectedReposForOrgVariable: [ - "GET /orgs/{org}/actions/variables/{name}/repositories" - ], - listSelectedRepositoriesEnabledGithubActionsOrganization: [ - "GET /orgs/{org}/actions/permissions/repositories" - ], - listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], - listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], - listWorkflowRunArtifacts: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" - ], - listWorkflowRuns: [ - "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" - ], - listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], - reRunJobForWorkflowRun: [ - "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun" - ], - reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], - reRunWorkflowFailedJobs: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs" - ], - removeAllCustomLabelsFromSelfHostedRunnerForOrg: [ - "DELETE /orgs/{org}/actions/runners/{runner_id}/labels" - ], - removeAllCustomLabelsFromSelfHostedRunnerForRepo: [ - "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" - ], - removeCustomLabelFromSelfHostedRunnerForOrg: [ - "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}" - ], - removeCustomLabelFromSelfHostedRunnerForRepo: [ - "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}" - ], - removeSelectedRepoFromOrgSecret: [ - "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" - ], - removeSelectedRepoFromOrgVariable: [ - "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" - ], - reviewCustomGatesForRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule" - ], - reviewPendingDeploymentsForRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" - ], - setAllowedActionsOrganization: [ - "PUT /orgs/{org}/actions/permissions/selected-actions" - ], - setAllowedActionsRepository: [ - "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions" - ], - setCustomLabelsForSelfHostedRunnerForOrg: [ - "PUT /orgs/{org}/actions/runners/{runner_id}/labels" - ], - setCustomLabelsForSelfHostedRunnerForRepo: [ - "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" - ], - setCustomOidcSubClaimForRepo: [ - "PUT /repos/{owner}/{repo}/actions/oidc/customization/sub" - ], - setGithubActionsDefaultWorkflowPermissionsOrganization: [ - "PUT /orgs/{org}/actions/permissions/workflow" - ], - setGithubActionsDefaultWorkflowPermissionsRepository: [ - "PUT /repos/{owner}/{repo}/actions/permissions/workflow" - ], - setGithubActionsPermissionsOrganization: [ - "PUT /orgs/{org}/actions/permissions" - ], - setGithubActionsPermissionsRepository: [ - "PUT /repos/{owner}/{repo}/actions/permissions" - ], - setSelectedReposForOrgSecret: [ - "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories" - ], - setSelectedReposForOrgVariable: [ - "PUT /orgs/{org}/actions/variables/{name}/repositories" - ], - setSelectedRepositoriesEnabledGithubActionsOrganization: [ - "PUT /orgs/{org}/actions/permissions/repositories" - ], - setWorkflowAccessToRepository: [ - "PUT /repos/{owner}/{repo}/actions/permissions/access" - ], - updateEnvironmentVariable: [ - "PATCH /repos/{owner}/{repo}/environments/{environment_name}/variables/{name}" - ], - updateHostedRunnerForOrg: [ - "PATCH /orgs/{org}/actions/hosted-runners/{hosted_runner_id}" - ], - updateOrgVariable: ["PATCH /orgs/{org}/actions/variables/{name}"], - updateRepoVariable: [ - "PATCH /repos/{owner}/{repo}/actions/variables/{name}" - ] - }, - activity: { - checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], - deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], - deleteThreadSubscription: [ - "DELETE /notifications/threads/{thread_id}/subscription" - ], - getFeeds: ["GET /feeds"], - getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], - getThread: ["GET /notifications/threads/{thread_id}"], - getThreadSubscriptionForAuthenticatedUser: [ - "GET /notifications/threads/{thread_id}/subscription" - ], - listEventsForAuthenticatedUser: ["GET /users/{username}/events"], - listNotificationsForAuthenticatedUser: ["GET /notifications"], - listOrgEventsForAuthenticatedUser: [ - "GET /users/{username}/events/orgs/{org}" - ], - listPublicEvents: ["GET /events"], - listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], - listPublicEventsForUser: ["GET /users/{username}/events/public"], - listPublicOrgEvents: ["GET /orgs/{org}/events"], - listReceivedEventsForUser: ["GET /users/{username}/received_events"], - listReceivedPublicEventsForUser: [ - "GET /users/{username}/received_events/public" - ], - listRepoEvents: ["GET /repos/{owner}/{repo}/events"], - listRepoNotificationsForAuthenticatedUser: [ - "GET /repos/{owner}/{repo}/notifications" - ], - listReposStarredByAuthenticatedUser: ["GET /user/starred"], - listReposStarredByUser: ["GET /users/{username}/starred"], - listReposWatchedByUser: ["GET /users/{username}/subscriptions"], - listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], - listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], - listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], - markNotificationsAsRead: ["PUT /notifications"], - markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], - markThreadAsDone: ["DELETE /notifications/threads/{thread_id}"], - markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], - setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], - setThreadSubscription: [ - "PUT /notifications/threads/{thread_id}/subscription" - ], - starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], - unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] - }, - apps: { - addRepoToInstallation: [ - "PUT /user/installations/{installation_id}/repositories/{repository_id}", - {}, - { renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] } - ], - addRepoToInstallationForAuthenticatedUser: [ - "PUT /user/installations/{installation_id}/repositories/{repository_id}" - ], - checkToken: ["POST /applications/{client_id}/token"], - createFromManifest: ["POST /app-manifests/{code}/conversions"], - createInstallationAccessToken: [ - "POST /app/installations/{installation_id}/access_tokens" - ], - deleteAuthorization: ["DELETE /applications/{client_id}/grant"], - deleteInstallation: ["DELETE /app/installations/{installation_id}"], - deleteToken: ["DELETE /applications/{client_id}/token"], - getAuthenticated: ["GET /app"], - getBySlug: ["GET /apps/{app_slug}"], - getInstallation: ["GET /app/installations/{installation_id}"], - getOrgInstallation: ["GET /orgs/{org}/installation"], - getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], - getSubscriptionPlanForAccount: [ - "GET /marketplace_listing/accounts/{account_id}" - ], - getSubscriptionPlanForAccountStubbed: [ - "GET /marketplace_listing/stubbed/accounts/{account_id}" - ], - getUserInstallation: ["GET /users/{username}/installation"], - getWebhookConfigForApp: ["GET /app/hook/config"], - getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], - listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], - listAccountsForPlanStubbed: [ - "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts" - ], - listInstallationReposForAuthenticatedUser: [ - "GET /user/installations/{installation_id}/repositories" - ], - listInstallationRequestsForAuthenticatedApp: [ - "GET /app/installation-requests" - ], - listInstallations: ["GET /app/installations"], - listInstallationsForAuthenticatedUser: ["GET /user/installations"], - listPlans: ["GET /marketplace_listing/plans"], - listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], - listReposAccessibleToInstallation: ["GET /installation/repositories"], - listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], - listSubscriptionsForAuthenticatedUserStubbed: [ - "GET /user/marketplace_purchases/stubbed" - ], - listWebhookDeliveries: ["GET /app/hook/deliveries"], - redeliverWebhookDelivery: [ - "POST /app/hook/deliveries/{delivery_id}/attempts" - ], - removeRepoFromInstallation: [ - "DELETE /user/installations/{installation_id}/repositories/{repository_id}", - {}, - { renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] } - ], - removeRepoFromInstallationForAuthenticatedUser: [ - "DELETE /user/installations/{installation_id}/repositories/{repository_id}" - ], - resetToken: ["PATCH /applications/{client_id}/token"], - revokeInstallationAccessToken: ["DELETE /installation/token"], - scopeToken: ["POST /applications/{client_id}/token/scoped"], - suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], - unsuspendInstallation: [ - "DELETE /app/installations/{installation_id}/suspended" - ], - updateWebhookConfigForApp: ["PATCH /app/hook/config"] - }, - billing: { - getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], - getGithubActionsBillingUser: [ - "GET /users/{username}/settings/billing/actions" - ], - getGithubBillingUsageReportOrg: [ - "GET /organizations/{org}/settings/billing/usage" - ], - getGithubBillingUsageReportUser: [ - "GET /users/{username}/settings/billing/usage" - ], - getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], - getGithubPackagesBillingUser: [ - "GET /users/{username}/settings/billing/packages" - ], - getSharedStorageBillingOrg: [ - "GET /orgs/{org}/settings/billing/shared-storage" - ], - getSharedStorageBillingUser: [ - "GET /users/{username}/settings/billing/shared-storage" - ] - }, - campaigns: { - createCampaign: ["POST /orgs/{org}/campaigns"], - deleteCampaign: ["DELETE /orgs/{org}/campaigns/{campaign_number}"], - getCampaignSummary: ["GET /orgs/{org}/campaigns/{campaign_number}"], - listOrgCampaigns: ["GET /orgs/{org}/campaigns"], - updateCampaign: ["PATCH /orgs/{org}/campaigns/{campaign_number}"] - }, - checks: { - create: ["POST /repos/{owner}/{repo}/check-runs"], - createSuite: ["POST /repos/{owner}/{repo}/check-suites"], - get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], - getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], - listAnnotations: [ - "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations" - ], - listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], - listForSuite: [ - "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs" - ], - listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], - rerequestRun: [ - "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest" - ], - rerequestSuite: [ - "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest" - ], - setSuitesPreferences: [ - "PATCH /repos/{owner}/{repo}/check-suites/preferences" - ], - update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] - }, - codeScanning: { - commitAutofix: [ - "POST /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/autofix/commits" - ], - createAutofix: [ - "POST /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/autofix" - ], - createVariantAnalysis: [ - "POST /repos/{owner}/{repo}/code-scanning/codeql/variant-analyses" - ], - deleteAnalysis: [ - "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}" - ], - deleteCodeqlDatabase: [ - "DELETE /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}" - ], - getAlert: [ - "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", - {}, - { renamedParameters: { alert_id: "alert_number" } } - ], - getAnalysis: [ - "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}" - ], - getAutofix: [ - "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/autofix" - ], - getCodeqlDatabase: [ - "GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}" - ], - getDefaultSetup: ["GET /repos/{owner}/{repo}/code-scanning/default-setup"], - getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], - getVariantAnalysis: [ - "GET /repos/{owner}/{repo}/code-scanning/codeql/variant-analyses/{codeql_variant_analysis_id}" - ], - getVariantAnalysisRepoTask: [ - "GET /repos/{owner}/{repo}/code-scanning/codeql/variant-analyses/{codeql_variant_analysis_id}/repos/{repo_owner}/{repo_name}" - ], - listAlertInstances: [ - "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances" - ], - listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], - listAlertsInstances: [ - "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", - {}, - { renamed: ["codeScanning", "listAlertInstances"] } - ], - listCodeqlDatabases: [ - "GET /repos/{owner}/{repo}/code-scanning/codeql/databases" - ], - listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], - updateAlert: [ - "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}" - ], - updateDefaultSetup: [ - "PATCH /repos/{owner}/{repo}/code-scanning/default-setup" - ], - uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] - }, - codeSecurity: { - attachConfiguration: [ - "POST /orgs/{org}/code-security/configurations/{configuration_id}/attach" - ], - attachEnterpriseConfiguration: [ - "POST /enterprises/{enterprise}/code-security/configurations/{configuration_id}/attach" - ], - createConfiguration: ["POST /orgs/{org}/code-security/configurations"], - createConfigurationForEnterprise: [ - "POST /enterprises/{enterprise}/code-security/configurations" - ], - deleteConfiguration: [ - "DELETE /orgs/{org}/code-security/configurations/{configuration_id}" - ], - deleteConfigurationForEnterprise: [ - "DELETE /enterprises/{enterprise}/code-security/configurations/{configuration_id}" - ], - detachConfiguration: [ - "DELETE /orgs/{org}/code-security/configurations/detach" - ], - getConfiguration: [ - "GET /orgs/{org}/code-security/configurations/{configuration_id}" - ], - getConfigurationForRepository: [ - "GET /repos/{owner}/{repo}/code-security-configuration" - ], - getConfigurationsForEnterprise: [ - "GET /enterprises/{enterprise}/code-security/configurations" - ], - getConfigurationsForOrg: ["GET /orgs/{org}/code-security/configurations"], - getDefaultConfigurations: [ - "GET /orgs/{org}/code-security/configurations/defaults" - ], - getDefaultConfigurationsForEnterprise: [ - "GET /enterprises/{enterprise}/code-security/configurations/defaults" - ], - getRepositoriesForConfiguration: [ - "GET /orgs/{org}/code-security/configurations/{configuration_id}/repositories" - ], - getRepositoriesForEnterpriseConfiguration: [ - "GET /enterprises/{enterprise}/code-security/configurations/{configuration_id}/repositories" - ], - getSingleConfigurationForEnterprise: [ - "GET /enterprises/{enterprise}/code-security/configurations/{configuration_id}" - ], - setConfigurationAsDefault: [ - "PUT /orgs/{org}/code-security/configurations/{configuration_id}/defaults" - ], - setConfigurationAsDefaultForEnterprise: [ - "PUT /enterprises/{enterprise}/code-security/configurations/{configuration_id}/defaults" - ], - updateConfiguration: [ - "PATCH /orgs/{org}/code-security/configurations/{configuration_id}" - ], - updateEnterpriseConfiguration: [ - "PATCH /enterprises/{enterprise}/code-security/configurations/{configuration_id}" - ] - }, - codesOfConduct: { - getAllCodesOfConduct: ["GET /codes_of_conduct"], - getConductCode: ["GET /codes_of_conduct/{key}"] - }, - codespaces: { - addRepositoryForSecretForAuthenticatedUser: [ - "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" - ], - addSelectedRepoToOrgSecret: [ - "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" - ], - checkPermissionsForDevcontainer: [ - "GET /repos/{owner}/{repo}/codespaces/permissions_check" - ], - codespaceMachinesForAuthenticatedUser: [ - "GET /user/codespaces/{codespace_name}/machines" - ], - createForAuthenticatedUser: ["POST /user/codespaces"], - createOrUpdateOrgSecret: [ - "PUT /orgs/{org}/codespaces/secrets/{secret_name}" - ], - createOrUpdateRepoSecret: [ - "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" - ], - createOrUpdateSecretForAuthenticatedUser: [ - "PUT /user/codespaces/secrets/{secret_name}" - ], - createWithPrForAuthenticatedUser: [ - "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces" - ], - createWithRepoForAuthenticatedUser: [ - "POST /repos/{owner}/{repo}/codespaces" - ], - deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], - deleteFromOrganization: [ - "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}" - ], - deleteOrgSecret: ["DELETE /orgs/{org}/codespaces/secrets/{secret_name}"], - deleteRepoSecret: [ - "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" - ], - deleteSecretForAuthenticatedUser: [ - "DELETE /user/codespaces/secrets/{secret_name}" - ], - exportForAuthenticatedUser: [ - "POST /user/codespaces/{codespace_name}/exports" - ], - getCodespacesForUserInOrg: [ - "GET /orgs/{org}/members/{username}/codespaces" - ], - getExportDetailsForAuthenticatedUser: [ - "GET /user/codespaces/{codespace_name}/exports/{export_id}" - ], - getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], - getOrgPublicKey: ["GET /orgs/{org}/codespaces/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/codespaces/secrets/{secret_name}"], - getPublicKeyForAuthenticatedUser: [ - "GET /user/codespaces/secrets/public-key" - ], - getRepoPublicKey: [ - "GET /repos/{owner}/{repo}/codespaces/secrets/public-key" - ], - getRepoSecret: [ - "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" - ], - getSecretForAuthenticatedUser: [ - "GET /user/codespaces/secrets/{secret_name}" - ], - listDevcontainersInRepositoryForAuthenticatedUser: [ - "GET /repos/{owner}/{repo}/codespaces/devcontainers" - ], - listForAuthenticatedUser: ["GET /user/codespaces"], - listInOrganization: [ - "GET /orgs/{org}/codespaces", - {}, - { renamedParameters: { org_id: "org" } } - ], - listInRepositoryForAuthenticatedUser: [ - "GET /repos/{owner}/{repo}/codespaces" - ], - listOrgSecrets: ["GET /orgs/{org}/codespaces/secrets"], - listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], - listRepositoriesForSecretForAuthenticatedUser: [ - "GET /user/codespaces/secrets/{secret_name}/repositories" - ], - listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], - listSelectedReposForOrgSecret: [ - "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories" - ], - preFlightWithRepoForAuthenticatedUser: [ - "GET /repos/{owner}/{repo}/codespaces/new" - ], - publishForAuthenticatedUser: [ - "POST /user/codespaces/{codespace_name}/publish" - ], - removeRepositoryForSecretForAuthenticatedUser: [ - "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" - ], - removeSelectedRepoFromOrgSecret: [ - "DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" - ], - repoMachinesForAuthenticatedUser: [ - "GET /repos/{owner}/{repo}/codespaces/machines" - ], - setRepositoriesForSecretForAuthenticatedUser: [ - "PUT /user/codespaces/secrets/{secret_name}/repositories" - ], - setSelectedReposForOrgSecret: [ - "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories" - ], - startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], - stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], - stopInOrganization: [ - "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop" - ], - updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"] - }, - copilot: { - addCopilotSeatsForTeams: [ - "POST /orgs/{org}/copilot/billing/selected_teams" - ], - addCopilotSeatsForUsers: [ - "POST /orgs/{org}/copilot/billing/selected_users" - ], - cancelCopilotSeatAssignmentForTeams: [ - "DELETE /orgs/{org}/copilot/billing/selected_teams" - ], - cancelCopilotSeatAssignmentForUsers: [ - "DELETE /orgs/{org}/copilot/billing/selected_users" - ], - copilotMetricsForOrganization: ["GET /orgs/{org}/copilot/metrics"], - copilotMetricsForTeam: ["GET /orgs/{org}/team/{team_slug}/copilot/metrics"], - getCopilotOrganizationDetails: ["GET /orgs/{org}/copilot/billing"], - getCopilotSeatDetailsForUser: [ - "GET /orgs/{org}/members/{username}/copilot" - ], - listCopilotSeats: ["GET /orgs/{org}/copilot/billing/seats"] - }, - credentials: { revoke: ["POST /credentials/revoke"] }, - dependabot: { - addSelectedRepoToOrgSecret: [ - "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" - ], - createOrUpdateOrgSecret: [ - "PUT /orgs/{org}/dependabot/secrets/{secret_name}" - ], - createOrUpdateRepoSecret: [ - "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" - ], - deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], - deleteRepoSecret: [ - "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" - ], - getAlert: ["GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"], - getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], - getRepoPublicKey: [ - "GET /repos/{owner}/{repo}/dependabot/secrets/public-key" - ], - getRepoSecret: [ - "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" - ], - listAlertsForEnterprise: [ - "GET /enterprises/{enterprise}/dependabot/alerts" - ], - listAlertsForOrg: ["GET /orgs/{org}/dependabot/alerts"], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/dependabot/alerts"], - listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], - listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], - listSelectedReposForOrgSecret: [ - "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories" - ], - removeSelectedRepoFromOrgSecret: [ - "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" - ], - setSelectedReposForOrgSecret: [ - "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories" - ], - updateAlert: [ - "PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}" - ] - }, - dependencyGraph: { - createRepositorySnapshot: [ - "POST /repos/{owner}/{repo}/dependency-graph/snapshots" - ], - diffRange: [ - "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}" - ], - exportSbom: ["GET /repos/{owner}/{repo}/dependency-graph/sbom"] - }, - emojis: { get: ["GET /emojis"] }, - gists: { - checkIsStarred: ["GET /gists/{gist_id}/star"], - create: ["POST /gists"], - createComment: ["POST /gists/{gist_id}/comments"], - delete: ["DELETE /gists/{gist_id}"], - deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], - fork: ["POST /gists/{gist_id}/forks"], - get: ["GET /gists/{gist_id}"], - getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], - getRevision: ["GET /gists/{gist_id}/{sha}"], - list: ["GET /gists"], - listComments: ["GET /gists/{gist_id}/comments"], - listCommits: ["GET /gists/{gist_id}/commits"], - listForUser: ["GET /users/{username}/gists"], - listForks: ["GET /gists/{gist_id}/forks"], - listPublic: ["GET /gists/public"], - listStarred: ["GET /gists/starred"], - star: ["PUT /gists/{gist_id}/star"], - unstar: ["DELETE /gists/{gist_id}/star"], - update: ["PATCH /gists/{gist_id}"], - updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] - }, - git: { - createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], - createCommit: ["POST /repos/{owner}/{repo}/git/commits"], - createRef: ["POST /repos/{owner}/{repo}/git/refs"], - createTag: ["POST /repos/{owner}/{repo}/git/tags"], - createTree: ["POST /repos/{owner}/{repo}/git/trees"], - deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], - getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], - getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], - getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], - getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], - getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], - listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], - updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] - }, - gitignore: { - getAllTemplates: ["GET /gitignore/templates"], - getTemplate: ["GET /gitignore/templates/{name}"] - }, - hostedCompute: { - createNetworkConfigurationForOrg: [ - "POST /orgs/{org}/settings/network-configurations" - ], - deleteNetworkConfigurationFromOrg: [ - "DELETE /orgs/{org}/settings/network-configurations/{network_configuration_id}" - ], - getNetworkConfigurationForOrg: [ - "GET /orgs/{org}/settings/network-configurations/{network_configuration_id}" - ], - getNetworkSettingsForOrg: [ - "GET /orgs/{org}/settings/network-settings/{network_settings_id}" - ], - listNetworkConfigurationsForOrg: [ - "GET /orgs/{org}/settings/network-configurations" - ], - updateNetworkConfigurationForOrg: [ - "PATCH /orgs/{org}/settings/network-configurations/{network_configuration_id}" - ] - }, - interactions: { - getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], - getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], - getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], - getRestrictionsForYourPublicRepos: [ - "GET /user/interaction-limits", - {}, - { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] } - ], - removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], - removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], - removeRestrictionsForRepo: [ - "DELETE /repos/{owner}/{repo}/interaction-limits" - ], - removeRestrictionsForYourPublicRepos: [ - "DELETE /user/interaction-limits", - {}, - { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] } - ], - setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], - setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], - setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], - setRestrictionsForYourPublicRepos: [ - "PUT /user/interaction-limits", - {}, - { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] } - ] - }, - issues: { - addAssignees: [ - "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees" - ], - addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], - addSubIssue: [ - "POST /repos/{owner}/{repo}/issues/{issue_number}/sub_issues" - ], - checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], - checkUserCanBeAssignedToIssue: [ - "GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}" - ], - create: ["POST /repos/{owner}/{repo}/issues"], - createComment: [ - "POST /repos/{owner}/{repo}/issues/{issue_number}/comments" - ], - createLabel: ["POST /repos/{owner}/{repo}/labels"], - createMilestone: ["POST /repos/{owner}/{repo}/milestones"], - deleteComment: [ - "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}" - ], - deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], - deleteMilestone: [ - "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}" - ], - get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], - getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], - getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], - getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], - getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], - list: ["GET /issues"], - listAssignees: ["GET /repos/{owner}/{repo}/assignees"], - listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], - listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], - listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], - listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], - listEventsForTimeline: [ - "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline" - ], - listForAuthenticatedUser: ["GET /user/issues"], - listForOrg: ["GET /orgs/{org}/issues"], - listForRepo: ["GET /repos/{owner}/{repo}/issues"], - listLabelsForMilestone: [ - "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels" - ], - listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], - listLabelsOnIssue: [ - "GET /repos/{owner}/{repo}/issues/{issue_number}/labels" - ], - listMilestones: ["GET /repos/{owner}/{repo}/milestones"], - listSubIssues: [ - "GET /repos/{owner}/{repo}/issues/{issue_number}/sub_issues" - ], - lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], - removeAllLabels: [ - "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels" - ], - removeAssignees: [ - "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees" - ], - removeLabel: [ - "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}" - ], - removeSubIssue: [ - "DELETE /repos/{owner}/{repo}/issues/{issue_number}/sub_issue" - ], - reprioritizeSubIssue: [ - "PATCH /repos/{owner}/{repo}/issues/{issue_number}/sub_issues/priority" - ], - setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], - unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], - update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], - updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], - updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], - updateMilestone: [ - "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}" - ] - }, - licenses: { - get: ["GET /licenses/{license}"], - getAllCommonlyUsed: ["GET /licenses"], - getForRepo: ["GET /repos/{owner}/{repo}/license"] - }, - markdown: { - render: ["POST /markdown"], - renderRaw: [ - "POST /markdown/raw", - { headers: { "content-type": "text/plain; charset=utf-8" } } - ] - }, - meta: { - get: ["GET /meta"], - getAllVersions: ["GET /versions"], - getOctocat: ["GET /octocat"], - getZen: ["GET /zen"], - root: ["GET /"] - }, - migrations: { - deleteArchiveForAuthenticatedUser: [ - "DELETE /user/migrations/{migration_id}/archive" - ], - deleteArchiveForOrg: [ - "DELETE /orgs/{org}/migrations/{migration_id}/archive" - ], - downloadArchiveForOrg: [ - "GET /orgs/{org}/migrations/{migration_id}/archive" - ], - getArchiveForAuthenticatedUser: [ - "GET /user/migrations/{migration_id}/archive" - ], - getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], - getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], - listForAuthenticatedUser: ["GET /user/migrations"], - listForOrg: ["GET /orgs/{org}/migrations"], - listReposForAuthenticatedUser: [ - "GET /user/migrations/{migration_id}/repositories" - ], - listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], - listReposForUser: [ - "GET /user/migrations/{migration_id}/repositories", - {}, - { renamed: ["migrations", "listReposForAuthenticatedUser"] } - ], - startForAuthenticatedUser: ["POST /user/migrations"], - startForOrg: ["POST /orgs/{org}/migrations"], - unlockRepoForAuthenticatedUser: [ - "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock" - ], - unlockRepoForOrg: [ - "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock" - ] - }, - oidc: { - getOidcCustomSubTemplateForOrg: [ - "GET /orgs/{org}/actions/oidc/customization/sub" - ], - updateOidcCustomSubTemplateForOrg: [ - "PUT /orgs/{org}/actions/oidc/customization/sub" - ] - }, - orgs: { - addSecurityManagerTeam: [ - "PUT /orgs/{org}/security-managers/teams/{team_slug}", - {}, - { - deprecated: "octokit.rest.orgs.addSecurityManagerTeam() is deprecated, see https://docs.github.com/rest/orgs/security-managers#add-a-security-manager-team" - } - ], - assignTeamToOrgRole: [ - "PUT /orgs/{org}/organization-roles/teams/{team_slug}/{role_id}" - ], - assignUserToOrgRole: [ - "PUT /orgs/{org}/organization-roles/users/{username}/{role_id}" - ], - blockUser: ["PUT /orgs/{org}/blocks/{username}"], - cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], - checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], - checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], - checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], - convertMemberToOutsideCollaborator: [ - "PUT /orgs/{org}/outside_collaborators/{username}" - ], - createInvitation: ["POST /orgs/{org}/invitations"], - createIssueType: ["POST /orgs/{org}/issue-types"], - createOrUpdateCustomProperties: ["PATCH /orgs/{org}/properties/schema"], - createOrUpdateCustomPropertiesValuesForRepos: [ - "PATCH /orgs/{org}/properties/values" - ], - createOrUpdateCustomProperty: [ - "PUT /orgs/{org}/properties/schema/{custom_property_name}" - ], - createWebhook: ["POST /orgs/{org}/hooks"], - delete: ["DELETE /orgs/{org}"], - deleteIssueType: ["DELETE /orgs/{org}/issue-types/{issue_type_id}"], - deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], - enableOrDisableSecurityProductOnAllOrgRepos: [ - "POST /orgs/{org}/{security_product}/{enablement}", - {}, - { - deprecated: "octokit.rest.orgs.enableOrDisableSecurityProductOnAllOrgRepos() is deprecated, see https://docs.github.com/rest/orgs/orgs#enable-or-disable-a-security-feature-for-an-organization" - } - ], - get: ["GET /orgs/{org}"], - getAllCustomProperties: ["GET /orgs/{org}/properties/schema"], - getCustomProperty: [ - "GET /orgs/{org}/properties/schema/{custom_property_name}" - ], - getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], - getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], - getOrgRole: ["GET /orgs/{org}/organization-roles/{role_id}"], - getOrgRulesetHistory: ["GET /orgs/{org}/rulesets/{ruleset_id}/history"], - getOrgRulesetVersion: [ - "GET /orgs/{org}/rulesets/{ruleset_id}/history/{version_id}" - ], - getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], - getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], - getWebhookDelivery: [ - "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}" - ], - list: ["GET /organizations"], - listAppInstallations: ["GET /orgs/{org}/installations"], - listAttestations: ["GET /orgs/{org}/attestations/{subject_digest}"], - listBlockedUsers: ["GET /orgs/{org}/blocks"], - listCustomPropertiesValuesForRepos: ["GET /orgs/{org}/properties/values"], - listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], - listForAuthenticatedUser: ["GET /user/orgs"], - listForUser: ["GET /users/{username}/orgs"], - listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], - listIssueTypes: ["GET /orgs/{org}/issue-types"], - listMembers: ["GET /orgs/{org}/members"], - listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], - listOrgRoleTeams: ["GET /orgs/{org}/organization-roles/{role_id}/teams"], - listOrgRoleUsers: ["GET /orgs/{org}/organization-roles/{role_id}/users"], - listOrgRoles: ["GET /orgs/{org}/organization-roles"], - listOrganizationFineGrainedPermissions: [ - "GET /orgs/{org}/organization-fine-grained-permissions" - ], - listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], - listPatGrantRepositories: [ - "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories" - ], - listPatGrantRequestRepositories: [ - "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories" - ], - listPatGrantRequests: ["GET /orgs/{org}/personal-access-token-requests"], - listPatGrants: ["GET /orgs/{org}/personal-access-tokens"], - listPendingInvitations: ["GET /orgs/{org}/invitations"], - listPublicMembers: ["GET /orgs/{org}/public_members"], - listSecurityManagerTeams: [ - "GET /orgs/{org}/security-managers", - {}, - { - deprecated: "octokit.rest.orgs.listSecurityManagerTeams() is deprecated, see https://docs.github.com/rest/orgs/security-managers#list-security-manager-teams" - } - ], - listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], - listWebhooks: ["GET /orgs/{org}/hooks"], - pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], - redeliverWebhookDelivery: [ - "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" - ], - removeCustomProperty: [ - "DELETE /orgs/{org}/properties/schema/{custom_property_name}" - ], - removeMember: ["DELETE /orgs/{org}/members/{username}"], - removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], - removeOutsideCollaborator: [ - "DELETE /orgs/{org}/outside_collaborators/{username}" - ], - removePublicMembershipForAuthenticatedUser: [ - "DELETE /orgs/{org}/public_members/{username}" - ], - removeSecurityManagerTeam: [ - "DELETE /orgs/{org}/security-managers/teams/{team_slug}", - {}, - { - deprecated: "octokit.rest.orgs.removeSecurityManagerTeam() is deprecated, see https://docs.github.com/rest/orgs/security-managers#remove-a-security-manager-team" - } - ], - reviewPatGrantRequest: [ - "POST /orgs/{org}/personal-access-token-requests/{pat_request_id}" - ], - reviewPatGrantRequestsInBulk: [ - "POST /orgs/{org}/personal-access-token-requests" - ], - revokeAllOrgRolesTeam: [ - "DELETE /orgs/{org}/organization-roles/teams/{team_slug}" - ], - revokeAllOrgRolesUser: [ - "DELETE /orgs/{org}/organization-roles/users/{username}" - ], - revokeOrgRoleTeam: [ - "DELETE /orgs/{org}/organization-roles/teams/{team_slug}/{role_id}" - ], - revokeOrgRoleUser: [ - "DELETE /orgs/{org}/organization-roles/users/{username}/{role_id}" - ], - setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], - setPublicMembershipForAuthenticatedUser: [ - "PUT /orgs/{org}/public_members/{username}" - ], - unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], - update: ["PATCH /orgs/{org}"], - updateIssueType: ["PUT /orgs/{org}/issue-types/{issue_type_id}"], - updateMembershipForAuthenticatedUser: [ - "PATCH /user/memberships/orgs/{org}" - ], - updatePatAccess: ["POST /orgs/{org}/personal-access-tokens/{pat_id}"], - updatePatAccesses: ["POST /orgs/{org}/personal-access-tokens"], - updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], - updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] - }, - packages: { - deletePackageForAuthenticatedUser: [ - "DELETE /user/packages/{package_type}/{package_name}" - ], - deletePackageForOrg: [ - "DELETE /orgs/{org}/packages/{package_type}/{package_name}" - ], - deletePackageForUser: [ - "DELETE /users/{username}/packages/{package_type}/{package_name}" - ], - deletePackageVersionForAuthenticatedUser: [ - "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}" - ], - deletePackageVersionForOrg: [ - "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" - ], - deletePackageVersionForUser: [ - "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" - ], - getAllPackageVersionsForAPackageOwnedByAnOrg: [ - "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", - {}, - { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] } - ], - getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [ - "GET /user/packages/{package_type}/{package_name}/versions", - {}, - { - renamed: [ - "packages", - "getAllPackageVersionsForPackageOwnedByAuthenticatedUser" - ] - } - ], - getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [ - "GET /user/packages/{package_type}/{package_name}/versions" - ], - getAllPackageVersionsForPackageOwnedByOrg: [ - "GET /orgs/{org}/packages/{package_type}/{package_name}/versions" - ], - getAllPackageVersionsForPackageOwnedByUser: [ - "GET /users/{username}/packages/{package_type}/{package_name}/versions" - ], - getPackageForAuthenticatedUser: [ - "GET /user/packages/{package_type}/{package_name}" - ], - getPackageForOrganization: [ - "GET /orgs/{org}/packages/{package_type}/{package_name}" - ], - getPackageForUser: [ - "GET /users/{username}/packages/{package_type}/{package_name}" - ], - getPackageVersionForAuthenticatedUser: [ - "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}" - ], - getPackageVersionForOrganization: [ - "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" - ], - getPackageVersionForUser: [ - "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" - ], - listDockerMigrationConflictingPackagesForAuthenticatedUser: [ - "GET /user/docker/conflicts" - ], - listDockerMigrationConflictingPackagesForOrganization: [ - "GET /orgs/{org}/docker/conflicts" - ], - listDockerMigrationConflictingPackagesForUser: [ - "GET /users/{username}/docker/conflicts" - ], - listPackagesForAuthenticatedUser: ["GET /user/packages"], - listPackagesForOrganization: ["GET /orgs/{org}/packages"], - listPackagesForUser: ["GET /users/{username}/packages"], - restorePackageForAuthenticatedUser: [ - "POST /user/packages/{package_type}/{package_name}/restore{?token}" - ], - restorePackageForOrg: [ - "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}" - ], - restorePackageForUser: [ - "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}" - ], - restorePackageVersionForAuthenticatedUser: [ - "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" - ], - restorePackageVersionForOrg: [ - "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" - ], - restorePackageVersionForUser: [ - "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" - ] - }, - privateRegistries: { - createOrgPrivateRegistry: ["POST /orgs/{org}/private-registries"], - deleteOrgPrivateRegistry: [ - "DELETE /orgs/{org}/private-registries/{secret_name}" - ], - getOrgPrivateRegistry: ["GET /orgs/{org}/private-registries/{secret_name}"], - getOrgPublicKey: ["GET /orgs/{org}/private-registries/public-key"], - listOrgPrivateRegistries: ["GET /orgs/{org}/private-registries"], - updateOrgPrivateRegistry: [ - "PATCH /orgs/{org}/private-registries/{secret_name}" - ] - }, - pulls: { - checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - create: ["POST /repos/{owner}/{repo}/pulls"], - createReplyForReviewComment: [ - "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies" - ], - createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - createReviewComment: [ - "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments" - ], - deletePendingReview: [ - "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" - ], - deleteReviewComment: [ - "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}" - ], - dismissReview: [ - "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals" - ], - get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], - getReview: [ - "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" - ], - getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], - list: ["GET /repos/{owner}/{repo}/pulls"], - listCommentsForReview: [ - "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments" - ], - listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], - listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], - listRequestedReviewers: [ - "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" - ], - listReviewComments: [ - "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments" - ], - listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], - listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - removeRequestedReviewers: [ - "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" - ], - requestReviewers: [ - "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" - ], - submitReview: [ - "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events" - ], - update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], - updateBranch: [ - "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch" - ], - updateReview: [ - "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" - ], - updateReviewComment: [ - "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}" - ] - }, - rateLimit: { get: ["GET /rate_limit"] }, - reactions: { - createForCommitComment: [ - "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions" - ], - createForIssue: [ - "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions" - ], - createForIssueComment: [ - "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" - ], - createForPullRequestReviewComment: [ - "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" - ], - createForRelease: [ - "POST /repos/{owner}/{repo}/releases/{release_id}/reactions" - ], - createForTeamDiscussionCommentInOrg: [ - "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" - ], - createForTeamDiscussionInOrg: [ - "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" - ], - deleteForCommitComment: [ - "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}" - ], - deleteForIssue: [ - "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}" - ], - deleteForIssueComment: [ - "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}" - ], - deleteForPullRequestComment: [ - "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}" - ], - deleteForRelease: [ - "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}" - ], - deleteForTeamDiscussion: [ - "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}" - ], - deleteForTeamDiscussionComment: [ - "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}" - ], - listForCommitComment: [ - "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions" - ], - listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], - listForIssueComment: [ - "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" - ], - listForPullRequestReviewComment: [ - "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" - ], - listForRelease: [ - "GET /repos/{owner}/{repo}/releases/{release_id}/reactions" - ], - listForTeamDiscussionCommentInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" - ], - listForTeamDiscussionInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" - ] - }, - repos: { - acceptInvitation: [ - "PATCH /user/repository_invitations/{invitation_id}", - {}, - { renamed: ["repos", "acceptInvitationForAuthenticatedUser"] } - ], - acceptInvitationForAuthenticatedUser: [ - "PATCH /user/repository_invitations/{invitation_id}" - ], - addAppAccessRestrictions: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", - {}, - { mapToData: "apps" } - ], - addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], - addStatusCheckContexts: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", - {}, - { mapToData: "contexts" } - ], - addTeamAccessRestrictions: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", - {}, - { mapToData: "teams" } - ], - addUserAccessRestrictions: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", - {}, - { mapToData: "users" } - ], - cancelPagesDeployment: [ - "POST /repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}/cancel" - ], - checkAutomatedSecurityFixes: [ - "GET /repos/{owner}/{repo}/automated-security-fixes" - ], - checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], - checkPrivateVulnerabilityReporting: [ - "GET /repos/{owner}/{repo}/private-vulnerability-reporting" - ], - checkVulnerabilityAlerts: [ - "GET /repos/{owner}/{repo}/vulnerability-alerts" - ], - codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], - compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], - compareCommitsWithBasehead: [ - "GET /repos/{owner}/{repo}/compare/{basehead}" - ], - createAttestation: ["POST /repos/{owner}/{repo}/attestations"], - createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], - createCommitComment: [ - "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments" - ], - createCommitSignatureProtection: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" - ], - createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], - createDeployKey: ["POST /repos/{owner}/{repo}/keys"], - createDeployment: ["POST /repos/{owner}/{repo}/deployments"], - createDeploymentBranchPolicy: [ - "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" - ], - createDeploymentProtectionRule: [ - "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" - ], - createDeploymentStatus: [ - "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" - ], - createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], - createForAuthenticatedUser: ["POST /user/repos"], - createFork: ["POST /repos/{owner}/{repo}/forks"], - createInOrg: ["POST /orgs/{org}/repos"], - createOrUpdateCustomPropertiesValues: [ - "PATCH /repos/{owner}/{repo}/properties/values" - ], - createOrUpdateEnvironment: [ - "PUT /repos/{owner}/{repo}/environments/{environment_name}" - ], - createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], - createOrgRuleset: ["POST /orgs/{org}/rulesets"], - createPagesDeployment: ["POST /repos/{owner}/{repo}/pages/deployments"], - createPagesSite: ["POST /repos/{owner}/{repo}/pages"], - createRelease: ["POST /repos/{owner}/{repo}/releases"], - createRepoRuleset: ["POST /repos/{owner}/{repo}/rulesets"], - createUsingTemplate: [ - "POST /repos/{template_owner}/{template_repo}/generate" - ], - createWebhook: ["POST /repos/{owner}/{repo}/hooks"], - declineInvitation: [ - "DELETE /user/repository_invitations/{invitation_id}", - {}, - { renamed: ["repos", "declineInvitationForAuthenticatedUser"] } - ], - declineInvitationForAuthenticatedUser: [ - "DELETE /user/repository_invitations/{invitation_id}" - ], - delete: ["DELETE /repos/{owner}/{repo}"], - deleteAccessRestrictions: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" - ], - deleteAdminBranchProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" - ], - deleteAnEnvironment: [ - "DELETE /repos/{owner}/{repo}/environments/{environment_name}" - ], - deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], - deleteBranchProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection" - ], - deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], - deleteCommitSignatureProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" - ], - deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], - deleteDeployment: [ - "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}" - ], - deleteDeploymentBranchPolicy: [ - "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" - ], - deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], - deleteInvitation: [ - "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}" - ], - deleteOrgRuleset: ["DELETE /orgs/{org}/rulesets/{ruleset_id}"], - deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], - deletePullRequestReviewProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" - ], - deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], - deleteReleaseAsset: [ - "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}" - ], - deleteRepoRuleset: ["DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}"], - deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], - disableAutomatedSecurityFixes: [ - "DELETE /repos/{owner}/{repo}/automated-security-fixes" - ], - disableDeploymentProtectionRule: [ - "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" - ], - disablePrivateVulnerabilityReporting: [ - "DELETE /repos/{owner}/{repo}/private-vulnerability-reporting" - ], - disableVulnerabilityAlerts: [ - "DELETE /repos/{owner}/{repo}/vulnerability-alerts" - ], - downloadArchive: [ - "GET /repos/{owner}/{repo}/zipball/{ref}", - {}, - { renamed: ["repos", "downloadZipballArchive"] } - ], - downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], - downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], - enableAutomatedSecurityFixes: [ - "PUT /repos/{owner}/{repo}/automated-security-fixes" - ], - enablePrivateVulnerabilityReporting: [ - "PUT /repos/{owner}/{repo}/private-vulnerability-reporting" - ], - enableVulnerabilityAlerts: [ - "PUT /repos/{owner}/{repo}/vulnerability-alerts" - ], - generateReleaseNotes: [ - "POST /repos/{owner}/{repo}/releases/generate-notes" - ], - get: ["GET /repos/{owner}/{repo}"], - getAccessRestrictions: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" - ], - getAdminBranchProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" - ], - getAllDeploymentProtectionRules: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" - ], - getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], - getAllStatusCheckContexts: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" - ], - getAllTopics: ["GET /repos/{owner}/{repo}/topics"], - getAppsWithAccessToProtectedBranch: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" - ], - getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], - getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], - getBranchProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection" - ], - getBranchRules: ["GET /repos/{owner}/{repo}/rules/branches/{branch}"], - getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], - getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], - getCollaboratorPermissionLevel: [ - "GET /repos/{owner}/{repo}/collaborators/{username}/permission" - ], - getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], - getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], - getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], - getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], - getCommitSignatureProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" - ], - getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], - getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], - getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], - getCustomDeploymentProtectionRule: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" - ], - getCustomPropertiesValues: ["GET /repos/{owner}/{repo}/properties/values"], - getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], - getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], - getDeploymentBranchPolicy: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" - ], - getDeploymentStatus: [ - "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}" - ], - getEnvironment: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}" - ], - getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], - getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], - getOrgRuleSuite: ["GET /orgs/{org}/rulesets/rule-suites/{rule_suite_id}"], - getOrgRuleSuites: ["GET /orgs/{org}/rulesets/rule-suites"], - getOrgRuleset: ["GET /orgs/{org}/rulesets/{ruleset_id}"], - getOrgRulesets: ["GET /orgs/{org}/rulesets"], - getPages: ["GET /repos/{owner}/{repo}/pages"], - getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], - getPagesDeployment: [ - "GET /repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}" - ], - getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], - getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], - getPullRequestReviewProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" - ], - getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], - getReadme: ["GET /repos/{owner}/{repo}/readme"], - getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], - getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], - getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], - getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], - getRepoRuleSuite: [ - "GET /repos/{owner}/{repo}/rulesets/rule-suites/{rule_suite_id}" - ], - getRepoRuleSuites: ["GET /repos/{owner}/{repo}/rulesets/rule-suites"], - getRepoRuleset: ["GET /repos/{owner}/{repo}/rulesets/{ruleset_id}"], - getRepoRulesetHistory: [ - "GET /repos/{owner}/{repo}/rulesets/{ruleset_id}/history" - ], - getRepoRulesetVersion: [ - "GET /repos/{owner}/{repo}/rulesets/{ruleset_id}/history/{version_id}" - ], - getRepoRulesets: ["GET /repos/{owner}/{repo}/rulesets"], - getStatusChecksProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" - ], - getTeamsWithAccessToProtectedBranch: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" - ], - getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], - getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], - getUsersWithAccessToProtectedBranch: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" - ], - getViews: ["GET /repos/{owner}/{repo}/traffic/views"], - getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], - getWebhookConfigForRepo: [ - "GET /repos/{owner}/{repo}/hooks/{hook_id}/config" - ], - getWebhookDelivery: [ - "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}" - ], - listActivities: ["GET /repos/{owner}/{repo}/activity"], - listAttestations: [ - "GET /repos/{owner}/{repo}/attestations/{subject_digest}" - ], - listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], - listBranches: ["GET /repos/{owner}/{repo}/branches"], - listBranchesForHeadCommit: [ - "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head" - ], - listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], - listCommentsForCommit: [ - "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments" - ], - listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], - listCommitStatusesForRef: [ - "GET /repos/{owner}/{repo}/commits/{ref}/statuses" - ], - listCommits: ["GET /repos/{owner}/{repo}/commits"], - listContributors: ["GET /repos/{owner}/{repo}/contributors"], - listCustomDeploymentRuleIntegrations: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps" - ], - listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], - listDeploymentBranchPolicies: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" - ], - listDeploymentStatuses: [ - "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" - ], - listDeployments: ["GET /repos/{owner}/{repo}/deployments"], - listForAuthenticatedUser: ["GET /user/repos"], - listForOrg: ["GET /orgs/{org}/repos"], - listForUser: ["GET /users/{username}/repos"], - listForks: ["GET /repos/{owner}/{repo}/forks"], - listInvitations: ["GET /repos/{owner}/{repo}/invitations"], - listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], - listLanguages: ["GET /repos/{owner}/{repo}/languages"], - listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], - listPublic: ["GET /repositories"], - listPullRequestsAssociatedWithCommit: [ - "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls" - ], - listReleaseAssets: [ - "GET /repos/{owner}/{repo}/releases/{release_id}/assets" - ], - listReleases: ["GET /repos/{owner}/{repo}/releases"], - listTags: ["GET /repos/{owner}/{repo}/tags"], - listTeams: ["GET /repos/{owner}/{repo}/teams"], - listWebhookDeliveries: [ - "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries" - ], - listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], - merge: ["POST /repos/{owner}/{repo}/merges"], - mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], - pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], - redeliverWebhookDelivery: [ - "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" - ], - removeAppAccessRestrictions: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", - {}, - { mapToData: "apps" } - ], - removeCollaborator: [ - "DELETE /repos/{owner}/{repo}/collaborators/{username}" - ], - removeStatusCheckContexts: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", - {}, - { mapToData: "contexts" } - ], - removeStatusCheckProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" - ], - removeTeamAccessRestrictions: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", - {}, - { mapToData: "teams" } - ], - removeUserAccessRestrictions: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", - {}, - { mapToData: "users" } - ], - renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], - replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], - requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], - setAdminBranchProtection: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" - ], - setAppAccessRestrictions: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", - {}, - { mapToData: "apps" } - ], - setStatusCheckContexts: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", - {}, - { mapToData: "contexts" } - ], - setTeamAccessRestrictions: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", - {}, - { mapToData: "teams" } - ], - setUserAccessRestrictions: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", - {}, - { mapToData: "users" } - ], - testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], - transfer: ["POST /repos/{owner}/{repo}/transfer"], - update: ["PATCH /repos/{owner}/{repo}"], - updateBranchProtection: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection" - ], - updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], - updateDeploymentBranchPolicy: [ - "PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" - ], - updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], - updateInvitation: [ - "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}" - ], - updateOrgRuleset: ["PUT /orgs/{org}/rulesets/{ruleset_id}"], - updatePullRequestReviewProtection: [ - "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" - ], - updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], - updateReleaseAsset: [ - "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}" - ], - updateRepoRuleset: ["PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}"], - updateStatusCheckPotection: [ - "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", - {}, - { renamed: ["repos", "updateStatusCheckProtection"] } - ], - updateStatusCheckProtection: [ - "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" - ], - updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], - updateWebhookConfigForRepo: [ - "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config" - ], - uploadReleaseAsset: [ - "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", - { baseUrl: "https://uploads.github.com" } - ] - }, - search: { - code: ["GET /search/code"], - commits: ["GET /search/commits"], - issuesAndPullRequests: [ - "GET /search/issues", - {}, - { - deprecated: "octokit.rest.search.issuesAndPullRequests() is deprecated, see https://docs.github.com/rest/search/search#search-issues-and-pull-requests" - } - ], - labels: ["GET /search/labels"], - repos: ["GET /search/repositories"], - topics: ["GET /search/topics"], - users: ["GET /search/users"] - }, - secretScanning: { - createPushProtectionBypass: [ - "POST /repos/{owner}/{repo}/secret-scanning/push-protection-bypasses" - ], - getAlert: [ - "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" - ], - getScanHistory: ["GET /repos/{owner}/{repo}/secret-scanning/scan-history"], - listAlertsForEnterprise: [ - "GET /enterprises/{enterprise}/secret-scanning/alerts" - ], - listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], - listLocationsForAlert: [ - "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations" - ], - updateAlert: [ - "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" - ] - }, - securityAdvisories: { - createFork: [ - "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/forks" - ], - createPrivateVulnerabilityReport: [ - "POST /repos/{owner}/{repo}/security-advisories/reports" - ], - createRepositoryAdvisory: [ - "POST /repos/{owner}/{repo}/security-advisories" - ], - createRepositoryAdvisoryCveRequest: [ - "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve" - ], - getGlobalAdvisory: ["GET /advisories/{ghsa_id}"], - getRepositoryAdvisory: [ - "GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}" - ], - listGlobalAdvisories: ["GET /advisories"], - listOrgRepositoryAdvisories: ["GET /orgs/{org}/security-advisories"], - listRepositoryAdvisories: ["GET /repos/{owner}/{repo}/security-advisories"], - updateRepositoryAdvisory: [ - "PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}" - ] - }, - teams: { - addOrUpdateMembershipForUserInOrg: [ - "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}" - ], - addOrUpdateRepoPermissionsInOrg: [ - "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" - ], - checkPermissionsForRepoInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" - ], - create: ["POST /orgs/{org}/teams"], - createDiscussionCommentInOrg: [ - "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" - ], - createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], - deleteDiscussionCommentInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" - ], - deleteDiscussionInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" - ], - deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], - getByName: ["GET /orgs/{org}/teams/{team_slug}"], - getDiscussionCommentInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" - ], - getDiscussionInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" - ], - getMembershipForUserInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/memberships/{username}" - ], - list: ["GET /orgs/{org}/teams"], - listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], - listDiscussionCommentsInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" - ], - listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], - listForAuthenticatedUser: ["GET /user/teams"], - listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], - listPendingInvitationsInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/invitations" - ], - listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], - removeMembershipForUserInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}" - ], - removeRepoInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" - ], - updateDiscussionCommentInOrg: [ - "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" - ], - updateDiscussionInOrg: [ - "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" - ], - updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] - }, - users: { - addEmailForAuthenticated: [ - "POST /user/emails", - {}, - { renamed: ["users", "addEmailForAuthenticatedUser"] } - ], - addEmailForAuthenticatedUser: ["POST /user/emails"], - addSocialAccountForAuthenticatedUser: ["POST /user/social_accounts"], - block: ["PUT /user/blocks/{username}"], - checkBlocked: ["GET /user/blocks/{username}"], - checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], - checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], - createGpgKeyForAuthenticated: [ - "POST /user/gpg_keys", - {}, - { renamed: ["users", "createGpgKeyForAuthenticatedUser"] } - ], - createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], - createPublicSshKeyForAuthenticated: [ - "POST /user/keys", - {}, - { renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] } - ], - createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], - createSshSigningKeyForAuthenticatedUser: ["POST /user/ssh_signing_keys"], - deleteEmailForAuthenticated: [ - "DELETE /user/emails", - {}, - { renamed: ["users", "deleteEmailForAuthenticatedUser"] } - ], - deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], - deleteGpgKeyForAuthenticated: [ - "DELETE /user/gpg_keys/{gpg_key_id}", - {}, - { renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] } - ], - deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], - deletePublicSshKeyForAuthenticated: [ - "DELETE /user/keys/{key_id}", - {}, - { renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] } - ], - deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], - deleteSocialAccountForAuthenticatedUser: ["DELETE /user/social_accounts"], - deleteSshSigningKeyForAuthenticatedUser: [ - "DELETE /user/ssh_signing_keys/{ssh_signing_key_id}" - ], - follow: ["PUT /user/following/{username}"], - getAuthenticated: ["GET /user"], - getById: ["GET /user/{account_id}"], - getByUsername: ["GET /users/{username}"], - getContextForUser: ["GET /users/{username}/hovercard"], - getGpgKeyForAuthenticated: [ - "GET /user/gpg_keys/{gpg_key_id}", - {}, - { renamed: ["users", "getGpgKeyForAuthenticatedUser"] } - ], - getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], - getPublicSshKeyForAuthenticated: [ - "GET /user/keys/{key_id}", - {}, - { renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] } - ], - getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], - getSshSigningKeyForAuthenticatedUser: [ - "GET /user/ssh_signing_keys/{ssh_signing_key_id}" - ], - list: ["GET /users"], - listAttestations: ["GET /users/{username}/attestations/{subject_digest}"], - listBlockedByAuthenticated: [ - "GET /user/blocks", - {}, - { renamed: ["users", "listBlockedByAuthenticatedUser"] } - ], - listBlockedByAuthenticatedUser: ["GET /user/blocks"], - listEmailsForAuthenticated: [ - "GET /user/emails", - {}, - { renamed: ["users", "listEmailsForAuthenticatedUser"] } - ], - listEmailsForAuthenticatedUser: ["GET /user/emails"], - listFollowedByAuthenticated: [ - "GET /user/following", - {}, - { renamed: ["users", "listFollowedByAuthenticatedUser"] } - ], - listFollowedByAuthenticatedUser: ["GET /user/following"], - listFollowersForAuthenticatedUser: ["GET /user/followers"], - listFollowersForUser: ["GET /users/{username}/followers"], - listFollowingForUser: ["GET /users/{username}/following"], - listGpgKeysForAuthenticated: [ - "GET /user/gpg_keys", - {}, - { renamed: ["users", "listGpgKeysForAuthenticatedUser"] } - ], - listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], - listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], - listPublicEmailsForAuthenticated: [ - "GET /user/public_emails", - {}, - { renamed: ["users", "listPublicEmailsForAuthenticatedUser"] } - ], - listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], - listPublicKeysForUser: ["GET /users/{username}/keys"], - listPublicSshKeysForAuthenticated: [ - "GET /user/keys", - {}, - { renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] } - ], - listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], - listSocialAccountsForAuthenticatedUser: ["GET /user/social_accounts"], - listSocialAccountsForUser: ["GET /users/{username}/social_accounts"], - listSshSigningKeysForAuthenticatedUser: ["GET /user/ssh_signing_keys"], - listSshSigningKeysForUser: ["GET /users/{username}/ssh_signing_keys"], - setPrimaryEmailVisibilityForAuthenticated: [ - "PATCH /user/email/visibility", - {}, - { renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] } - ], - setPrimaryEmailVisibilityForAuthenticatedUser: [ - "PATCH /user/email/visibility" - ], - unblock: ["DELETE /user/blocks/{username}"], - unfollow: ["DELETE /user/following/{username}"], - updateAuthenticated: ["PATCH /user"] - } -}; -var endpoints_default = Endpoints; - -// node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/endpoints-to-methods.js -var endpointMethodsMap = /* @__PURE__ */ new Map(); -for (const [scope, endpoints] of Object.entries(endpoints_default)) { - for (const [methodName, endpoint2] of Object.entries(endpoints)) { - const [route, defaults, decorations] = endpoint2; - const [method, url] = route.split(/ /); - const endpointDefaults = Object.assign( - { - method, - url - }, - defaults - ); - if (!endpointMethodsMap.has(scope)) { - endpointMethodsMap.set(scope, /* @__PURE__ */ new Map()); - } - endpointMethodsMap.get(scope).set(methodName, { - scope, - methodName, - endpointDefaults, - decorations - }); - } -} -var handler = { - has({ scope }, methodName) { - return endpointMethodsMap.get(scope).has(methodName); - }, - getOwnPropertyDescriptor(target, methodName) { - return { - value: this.get(target, methodName), - // ensures method is in the cache - configurable: true, - writable: true, - enumerable: true - }; - }, - defineProperty(target, methodName, descriptor) { - Object.defineProperty(target.cache, methodName, descriptor); - return true; - }, - deleteProperty(target, methodName) { - delete target.cache[methodName]; - return true; - }, - ownKeys({ scope }) { - return [...endpointMethodsMap.get(scope).keys()]; - }, - set(target, methodName, value) { - return target.cache[methodName] = value; - }, - get({ octokit, scope, cache }, methodName) { - if (cache[methodName]) { - return cache[methodName]; - } - const method = endpointMethodsMap.get(scope).get(methodName); - if (!method) { - return void 0; - } - const { endpointDefaults, decorations } = method; - if (decorations) { - cache[methodName] = decorate( - octokit, - scope, - methodName, - endpointDefaults, - decorations - ); - } else { - cache[methodName] = octokit.request.defaults(endpointDefaults); - } - return cache[methodName]; - } -}; -function endpointsToMethods(octokit) { - const newMethods = {}; - for (const scope of endpointMethodsMap.keys()) { - newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler); - } - return newMethods; -} -function decorate(octokit, scope, methodName, defaults, decorations) { - const requestWithDefaults = octokit.request.defaults(defaults); - function withDecorations(...args2) { - let options = requestWithDefaults.endpoint.merge(...args2); - if (decorations.mapToData) { - options = Object.assign({}, options, { - data: options[decorations.mapToData], - [decorations.mapToData]: void 0 - }); - return requestWithDefaults(options); - } - if (decorations.renamed) { - const [newScope, newMethodName] = decorations.renamed; - octokit.log.warn( - `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()` - ); - } - if (decorations.deprecated) { - octokit.log.warn(decorations.deprecated); - } - if (decorations.renamedParameters) { - const options2 = requestWithDefaults.endpoint.merge(...args2); - for (const [name, alias] of Object.entries( - decorations.renamedParameters - )) { - if (name in options2) { - octokit.log.warn( - `"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead` - ); - if (!(alias in options2)) { - options2[alias] = options2[name]; - } - delete options2[name]; - } - } - return requestWithDefaults(options2); - } - return requestWithDefaults(...args2); - } - return Object.assign(withDecorations, requestWithDefaults); -} - -// node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/index.js -function restEndpointMethods(octokit) { - const api = endpointsToMethods(octokit); - return { - rest: api - }; -} -restEndpointMethods.VERSION = VERSION6; -function legacyRestEndpointMethods(octokit) { - const api = endpointsToMethods(octokit); - return { - ...api, - rest: api - }; -} -legacyRestEndpointMethods.VERSION = VERSION6; - // src/download/download-version.ts var pep440 = __toESM(require_pep440(), 1); var semver3 = __toESM(require_semver(), 1); // src/utils/constants.ts -var REPO = "ruff"; -var OWNER = "astral-sh"; var TOOL_CACHE_NAME = "ruff"; +var VERSIONS_MANIFEST_URL = "https://raw.githubusercontent.com/astral-sh/versions/main/v1/ruff.ndjson"; +var GITHUB_RELEASES_PREFIX = "https://github.com/astral-sh/ruff/releases/download/"; +var ASTRAL_MIRROR_PREFIX = "https://releases.astral.sh/github/ruff/releases/download/"; // src/download/checksum/checksum.ts var crypto3 = __toESM(require("node:crypto"), 1); @@ -31208,8 +27814,195 @@ async function validateFileCheckSum(filePath, expected) { }); } +// src/utils/fetch.ts +var import_undici2 = __toESM(require_undici(), 1); +function getProxyAgent() { + const httpProxy = process.env.HTTP_PROXY || process.env.http_proxy; + if (httpProxy) { + return new import_undici2.ProxyAgent(httpProxy); + } + const httpsProxy = process.env.HTTPS_PROXY || process.env.https_proxy; + if (httpsProxy) { + return new import_undici2.ProxyAgent(httpsProxy); + } + return void 0; +} +var fetch = async (url, opts) => await (0, import_undici2.fetch)(url, { + dispatcher: getProxyAgent(), + ...opts +}); + +// src/download/variant-selection.ts +function selectDefaultVariant(entries, duplicateEntryDescription) { + const firstEntry = entries[0]; + if (firstEntry === void 0) { + throw new Error("selectDefaultVariant requires at least one candidate."); + } + if (entries.length === 1) { + return firstEntry; + } + const defaultEntries = entries.filter( + (entry) => isDefaultVariant(entry.variant) + ); + if (defaultEntries.length === 1) { + return defaultEntries[0]; + } + throw new Error( + `${duplicateEntryDescription} with variants ${formatVariants(entries)}. ruff-action currently requires a single default variant for duplicate platform entries.` + ); +} +function isDefaultVariant(variant) { + return variant === void 0 || variant === "default"; +} +function formatVariants(entries) { + return entries.map((entry) => entry.variant ?? "default").sort((left, right) => left.localeCompare(right)).join(", "); +} + +// src/download/manifest.ts +var cachedManifestData = /* @__PURE__ */ new Map(); +async function fetchManifest(manifestUrl = VERSIONS_MANIFEST_URL) { + const cachedVersions = cachedManifestData.get(manifestUrl); + if (cachedVersions !== void 0) { + debug(`Using cached manifest data from ${manifestUrl}`); + return cachedVersions; + } + info(`Fetching manifest data from ${manifestUrl} ...`); + const response = await fetch(manifestUrl, {}); + if (!response.ok) { + throw new Error( + `Failed to fetch manifest data: ${response.status} ${response.statusText}` + ); + } + const body = await response.text(); + const versions = parseManifest(body, manifestUrl); + cachedManifestData.set(manifestUrl, versions); + return versions; +} +function parseManifest(data, sourceDescription) { + const trimmed = data.trim(); + if (trimmed === "") { + throw new Error(`Manifest at ${sourceDescription} is empty.`); + } + if (trimmed.startsWith("[")) { + throw new Error( + `Legacy JSON array manifests are no longer supported in ${sourceDescription}. Use the astral-sh/versions manifest format instead.` + ); + } + const versions = []; + for (const [index, line] of data.split("\n").entries()) { + const record = line.trim(); + if (record === "") { + continue; + } + let parsed; + try { + parsed = JSON.parse(record); + } catch (error2) { + throw new Error( + `Failed to parse manifest data from ${sourceDescription} at line ${index + 1}: ${error2.message}` + ); + } + if (!isManifestVersion(parsed)) { + throw new Error( + `Invalid manifest record in ${sourceDescription} at line ${index + 1}.` + ); + } + versions.push(parsed); + } + if (versions.length === 0) { + throw new Error(`No manifest data found in ${sourceDescription}.`); + } + return versions; +} +async function getLatestVersion(manifestUrl = VERSIONS_MANIFEST_URL) { + const latestVersion = (await fetchManifest(manifestUrl))[0]?.version; + if (latestVersion === void 0) { + throw new Error("No versions found in manifest data"); + } + debug(`Latest version from manifest: ${latestVersion}`); + return latestVersion; +} +async function getAllVersions(manifestUrl = VERSIONS_MANIFEST_URL) { + info( + `Getting available versions from ${manifestSource(manifestUrl)} ...` + ); + const versions = await fetchManifest(manifestUrl); + return versions.map((versionData) => versionData.version); +} +async function getArtifact(version2, arch3, platform2, manifestUrl = VERSIONS_MANIFEST_URL) { + const versions = await fetchManifest(manifestUrl); + const versionData = versions.find( + (candidate) => matchesManifestVersion(candidate.version, version2) + ); + if (!versionData) { + debug(`Version ${version2} not found in manifest ${manifestUrl}`); + return void 0; + } + const targetPlatforms = getTargetPlatforms( + versionData.version, + arch3, + platform2 + ); + const matchingArtifacts = versionData.artifacts.filter( + (candidate) => targetPlatforms.includes(candidate.platform) + ); + if (matchingArtifacts.length === 0) { + debug( + `Artifact for ${targetPlatforms.join(" or ")} not found in version ${version2}. Available platforms: ${versionData.artifacts.map((candidate) => candidate.platform).join(", ")}` + ); + return void 0; + } + const artifact = selectDefaultVariant( + matchingArtifacts, + `Multiple artifacts found for ${targetPlatforms.join(" or ")} in version ${version2}` + ); + return { + archiveFormat: artifact.archive_format, + checksum: artifact.sha256, + downloadUrl: artifact.url + }; +} +function manifestSource(manifestUrl) { + if (manifestUrl === VERSIONS_MANIFEST_URL) { + return VERSIONS_MANIFEST_URL; + } + return `manifest-file ${manifestUrl}`; +} +function matchesManifestVersion(manifestVersion, requestedVersion) { + return manifestVersion === requestedVersion || manifestVersion === withVersionPrefix(requestedVersion); +} +function getTargetPlatforms(manifestVersion, arch3, platform2) { + const targetPlatform = `${arch3}-${platform2}`; + const versionPrefixedTargetPlatform = `${stripVersionPrefix(manifestVersion)}-${targetPlatform}`; + return [targetPlatform, versionPrefixedTargetPlatform]; +} +function withVersionPrefix(version2) { + return version2.startsWith("v") ? version2 : `v${version2}`; +} +function stripVersionPrefix(version2) { + return version2.startsWith("v") ? version2.slice(1) : version2; +} +function isManifestVersion(value) { + if (!isRecord(value)) { + return false; + } + if (typeof value.version !== "string" || !Array.isArray(value.artifacts)) { + return false; + } + return value.artifacts.every(isManifestArtifact); +} +function isManifestArtifact(value) { + if (!isRecord(value)) { + return false; + } + const variantIsValid = typeof value.variant === "string" || value.variant === void 0; + return typeof value.archive_format === "string" && typeof value.platform === "string" && typeof value.sha256 === "string" && typeof value.url === "string" && variantIsValid; +} +function isRecord(value) { + return typeof value === "object" && value !== null; +} + // src/download/download-version.ts -var PaginatingOctokit = Octokit.plugin(paginateRest, restEndpointMethods); function tryGetFromToolCache(arch3, version2) { debug(`Trying to get ruff from tool cache for ${version2}...`); const cachedVersions = findAllVersions(TOOL_CACHE_NAME, arch3); @@ -31221,27 +28014,34 @@ function tryGetFromToolCache(arch3, version2) { const installedPath = find(TOOL_CACHE_NAME, resolvedVersion, arch3); return { installedPath, version: resolvedVersion }; } -async function downloadVersion(platform2, arch3, version2, checkSum2, githubToken2) { - const artifact = `ruff-${arch3}-${platform2}`; - let extension = ".tar.gz"; - if (platform2 === "pc-windows-msvc") { - extension = ".zip"; +async function downloadVersion(platform2, arch3, version2, checksum, githubToken2, manifestUrl) { + const artifact = await getArtifact(version2, arch3, platform2, manifestUrl); + if (!artifact) { + throw new Error( + getMissingArtifactMessage(version2, arch3, platform2, manifestUrl) + ); } - const downloadUrl = constructDownloadUrl(version2, platform2, arch3); - debug(`Downloading ruff from "${downloadUrl}" ...`); - const downloadPath = await downloadTool( - downloadUrl, - void 0, - githubToken2 + const resolvedChecksum = manifestUrl === void 0 ? checksum : resolveChecksum(checksum, artifact.checksum); + const downloadPath = await downloadArtifact( + artifact.downloadUrl, + platform2, + arch3, + version2, + getDownloadToken(artifact.downloadUrl, githubToken2) + ); + await validateChecksum( + resolvedChecksum, + downloadPath, + arch3, + platform2, + version2 ); - debug(`Downloaded ruff to "${downloadPath}"`); - await validateChecksum(checkSum2, downloadPath, arch3, platform2, version2); const extractedDir = await extractDownloadedArtifact( version2, downloadPath, - extension, + getExtension(platform2), platform2, - artifact + `ruff-${arch3}-${platform2}` ); const cachedToolDir = await cacheDir( extractedDir, @@ -31251,15 +28051,42 @@ async function downloadVersion(platform2, arch3, version2, checkSum2, githubToke ); return { cachedToolDir, version: version2 }; } -function constructDownloadUrl(version2, platform2, arch3) { - const artifactVersionSuffix = semver3.lte(version2, "v0.4.10") && semver3.gte(version2, "v0.1.8") ? `-${version2}` : ""; - const artifact = `ruff${artifactVersionSuffix}-${arch3}-${platform2}`; - let extension = ".tar.gz"; - if (platform2 === "pc-windows-msvc") { - extension = ".zip"; +function rewriteToMirror(url) { + if (!url.startsWith(GITHUB_RELEASES_PREFIX)) { + return void 0; } - const versionPrefix = semver3.lte(version2, "v0.4.10") ? "v" : ""; - return `https://github.com/${OWNER}/${REPO}/releases/download/${versionPrefix}${version2}/${artifact}${extension}`; + return ASTRAL_MIRROR_PREFIX + url.slice(GITHUB_RELEASES_PREFIX.length); +} +async function downloadArtifact(downloadUrl, platform2, arch3, version2, githubToken2) { + const mirrorUrl = rewriteToMirror(downloadUrl); + const resolvedDownloadUrl = mirrorUrl ?? downloadUrl; + try { + return await downloadFile( + resolvedDownloadUrl, + mirrorUrl !== void 0 ? void 0 : githubToken2 + ); + } catch (err) { + if (mirrorUrl === void 0) { + throw err; + } + warning( + `Failed to download from mirror, falling back to GitHub Releases: ${err.message}` + ); + return await downloadFile( + constructDownloadUrl(version2, platform2, arch3), + githubToken2 + ); + } +} +async function downloadFile(downloadUrl, githubToken2) { + info(`Downloading ruff from "${downloadUrl}" ...`); + const downloadPath = await downloadTool( + downloadUrl, + void 0, + githubToken2 + ); + debug(`Downloaded ruff to "${downloadPath}"`); + return downloadPath; } async function extractDownloadedArtifact(version2, downloadPath, extension, platform2, artifact) { let ruffDir; @@ -31277,14 +28104,14 @@ async function extractDownloadedArtifact(version2, downloadPath, extension, plat debug(`Contents of ${ruffDir}: ${files.join(", ")}`); return ruffDir; } -async function resolveVersion(versionInput, githubToken2) { +async function resolveVersion(versionInput, manifestUrl) { debug(`Resolving ${versionInput}...`); - const version2 = versionInput === "latest" ? await getLatestVersion(githubToken2) : versionInput; + const version2 = versionInput === "latest" ? await getLatestVersion(manifestUrl) : versionInput; if (isExplicitVersion(version2)) { debug(`Version ${version2} is an explicit version.`); return version2; } - const availableVersions = await getAvailableVersions(githubToken2); + const availableVersions = await getAvailableVersions(manifestUrl); const resolvedVersion = maxSatisfying2(availableVersions, version2); if (resolvedVersion === void 0) { throw new Error(`No version found for ${version2}`); @@ -31292,68 +28119,33 @@ async function resolveVersion(versionInput, githubToken2) { debug(`Resolved version: ${resolvedVersion}`); return resolvedVersion; } -async function getAvailableVersions(githubToken2) { - try { - const octokit = new PaginatingOctokit({ - auth: githubToken2 - }); - return await getReleaseTagNames(octokit); - } catch (err) { - if (err.message.includes("Bad credentials")) { - info( - "No (valid) GitHub token provided. Falling back to anonymous. Requests might be rate limited." - ); - const octokit = new PaginatingOctokit(); - return await getReleaseTagNames(octokit); - } - throw err; - } +async function getAvailableVersions(manifestUrl) { + return await getAllVersions(manifestUrl); } -async function getReleaseTagNames(octokit) { - const response = await octokit.paginate(octokit.rest.repos.listReleases, { - owner: OWNER, - repo: REPO - }); - const releaseTagNames = response.map((release) => release.tag_name); - if (releaseTagNames.length === 0) { - throw Error( - "Github API request failed while getting releases. Check the GitHub status page for outages. Try again later." - ); +function getMissingArtifactMessage(version2, arch3, platform2, manifestUrl) { + if (manifestUrl === void 0) { + return `Could not find artifact for version ${version2}, arch ${arch3}, platform ${platform2} in ${VERSIONS_MANIFEST_URL} .`; } - return response.map((release) => release.tag_name); + return `manifest-file does not contain version ${version2}, arch ${arch3}, platform ${platform2}.`; } -async function getLatestVersion(githubToken2) { - const octokit = new PaginatingOctokit({ - auth: githubToken2 - }); - let latestRelease; - try { - latestRelease = await getLatestRelease(octokit); - } catch (err) { - if (err.message.includes("Bad credentials")) { - info( - "No (valid) GitHub token provided. Falling back to anonymous. Requests might be rate limited." - ); - const octokit2 = new PaginatingOctokit(); - latestRelease = await getLatestRelease(octokit2); - } else { - error( - "Github API request failed while getting latest release. Check the GitHub status page for outages. Try again later." - ); - throw err; - } - } - if (!latestRelease) { - throw new Error("Could not determine latest release."); - } - return latestRelease.tag_name; +function resolveChecksum(checksum, manifestChecksum) { + return checksum !== void 0 && checksum !== "" ? checksum : manifestChecksum; } -async function getLatestRelease(octokit) { - const { data: latestRelease } = await octokit.rest.repos.getLatestRelease({ - owner: OWNER, - repo: REPO - }); - return latestRelease; +function getDownloadToken(downloadUrl, githubToken2) { + return downloadUrl.startsWith(GITHUB_RELEASES_PREFIX) ? githubToken2 : void 0; +} +function constructDownloadUrl(version2, platform2, arch3) { + const normalizedVersion = stripVersionPrefix2(version2); + const artifactVersionSuffix = semver3.lte(version2, "v0.4.10") && semver3.gte(version2, "v0.1.8") ? `-${normalizedVersion}` : ""; + const artifact = `ruff${artifactVersionSuffix}-${arch3}-${platform2}`; + const versionPrefix = semver3.lte(version2, "v0.4.10") ? "v" : ""; + return `${GITHUB_RELEASES_PREFIX}${versionPrefix}${normalizedVersion}/${artifact}${getExtension(platform2)}`; +} +function stripVersionPrefix2(version2) { + return version2.startsWith("v") ? version2.slice(1) : version2; +} +function getExtension(platform2) { + return platform2 === "pc-windows-msvc" ? ".zip" : ".tar.gz"; } function maxSatisfying2(versions, version2) { const maxSemver = evaluateVersions(versions, version2); @@ -31378,6 +28170,7 @@ var githubToken = getInput("github-token"); var args = getInput("args"); var src = getInput("src"); var versionFile = getInput("version-file"); +var manifestFile = getInput("manifest-file"); // src/utils/platforms.ts function getArch() { @@ -31486,7 +28279,7 @@ function skipVoid(str, ptr, banNewLines, banComments) { } return ptr; } -function skipUntil(str, ptr, sep3, end, banNewLines = false) { +function skipUntil(str, ptr, sep2, end, banNewLines = false) { if (!end) { ptr = indexOfNewline(str, ptr); return ptr < 0 ? str.length : ptr; @@ -31495,7 +28288,7 @@ function skipUntil(str, ptr, sep3, end, banNewLines = false) { let c = str[i]; if (c === "#") { i = indexOfNewline(str, i); - } else if (c === sep3) { + } else if (c === sep2) { return i + 1; } else if (c === end || banNewLines && (c === "\n" || c === "\r" && str[i + 1] === "\n")) { return i; @@ -32031,7 +28824,7 @@ function peekTable(key, table, meta, type) { } return [k, t, state.c]; } -function parse2(toml, { maxDepth = 1e3, integersAsBigInt } = {}) { +function parse(toml, { maxDepth = 1e3, integersAsBigInt } = {}) { let res = {}; let meta = {}; let tbl = res; @@ -32129,7 +28922,7 @@ function getRuffVersionFromAllDependencies(allDependencies) { return allDependencies.map((dep) => findRuffVersionInSpec(dep)).find((version2) => version2 !== void 0); } function parsePyproject(pyprojectContent) { - const pyproject = parse2(pyprojectContent); + const pyproject = parse(pyprojectContent); const dependencies = pyproject?.project?.dependencies || []; const optionalDependencies = Object.values( pyproject?.project?.["optional-dependencies"] || {} @@ -32229,6 +29022,7 @@ async function run() { } async function setupRuff(platform2, arch3, checkSum2, githubToken2) { const resolvedVersion = await determineVersion(); + const manifestUrl = manifestFile || void 0; if (semver4.lt(resolvedVersion, "v0.0.247")) { throw Error( "This action does not support ruff versions older than 0.0.247" @@ -32247,7 +29041,8 @@ async function setupRuff(platform2, arch3, checkSum2, githubToken2) { arch3, resolvedVersion, checkSum2, - githubToken2 + githubToken2, + manifestUrl ); return { ruffDir: downloadVersionResult.cachedToolDir, @@ -32259,7 +29054,7 @@ async function determineVersion() { throw Error("It is not allowed to specify both version and version-file"); } if (version !== "") { - return await resolveVersion(version, githubToken); + return await resolveVersion(version, manifestFile || void 0); } if (versionFile !== "") { const versionFromPyproject2 = getRuffVersionFromRequirementsFile(versionFile); @@ -32268,7 +29063,10 @@ async function determineVersion() { `Could not parse version from ${versionFile}. Using latest version.` ); } - return await resolveVersion(versionFromPyproject2 || "latest", githubToken); + return await resolveVersion( + versionFromPyproject2 || "latest", + manifestFile || void 0 + ); } const pyProjectPath = findPyprojectToml( src, @@ -32276,7 +29074,7 @@ async function determineVersion() { ); if (!pyProjectPath) { info(`Could not find pyproject.toml. Using latest version.`); - return await resolveVersion("latest", githubToken); + return await resolveVersion("latest", manifestFile || void 0); } const versionFromPyproject = getRuffVersionFromRequirementsFile(pyProjectPath); if (versionFromPyproject === void 0) { @@ -32284,7 +29082,10 @@ async function determineVersion() { `Could not parse version from ${pyProjectPath}. Using latest version.` ); } - return await resolveVersion(versionFromPyproject || "latest", githubToken); + return await resolveVersion( + versionFromPyproject || "latest", + manifestFile || void 0 + ); } function addRuffToPath(cachedPath) { addPath(cachedPath); diff --git a/package-lock.json b/package-lock.json index 703e8bc..37e2ec1 100644 --- a/package-lock.json +++ b/package-lock.json @@ -16,7 +16,8 @@ "@octokit/plugin-paginate-rest": "^13.1.1", "@octokit/plugin-rest-endpoint-methods": "^16.0.0", "@renovatebot/pep440": "^4.2.1", - "smol-toml": "^1.6.0" + "smol-toml": "^1.6.0", + "undici": "^6.24.1" }, "devDependencies": { "@biomejs/biome": "^2.4.7", diff --git a/package.json b/package.json index 1368edf..3425847 100644 --- a/package.json +++ b/package.json @@ -35,7 +35,8 @@ "@octokit/plugin-paginate-rest": "^13.1.1", "@octokit/plugin-rest-endpoint-methods": "^16.0.0", "@renovatebot/pep440": "^4.2.1", - "smol-toml": "^1.6.0" + "smol-toml": "^1.6.0", + "undici": "^6.24.1" }, "devDependencies": { "@biomejs/biome": "^2.4.7", diff --git a/src/download/download-version.ts b/src/download/download-version.ts index ecd269d..283cef7 100644 --- a/src/download/download-version.ts +++ b/src/download/download-version.ts @@ -2,16 +2,17 @@ import { promises as fs } from "node:fs"; import * as path from "node:path"; import * as core from "@actions/core"; import * as tc from "@actions/tool-cache"; -import { Octokit } from "@octokit/core"; -import { paginateRest } from "@octokit/plugin-paginate-rest"; -import { restEndpointMethods } from "@octokit/plugin-rest-endpoint-methods"; import * as pep440 from "@renovatebot/pep440"; import * as semver from "semver"; -import { OWNER, REPO, TOOL_CACHE_NAME } from "../utils/constants"; +import { + ASTRAL_MIRROR_PREFIX, + GITHUB_RELEASES_PREFIX, + TOOL_CACHE_NAME, + VERSIONS_MANIFEST_URL, +} from "../utils/constants"; import type { Architecture, Platform } from "../utils/platforms"; import { validateChecksum } from "./checksum/checksum"; - -const PaginatingOctokit = Octokit.plugin(paginateRest, restEndpointMethods); +import { getAllVersions, getArtifact, getLatestVersion } from "./manifest"; export function tryGetFromToolCache( arch: Architecture, @@ -32,31 +33,46 @@ export async function downloadVersion( platform: Platform, arch: Architecture, version: string, - checkSum: string | undefined, + checksum: string | undefined, githubToken: string, + manifestUrl?: string, ): Promise<{ version: string; cachedToolDir: string }> { - const artifact = `ruff-${arch}-${platform}`; - let extension = ".tar.gz"; - if (platform === "pc-windows-msvc") { - extension = ".zip"; - } - const downloadUrl = constructDownloadUrl(version, platform, arch); - core.debug(`Downloading ruff from "${downloadUrl}" ...`); + const artifact = await getArtifact(version, arch, platform, manifestUrl); - const downloadPath = await tc.downloadTool( - downloadUrl, - undefined, - githubToken, + if (!artifact) { + throw new Error( + getMissingArtifactMessage(version, arch, platform, manifestUrl), + ); + } + + // For the default astral-sh/versions source, checksum validation relies on + // user input or the built-in KNOWN_CHECKSUMS table, not manifest sha256 values. + const resolvedChecksum = + manifestUrl === undefined + ? checksum + : resolveChecksum(checksum, artifact.checksum); + + const downloadPath = await downloadArtifact( + artifact.downloadUrl, + platform, + arch, + version, + getDownloadToken(artifact.downloadUrl, githubToken), + ); + await validateChecksum( + resolvedChecksum, + downloadPath, + arch, + platform, + version, ); - core.debug(`Downloaded ruff to "${downloadPath}"`); - await validateChecksum(checkSum, downloadPath, arch, platform, version); const extractedDir = await extractDownloadedArtifact( version, downloadPath, - extension, + getExtension(platform), platform, - artifact, + `ruff-${arch}-${platform}`, ); const cachedToolDir = await tc.cacheDir( @@ -65,25 +81,60 @@ export async function downloadVersion( version, arch, ); - return { cachedToolDir, version: version }; + return { cachedToolDir, version }; } -function constructDownloadUrl( - version: string, +export function rewriteToMirror(url: string): string | undefined { + if (!url.startsWith(GITHUB_RELEASES_PREFIX)) { + return undefined; + } + + return ASTRAL_MIRROR_PREFIX + url.slice(GITHUB_RELEASES_PREFIX.length); +} + +async function downloadArtifact( + downloadUrl: string, platform: Platform, arch: Architecture, -): string { - const artifactVersionSuffix = - semver.lte(version, "v0.4.10") && semver.gte(version, "v0.1.8") - ? `-${version}` - : ""; - const artifact = `ruff${artifactVersionSuffix}-${arch}-${platform}`; - let extension = ".tar.gz"; - if (platform === "pc-windows-msvc") { - extension = ".zip"; + version: string, + githubToken: string | undefined, +): Promise { + const mirrorUrl = rewriteToMirror(downloadUrl); + const resolvedDownloadUrl = mirrorUrl ?? downloadUrl; + + try { + return await downloadFile( + resolvedDownloadUrl, + mirrorUrl !== undefined ? undefined : githubToken, + ); + } catch (err) { + if (mirrorUrl === undefined) { + throw err; + } + + core.warning( + `Failed to download from mirror, falling back to GitHub Releases: ${(err as Error).message}`, + ); + + return await downloadFile( + constructDownloadUrl(version, platform, arch), + githubToken, + ); } - const versionPrefix = semver.lte(version, "v0.4.10") ? "v" : ""; - return `https://github.com/${OWNER}/${REPO}/releases/download/${versionPrefix}${version}/${artifact}${extension}`; +} + +async function downloadFile( + downloadUrl: string, + githubToken: string | undefined, +): Promise { + core.info(`Downloading ruff from "${downloadUrl}" ...`); + const downloadPath = await tc.downloadTool( + downloadUrl, + undefined, + githubToken, + ); + core.debug(`Downloaded ruff to "${downloadPath}"`); + return downloadPath; } async function extractDownloadedArtifact( @@ -98,7 +149,7 @@ async function extractDownloadedArtifact( const fullPathWithExtension = `${downloadPath}${extension}`; await fs.copyFile(downloadPath, fullPathWithExtension); ruffDir = await tc.extractZip(fullPathWithExtension); - // On windows extracting the zip does not create an intermediate directory + // On windows extracting the zip does not create an intermediate directory. } else { ruffDir = await tc.extractTar(downloadPath); if (semver.gte(version, "v0.5.0")) { @@ -113,18 +164,21 @@ async function extractDownloadedArtifact( export async function resolveVersion( versionInput: string, - githubToken: string, + manifestUrl?: string, ): Promise { core.debug(`Resolving ${versionInput}...`); + const version = versionInput === "latest" - ? await getLatestVersion(githubToken) + ? await getLatestVersion(manifestUrl) : versionInput; + if (tc.isExplicitVersion(version)) { core.debug(`Version ${version} is an explicit version.`); return version; } - const availableVersions = await getAvailableVersions(githubToken); + + const availableVersions = await getAvailableVersions(manifestUrl); const resolvedVersion = maxSatisfying(availableVersions, version); if (resolvedVersion === undefined) { throw new Error(`No version found for ${version}`); @@ -133,77 +187,63 @@ export async function resolveVersion( return resolvedVersion; } -async function getAvailableVersions(githubToken: string): Promise { - try { - const octokit = new PaginatingOctokit({ - auth: githubToken, - }); - return await getReleaseTagNames(octokit); - } catch (err) { - if ((err as Error).message.includes("Bad credentials")) { - core.info( - "No (valid) GitHub token provided. Falling back to anonymous. Requests might be rate limited.", - ); - const octokit = new PaginatingOctokit(); - return await getReleaseTagNames(octokit); - } - throw err; - } +async function getAvailableVersions(manifestUrl?: string): Promise { + return await getAllVersions(manifestUrl); } -async function getReleaseTagNames( - octokit: InstanceType, -): Promise { - const response = await octokit.paginate(octokit.rest.repos.listReleases, { - owner: OWNER, - repo: REPO, - }); - const releaseTagNames = response.map((release) => release.tag_name); - if (releaseTagNames.length === 0) { - throw Error( - "Github API request failed while getting releases. Check the GitHub status page for outages. Try again later.", - ); +function getMissingArtifactMessage( + version: string, + arch: Architecture, + platform: Platform, + manifestUrl?: string, +): string { + if (manifestUrl === undefined) { + return `Could not find artifact for version ${version}, arch ${arch}, platform ${platform} in ${VERSIONS_MANIFEST_URL} .`; } - return response.map((release) => release.tag_name); + + return `manifest-file does not contain version ${version}, arch ${arch}, platform ${platform}.`; } -async function getLatestVersion(githubToken: string) { - const octokit = new PaginatingOctokit({ - auth: githubToken, - }); - - let latestRelease: { tag_name: string } | undefined; - try { - latestRelease = await getLatestRelease(octokit); - } catch (err) { - if ((err as Error).message.includes("Bad credentials")) { - core.info( - "No (valid) GitHub token provided. Falling back to anonymous. Requests might be rate limited.", - ); - const octokit = new PaginatingOctokit(); - latestRelease = await getLatestRelease(octokit); - } else { - core.error( - "Github API request failed while getting latest release. Check the GitHub status page for outages. Try again later.", - ); - throw err; - } - } - - if (!latestRelease) { - throw new Error("Could not determine latest release."); - } - return latestRelease.tag_name; +function resolveChecksum( + checksum: string | undefined, + manifestChecksum: string, +): string { + return checksum !== undefined && checksum !== "" + ? checksum + : manifestChecksum; } -async function getLatestRelease( - octokit: InstanceType, -) { - const { data: latestRelease } = await octokit.rest.repos.getLatestRelease({ - owner: OWNER, - repo: REPO, - }); - return latestRelease; +function getDownloadToken( + downloadUrl: string, + githubToken: string, +): string | undefined { + return downloadUrl.startsWith(GITHUB_RELEASES_PREFIX) + ? githubToken + : undefined; +} + +function constructDownloadUrl( + version: string, + platform: Platform, + arch: Architecture, +): string { + const normalizedVersion = stripVersionPrefix(version); + const artifactVersionSuffix = + semver.lte(version, "v0.4.10") && semver.gte(version, "v0.1.8") + ? `-${normalizedVersion}` + : ""; + const artifact = `ruff${artifactVersionSuffix}-${arch}-${platform}`; + const versionPrefix = semver.lte(version, "v0.4.10") ? "v" : ""; + + return `${GITHUB_RELEASES_PREFIX}${versionPrefix}${normalizedVersion}/${artifact}${getExtension(platform)}`; +} + +function stripVersionPrefix(version: string): string { + return version.startsWith("v") ? version.slice(1) : version; +} + +function getExtension(platform: Platform): string { + return platform === "pc-windows-msvc" ? ".zip" : ".tar.gz"; } function maxSatisfying( diff --git a/src/download/manifest.ts b/src/download/manifest.ts new file mode 100644 index 0000000..e79569a --- /dev/null +++ b/src/download/manifest.ts @@ -0,0 +1,243 @@ +import * as core from "@actions/core"; +import { VERSIONS_MANIFEST_URL } from "../utils/constants"; +import { fetch } from "../utils/fetch"; +import { selectDefaultVariant } from "./variant-selection"; + +export interface ManifestArtifact { + platform: string; + variant?: string; + url: string; + archive_format: string; + sha256: string; +} + +export interface ManifestVersion { + version: string; + artifacts: ManifestArtifact[]; +} + +export interface ArtifactResult { + archiveFormat: string; + checksum: string; + downloadUrl: string; +} + +const cachedManifestData = new Map(); + +export async function fetchManifest( + manifestUrl: string = VERSIONS_MANIFEST_URL, +): Promise { + const cachedVersions = cachedManifestData.get(manifestUrl); + if (cachedVersions !== undefined) { + core.debug(`Using cached manifest data from ${manifestUrl}`); + return cachedVersions; + } + + core.info(`Fetching manifest data from ${manifestUrl} ...`); + const response = await fetch(manifestUrl, {}); + if (!response.ok) { + throw new Error( + `Failed to fetch manifest data: ${response.status} ${response.statusText}`, + ); + } + + const body = await response.text(); + const versions = parseManifest(body, manifestUrl); + cachedManifestData.set(manifestUrl, versions); + return versions; +} + +export function parseManifest( + data: string, + sourceDescription: string, +): ManifestVersion[] { + const trimmed = data.trim(); + if (trimmed === "") { + throw new Error(`Manifest at ${sourceDescription} is empty.`); + } + + if (trimmed.startsWith("[")) { + throw new Error( + `Legacy JSON array manifests are no longer supported in ${sourceDescription}. Use the astral-sh/versions manifest format instead.`, + ); + } + + const versions: ManifestVersion[] = []; + + for (const [index, line] of data.split("\n").entries()) { + const record = line.trim(); + if (record === "") { + continue; + } + + let parsed: unknown; + try { + parsed = JSON.parse(record); + } catch (error) { + throw new Error( + `Failed to parse manifest data from ${sourceDescription} at line ${index + 1}: ${(error as Error).message}`, + ); + } + + if (!isManifestVersion(parsed)) { + throw new Error( + `Invalid manifest record in ${sourceDescription} at line ${index + 1}.`, + ); + } + + versions.push(parsed); + } + + if (versions.length === 0) { + throw new Error(`No manifest data found in ${sourceDescription}.`); + } + + return versions; +} + +export async function getLatestVersion( + manifestUrl: string = VERSIONS_MANIFEST_URL, +): Promise { + const latestVersion = (await fetchManifest(manifestUrl))[0]?.version; + + if (latestVersion === undefined) { + throw new Error("No versions found in manifest data"); + } + + core.debug(`Latest version from manifest: ${latestVersion}`); + return latestVersion; +} + +export async function getAllVersions( + manifestUrl: string = VERSIONS_MANIFEST_URL, +): Promise { + core.info( + `Getting available versions from ${manifestSource(manifestUrl)} ...`, + ); + const versions = await fetchManifest(manifestUrl); + return versions.map((versionData) => versionData.version); +} + +export async function getArtifact( + version: string, + arch: string, + platform: string, + manifestUrl: string = VERSIONS_MANIFEST_URL, +): Promise { + const versions = await fetchManifest(manifestUrl); + const versionData = versions.find((candidate) => + matchesManifestVersion(candidate.version, version), + ); + if (!versionData) { + core.debug(`Version ${version} not found in manifest ${manifestUrl}`); + return undefined; + } + + const targetPlatforms = getTargetPlatforms( + versionData.version, + arch, + platform, + ); + const matchingArtifacts = versionData.artifacts.filter((candidate) => + targetPlatforms.includes(candidate.platform), + ); + + if (matchingArtifacts.length === 0) { + core.debug( + `Artifact for ${targetPlatforms.join(" or ")} not found in version ${version}. Available platforms: ${versionData.artifacts + .map((candidate) => candidate.platform) + .join(", ")}`, + ); + return undefined; + } + + const artifact = selectDefaultVariant( + matchingArtifacts, + `Multiple artifacts found for ${targetPlatforms.join(" or ")} in version ${version}`, + ); + + return { + archiveFormat: artifact.archive_format, + checksum: artifact.sha256, + downloadUrl: artifact.url, + }; +} + +export function clearManifestCache(manifestUrl?: string): void { + if (manifestUrl === undefined) { + cachedManifestData.clear(); + return; + } + + cachedManifestData.delete(manifestUrl); +} + +function manifestSource(manifestUrl: string): string { + if (manifestUrl === VERSIONS_MANIFEST_URL) { + return VERSIONS_MANIFEST_URL; + } + + return `manifest-file ${manifestUrl}`; +} + +function matchesManifestVersion( + manifestVersion: string, + requestedVersion: string, +): boolean { + return ( + manifestVersion === requestedVersion || + manifestVersion === withVersionPrefix(requestedVersion) + ); +} + +function getTargetPlatforms( + manifestVersion: string, + arch: string, + platform: string, +): string[] { + const targetPlatform = `${arch}-${platform}`; + const versionPrefixedTargetPlatform = `${stripVersionPrefix(manifestVersion)}-${targetPlatform}`; + + return [targetPlatform, versionPrefixedTargetPlatform]; +} + +function withVersionPrefix(version: string): string { + return version.startsWith("v") ? version : `v${version}`; +} + +function stripVersionPrefix(version: string): string { + return version.startsWith("v") ? version.slice(1) : version; +} + +function isManifestVersion(value: unknown): value is ManifestVersion { + if (!isRecord(value)) { + return false; + } + + if (typeof value.version !== "string" || !Array.isArray(value.artifacts)) { + return false; + } + + return value.artifacts.every(isManifestArtifact); +} + +function isManifestArtifact(value: unknown): value is ManifestArtifact { + if (!isRecord(value)) { + return false; + } + + const variantIsValid = + typeof value.variant === "string" || value.variant === undefined; + + return ( + typeof value.archive_format === "string" && + typeof value.platform === "string" && + typeof value.sha256 === "string" && + typeof value.url === "string" && + variantIsValid + ); +} + +function isRecord(value: unknown): value is Record { + return typeof value === "object" && value !== null; +} diff --git a/src/download/variant-selection.ts b/src/download/variant-selection.ts new file mode 100644 index 0000000..622fee2 --- /dev/null +++ b/src/download/variant-selection.ts @@ -0,0 +1,39 @@ +interface VariantAwareEntry { + variant?: string; +} + +export function selectDefaultVariant( + entries: T[], + duplicateEntryDescription: string, +): T { + const firstEntry = entries[0]; + if (firstEntry === undefined) { + throw new Error("selectDefaultVariant requires at least one candidate."); + } + + if (entries.length === 1) { + return firstEntry; + } + + const defaultEntries = entries.filter((entry) => + isDefaultVariant(entry.variant), + ); + if (defaultEntries.length === 1) { + return defaultEntries[0]; + } + + throw new Error( + `${duplicateEntryDescription} with variants ${formatVariants(entries)}. ruff-action currently requires a single default variant for duplicate platform entries.`, + ); +} + +function isDefaultVariant(variant: string | undefined): boolean { + return variant === undefined || variant === "default"; +} + +function formatVariants(entries: T[]): string { + return entries + .map((entry) => entry.variant ?? "default") + .sort((left, right) => left.localeCompare(right)) + .join(", "); +} diff --git a/src/ruff-action.ts b/src/ruff-action.ts index 4e659d3..68d873a 100644 --- a/src/ruff-action.ts +++ b/src/ruff-action.ts @@ -11,6 +11,7 @@ import { args, checkSum, githubToken, + manifestFile, src, version, versionFile as versionFileInput, @@ -62,6 +63,7 @@ async function setupRuff( githubToken: string, ): Promise<{ ruffDir: string; version: string }> { const resolvedVersion = await determineVersion(); + const manifestUrl = manifestFile || undefined; if (semver.lt(resolvedVersion, "v0.0.247")) { throw Error( "This action does not support ruff versions older than 0.0.247", @@ -82,6 +84,7 @@ async function setupRuff( resolvedVersion, checkSum, githubToken, + manifestUrl, ); return { @@ -95,7 +98,7 @@ async function determineVersion(): Promise { throw Error("It is not allowed to specify both version and version-file"); } if (version !== "") { - return await resolveVersion(version, githubToken); + return await resolveVersion(version, manifestFile || undefined); } if (versionFileInput !== "") { const versionFromPyproject = @@ -105,7 +108,10 @@ async function determineVersion(): Promise { `Could not parse version from ${versionFileInput}. Using latest version.`, ); } - return await resolveVersion(versionFromPyproject || "latest", githubToken); + return await resolveVersion( + versionFromPyproject || "latest", + manifestFile || undefined, + ); } const pyProjectPath = findPyprojectToml( src, @@ -113,7 +119,7 @@ async function determineVersion(): Promise { ); if (!pyProjectPath) { core.info(`Could not find pyproject.toml. Using latest version.`); - return await resolveVersion("latest", githubToken); + return await resolveVersion("latest", manifestFile || undefined); } const versionFromPyproject = getRuffVersionFromRequirementsFile(pyProjectPath); @@ -122,7 +128,10 @@ async function determineVersion(): Promise { `Could not parse version from ${pyProjectPath}. Using latest version.`, ); } - return await resolveVersion(versionFromPyproject || "latest", githubToken); + return await resolveVersion( + versionFromPyproject || "latest", + manifestFile || undefined, + ); } function addRuffToPath(cachedPath: string): void { diff --git a/src/utils/constants.ts b/src/utils/constants.ts index 35e0414..bf0f794 100644 --- a/src/utils/constants.ts +++ b/src/utils/constants.ts @@ -1,3 +1,9 @@ export const REPO = "ruff"; export const OWNER = "astral-sh"; export const TOOL_CACHE_NAME = "ruff"; +export const VERSIONS_MANIFEST_URL = + "https://raw.githubusercontent.com/astral-sh/versions/main/v1/ruff.ndjson"; +export const GITHUB_RELEASES_PREFIX = + "https://github.com/astral-sh/ruff/releases/download/"; +export const ASTRAL_MIRROR_PREFIX = + "https://releases.astral.sh/github/ruff/releases/download/"; diff --git a/src/utils/fetch.ts b/src/utils/fetch.ts new file mode 100644 index 0000000..0830dfd --- /dev/null +++ b/src/utils/fetch.ts @@ -0,0 +1,21 @@ +import { ProxyAgent, type RequestInit, fetch as undiciFetch } from "undici"; + +export function getProxyAgent() { + const httpProxy = process.env.HTTP_PROXY || process.env.http_proxy; + if (httpProxy) { + return new ProxyAgent(httpProxy); + } + + const httpsProxy = process.env.HTTPS_PROXY || process.env.https_proxy; + if (httpsProxy) { + return new ProxyAgent(httpsProxy); + } + + return undefined; +} + +export const fetch = async (url: string, opts: RequestInit) => + await undiciFetch(url, { + dispatcher: getProxyAgent(), + ...opts, + }); diff --git a/src/utils/inputs.ts b/src/utils/inputs.ts index 6b59b93..ec4c1fa 100644 --- a/src/utils/inputs.ts +++ b/src/utils/inputs.ts @@ -6,3 +6,4 @@ export const githubToken = core.getInput("github-token"); export const args = core.getInput("args"); export const src = core.getInput("src"); export const versionFile = core.getInput("version-file"); +export const manifestFile = core.getInput("manifest-file");