initial commit of actions
This commit is contained in:
commit
949ece5785
44660 changed files with 12034344 additions and 0 deletions
26
dawidd6/action-download-artifact-v3/.github/dependabot.yml
vendored
Normal file
26
dawidd6/action-download-artifact-v3/.github/dependabot.yml
vendored
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: npm
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
pull-request-branch-name:
|
||||
separator: "-"
|
||||
open-pull-requests-limit: 10
|
||||
ignore:
|
||||
- dependency-name: filesize
|
||||
versions:
|
||||
- 6.2.6
|
||||
- 6.3.0
|
||||
- dependency-name: adm-zip
|
||||
versions:
|
||||
- 0.5.3
|
||||
- package-ecosystem: github-actions
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "04:00"
|
||||
pull-request-branch-name:
|
||||
separator: "-"
|
||||
open-pull-requests-limit: 10
|
||||
239
dawidd6/action-download-artifact-v3/.github/workflows/download.yml
vendored
Normal file
239
dawidd6/action-download-artifact-v3/.github/workflows/download.yml
vendored
Normal file
|
|
@ -0,0 +1,239 @@
|
|||
name: Download
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
wait:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Wait
|
||||
run: sleep 60
|
||||
download-latest:
|
||||
runs-on: ubuntu-latest
|
||||
needs: wait
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Download
|
||||
uses: ./
|
||||
with:
|
||||
workflow: upload.yml
|
||||
name: artifact
|
||||
path: artifact
|
||||
- name: Test
|
||||
run: cat artifact/sha | grep $GITHUB_SHA
|
||||
download-search-workflow:
|
||||
runs-on: ubuntu-latest
|
||||
needs: wait
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Download
|
||||
uses: ./
|
||||
with:
|
||||
name: artifact
|
||||
path: artifact
|
||||
workflow_search: true
|
||||
- name: Test
|
||||
run: cat artifact/sha | grep $GITHUB_SHA
|
||||
download-branch:
|
||||
runs-on: ubuntu-latest
|
||||
needs: wait
|
||||
if: github.ref == 'refs/heads/master'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Download
|
||||
uses: ./
|
||||
with:
|
||||
workflow: upload.yml
|
||||
name: artifact
|
||||
path: artifact
|
||||
branch: master
|
||||
- name: Test
|
||||
run: cat artifact/sha | grep $GITHUB_SHA
|
||||
download-pr:
|
||||
runs-on: ubuntu-latest
|
||||
needs: wait
|
||||
if: github.ref != 'refs/heads/master'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Download
|
||||
uses: ./
|
||||
with:
|
||||
workflow: upload.yml
|
||||
name: artifact
|
||||
path: artifact
|
||||
pr: ${{github.event.pull_request.number}}
|
||||
- name: Test
|
||||
run: cat artifact/sha | grep $GITHUB_SHA
|
||||
download-commit:
|
||||
runs-on: ubuntu-latest
|
||||
needs: wait
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Download
|
||||
uses: ./
|
||||
with:
|
||||
workflow: upload.yml
|
||||
name: artifact
|
||||
path: artifact
|
||||
commit: ${{ github.event.workflow_run.head_sha }}
|
||||
- name: Test
|
||||
run: cat artifact/sha | grep $GITHUB_SHA
|
||||
download-multiple:
|
||||
runs-on: ubuntu-latest
|
||||
needs: wait
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Download
|
||||
uses: ./
|
||||
with:
|
||||
workflow: upload.yml
|
||||
- name: Test
|
||||
run: |
|
||||
cat artifact/sha | grep $GITHUB_SHA
|
||||
cat artifact1/sha1 | grep $GITHUB_SHA
|
||||
cat artifact2/sha2 | grep $GITHUB_SHA
|
||||
download-regexp:
|
||||
runs-on: ubuntu-latest
|
||||
needs: wait
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Download
|
||||
uses: ./
|
||||
with:
|
||||
workflow: upload.yml
|
||||
name: artifact.
|
||||
name_is_regexp: true
|
||||
- name: Test
|
||||
run: |
|
||||
cat artifact1/sha1 | grep $GITHUB_SHA
|
||||
cat artifact2/sha2 | grep $GITHUB_SHA
|
||||
! test -d artifact/artifact
|
||||
! test -f artifact.zip
|
||||
download-empty-conclusion:
|
||||
runs-on: ubuntu-latest
|
||||
needs: wait
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Download
|
||||
uses: ./
|
||||
with:
|
||||
workflow: upload.yml
|
||||
name: artifact
|
||||
path: artifact
|
||||
workflow_conclusion:
|
||||
- name: Test
|
||||
run: cat artifact/sha | grep $GITHUB_SHA
|
||||
download-skip-unpack:
|
||||
runs-on: ubuntu-latest
|
||||
needs: wait
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Download
|
||||
uses: ./
|
||||
with:
|
||||
workflow: upload.yml
|
||||
name: artifact
|
||||
path: artifact
|
||||
skip_unpack: true
|
||||
- name: Test
|
||||
run: |
|
||||
test -d artifact
|
||||
test -f artifact/artifact.zip
|
||||
! test -d artifact/artifact
|
||||
! test -f artifact.zip
|
||||
unzip -l artifact/artifact.zip
|
||||
download-dry-run-exists:
|
||||
runs-on: ubuntu-latest
|
||||
needs: wait
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Download
|
||||
id: download
|
||||
uses: ./
|
||||
with:
|
||||
workflow: upload.yml
|
||||
name: artifact
|
||||
path: artifact
|
||||
dry_run: true
|
||||
- name: Test
|
||||
run: test ${{ steps.download.outputs.dry_run }} == true
|
||||
download-dry-run-not-exists:
|
||||
runs-on: ubuntu-latest
|
||||
needs: wait
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Download
|
||||
id: download
|
||||
uses: ./
|
||||
with:
|
||||
workflow: upload.yml
|
||||
name: not-artifact
|
||||
path: artifact
|
||||
dry_run: true
|
||||
- name: Test
|
||||
run: test ${{ steps.download.outputs.dry_run }} == false
|
||||
download-with-check-artifacts:
|
||||
runs-on: ubuntu-latest
|
||||
needs: wait
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Download
|
||||
uses: ./
|
||||
with:
|
||||
workflow: upload.yml
|
||||
name: artifact
|
||||
path: artifact
|
||||
check_artifacts: true
|
||||
- name: Test
|
||||
run: cat artifact/sha | grep $GITHUB_SHA
|
||||
download-with-search-artifacts:
|
||||
runs-on: ubuntu-latest
|
||||
needs: wait
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Download
|
||||
uses: ./
|
||||
with:
|
||||
workflow: upload.yml
|
||||
name: artifact
|
||||
path: artifact
|
||||
search_artifacts: true
|
||||
- name: Test
|
||||
run: cat artifact/sha | grep $GITHUB_SHA
|
||||
download-regexp-with-search-artifacts:
|
||||
runs-on: ubuntu-latest
|
||||
needs: wait
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Download
|
||||
uses: ./
|
||||
with:
|
||||
workflow: upload.yml
|
||||
name: artifact.
|
||||
name_is_regexp: true
|
||||
path: artifact
|
||||
search_artifacts: true
|
||||
- name: Test
|
||||
run: |
|
||||
cat artifact/artifact1/sha1 | grep $GITHUB_SHA
|
||||
cat artifact/artifact2/sha2 | grep $GITHUB_SHA
|
||||
! test -d artifact/artifact/artifact
|
||||
! test -f artifact/artifact.zip
|
||||
39
dawidd6/action-download-artifact-v3/.github/workflows/upload.yml
vendored
Normal file
39
dawidd6/action-download-artifact-v3/.github/workflows/upload.yml
vendored
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
name: Upload
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
upload:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Dump
|
||||
run: |
|
||||
mkdir artifact
|
||||
echo $GITHUB_SHA > artifact/sha
|
||||
- name: Upload
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifact
|
||||
path: artifact
|
||||
upload-multiple:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Dump
|
||||
run: |
|
||||
mkdir artifact1 artifact2
|
||||
echo $GITHUB_SHA > artifact1/sha1
|
||||
echo $GITHUB_SHA > artifact2/sha2
|
||||
- name: Upload first
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifact1
|
||||
path: artifact1
|
||||
- name: Upload second
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifact2
|
||||
path: artifact2
|
||||
21
dawidd6/action-download-artifact-v3/LICENSE
Normal file
21
dawidd6/action-download-artifact-v3/LICENSE
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2020 Dawid Dziurla
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
88
dawidd6/action-download-artifact-v3/README.md
Normal file
88
dawidd6/action-download-artifact-v3/README.md
Normal file
|
|
@ -0,0 +1,88 @@
|
|||
# Download workflow artifact GitHub Action
|
||||
|
||||
An action that downloads and extracts uploaded artifacts associated with a given workflow and commit or other criteria.
|
||||
|
||||
Let's suppose you have a workflow with a job in it that at the end uploads an artifact using `actions/upload-artifact` action and you want to download this artifact in another workflow that is run after the first one. Official `actions/download-artifact` does not allow this. That's why I decided to create this action. By knowing only the workflow name and commit SHA or other details, you can download the previously uploaded artifact from different workflow associated with that commit or other criteria and use it.
|
||||
|
||||
## Usage
|
||||
|
||||
> If `commit` or `pr` or `branch` or `run_id` or `workflow_conclusion` is not specified then the artifact from the most recent successfully completed workflow run will be downloaded.
|
||||
|
||||
**Do not specify `pr`, `commit`, `branch`, `run_id` together or `workflow_conclusion` and `run_id` together. Pick just one of each or none.**
|
||||
|
||||
```yaml
|
||||
- name: Download artifact
|
||||
id: download-artifact
|
||||
uses: dawidd6/action-download-artifact@v3
|
||||
with:
|
||||
# Optional, GitHub token, a Personal Access Token with `public_repo` scope if needed
|
||||
# Required, if the artifact is from a different repo
|
||||
# Required, if the repo is private a Personal Access Token with `repo` scope is needed or GitHub token in a job where the permissions `action` scope set to `read`
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
# Optional, workflow file name or ID
|
||||
# If not specified, will be inferred from run_id (if run_id is specified), or will be the current workflow
|
||||
workflow: workflow_name.yml
|
||||
# If no workflow is set and workflow_search set to true, then the most recent workflow matching
|
||||
# all other criteria will be looked up instead of using the current workflow
|
||||
workflow_search: false
|
||||
# Optional, the status or conclusion of a completed workflow to search for
|
||||
# Can be one of a workflow conclusion:
|
||||
# "failure", "success", "neutral", "cancelled", "skipped", "timed_out", "action_required"
|
||||
# Or a workflow status:
|
||||
# "completed", "in_progress", "queued"
|
||||
# Use the empty string ("") to ignore status or conclusion in the search
|
||||
workflow_conclusion: success
|
||||
# Optional, will get head commit SHA
|
||||
pr: ${{github.event.pull_request.number}}
|
||||
# Optional, no need to specify if PR is
|
||||
commit: ${{github.event.pull_request.head.sha}}
|
||||
# Optional, will use the specified branch. Defaults to all branches
|
||||
branch: master
|
||||
# Optional, defaults to all types
|
||||
event: push
|
||||
# Optional, will use specified workflow run
|
||||
# use ${{ github.event.workflow_run.id }} when your action runs in a workflow_run event
|
||||
# and wants to download from the triggering workflow run
|
||||
run_id: 1122334455
|
||||
# Optional, run number from the workflow
|
||||
run_number: 34
|
||||
# Optional, uploaded artifact name,
|
||||
# will download all artifacts if not specified
|
||||
# and extract them into respective subdirectories
|
||||
# https://github.com/actions/download-artifact#download-all-artifacts
|
||||
# is treated as a regular expression if input name_is_regexp is true
|
||||
# will download only those artifacts with a name that matches this regular expression
|
||||
# https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_expressions
|
||||
name: artifact_name
|
||||
# Optional, name is treated as a regular expression if set true
|
||||
name_is_regexp: true
|
||||
# Optional, a directory where to extract artifact(s), defaults to the current directory
|
||||
path: extract_here
|
||||
# Optional, defaults to current repo
|
||||
repo: ${{ github.repository }}
|
||||
# Optional, check the workflow run to whether it has an artifact
|
||||
# then will get the last available artifact from the previous workflow
|
||||
# default false, just try to download from the last one
|
||||
check_artifacts: false
|
||||
# Optional, search for the last workflow run whose stored an artifact named as in `name` input
|
||||
# default false
|
||||
search_artifacts: false
|
||||
# Optional, choose to skip unpacking the downloaded artifact(s)
|
||||
# default false
|
||||
skip_unpack: false
|
||||
# Optional, choose how to exit the action if no artifact is found
|
||||
# can be one of:
|
||||
# "fail", "warn", "ignore"
|
||||
# default fail
|
||||
if_no_artifact_found: fail
|
||||
# Optional, ignore forks when searching for artifacts
|
||||
# default true
|
||||
allow_forks: false
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### GLIBC_2.28 not found
|
||||
|
||||
`v3` release of this action switched from `node16` to `node20` as runtime.
|
||||
Node 20 requires `glibc>=2.28`. If your self-hosted runner has older `glibc`, pin to `v2` release, but note it won't receive any updates.
|
||||
103
dawidd6/action-download-artifact-v3/action.yml
Normal file
103
dawidd6/action-download-artifact-v3/action.yml
Normal file
|
|
@ -0,0 +1,103 @@
|
|||
name: Download workflow artifact
|
||||
description: Download and extract an artifact associated with given workflow and commit or other criteria
|
||||
author: dawidd6
|
||||
branding:
|
||||
icon: download
|
||||
color: blue
|
||||
inputs:
|
||||
github_token:
|
||||
description: GitHub token
|
||||
required: false
|
||||
default: ${{ github.token }}
|
||||
workflow:
|
||||
description: |
|
||||
Workflow name.
|
||||
|
||||
If not specified, will be inferred from run_id (if run_id is specified), or will be the current workflow
|
||||
required: false
|
||||
workflow_search:
|
||||
description: |
|
||||
Most recent workflow matching all other criteria will be looked up instead of using the current workflow
|
||||
|
||||
https://docs.github.com/de/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-repository
|
||||
required: false
|
||||
default: false
|
||||
workflow_conclusion:
|
||||
description: |
|
||||
Wanted status or conclusion to search for in recent runs
|
||||
|
||||
https://docs.github.com/de/rest/actions/workflow-runs?apiVersion=2022-11-28#list-workflow-runs-for-a-workflow
|
||||
required: false
|
||||
default: success
|
||||
repo:
|
||||
description: Repository name with owner (like actions/checkout)
|
||||
required: false
|
||||
default: ${{ github.repository }}
|
||||
pr:
|
||||
description: Pull request number
|
||||
required: false
|
||||
commit:
|
||||
description: Commit hash
|
||||
required: false
|
||||
branch:
|
||||
description: Branch name
|
||||
required: false
|
||||
event:
|
||||
description: Event type
|
||||
required: false
|
||||
run_id:
|
||||
description: Workflow run id
|
||||
required: false
|
||||
run_number:
|
||||
description: Workflow run number
|
||||
required: false
|
||||
name:
|
||||
description: Artifact name (download all artifacts if not specified)
|
||||
required: false
|
||||
name_is_regexp:
|
||||
description: Treat artifact name as a regular expression and download only artifacts with matching names
|
||||
required: false
|
||||
default: false
|
||||
path:
|
||||
description: Where to unpack the artifact
|
||||
required: false
|
||||
default: "./"
|
||||
allow_forks:
|
||||
description: Allow forks
|
||||
required: false
|
||||
default: true
|
||||
check_artifacts:
|
||||
description: Check workflow run whether it has an artifact
|
||||
required: false
|
||||
default: false
|
||||
search_artifacts:
|
||||
description: Search workflow runs for artifact with specified name
|
||||
required: false
|
||||
default: false
|
||||
skip_unpack:
|
||||
description: Choose to skip unpacking the downloaded artifact(s)
|
||||
required: false
|
||||
default: false
|
||||
dry_run:
|
||||
description: Check the existence of artifact(s) without downloading
|
||||
required: false
|
||||
if_no_artifact_found:
|
||||
required: false
|
||||
description: |
|
||||
Choose how to exit the action if no artifact is found
|
||||
|
||||
fail, warn or ignore
|
||||
default: fail
|
||||
outputs:
|
||||
error_message:
|
||||
description: The error message, if an error occurs
|
||||
# TODO: dry_run should be merged with found_artifact output
|
||||
dry_run:
|
||||
description: Boolean output which is true if the dry run was successful and false otherwise
|
||||
found_artifact:
|
||||
description: Boolean output which is true if the artifact was found and false otherwise
|
||||
artifacts:
|
||||
description: JSON array with details about found artifacts
|
||||
runs:
|
||||
using: node20
|
||||
main: main.js
|
||||
310
dawidd6/action-download-artifact-v3/main.js
Normal file
310
dawidd6/action-download-artifact-v3/main.js
Normal file
|
|
@ -0,0 +1,310 @@
|
|||
const core = require('@actions/core')
|
||||
const github = require('@actions/github')
|
||||
const artifact = require('@actions/artifact')
|
||||
const AdmZip = require('adm-zip')
|
||||
const filesize = require('filesize')
|
||||
const pathname = require('path')
|
||||
const fs = require('fs')
|
||||
|
||||
async function downloadAction(name, path) {
|
||||
const artifactClient = artifact.create()
|
||||
const downloadOptions = {
|
||||
createArtifactFolder: false
|
||||
}
|
||||
const downloadResponse = await artifactClient.downloadArtifact(
|
||||
name,
|
||||
path,
|
||||
downloadOptions
|
||||
)
|
||||
core.setOutput("found_artifact", true)
|
||||
}
|
||||
|
||||
async function getWorkflow(client, owner, repo, runID) {
|
||||
const run = await client.rest.actions.getWorkflowRun({
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
run_id: runID || github.context.runId,
|
||||
})
|
||||
return run.data.workflow_id
|
||||
}
|
||||
|
||||
async function main() {
|
||||
try {
|
||||
const token = core.getInput("github_token", { required: true })
|
||||
const [owner, repo] = core.getInput("repo", { required: true }).split("/")
|
||||
const path = core.getInput("path", { required: true })
|
||||
const name = core.getInput("name")
|
||||
const nameIsRegExp = core.getBooleanInput("name_is_regexp")
|
||||
const skipUnpack = core.getBooleanInput("skip_unpack")
|
||||
const ifNoArtifactFound = core.getInput("if_no_artifact_found")
|
||||
let workflow = core.getInput("workflow")
|
||||
let workflowSearch = core.getBooleanInput("workflow_search")
|
||||
let workflowConclusion = core.getInput("workflow_conclusion")
|
||||
let pr = core.getInput("pr")
|
||||
let commit = core.getInput("commit")
|
||||
let branch = core.getInput("branch")
|
||||
let event = core.getInput("event")
|
||||
let runID = core.getInput("run_id")
|
||||
let runNumber = core.getInput("run_number")
|
||||
let checkArtifacts = core.getBooleanInput("check_artifacts")
|
||||
let searchArtifacts = core.getBooleanInput("search_artifacts")
|
||||
const allowForks = core.getBooleanInput("allow_forks")
|
||||
let dryRun = core.getInput("dry_run")
|
||||
|
||||
const client = github.getOctokit(token)
|
||||
|
||||
core.info(`==> Repository: ${owner}/${repo}`)
|
||||
core.info(`==> Artifact name: ${name}`)
|
||||
core.info(`==> Local path: ${path}`)
|
||||
|
||||
if (!workflow && !workflowSearch) {
|
||||
workflow = await getWorkflow(client, owner, repo, runID)
|
||||
}
|
||||
|
||||
if (workflow) {
|
||||
core.info(`==> Workflow name: ${workflow}`)
|
||||
}
|
||||
core.info(`==> Workflow conclusion: ${workflowConclusion}`)
|
||||
|
||||
const uniqueInputSets = [
|
||||
{
|
||||
"pr": pr,
|
||||
"commit": commit,
|
||||
"branch": branch,
|
||||
"run_id": runID
|
||||
}
|
||||
]
|
||||
uniqueInputSets.forEach((inputSet) => {
|
||||
const inputs = Object.values(inputSet)
|
||||
const providedInputs = inputs.filter(input => input !== '')
|
||||
if (providedInputs.length > 1) {
|
||||
throw new Error(`The following inputs cannot be used together: ${Object.keys(inputSet).join(", ")}`)
|
||||
}
|
||||
})
|
||||
|
||||
if (pr) {
|
||||
core.info(`==> PR: ${pr}`)
|
||||
const pull = await client.rest.pulls.get({
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
pull_number: pr,
|
||||
})
|
||||
commit = pull.data.head.sha
|
||||
//branch = pull.data.head.ref
|
||||
}
|
||||
|
||||
if (commit) {
|
||||
core.info(`==> Commit: ${commit}`)
|
||||
}
|
||||
|
||||
if (branch) {
|
||||
branch = branch.replace(/^refs\/heads\//, "")
|
||||
core.info(`==> Branch: ${branch}`)
|
||||
}
|
||||
|
||||
if (event) {
|
||||
core.info(`==> Event: ${event}`)
|
||||
}
|
||||
|
||||
if (runNumber) {
|
||||
core.info(`==> Run number: ${runNumber}`)
|
||||
}
|
||||
|
||||
core.info(`==> Allow forks: ${allowForks}`)
|
||||
|
||||
if (!runID) {
|
||||
const runGetter = workflow ? client.rest.actions.listWorkflowRuns : client.rest.actions.listWorkflowRunsForRepo
|
||||
// Note that the runs are returned in most recent first order.
|
||||
for await (const runs of client.paginate.iterator(runGetter, {
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
...(workflow ? { workflow_id: workflow } : {}),
|
||||
...(branch ? { branch } : {}),
|
||||
...(event ? { event } : {}),
|
||||
...(commit ? { head_sha: commit } : {}),
|
||||
}
|
||||
)) {
|
||||
for (const run of runs.data) {
|
||||
if (runNumber && run.run_number != runNumber) {
|
||||
continue
|
||||
}
|
||||
if (workflowConclusion && (workflowConclusion != run.conclusion && workflowConclusion != run.status)) {
|
||||
continue
|
||||
}
|
||||
if (!allowForks && run.head_repository.full_name !== `${owner}/${repo}`) {
|
||||
core.info(`==> Skipping run from fork: ${run.head_repository.full_name}`)
|
||||
continue
|
||||
}
|
||||
if (checkArtifacts || searchArtifacts) {
|
||||
let artifacts = await client.paginate(client.rest.actions.listWorkflowRunArtifacts, {
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
run_id: run.id,
|
||||
})
|
||||
if (!artifacts || artifacts.length == 0) {
|
||||
continue
|
||||
}
|
||||
if (searchArtifacts) {
|
||||
const artifact = artifacts.find((artifact) => {
|
||||
if (nameIsRegExp) {
|
||||
return artifact.name.match(name) !== null
|
||||
}
|
||||
return artifact.name == name
|
||||
})
|
||||
if (!artifact) {
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
runID = run.id
|
||||
core.info(`==> (found) Run ID: ${runID}`)
|
||||
core.info(`==> (found) Run date: ${run.created_at}`)
|
||||
|
||||
if (!workflow) {
|
||||
workflow = await getWorkflow(client, owner, repo, runID)
|
||||
core.info(`==> (found) Workflow: ${workflow}`)
|
||||
}
|
||||
break
|
||||
}
|
||||
if (runID) {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!runID) {
|
||||
if (workflowConclusion && (workflowConclusion != 'in_progress')) {
|
||||
return setExitMessage(ifNoArtifactFound, "no matching workflow run found with any artifacts?")
|
||||
}
|
||||
|
||||
try {
|
||||
return await downloadAction(name, path)
|
||||
} catch (error) {
|
||||
return setExitMessage(ifNoArtifactFound, "no matching artifact in this workflow?")
|
||||
}
|
||||
}
|
||||
|
||||
let artifacts = await client.paginate(client.rest.actions.listWorkflowRunArtifacts, {
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
run_id: runID,
|
||||
})
|
||||
|
||||
// One artifact if 'name' input is specified, one or more if `name` is a regular expression, all otherwise.
|
||||
if (name) {
|
||||
filtered = artifacts.filter((artifact) => {
|
||||
if (nameIsRegExp) {
|
||||
return artifact.name.match(name) !== null
|
||||
}
|
||||
return artifact.name == name
|
||||
})
|
||||
if (filtered.length == 0) {
|
||||
core.info(`==> (not found) Artifact: ${name}`)
|
||||
core.info('==> Found the following artifacts instead:')
|
||||
for (const artifact of artifacts) {
|
||||
core.info(`\t==> (found) Artifact: ${artifact.name}`)
|
||||
}
|
||||
}
|
||||
artifacts = filtered
|
||||
}
|
||||
|
||||
core.setOutput("artifacts", artifacts)
|
||||
|
||||
if (dryRun) {
|
||||
if (artifacts.length == 0) {
|
||||
core.setOutput("dry_run", false)
|
||||
core.setOutput("found_artifact", false)
|
||||
return
|
||||
} else {
|
||||
core.setOutput("dry_run", true)
|
||||
core.setOutput("found_artifact", true)
|
||||
core.info('==> (found) Artifacts')
|
||||
for (const artifact of artifacts) {
|
||||
const size = filesize(artifact.size_in_bytes, { base: 10 })
|
||||
core.info(`\t==> Artifact:`)
|
||||
core.info(`\t==> ID: ${artifact.id}`)
|
||||
core.info(`\t==> Name: ${artifact.name}`)
|
||||
core.info(`\t==> Size: ${size}`)
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if (artifacts.length == 0) {
|
||||
return setExitMessage(ifNoArtifactFound, "no artifacts found")
|
||||
}
|
||||
|
||||
core.setOutput("found_artifact", true)
|
||||
|
||||
for (const artifact of artifacts) {
|
||||
core.info(`==> Artifact: ${artifact.id}`)
|
||||
|
||||
const size = filesize(artifact.size_in_bytes, { base: 10 })
|
||||
|
||||
core.info(`==> Downloading: ${artifact.name}.zip (${size})`)
|
||||
|
||||
let zip
|
||||
try {
|
||||
zip = await client.rest.actions.downloadArtifact({
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
artifact_id: artifact.id,
|
||||
archive_format: "zip",
|
||||
})
|
||||
} catch (error) {
|
||||
if (error.message.startsWith("Artifact has expired")) {
|
||||
return setExitMessage(ifNoArtifactFound, "no downloadable artifacts found (expired)")
|
||||
} else {
|
||||
throw new Error(error.message)
|
||||
}
|
||||
}
|
||||
|
||||
if (skipUnpack) {
|
||||
fs.mkdirSync(path, { recursive: true })
|
||||
fs.writeFileSync(`${pathname.join(path, artifact.name)}.zip`, Buffer.from(zip.data), 'binary')
|
||||
continue
|
||||
}
|
||||
|
||||
const dir = name && !nameIsRegExp ? path : pathname.join(path, artifact.name)
|
||||
|
||||
fs.mkdirSync(dir, { recursive: true })
|
||||
|
||||
const adm = new AdmZip(Buffer.from(zip.data))
|
||||
|
||||
core.startGroup(`==> Extracting: ${artifact.name}.zip`)
|
||||
adm.getEntries().forEach((entry) => {
|
||||
const action = entry.isDirectory ? "creating" : "inflating"
|
||||
const filepath = pathname.join(dir, entry.entryName)
|
||||
|
||||
core.info(` ${action}: ${filepath}`)
|
||||
})
|
||||
|
||||
adm.extractAllTo(dir, true)
|
||||
core.endGroup()
|
||||
}
|
||||
} catch (error) {
|
||||
core.setOutput("found_artifact", false)
|
||||
core.setOutput("error_message", error.message)
|
||||
core.setFailed(error.message)
|
||||
}
|
||||
|
||||
function setExitMessage(ifNoArtifactFound, message) {
|
||||
core.setOutput("found_artifact", false)
|
||||
|
||||
switch (ifNoArtifactFound) {
|
||||
case "fail":
|
||||
core.setFailed(message)
|
||||
break
|
||||
case "warn":
|
||||
core.warning(message)
|
||||
break
|
||||
case "ignore":
|
||||
default:
|
||||
core.info(message)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/.bin/crc32
generated
vendored
Symbolic link
1
dawidd6/action-download-artifact-v3/node_modules/.bin/crc32
generated
vendored
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../crc-32/bin/crc32.njs
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/.bin/dot-object
generated
vendored
Symbolic link
1
dawidd6/action-download-artifact-v3/node_modules/.bin/dot-object
generated
vendored
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../dot-object/bin/dot-object
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/.bin/mkdirp
generated
vendored
Symbolic link
1
dawidd6/action-download-artifact-v3/node_modules/.bin/mkdirp
generated
vendored
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../mkdirp/bin/cmd.js
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/.bin/prettier
generated
vendored
Symbolic link
1
dawidd6/action-download-artifact-v3/node_modules/.bin/prettier
generated
vendored
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../prettier/bin-prettier.js
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/.bin/protoc
generated
vendored
Symbolic link
1
dawidd6/action-download-artifact-v3/node_modules/.bin/protoc
generated
vendored
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../@protobuf-ts/protoc/protoc.js
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/.bin/protoc-gen-dump
generated
vendored
Symbolic link
1
dawidd6/action-download-artifact-v3/node_modules/.bin/protoc-gen-dump
generated
vendored
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../@protobuf-ts/plugin/bin/protoc-gen-dump
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/.bin/protoc-gen-ts
generated
vendored
Symbolic link
1
dawidd6/action-download-artifact-v3/node_modules/.bin/protoc-gen-ts
generated
vendored
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../@protobuf-ts/plugin/bin/protoc-gen-ts
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/.bin/protoc-gen-twirp_ts
generated
vendored
Symbolic link
1
dawidd6/action-download-artifact-v3/node_modules/.bin/protoc-gen-twirp_ts
generated
vendored
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../twirp-ts/protoc-gen-twirp_ts
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/.bin/tsc
generated
vendored
Symbolic link
1
dawidd6/action-download-artifact-v3/node_modules/.bin/tsc
generated
vendored
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../typescript/bin/tsc
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/.bin/tsserver
generated
vendored
Symbolic link
1
dawidd6/action-download-artifact-v3/node_modules/.bin/tsserver
generated
vendored
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../typescript/bin/tsserver
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/.bin/uuid
generated
vendored
Symbolic link
1
dawidd6/action-download-artifact-v3/node_modules/.bin/uuid
generated
vendored
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../uuid/dist/bin/uuid
|
||||
1501
dawidd6/action-download-artifact-v3/node_modules/.package-lock.json
generated
vendored
Normal file
1501
dawidd6/action-download-artifact-v3/node_modules/.package-lock.json
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
9
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/LICENSE.md
generated
vendored
Normal file
9
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/LICENSE.md
generated
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright 2019 GitHub
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
192
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/README.md
generated
vendored
Normal file
192
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,192 @@
|
|||
# `@actions/artifact`
|
||||
|
||||
Interact programmatically with [Actions Artifacts](https://docs.github.com/en/actions/using-workflows/storing-workflow-data-as-artifacts).
|
||||
|
||||
This is the core library that powers the [`@actions/upload-artifact`](https://github.com/actions/upload-artifact) and [`@actions/download-artifact`](https://github.com/actions/download-artifact) actions.
|
||||
|
||||
|
||||
- [`@actions/artifact`](#actionsartifact)
|
||||
- [v2 - What's New](#v2---whats-new)
|
||||
- [Improvements](#improvements)
|
||||
- [Breaking changes](#breaking-changes)
|
||||
- [Quick Start](#quick-start)
|
||||
- [Examples](#examples)
|
||||
- [Upload and Download](#upload-and-download)
|
||||
- [Delete an Artifact](#delete-an-artifact)
|
||||
- [Downloading from other workflow runs or repos](#downloading-from-other-workflow-runs-or-repos)
|
||||
- [Speeding up large uploads](#speeding-up-large-uploads)
|
||||
- [Additional Resources](#additional-resources)
|
||||
|
||||
## v2 - What's New
|
||||
|
||||
> [!IMPORTANT]
|
||||
> @actions/artifact v2+, upload-artifact@v4+, and download-artifact@v4+ are not currently supported on GHES yet. The previous version of this package can be found at [this tag](https://github.com/actions/toolkit/tree/@actions/artifact@1.1.2/packages/artifact) and [on npm](https://www.npmjs.com/package/@actions/artifact/v/1.1.2).
|
||||
|
||||
The release of `@actions/artifact@v2` (including `upload-artifact@v4` and `download-artifact@v4`) are major changes to the backend architecture of Artifacts. They have numerous performance and behavioral improvements.
|
||||
|
||||
### Improvements
|
||||
|
||||
1. All upload and download operations are much quicker, up to 80% faster download times and 96% faster upload times in worst case scenarios.
|
||||
2. Once uploaded, an Artifact ID is returned and Artifacts are immediately available in the UI and [REST API](https://docs.github.com/en/rest/actions/artifacts). Previously, you would have to wait for the run to be completed before an ID was available or any APIs could be utilized.
|
||||
3. Artifacts can now be downloaded and deleted from the UI _before_ the entire workflow run finishes.
|
||||
4. The contents of an Artifact are uploaded together into an _immutable_ archive. They cannot be altered by subsequent jobs. Both of these factors help reduce the possibility of accidentally corrupting Artifact files. (Digest/integrity hash coming soon in the API!)
|
||||
5. This library (and `actions/download-artifact`) now support downloading Artifacts from _other_ repositories and runs if a `GITHUB_TOKEN` with sufficient `actions:read` permissions are provided.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
1. Firewall rules required for self-hosted runners.
|
||||
|
||||
If you are using self-hosted runners behind a firewall, you must have flows open to [Actions endpoints](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github). If you cannot use wildcard rules for your firewall, see the GitHub [meta endpoint](https://api.github.com/meta) for specific endpoints.
|
||||
|
||||
e.g.
|
||||
|
||||
```bash
|
||||
curl https://api.github.com/meta | jq .domains.actions
|
||||
```
|
||||
|
||||
2. Uploading to the same named Artifact multiple times.
|
||||
|
||||
Due to how Artifacts are created in this new version, it is no longer possible to upload to the same named Artifact multiple times. You must either split the uploads into multiple Artifacts with different names, or only upload once.
|
||||
|
||||
3. Limit of Artifacts for an individual job.
|
||||
|
||||
Each job in a workflow run now has a limit of 10 artifacts.
|
||||
|
||||
## Quick Start
|
||||
|
||||
Install the package:
|
||||
|
||||
```bash
|
||||
npm i @actions/artifact
|
||||
```
|
||||
|
||||
Import the module:
|
||||
|
||||
```js
|
||||
// ES6 module
|
||||
import {DefaultArtifactClient} from '@actions/artifact'
|
||||
|
||||
// CommonJS
|
||||
const {DefaultArtifactClient} = require('@actions/artifact')
|
||||
```
|
||||
|
||||
Then instantiate:
|
||||
|
||||
```js
|
||||
const artifact = new DefaultArtifactClient()
|
||||
```
|
||||
|
||||
ℹ️ For a comprehensive list of classes, interfaces, functions and more, see the [generated documentation](./docs/generated/README.md).
|
||||
|
||||
## Examples
|
||||
|
||||
### Upload and Download
|
||||
|
||||
The most basic scenario is uploading one or more files to an Artifact, then downloading that Artifact. Downloads are based on the Artifact ID, which can be obtained in the response of `uploadArtifact`, `getArtifact`, `listArtifacts` or via the [REST API](https://docs.github.com/en/rest/actions/artifacts).
|
||||
|
||||
```js
|
||||
const {id, size} = await artifact.uploadArtifact(
|
||||
// name of the artifact
|
||||
'my-artifact',
|
||||
// files to include (supports absolute and relative paths)
|
||||
['/absolute/path/file1.txt', './relative/file2.txt'],
|
||||
{
|
||||
// optional: how long to retain the artifact
|
||||
// if unspecified, defaults to repository/org retention settings (the limit of this value)
|
||||
retentionDays: 10
|
||||
}
|
||||
)
|
||||
|
||||
console.log(`Created artifact with id: ${id} (bytes: ${size}`)
|
||||
|
||||
const {downloadPath} = await artifact.downloadArtifact(id, {
|
||||
// optional: download destination path. otherwise defaults to $GITHUB_WORKSPACE
|
||||
path: '/tmp/dst/path',
|
||||
})
|
||||
|
||||
console.log(`Downloaded artifact ${id} to: ${downloadPath}`)
|
||||
```
|
||||
|
||||
### Delete an Artifact
|
||||
|
||||
To delete an artifact, all you need is the name.
|
||||
|
||||
```js
|
||||
const {id} = await artifact.deleteArtifact(
|
||||
// name of the artifact
|
||||
'my-artifact'
|
||||
)
|
||||
|
||||
console.log(`Deleted Artifact ID '${id}'`)
|
||||
```
|
||||
|
||||
It also supports options to delete from other repos/runs given a github token with `actions:write` permissions on the target repository is supplied.
|
||||
|
||||
```js
|
||||
const findBy = {
|
||||
// must have actions:write permission on target repository
|
||||
token: process.env['GITHUB_TOKEN'],
|
||||
workflowRunId: 123,
|
||||
repositoryOwner: 'actions',
|
||||
repositoryName: 'toolkit'
|
||||
}
|
||||
|
||||
|
||||
const {id} = await artifact.deleteArtifact(
|
||||
// name of the artifact
|
||||
'my-artifact',
|
||||
// options to find by other repo/owner
|
||||
{ findBy }
|
||||
)
|
||||
|
||||
console.log(`Deleted Artifact ID '${id}' from ${findBy.repositoryOwner}/ ${findBy.repositoryName}`)
|
||||
```
|
||||
|
||||
### Downloading from other workflow runs or repos
|
||||
|
||||
It may be useful to download Artifacts from other workflow runs, or even other repositories. By default, the permissions are scoped so they can only download Artifacts within the current workflow run. To elevate permissions for this scenario, you must specify `options.findBy` to `downloadArtifact`.
|
||||
|
||||
```ts
|
||||
const findBy = {
|
||||
// must have actions:read permission on target repository
|
||||
token: process.env['GITHUB_TOKEN'],
|
||||
workflowRunId: 123,
|
||||
repositoryOwner: 'actions',
|
||||
repositoryName: 'toolkit'
|
||||
}
|
||||
|
||||
await artifact.downloadArtifact(1337, {
|
||||
findBy
|
||||
})
|
||||
|
||||
// can also be used in other methods
|
||||
|
||||
await artifact.getArtifact('my-artifact', {
|
||||
findBy
|
||||
})
|
||||
|
||||
await artifact.listArtifacts({
|
||||
findBy
|
||||
})
|
||||
```
|
||||
|
||||
### Speeding up large uploads
|
||||
|
||||
If you have large files that need to be uploaded (or file types that don't compress well), you may benefit from changing the compression level of the Artifact archive. NOTE: This is a tradeoff between artifact upload time and stored data size.
|
||||
|
||||
```ts
|
||||
await artifact.uploadArtifact('my-massive-artifact', ['big_file.bin'], {
|
||||
// The level of compression for Zlib to be applied to the artifact archive.
|
||||
// - 0: No compression
|
||||
// - 1: Best speed
|
||||
// - 6: Default compression (same as GNU Gzip)
|
||||
// - 9: Best compression
|
||||
compressionLevel: 0
|
||||
})
|
||||
```
|
||||
|
||||
## Additional Resources
|
||||
|
||||
- [Releases](./RELEASES.md)
|
||||
- [Contribution Guide](./CONTRIBUTIONS.md)
|
||||
- [Frequently Asked Questions](./docs/faq.md)
|
||||
6
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/artifact.d.ts
generated
vendored
Normal file
6
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/artifact.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
import { ArtifactClient } from './internal/client';
|
||||
export * from './internal/shared/interfaces';
|
||||
export * from './internal/shared/errors';
|
||||
export * from './internal/client';
|
||||
declare const client: ArtifactClient;
|
||||
export default client;
|
||||
23
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/artifact.js
generated
vendored
Normal file
23
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/artifact.js
generated
vendored
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
||||
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const client_1 = require("./internal/client");
|
||||
__exportStar(require("./internal/shared/interfaces"), exports);
|
||||
__exportStar(require("./internal/shared/errors"), exports);
|
||||
__exportStar(require("./internal/client"), exports);
|
||||
const client = new client_1.DefaultArtifactClient();
|
||||
exports.default = client;
|
||||
//# sourceMappingURL=artifact.js.map
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/artifact.js.map
generated
vendored
Normal file
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/artifact.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"artifact.js","sourceRoot":"","sources":["../src/artifact.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;AAAA,8CAAuE;AAEvE,+DAA4C;AAC5C,2DAAwC;AACxC,oDAAiC;AAEjC,MAAM,MAAM,GAAmB,IAAI,8BAAqB,EAAE,CAAA;AAC1D,kBAAe,MAAM,CAAA"}
|
||||
145
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/google/protobuf/timestamp.d.ts
generated
vendored
Normal file
145
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/google/protobuf/timestamp.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,145 @@
|
|||
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||
import type { JsonValue } from "@protobuf-ts/runtime";
|
||||
import type { JsonReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { JsonWriteOptions } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
/**
|
||||
* A Timestamp represents a point in time independent of any time zone
|
||||
* or calendar, represented as seconds and fractions of seconds at
|
||||
* nanosecond resolution in UTC Epoch time. It is encoded using the
|
||||
* Proleptic Gregorian Calendar which extends the Gregorian calendar
|
||||
* backwards to year one. It is encoded assuming all minutes are 60
|
||||
* seconds long, i.e. leap seconds are "smeared" so that no leap second
|
||||
* table is needed for interpretation. Range is from
|
||||
* 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z.
|
||||
* By restricting to that range, we ensure that we can convert to
|
||||
* and from RFC 3339 date strings.
|
||||
* See [https://www.ietf.org/rfc/rfc3339.txt](https://www.ietf.org/rfc/rfc3339.txt).
|
||||
*
|
||||
* # Examples
|
||||
*
|
||||
* Example 1: Compute Timestamp from POSIX `time()`.
|
||||
*
|
||||
* Timestamp timestamp;
|
||||
* timestamp.set_seconds(time(NULL));
|
||||
* timestamp.set_nanos(0);
|
||||
*
|
||||
* Example 2: Compute Timestamp from POSIX `gettimeofday()`.
|
||||
*
|
||||
* struct timeval tv;
|
||||
* gettimeofday(&tv, NULL);
|
||||
*
|
||||
* Timestamp timestamp;
|
||||
* timestamp.set_seconds(tv.tv_sec);
|
||||
* timestamp.set_nanos(tv.tv_usec * 1000);
|
||||
*
|
||||
* Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
|
||||
*
|
||||
* FILETIME ft;
|
||||
* GetSystemTimeAsFileTime(&ft);
|
||||
* UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
|
||||
*
|
||||
* // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
|
||||
* // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
|
||||
* Timestamp timestamp;
|
||||
* timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
|
||||
* timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
|
||||
*
|
||||
* Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
|
||||
*
|
||||
* long millis = System.currentTimeMillis();
|
||||
*
|
||||
* Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
|
||||
* .setNanos((int) ((millis % 1000) * 1000000)).build();
|
||||
*
|
||||
*
|
||||
* Example 5: Compute Timestamp from current time in Python.
|
||||
*
|
||||
* timestamp = Timestamp()
|
||||
* timestamp.GetCurrentTime()
|
||||
*
|
||||
* # JSON Mapping
|
||||
*
|
||||
* In JSON format, the Timestamp type is encoded as a string in the
|
||||
* [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
|
||||
* format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
|
||||
* where {year} is always expressed using four digits while {month}, {day},
|
||||
* {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
|
||||
* seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
|
||||
* are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
|
||||
* is required. A proto3 JSON serializer should always use UTC (as indicated by
|
||||
* "Z") when printing the Timestamp type and a proto3 JSON parser should be
|
||||
* able to accept both UTC and other timezones (as indicated by an offset).
|
||||
*
|
||||
* For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
|
||||
* 01:30 UTC on January 15, 2017.
|
||||
*
|
||||
* In JavaScript, one can convert a Date object to this format using the
|
||||
* standard [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString]
|
||||
* method. In Python, a standard `datetime.datetime` object can be converted
|
||||
* to this format using [`strftime`](https://docs.python.org/2/library/time.html#time.strftime)
|
||||
* with the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one
|
||||
* can use the Joda Time's [`ISODateTimeFormat.dateTime()`](
|
||||
* http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime--
|
||||
* ) to obtain a formatter capable of generating timestamps in this format.
|
||||
*
|
||||
*
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.Timestamp
|
||||
*/
|
||||
export interface Timestamp {
|
||||
/**
|
||||
* Represents seconds of UTC time since Unix epoch
|
||||
* 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
|
||||
* 9999-12-31T23:59:59Z inclusive.
|
||||
*
|
||||
* @generated from protobuf field: int64 seconds = 1;
|
||||
*/
|
||||
seconds: string;
|
||||
/**
|
||||
* Non-negative fractions of a second at nanosecond resolution. Negative
|
||||
* second values with fractions must still have non-negative nanos values
|
||||
* that count forward in time. Must be from 0 to 999,999,999
|
||||
* inclusive.
|
||||
*
|
||||
* @generated from protobuf field: int32 nanos = 2;
|
||||
*/
|
||||
nanos: number;
|
||||
}
|
||||
declare class Timestamp$Type extends MessageType<Timestamp> {
|
||||
constructor();
|
||||
/**
|
||||
* Creates a new `Timestamp` for the current time.
|
||||
*/
|
||||
now(): Timestamp;
|
||||
/**
|
||||
* Converts a `Timestamp` to a JavaScript Date.
|
||||
*/
|
||||
toDate(message: Timestamp): Date;
|
||||
/**
|
||||
* Converts a JavaScript Date to a `Timestamp`.
|
||||
*/
|
||||
fromDate(date: Date): Timestamp;
|
||||
/**
|
||||
* In JSON format, the `Timestamp` type is encoded as a string
|
||||
* in the RFC 3339 format.
|
||||
*/
|
||||
internalJsonWrite(message: Timestamp, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* In JSON format, the `Timestamp` type is encoded as a string
|
||||
* in the RFC 3339 format.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Timestamp): Timestamp;
|
||||
create(value?: PartialMessage<Timestamp>): Timestamp;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Timestamp): Timestamp;
|
||||
internalBinaryWrite(message: Timestamp, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Timestamp
|
||||
*/
|
||||
export declare const Timestamp: Timestamp$Type;
|
||||
export {};
|
||||
136
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/google/protobuf/timestamp.js
generated
vendored
Normal file
136
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/google/protobuf/timestamp.js
generated
vendored
Normal file
|
|
@ -0,0 +1,136 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Timestamp = void 0;
|
||||
const runtime_1 = require("@protobuf-ts/runtime");
|
||||
const runtime_2 = require("@protobuf-ts/runtime");
|
||||
const runtime_3 = require("@protobuf-ts/runtime");
|
||||
const runtime_4 = require("@protobuf-ts/runtime");
|
||||
const runtime_5 = require("@protobuf-ts/runtime");
|
||||
const runtime_6 = require("@protobuf-ts/runtime");
|
||||
const runtime_7 = require("@protobuf-ts/runtime");
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class Timestamp$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.Timestamp", [
|
||||
{ no: 1, name: "seconds", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 2, name: "nanos", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Creates a new `Timestamp` for the current time.
|
||||
*/
|
||||
now() {
|
||||
const msg = this.create();
|
||||
const ms = Date.now();
|
||||
msg.seconds = runtime_6.PbLong.from(Math.floor(ms / 1000)).toString();
|
||||
msg.nanos = (ms % 1000) * 1000000;
|
||||
return msg;
|
||||
}
|
||||
/**
|
||||
* Converts a `Timestamp` to a JavaScript Date.
|
||||
*/
|
||||
toDate(message) {
|
||||
return new Date(runtime_6.PbLong.from(message.seconds).toNumber() * 1000 + Math.ceil(message.nanos / 1000000));
|
||||
}
|
||||
/**
|
||||
* Converts a JavaScript Date to a `Timestamp`.
|
||||
*/
|
||||
fromDate(date) {
|
||||
const msg = this.create();
|
||||
const ms = date.getTime();
|
||||
msg.seconds = runtime_6.PbLong.from(Math.floor(ms / 1000)).toString();
|
||||
msg.nanos = (ms % 1000) * 1000000;
|
||||
return msg;
|
||||
}
|
||||
/**
|
||||
* In JSON format, the `Timestamp` type is encoded as a string
|
||||
* in the RFC 3339 format.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
let ms = runtime_6.PbLong.from(message.seconds).toNumber() * 1000;
|
||||
if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z"))
|
||||
throw new Error("Unable to encode Timestamp to JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");
|
||||
if (message.nanos < 0)
|
||||
throw new Error("Unable to encode invalid Timestamp to JSON. Nanos must not be negative.");
|
||||
let z = "Z";
|
||||
if (message.nanos > 0) {
|
||||
let nanosStr = (message.nanos + 1000000000).toString().substring(1);
|
||||
if (nanosStr.substring(3) === "000000")
|
||||
z = "." + nanosStr.substring(0, 3) + "Z";
|
||||
else if (nanosStr.substring(6) === "000")
|
||||
z = "." + nanosStr.substring(0, 6) + "Z";
|
||||
else
|
||||
z = "." + nanosStr + "Z";
|
||||
}
|
||||
return new Date(ms).toISOString().replace(".000Z", z);
|
||||
}
|
||||
/**
|
||||
* In JSON format, the `Timestamp` type is encoded as a string
|
||||
* in the RFC 3339 format.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (typeof json !== "string")
|
||||
throw new Error("Unable to parse Timestamp from JSON " + (0, runtime_5.typeofJsonValue)(json) + ".");
|
||||
let matches = json.match(/^([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2})(?:Z|\.([0-9]{3,9})Z|([+-][0-9][0-9]:[0-9][0-9]))$/);
|
||||
if (!matches)
|
||||
throw new Error("Unable to parse Timestamp from JSON. Invalid format.");
|
||||
let ms = Date.parse(matches[1] + "-" + matches[2] + "-" + matches[3] + "T" + matches[4] + ":" + matches[5] + ":" + matches[6] + (matches[8] ? matches[8] : "Z"));
|
||||
if (Number.isNaN(ms))
|
||||
throw new Error("Unable to parse Timestamp from JSON. Invalid value.");
|
||||
if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z"))
|
||||
throw new globalThis.Error("Unable to parse Timestamp from JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.seconds = runtime_6.PbLong.from(ms / 1000).toString();
|
||||
target.nanos = 0;
|
||||
if (matches[7])
|
||||
target.nanos = (parseInt("1" + matches[7] + "0".repeat(9 - matches[7].length)) - 1000000000);
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { seconds: "0", nanos: 0 };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* int64 seconds */ 1:
|
||||
message.seconds = reader.int64().toString();
|
||||
break;
|
||||
case /* int32 nanos */ 2:
|
||||
message.nanos = reader.int32();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* int64 seconds = 1; */
|
||||
if (message.seconds !== "0")
|
||||
writer.tag(1, runtime_1.WireType.Varint).int64(message.seconds);
|
||||
/* int32 nanos = 2; */
|
||||
if (message.nanos !== 0)
|
||||
writer.tag(2, runtime_1.WireType.Varint).int32(message.nanos);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Timestamp
|
||||
*/
|
||||
exports.Timestamp = new Timestamp$Type();
|
||||
//# sourceMappingURL=timestamp.js.map
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/google/protobuf/timestamp.js.map
generated
vendored
Normal file
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/google/protobuf/timestamp.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
307
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/google/protobuf/wrappers.d.ts
generated
vendored
Normal file
307
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/google/protobuf/wrappers.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,307 @@
|
|||
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||
import type { JsonValue } from "@protobuf-ts/runtime";
|
||||
import type { JsonReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { JsonWriteOptions } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
/**
|
||||
* Wrapper message for `double`.
|
||||
*
|
||||
* The JSON representation for `DoubleValue` is JSON number.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.DoubleValue
|
||||
*/
|
||||
export interface DoubleValue {
|
||||
/**
|
||||
* The double value.
|
||||
*
|
||||
* @generated from protobuf field: double value = 1;
|
||||
*/
|
||||
value: number;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `float`.
|
||||
*
|
||||
* The JSON representation for `FloatValue` is JSON number.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.FloatValue
|
||||
*/
|
||||
export interface FloatValue {
|
||||
/**
|
||||
* The float value.
|
||||
*
|
||||
* @generated from protobuf field: float value = 1;
|
||||
*/
|
||||
value: number;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `int64`.
|
||||
*
|
||||
* The JSON representation for `Int64Value` is JSON string.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.Int64Value
|
||||
*/
|
||||
export interface Int64Value {
|
||||
/**
|
||||
* The int64 value.
|
||||
*
|
||||
* @generated from protobuf field: int64 value = 1;
|
||||
*/
|
||||
value: string;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `uint64`.
|
||||
*
|
||||
* The JSON representation for `UInt64Value` is JSON string.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.UInt64Value
|
||||
*/
|
||||
export interface UInt64Value {
|
||||
/**
|
||||
* The uint64 value.
|
||||
*
|
||||
* @generated from protobuf field: uint64 value = 1;
|
||||
*/
|
||||
value: string;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `int32`.
|
||||
*
|
||||
* The JSON representation for `Int32Value` is JSON number.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.Int32Value
|
||||
*/
|
||||
export interface Int32Value {
|
||||
/**
|
||||
* The int32 value.
|
||||
*
|
||||
* @generated from protobuf field: int32 value = 1;
|
||||
*/
|
||||
value: number;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `uint32`.
|
||||
*
|
||||
* The JSON representation for `UInt32Value` is JSON number.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.UInt32Value
|
||||
*/
|
||||
export interface UInt32Value {
|
||||
/**
|
||||
* The uint32 value.
|
||||
*
|
||||
* @generated from protobuf field: uint32 value = 1;
|
||||
*/
|
||||
value: number;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `bool`.
|
||||
*
|
||||
* The JSON representation for `BoolValue` is JSON `true` and `false`.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.BoolValue
|
||||
*/
|
||||
export interface BoolValue {
|
||||
/**
|
||||
* The bool value.
|
||||
*
|
||||
* @generated from protobuf field: bool value = 1;
|
||||
*/
|
||||
value: boolean;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `string`.
|
||||
*
|
||||
* The JSON representation for `StringValue` is JSON string.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.StringValue
|
||||
*/
|
||||
export interface StringValue {
|
||||
/**
|
||||
* The string value.
|
||||
*
|
||||
* @generated from protobuf field: string value = 1;
|
||||
*/
|
||||
value: string;
|
||||
}
|
||||
/**
|
||||
* Wrapper message for `bytes`.
|
||||
*
|
||||
* The JSON representation for `BytesValue` is JSON string.
|
||||
*
|
||||
* @generated from protobuf message google.protobuf.BytesValue
|
||||
*/
|
||||
export interface BytesValue {
|
||||
/**
|
||||
* The bytes value.
|
||||
*
|
||||
* @generated from protobuf field: bytes value = 1;
|
||||
*/
|
||||
value: Uint8Array;
|
||||
}
|
||||
declare class DoubleValue$Type extends MessageType<DoubleValue> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `DoubleValue` to JSON number.
|
||||
*/
|
||||
internalJsonWrite(message: DoubleValue, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `DoubleValue` from JSON number.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: DoubleValue): DoubleValue;
|
||||
create(value?: PartialMessage<DoubleValue>): DoubleValue;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DoubleValue): DoubleValue;
|
||||
internalBinaryWrite(message: DoubleValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.DoubleValue
|
||||
*/
|
||||
export declare const DoubleValue: DoubleValue$Type;
|
||||
declare class FloatValue$Type extends MessageType<FloatValue> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `FloatValue` to JSON number.
|
||||
*/
|
||||
internalJsonWrite(message: FloatValue, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `FloatValue` from JSON number.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: FloatValue): FloatValue;
|
||||
create(value?: PartialMessage<FloatValue>): FloatValue;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FloatValue): FloatValue;
|
||||
internalBinaryWrite(message: FloatValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.FloatValue
|
||||
*/
|
||||
export declare const FloatValue: FloatValue$Type;
|
||||
declare class Int64Value$Type extends MessageType<Int64Value> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `Int64Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: Int64Value, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `Int64Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int64Value): Int64Value;
|
||||
create(value?: PartialMessage<Int64Value>): Int64Value;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Int64Value): Int64Value;
|
||||
internalBinaryWrite(message: Int64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Int64Value
|
||||
*/
|
||||
export declare const Int64Value: Int64Value$Type;
|
||||
declare class UInt64Value$Type extends MessageType<UInt64Value> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `UInt64Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: UInt64Value, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `UInt64Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt64Value): UInt64Value;
|
||||
create(value?: PartialMessage<UInt64Value>): UInt64Value;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: UInt64Value): UInt64Value;
|
||||
internalBinaryWrite(message: UInt64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.UInt64Value
|
||||
*/
|
||||
export declare const UInt64Value: UInt64Value$Type;
|
||||
declare class Int32Value$Type extends MessageType<Int32Value> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `Int32Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: Int32Value, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `Int32Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int32Value): Int32Value;
|
||||
create(value?: PartialMessage<Int32Value>): Int32Value;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Int32Value): Int32Value;
|
||||
internalBinaryWrite(message: Int32Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Int32Value
|
||||
*/
|
||||
export declare const Int32Value: Int32Value$Type;
|
||||
declare class UInt32Value$Type extends MessageType<UInt32Value> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `UInt32Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: UInt32Value, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `UInt32Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt32Value): UInt32Value;
|
||||
create(value?: PartialMessage<UInt32Value>): UInt32Value;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: UInt32Value): UInt32Value;
|
||||
internalBinaryWrite(message: UInt32Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.UInt32Value
|
||||
*/
|
||||
export declare const UInt32Value: UInt32Value$Type;
|
||||
declare class BoolValue$Type extends MessageType<BoolValue> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `BoolValue` to JSON bool.
|
||||
*/
|
||||
internalJsonWrite(message: BoolValue, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `BoolValue` from JSON bool.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: BoolValue): BoolValue;
|
||||
create(value?: PartialMessage<BoolValue>): BoolValue;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BoolValue): BoolValue;
|
||||
internalBinaryWrite(message: BoolValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.BoolValue
|
||||
*/
|
||||
export declare const BoolValue: BoolValue$Type;
|
||||
declare class StringValue$Type extends MessageType<StringValue> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `StringValue` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: StringValue, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `StringValue` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: StringValue): StringValue;
|
||||
create(value?: PartialMessage<StringValue>): StringValue;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StringValue): StringValue;
|
||||
internalBinaryWrite(message: StringValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.StringValue
|
||||
*/
|
||||
export declare const StringValue: StringValue$Type;
|
||||
declare class BytesValue$Type extends MessageType<BytesValue> {
|
||||
constructor();
|
||||
/**
|
||||
* Encode `BytesValue` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message: BytesValue, options: JsonWriteOptions): JsonValue;
|
||||
/**
|
||||
* Decode `BytesValue` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: BytesValue): BytesValue;
|
||||
create(value?: PartialMessage<BytesValue>): BytesValue;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BytesValue): BytesValue;
|
||||
internalBinaryWrite(message: BytesValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.BytesValue
|
||||
*/
|
||||
export declare const BytesValue: BytesValue$Type;
|
||||
export {};
|
||||
609
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/google/protobuf/wrappers.js
generated
vendored
Normal file
609
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/google/protobuf/wrappers.js
generated
vendored
Normal file
|
|
@ -0,0 +1,609 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.BytesValue = exports.StringValue = exports.BoolValue = exports.UInt32Value = exports.Int32Value = exports.UInt64Value = exports.Int64Value = exports.FloatValue = exports.DoubleValue = void 0;
|
||||
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||
// @generated from protobuf file "google/protobuf/wrappers.proto" (package "google.protobuf", syntax proto3)
|
||||
// tslint:disable
|
||||
//
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
//
|
||||
//
|
||||
// Wrappers for primitive (non-message) types. These types are useful
|
||||
// for embedding primitives in the `google.protobuf.Any` type and for places
|
||||
// where we need to distinguish between the absence of a primitive
|
||||
// typed field and its default value.
|
||||
//
|
||||
const runtime_1 = require("@protobuf-ts/runtime");
|
||||
const runtime_2 = require("@protobuf-ts/runtime");
|
||||
const runtime_3 = require("@protobuf-ts/runtime");
|
||||
const runtime_4 = require("@protobuf-ts/runtime");
|
||||
const runtime_5 = require("@protobuf-ts/runtime");
|
||||
const runtime_6 = require("@protobuf-ts/runtime");
|
||||
const runtime_7 = require("@protobuf-ts/runtime");
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class DoubleValue$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.DoubleValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 1 /*ScalarType.DOUBLE*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `DoubleValue` to JSON number.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return this.refJsonWriter.scalar(2, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `DoubleValue` from JSON number.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 1, undefined, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: 0 };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* double value */ 1:
|
||||
message.value = reader.double();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* double value = 1; */
|
||||
if (message.value !== 0)
|
||||
writer.tag(1, runtime_3.WireType.Bit64).double(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.DoubleValue
|
||||
*/
|
||||
exports.DoubleValue = new DoubleValue$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class FloatValue$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.FloatValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 2 /*ScalarType.FLOAT*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `FloatValue` to JSON number.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return this.refJsonWriter.scalar(1, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `FloatValue` from JSON number.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 1, undefined, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: 0 };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* float value */ 1:
|
||||
message.value = reader.float();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* float value = 1; */
|
||||
if (message.value !== 0)
|
||||
writer.tag(1, runtime_3.WireType.Bit32).float(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.FloatValue
|
||||
*/
|
||||
exports.FloatValue = new FloatValue$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class Int64Value$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.Int64Value", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `Int64Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return this.refJsonWriter.scalar(runtime_1.ScalarType.INT64, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `Int64Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, runtime_1.ScalarType.INT64, runtime_2.LongType.STRING, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: "0" };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* int64 value */ 1:
|
||||
message.value = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* int64 value = 1; */
|
||||
if (message.value !== "0")
|
||||
writer.tag(1, runtime_3.WireType.Varint).int64(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Int64Value
|
||||
*/
|
||||
exports.Int64Value = new Int64Value$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class UInt64Value$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.UInt64Value", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 4 /*ScalarType.UINT64*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `UInt64Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return this.refJsonWriter.scalar(runtime_1.ScalarType.UINT64, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `UInt64Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, runtime_1.ScalarType.UINT64, runtime_2.LongType.STRING, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: "0" };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* uint64 value */ 1:
|
||||
message.value = reader.uint64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* uint64 value = 1; */
|
||||
if (message.value !== "0")
|
||||
writer.tag(1, runtime_3.WireType.Varint).uint64(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.UInt64Value
|
||||
*/
|
||||
exports.UInt64Value = new UInt64Value$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class Int32Value$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.Int32Value", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `Int32Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return this.refJsonWriter.scalar(5, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `Int32Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 5, undefined, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: 0 };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* int32 value */ 1:
|
||||
message.value = reader.int32();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* int32 value = 1; */
|
||||
if (message.value !== 0)
|
||||
writer.tag(1, runtime_3.WireType.Varint).int32(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.Int32Value
|
||||
*/
|
||||
exports.Int32Value = new Int32Value$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class UInt32Value$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.UInt32Value", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 13 /*ScalarType.UINT32*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `UInt32Value` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return this.refJsonWriter.scalar(13, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `UInt32Value` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 13, undefined, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: 0 };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* uint32 value */ 1:
|
||||
message.value = reader.uint32();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* uint32 value = 1; */
|
||||
if (message.value !== 0)
|
||||
writer.tag(1, runtime_3.WireType.Varint).uint32(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.UInt32Value
|
||||
*/
|
||||
exports.UInt32Value = new UInt32Value$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class BoolValue$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.BoolValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `BoolValue` to JSON bool.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return message.value;
|
||||
}
|
||||
/**
|
||||
* Decode `BoolValue` from JSON bool.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 8, undefined, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: false };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool value */ 1:
|
||||
message.value = reader.bool();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* bool value = 1; */
|
||||
if (message.value !== false)
|
||||
writer.tag(1, runtime_3.WireType.Varint).bool(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.BoolValue
|
||||
*/
|
||||
exports.BoolValue = new BoolValue$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class StringValue$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.StringValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `StringValue` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return message.value;
|
||||
}
|
||||
/**
|
||||
* Decode `StringValue` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 9, undefined, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string value */ 1:
|
||||
message.value = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string value = 1; */
|
||||
if (message.value !== "")
|
||||
writer.tag(1, runtime_3.WireType.LengthDelimited).string(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.StringValue
|
||||
*/
|
||||
exports.StringValue = new StringValue$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class BytesValue$Type extends runtime_7.MessageType {
|
||||
constructor() {
|
||||
super("google.protobuf.BytesValue", [
|
||||
{ no: 1, name: "value", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* Encode `BytesValue` to JSON string.
|
||||
*/
|
||||
internalJsonWrite(message, options) {
|
||||
return this.refJsonWriter.scalar(12, message.value, "value", false, true);
|
||||
}
|
||||
/**
|
||||
* Decode `BytesValue` from JSON string.
|
||||
*/
|
||||
internalJsonRead(json, options, target) {
|
||||
if (!target)
|
||||
target = this.create();
|
||||
target.value = this.refJsonReader.scalar(json, 12, undefined, "value");
|
||||
return target;
|
||||
}
|
||||
create(value) {
|
||||
const message = { value: new Uint8Array(0) };
|
||||
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_5.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bytes value */ 1:
|
||||
message.value = reader.bytes();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* bytes value = 1; */
|
||||
if (message.value.length)
|
||||
writer.tag(1, runtime_3.WireType.LengthDelimited).bytes(message.value);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message google.protobuf.BytesValue
|
||||
*/
|
||||
exports.BytesValue = new BytesValue$Type();
|
||||
//# sourceMappingURL=wrappers.js.map
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/google/protobuf/wrappers.js.map
generated
vendored
Normal file
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/google/protobuf/wrappers.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
4
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/index.d.ts
generated
vendored
Normal file
4
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/index.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
export * from './google/protobuf/timestamp';
|
||||
export * from './google/protobuf/wrappers';
|
||||
export * from './results/api/v1/artifact';
|
||||
export * from './results/api/v1/artifact.twirp';
|
||||
21
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/index.js
generated
vendored
Normal file
21
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
||||
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
__exportStar(require("./google/protobuf/timestamp"), exports);
|
||||
__exportStar(require("./google/protobuf/wrappers"), exports);
|
||||
__exportStar(require("./results/api/v1/artifact"), exports);
|
||||
__exportStar(require("./results/api/v1/artifact.twirp"), exports);
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/index.js.map
generated
vendored
Normal file
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/index.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/generated/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;AAAA,8DAA2C;AAC3C,6DAA0C;AAC1C,4DAAyC;AACzC,kEAA+C"}
|
||||
336
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.d.ts
generated
vendored
Normal file
336
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,336 @@
|
|||
import { ServiceType } from "@protobuf-ts/runtime-rpc";
|
||||
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
import { Int64Value } from "../../../google/protobuf/wrappers";
|
||||
import { StringValue } from "../../../google/protobuf/wrappers";
|
||||
import { Timestamp } from "../../../google/protobuf/timestamp";
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.CreateArtifactRequest
|
||||
*/
|
||||
export interface CreateArtifactRequest {
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string name = 3;
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* @generated from protobuf field: google.protobuf.Timestamp expires_at = 4;
|
||||
*/
|
||||
expiresAt?: Timestamp;
|
||||
/**
|
||||
* @generated from protobuf field: int32 version = 5;
|
||||
*/
|
||||
version: number;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.CreateArtifactResponse
|
||||
*/
|
||||
export interface CreateArtifactResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* @generated from protobuf field: string signed_upload_url = 2;
|
||||
*/
|
||||
signedUploadUrl: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.FinalizeArtifactRequest
|
||||
*/
|
||||
export interface FinalizeArtifactRequest {
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string name = 3;
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* @generated from protobuf field: int64 size = 4;
|
||||
*/
|
||||
size: string;
|
||||
/**
|
||||
* @generated from protobuf field: google.protobuf.StringValue hash = 5;
|
||||
*/
|
||||
hash?: StringValue;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.FinalizeArtifactResponse
|
||||
*/
|
||||
export interface FinalizeArtifactResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* @generated from protobuf field: int64 artifact_id = 2;
|
||||
*/
|
||||
artifactId: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.ListArtifactsRequest
|
||||
*/
|
||||
export interface ListArtifactsRequest {
|
||||
/**
|
||||
* The backend plan ID
|
||||
*
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* The backend job ID
|
||||
*
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* Name of the artifact to filter on
|
||||
*
|
||||
* @generated from protobuf field: google.protobuf.StringValue name_filter = 3;
|
||||
*/
|
||||
nameFilter?: StringValue;
|
||||
/**
|
||||
* Monolith Database ID of the artifact to filter on
|
||||
*
|
||||
* @generated from protobuf field: google.protobuf.Int64Value id_filter = 4;
|
||||
*/
|
||||
idFilter?: Int64Value;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.ListArtifactsResponse
|
||||
*/
|
||||
export interface ListArtifactsResponse {
|
||||
/**
|
||||
* @generated from protobuf field: repeated github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact artifacts = 1;
|
||||
*/
|
||||
artifacts: ListArtifactsResponse_MonolithArtifact[];
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact
|
||||
*/
|
||||
export interface ListArtifactsResponse_MonolithArtifact {
|
||||
/**
|
||||
* The backend plan ID
|
||||
*
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* The backend job ID
|
||||
*
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* Monolith database ID of the artifact
|
||||
*
|
||||
* @generated from protobuf field: int64 database_id = 3;
|
||||
*/
|
||||
databaseId: string;
|
||||
/**
|
||||
* Name of the artifact
|
||||
*
|
||||
* @generated from protobuf field: string name = 4;
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* Size of the artifact in bytes
|
||||
*
|
||||
* @generated from protobuf field: int64 size = 5;
|
||||
*/
|
||||
size: string;
|
||||
/**
|
||||
* When the artifact was created in the monolith
|
||||
*
|
||||
* @generated from protobuf field: google.protobuf.Timestamp created_at = 6;
|
||||
*/
|
||||
createdAt?: Timestamp;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.GetSignedArtifactURLRequest
|
||||
*/
|
||||
export interface GetSignedArtifactURLRequest {
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string name = 3;
|
||||
*/
|
||||
name: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.GetSignedArtifactURLResponse
|
||||
*/
|
||||
export interface GetSignedArtifactURLResponse {
|
||||
/**
|
||||
* @generated from protobuf field: string signed_url = 1;
|
||||
*/
|
||||
signedUrl: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.DeleteArtifactRequest
|
||||
*/
|
||||
export interface DeleteArtifactRequest {
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_run_backend_id = 1;
|
||||
*/
|
||||
workflowRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string workflow_job_run_backend_id = 2;
|
||||
*/
|
||||
workflowJobRunBackendId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string name = 3;
|
||||
*/
|
||||
name: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message github.actions.results.api.v1.DeleteArtifactResponse
|
||||
*/
|
||||
export interface DeleteArtifactResponse {
|
||||
/**
|
||||
* @generated from protobuf field: bool ok = 1;
|
||||
*/
|
||||
ok: boolean;
|
||||
/**
|
||||
* @generated from protobuf field: int64 artifact_id = 2;
|
||||
*/
|
||||
artifactId: string;
|
||||
}
|
||||
declare class CreateArtifactRequest$Type extends MessageType<CreateArtifactRequest> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<CreateArtifactRequest>): CreateArtifactRequest;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CreateArtifactRequest): CreateArtifactRequest;
|
||||
internalBinaryWrite(message: CreateArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateArtifactRequest
|
||||
*/
|
||||
export declare const CreateArtifactRequest: CreateArtifactRequest$Type;
|
||||
declare class CreateArtifactResponse$Type extends MessageType<CreateArtifactResponse> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<CreateArtifactResponse>): CreateArtifactResponse;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CreateArtifactResponse): CreateArtifactResponse;
|
||||
internalBinaryWrite(message: CreateArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateArtifactResponse
|
||||
*/
|
||||
export declare const CreateArtifactResponse: CreateArtifactResponse$Type;
|
||||
declare class FinalizeArtifactRequest$Type extends MessageType<FinalizeArtifactRequest> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<FinalizeArtifactRequest>): FinalizeArtifactRequest;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeArtifactRequest): FinalizeArtifactRequest;
|
||||
internalBinaryWrite(message: FinalizeArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeArtifactRequest
|
||||
*/
|
||||
export declare const FinalizeArtifactRequest: FinalizeArtifactRequest$Type;
|
||||
declare class FinalizeArtifactResponse$Type extends MessageType<FinalizeArtifactResponse> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<FinalizeArtifactResponse>): FinalizeArtifactResponse;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeArtifactResponse): FinalizeArtifactResponse;
|
||||
internalBinaryWrite(message: FinalizeArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeArtifactResponse
|
||||
*/
|
||||
export declare const FinalizeArtifactResponse: FinalizeArtifactResponse$Type;
|
||||
declare class ListArtifactsRequest$Type extends MessageType<ListArtifactsRequest> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<ListArtifactsRequest>): ListArtifactsRequest;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListArtifactsRequest): ListArtifactsRequest;
|
||||
internalBinaryWrite(message: ListArtifactsRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListArtifactsRequest
|
||||
*/
|
||||
export declare const ListArtifactsRequest: ListArtifactsRequest$Type;
|
||||
declare class ListArtifactsResponse$Type extends MessageType<ListArtifactsResponse> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<ListArtifactsResponse>): ListArtifactsResponse;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListArtifactsResponse): ListArtifactsResponse;
|
||||
internalBinaryWrite(message: ListArtifactsResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListArtifactsResponse
|
||||
*/
|
||||
export declare const ListArtifactsResponse: ListArtifactsResponse$Type;
|
||||
declare class ListArtifactsResponse_MonolithArtifact$Type extends MessageType<ListArtifactsResponse_MonolithArtifact> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<ListArtifactsResponse_MonolithArtifact>): ListArtifactsResponse_MonolithArtifact;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListArtifactsResponse_MonolithArtifact): ListArtifactsResponse_MonolithArtifact;
|
||||
internalBinaryWrite(message: ListArtifactsResponse_MonolithArtifact, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact
|
||||
*/
|
||||
export declare const ListArtifactsResponse_MonolithArtifact: ListArtifactsResponse_MonolithArtifact$Type;
|
||||
declare class GetSignedArtifactURLRequest$Type extends MessageType<GetSignedArtifactURLRequest> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<GetSignedArtifactURLRequest>): GetSignedArtifactURLRequest;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetSignedArtifactURLRequest): GetSignedArtifactURLRequest;
|
||||
internalBinaryWrite(message: GetSignedArtifactURLRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.GetSignedArtifactURLRequest
|
||||
*/
|
||||
export declare const GetSignedArtifactURLRequest: GetSignedArtifactURLRequest$Type;
|
||||
declare class GetSignedArtifactURLResponse$Type extends MessageType<GetSignedArtifactURLResponse> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<GetSignedArtifactURLResponse>): GetSignedArtifactURLResponse;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetSignedArtifactURLResponse): GetSignedArtifactURLResponse;
|
||||
internalBinaryWrite(message: GetSignedArtifactURLResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.GetSignedArtifactURLResponse
|
||||
*/
|
||||
export declare const GetSignedArtifactURLResponse: GetSignedArtifactURLResponse$Type;
|
||||
declare class DeleteArtifactRequest$Type extends MessageType<DeleteArtifactRequest> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<DeleteArtifactRequest>): DeleteArtifactRequest;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeleteArtifactRequest): DeleteArtifactRequest;
|
||||
internalBinaryWrite(message: DeleteArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteArtifactRequest
|
||||
*/
|
||||
export declare const DeleteArtifactRequest: DeleteArtifactRequest$Type;
|
||||
declare class DeleteArtifactResponse$Type extends MessageType<DeleteArtifactResponse> {
|
||||
constructor();
|
||||
create(value?: PartialMessage<DeleteArtifactResponse>): DeleteArtifactResponse;
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeleteArtifactResponse): DeleteArtifactResponse;
|
||||
internalBinaryWrite(message: DeleteArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteArtifactResponse
|
||||
*/
|
||||
export declare const DeleteArtifactResponse: DeleteArtifactResponse$Type;
|
||||
/**
|
||||
* @generated ServiceType for protobuf service github.actions.results.api.v1.ArtifactService
|
||||
*/
|
||||
export declare const ArtifactService: ServiceType;
|
||||
export {};
|
||||
704
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.js
generated
vendored
Normal file
704
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.js
generated
vendored
Normal file
|
|
@ -0,0 +1,704 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ArtifactService = exports.DeleteArtifactResponse = exports.DeleteArtifactRequest = exports.GetSignedArtifactURLResponse = exports.GetSignedArtifactURLRequest = exports.ListArtifactsResponse_MonolithArtifact = exports.ListArtifactsResponse = exports.ListArtifactsRequest = exports.FinalizeArtifactResponse = exports.FinalizeArtifactRequest = exports.CreateArtifactResponse = exports.CreateArtifactRequest = void 0;
|
||||
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
|
||||
// @generated from protobuf file "results/api/v1/artifact.proto" (package "github.actions.results.api.v1", syntax proto3)
|
||||
// tslint:disable
|
||||
const runtime_rpc_1 = require("@protobuf-ts/runtime-rpc");
|
||||
const runtime_1 = require("@protobuf-ts/runtime");
|
||||
const runtime_2 = require("@protobuf-ts/runtime");
|
||||
const runtime_3 = require("@protobuf-ts/runtime");
|
||||
const runtime_4 = require("@protobuf-ts/runtime");
|
||||
const runtime_5 = require("@protobuf-ts/runtime");
|
||||
const wrappers_1 = require("../../../google/protobuf/wrappers");
|
||||
const wrappers_2 = require("../../../google/protobuf/wrappers");
|
||||
const timestamp_1 = require("../../../google/protobuf/timestamp");
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CreateArtifactRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.CreateArtifactRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 4, name: "expires_at", kind: "message", T: () => timestamp_1.Timestamp },
|
||||
{ no: 5, name: "version", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "", version: 0 };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string name */ 3:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
case /* google.protobuf.Timestamp expires_at */ 4:
|
||||
message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
|
||||
break;
|
||||
case /* int32 version */ 5:
|
||||
message.version = reader.int32();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* string name = 3; */
|
||||
if (message.name !== "")
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.name);
|
||||
/* google.protobuf.Timestamp expires_at = 4; */
|
||||
if (message.expiresAt)
|
||||
timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(4, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
/* int32 version = 5; */
|
||||
if (message.version !== 0)
|
||||
writer.tag(5, runtime_1.WireType.Varint).int32(message.version);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateArtifactRequest
|
||||
*/
|
||||
exports.CreateArtifactRequest = new CreateArtifactRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CreateArtifactResponse$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.CreateArtifactResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { ok: false, signedUploadUrl: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* string signed_upload_url */ 2:
|
||||
message.signedUploadUrl = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
|
||||
/* string signed_upload_url = 2; */
|
||||
if (message.signedUploadUrl !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateArtifactResponse
|
||||
*/
|
||||
exports.CreateArtifactResponse = new CreateArtifactResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class FinalizeArtifactRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.FinalizeArtifactRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 4, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 5, name: "hash", kind: "message", T: () => wrappers_2.StringValue }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "", size: "0" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string name */ 3:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
case /* int64 size */ 4:
|
||||
message.size = reader.int64().toString();
|
||||
break;
|
||||
case /* google.protobuf.StringValue hash */ 5:
|
||||
message.hash = wrappers_2.StringValue.internalBinaryRead(reader, reader.uint32(), options, message.hash);
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* string name = 3; */
|
||||
if (message.name !== "")
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.name);
|
||||
/* int64 size = 4; */
|
||||
if (message.size !== "0")
|
||||
writer.tag(4, runtime_1.WireType.Varint).int64(message.size);
|
||||
/* google.protobuf.StringValue hash = 5; */
|
||||
if (message.hash)
|
||||
wrappers_2.StringValue.internalBinaryWrite(message.hash, writer.tag(5, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeArtifactRequest
|
||||
*/
|
||||
exports.FinalizeArtifactRequest = new FinalizeArtifactRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class FinalizeArtifactResponse$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.FinalizeArtifactResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "artifact_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { ok: false, artifactId: "0" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* int64 artifact_id */ 2:
|
||||
message.artifactId = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
|
||||
/* int64 artifact_id = 2; */
|
||||
if (message.artifactId !== "0")
|
||||
writer.tag(2, runtime_1.WireType.Varint).int64(message.artifactId);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeArtifactResponse
|
||||
*/
|
||||
exports.FinalizeArtifactResponse = new FinalizeArtifactResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class ListArtifactsRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.ListArtifactsRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "name_filter", kind: "message", T: () => wrappers_2.StringValue },
|
||||
{ no: 4, name: "id_filter", kind: "message", T: () => wrappers_1.Int64Value }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* google.protobuf.StringValue name_filter */ 3:
|
||||
message.nameFilter = wrappers_2.StringValue.internalBinaryRead(reader, reader.uint32(), options, message.nameFilter);
|
||||
break;
|
||||
case /* google.protobuf.Int64Value id_filter */ 4:
|
||||
message.idFilter = wrappers_1.Int64Value.internalBinaryRead(reader, reader.uint32(), options, message.idFilter);
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* google.protobuf.StringValue name_filter = 3; */
|
||||
if (message.nameFilter)
|
||||
wrappers_2.StringValue.internalBinaryWrite(message.nameFilter, writer.tag(3, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
/* google.protobuf.Int64Value id_filter = 4; */
|
||||
if (message.idFilter)
|
||||
wrappers_1.Int64Value.internalBinaryWrite(message.idFilter, writer.tag(4, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListArtifactsRequest
|
||||
*/
|
||||
exports.ListArtifactsRequest = new ListArtifactsRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class ListArtifactsResponse$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.ListArtifactsResponse", [
|
||||
{ no: 1, name: "artifacts", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => exports.ListArtifactsResponse_MonolithArtifact }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { artifacts: [] };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* repeated github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact artifacts */ 1:
|
||||
message.artifacts.push(exports.ListArtifactsResponse_MonolithArtifact.internalBinaryRead(reader, reader.uint32(), options));
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* repeated github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact artifacts = 1; */
|
||||
for (let i = 0; i < message.artifacts.length; i++)
|
||||
exports.ListArtifactsResponse_MonolithArtifact.internalBinaryWrite(message.artifacts[i], writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListArtifactsResponse
|
||||
*/
|
||||
exports.ListArtifactsResponse = new ListArtifactsResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class ListArtifactsResponse_MonolithArtifact$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "database_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 4, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 5, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", databaseId: "0", name: "", size: "0" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* int64 database_id */ 3:
|
||||
message.databaseId = reader.int64().toString();
|
||||
break;
|
||||
case /* string name */ 4:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
case /* int64 size */ 5:
|
||||
message.size = reader.int64().toString();
|
||||
break;
|
||||
case /* google.protobuf.Timestamp created_at */ 6:
|
||||
message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* int64 database_id = 3; */
|
||||
if (message.databaseId !== "0")
|
||||
writer.tag(3, runtime_1.WireType.Varint).int64(message.databaseId);
|
||||
/* string name = 4; */
|
||||
if (message.name !== "")
|
||||
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.name);
|
||||
/* int64 size = 5; */
|
||||
if (message.size !== "0")
|
||||
writer.tag(5, runtime_1.WireType.Varint).int64(message.size);
|
||||
/* google.protobuf.Timestamp created_at = 6; */
|
||||
if (message.createdAt)
|
||||
timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact
|
||||
*/
|
||||
exports.ListArtifactsResponse_MonolithArtifact = new ListArtifactsResponse_MonolithArtifact$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class GetSignedArtifactURLRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.GetSignedArtifactURLRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string name */ 3:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* string name = 3; */
|
||||
if (message.name !== "")
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.name);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.GetSignedArtifactURLRequest
|
||||
*/
|
||||
exports.GetSignedArtifactURLRequest = new GetSignedArtifactURLRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class GetSignedArtifactURLResponse$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.GetSignedArtifactURLResponse", [
|
||||
{ no: 1, name: "signed_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { signedUrl: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string signed_url */ 1:
|
||||
message.signedUrl = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string signed_url = 1; */
|
||||
if (message.signedUrl !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.signedUrl);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.GetSignedArtifactURLResponse
|
||||
*/
|
||||
exports.GetSignedArtifactURLResponse = new GetSignedArtifactURLResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class DeleteArtifactRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.DeleteArtifactRequest", [
|
||||
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string workflow_run_backend_id */ 1:
|
||||
message.workflowRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string workflow_job_run_backend_id */ 2:
|
||||
message.workflowJobRunBackendId = reader.string();
|
||||
break;
|
||||
case /* string name */ 3:
|
||||
message.name = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string workflow_run_backend_id = 1; */
|
||||
if (message.workflowRunBackendId !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
|
||||
/* string workflow_job_run_backend_id = 2; */
|
||||
if (message.workflowJobRunBackendId !== "")
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId);
|
||||
/* string name = 3; */
|
||||
if (message.name !== "")
|
||||
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.name);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteArtifactRequest
|
||||
*/
|
||||
exports.DeleteArtifactRequest = new DeleteArtifactRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class DeleteArtifactResponse$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("github.actions.results.api.v1.DeleteArtifactResponse", [
|
||||
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "artifact_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { ok: false, artifactId: "0" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool ok */ 1:
|
||||
message.ok = reader.bool();
|
||||
break;
|
||||
case /* int64 artifact_id */ 2:
|
||||
message.artifactId = reader.int64().toString();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* bool ok = 1; */
|
||||
if (message.ok !== false)
|
||||
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
|
||||
/* int64 artifact_id = 2; */
|
||||
if (message.artifactId !== "0")
|
||||
writer.tag(2, runtime_1.WireType.Varint).int64(message.artifactId);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteArtifactResponse
|
||||
*/
|
||||
exports.DeleteArtifactResponse = new DeleteArtifactResponse$Type();
|
||||
/**
|
||||
* @generated ServiceType for protobuf service github.actions.results.api.v1.ArtifactService
|
||||
*/
|
||||
exports.ArtifactService = new runtime_rpc_1.ServiceType("github.actions.results.api.v1.ArtifactService", [
|
||||
{ name: "CreateArtifact", options: {}, I: exports.CreateArtifactRequest, O: exports.CreateArtifactResponse },
|
||||
{ name: "FinalizeArtifact", options: {}, I: exports.FinalizeArtifactRequest, O: exports.FinalizeArtifactResponse },
|
||||
{ name: "ListArtifacts", options: {}, I: exports.ListArtifactsRequest, O: exports.ListArtifactsResponse },
|
||||
{ name: "GetSignedArtifactURL", options: {}, I: exports.GetSignedArtifactURLRequest, O: exports.GetSignedArtifactURLResponse },
|
||||
{ name: "DeleteArtifact", options: {}, I: exports.DeleteArtifactRequest, O: exports.DeleteArtifactResponse }
|
||||
]);
|
||||
//# sourceMappingURL=artifact.js.map
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.js.map
generated
vendored
Normal file
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
48
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.twirp.d.ts
generated
vendored
Normal file
48
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.twirp.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
/// <reference types="node" />
|
||||
import { TwirpContext, TwirpServer } from "twirp-ts";
|
||||
import { CreateArtifactRequest, CreateArtifactResponse, FinalizeArtifactRequest, FinalizeArtifactResponse, ListArtifactsRequest, ListArtifactsResponse, GetSignedArtifactURLRequest, GetSignedArtifactURLResponse, DeleteArtifactRequest, DeleteArtifactResponse } from "./artifact";
|
||||
interface Rpc {
|
||||
request(service: string, method: string, contentType: "application/json" | "application/protobuf", data: object | Uint8Array): Promise<object | Uint8Array>;
|
||||
}
|
||||
export interface ArtifactServiceClient {
|
||||
CreateArtifact(request: CreateArtifactRequest): Promise<CreateArtifactResponse>;
|
||||
FinalizeArtifact(request: FinalizeArtifactRequest): Promise<FinalizeArtifactResponse>;
|
||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse>;
|
||||
GetSignedArtifactURL(request: GetSignedArtifactURLRequest): Promise<GetSignedArtifactURLResponse>;
|
||||
DeleteArtifact(request: DeleteArtifactRequest): Promise<DeleteArtifactResponse>;
|
||||
}
|
||||
export declare class ArtifactServiceClientJSON implements ArtifactServiceClient {
|
||||
private readonly rpc;
|
||||
constructor(rpc: Rpc);
|
||||
CreateArtifact(request: CreateArtifactRequest): Promise<CreateArtifactResponse>;
|
||||
FinalizeArtifact(request: FinalizeArtifactRequest): Promise<FinalizeArtifactResponse>;
|
||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse>;
|
||||
GetSignedArtifactURL(request: GetSignedArtifactURLRequest): Promise<GetSignedArtifactURLResponse>;
|
||||
DeleteArtifact(request: DeleteArtifactRequest): Promise<DeleteArtifactResponse>;
|
||||
}
|
||||
export declare class ArtifactServiceClientProtobuf implements ArtifactServiceClient {
|
||||
private readonly rpc;
|
||||
constructor(rpc: Rpc);
|
||||
CreateArtifact(request: CreateArtifactRequest): Promise<CreateArtifactResponse>;
|
||||
FinalizeArtifact(request: FinalizeArtifactRequest): Promise<FinalizeArtifactResponse>;
|
||||
ListArtifacts(request: ListArtifactsRequest): Promise<ListArtifactsResponse>;
|
||||
GetSignedArtifactURL(request: GetSignedArtifactURLRequest): Promise<GetSignedArtifactURLResponse>;
|
||||
DeleteArtifact(request: DeleteArtifactRequest): Promise<DeleteArtifactResponse>;
|
||||
}
|
||||
export interface ArtifactServiceTwirp<T extends TwirpContext = TwirpContext> {
|
||||
CreateArtifact(ctx: T, request: CreateArtifactRequest): Promise<CreateArtifactResponse>;
|
||||
FinalizeArtifact(ctx: T, request: FinalizeArtifactRequest): Promise<FinalizeArtifactResponse>;
|
||||
ListArtifacts(ctx: T, request: ListArtifactsRequest): Promise<ListArtifactsResponse>;
|
||||
GetSignedArtifactURL(ctx: T, request: GetSignedArtifactURLRequest): Promise<GetSignedArtifactURLResponse>;
|
||||
DeleteArtifact(ctx: T, request: DeleteArtifactRequest): Promise<DeleteArtifactResponse>;
|
||||
}
|
||||
export declare enum ArtifactServiceMethod {
|
||||
CreateArtifact = "CreateArtifact",
|
||||
FinalizeArtifact = "FinalizeArtifact",
|
||||
ListArtifacts = "ListArtifacts",
|
||||
GetSignedArtifactURL = "GetSignedArtifactURL",
|
||||
DeleteArtifact = "DeleteArtifact"
|
||||
}
|
||||
export declare const ArtifactServiceMethodList: ArtifactServiceMethod[];
|
||||
export declare function createArtifactServiceServer<T extends TwirpContext = TwirpContext>(service: ArtifactServiceTwirp<T>): TwirpServer<ArtifactServiceTwirp<TwirpContext<import("http").IncomingMessage, import("http").ServerResponse<import("http").IncomingMessage>>>, T>;
|
||||
export {};
|
||||
508
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.twirp.js
generated
vendored
Normal file
508
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.twirp.js
generated
vendored
Normal file
|
|
@ -0,0 +1,508 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createArtifactServiceServer = exports.ArtifactServiceMethodList = exports.ArtifactServiceMethod = exports.ArtifactServiceClientProtobuf = exports.ArtifactServiceClientJSON = void 0;
|
||||
const twirp_ts_1 = require("twirp-ts");
|
||||
const artifact_1 = require("./artifact");
|
||||
class ArtifactServiceClientJSON {
|
||||
constructor(rpc) {
|
||||
this.rpc = rpc;
|
||||
this.CreateArtifact.bind(this);
|
||||
this.FinalizeArtifact.bind(this);
|
||||
this.ListArtifacts.bind(this);
|
||||
this.GetSignedArtifactURL.bind(this);
|
||||
this.DeleteArtifact.bind(this);
|
||||
}
|
||||
CreateArtifact(request) {
|
||||
const data = artifact_1.CreateArtifactRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "CreateArtifact", "application/json", data);
|
||||
return promise.then((data) => artifact_1.CreateArtifactResponse.fromJson(data, {
|
||||
ignoreUnknownFields: true,
|
||||
}));
|
||||
}
|
||||
FinalizeArtifact(request) {
|
||||
const data = artifact_1.FinalizeArtifactRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "FinalizeArtifact", "application/json", data);
|
||||
return promise.then((data) => artifact_1.FinalizeArtifactResponse.fromJson(data, {
|
||||
ignoreUnknownFields: true,
|
||||
}));
|
||||
}
|
||||
ListArtifacts(request) {
|
||||
const data = artifact_1.ListArtifactsRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "ListArtifacts", "application/json", data);
|
||||
return promise.then((data) => artifact_1.ListArtifactsResponse.fromJson(data, { ignoreUnknownFields: true }));
|
||||
}
|
||||
GetSignedArtifactURL(request) {
|
||||
const data = artifact_1.GetSignedArtifactURLRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "GetSignedArtifactURL", "application/json", data);
|
||||
return promise.then((data) => artifact_1.GetSignedArtifactURLResponse.fromJson(data, {
|
||||
ignoreUnknownFields: true,
|
||||
}));
|
||||
}
|
||||
DeleteArtifact(request) {
|
||||
const data = artifact_1.DeleteArtifactRequest.toJson(request, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
});
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "DeleteArtifact", "application/json", data);
|
||||
return promise.then((data) => artifact_1.DeleteArtifactResponse.fromJson(data, {
|
||||
ignoreUnknownFields: true,
|
||||
}));
|
||||
}
|
||||
}
|
||||
exports.ArtifactServiceClientJSON = ArtifactServiceClientJSON;
|
||||
class ArtifactServiceClientProtobuf {
|
||||
constructor(rpc) {
|
||||
this.rpc = rpc;
|
||||
this.CreateArtifact.bind(this);
|
||||
this.FinalizeArtifact.bind(this);
|
||||
this.ListArtifacts.bind(this);
|
||||
this.GetSignedArtifactURL.bind(this);
|
||||
this.DeleteArtifact.bind(this);
|
||||
}
|
||||
CreateArtifact(request) {
|
||||
const data = artifact_1.CreateArtifactRequest.toBinary(request);
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "CreateArtifact", "application/protobuf", data);
|
||||
return promise.then((data) => artifact_1.CreateArtifactResponse.fromBinary(data));
|
||||
}
|
||||
FinalizeArtifact(request) {
|
||||
const data = artifact_1.FinalizeArtifactRequest.toBinary(request);
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "FinalizeArtifact", "application/protobuf", data);
|
||||
return promise.then((data) => artifact_1.FinalizeArtifactResponse.fromBinary(data));
|
||||
}
|
||||
ListArtifacts(request) {
|
||||
const data = artifact_1.ListArtifactsRequest.toBinary(request);
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "ListArtifacts", "application/protobuf", data);
|
||||
return promise.then((data) => artifact_1.ListArtifactsResponse.fromBinary(data));
|
||||
}
|
||||
GetSignedArtifactURL(request) {
|
||||
const data = artifact_1.GetSignedArtifactURLRequest.toBinary(request);
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "GetSignedArtifactURL", "application/protobuf", data);
|
||||
return promise.then((data) => artifact_1.GetSignedArtifactURLResponse.fromBinary(data));
|
||||
}
|
||||
DeleteArtifact(request) {
|
||||
const data = artifact_1.DeleteArtifactRequest.toBinary(request);
|
||||
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "DeleteArtifact", "application/protobuf", data);
|
||||
return promise.then((data) => artifact_1.DeleteArtifactResponse.fromBinary(data));
|
||||
}
|
||||
}
|
||||
exports.ArtifactServiceClientProtobuf = ArtifactServiceClientProtobuf;
|
||||
var ArtifactServiceMethod;
|
||||
(function (ArtifactServiceMethod) {
|
||||
ArtifactServiceMethod["CreateArtifact"] = "CreateArtifact";
|
||||
ArtifactServiceMethod["FinalizeArtifact"] = "FinalizeArtifact";
|
||||
ArtifactServiceMethod["ListArtifacts"] = "ListArtifacts";
|
||||
ArtifactServiceMethod["GetSignedArtifactURL"] = "GetSignedArtifactURL";
|
||||
ArtifactServiceMethod["DeleteArtifact"] = "DeleteArtifact";
|
||||
})(ArtifactServiceMethod || (exports.ArtifactServiceMethod = ArtifactServiceMethod = {}));
|
||||
exports.ArtifactServiceMethodList = [
|
||||
ArtifactServiceMethod.CreateArtifact,
|
||||
ArtifactServiceMethod.FinalizeArtifact,
|
||||
ArtifactServiceMethod.ListArtifacts,
|
||||
ArtifactServiceMethod.GetSignedArtifactURL,
|
||||
ArtifactServiceMethod.DeleteArtifact,
|
||||
];
|
||||
function createArtifactServiceServer(service) {
|
||||
return new twirp_ts_1.TwirpServer({
|
||||
service,
|
||||
packageName: "github.actions.results.api.v1",
|
||||
serviceName: "ArtifactService",
|
||||
methodList: exports.ArtifactServiceMethodList,
|
||||
matchRoute: matchArtifactServiceRoute,
|
||||
});
|
||||
}
|
||||
exports.createArtifactServiceServer = createArtifactServiceServer;
|
||||
function matchArtifactServiceRoute(method, events) {
|
||||
switch (method) {
|
||||
case "CreateArtifact":
|
||||
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
|
||||
ctx = Object.assign(Object.assign({}, ctx), { methodName: "CreateArtifact" });
|
||||
yield events.onMatch(ctx);
|
||||
return handleArtifactServiceCreateArtifactRequest(ctx, service, data, interceptors);
|
||||
});
|
||||
case "FinalizeArtifact":
|
||||
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
|
||||
ctx = Object.assign(Object.assign({}, ctx), { methodName: "FinalizeArtifact" });
|
||||
yield events.onMatch(ctx);
|
||||
return handleArtifactServiceFinalizeArtifactRequest(ctx, service, data, interceptors);
|
||||
});
|
||||
case "ListArtifacts":
|
||||
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
|
||||
ctx = Object.assign(Object.assign({}, ctx), { methodName: "ListArtifacts" });
|
||||
yield events.onMatch(ctx);
|
||||
return handleArtifactServiceListArtifactsRequest(ctx, service, data, interceptors);
|
||||
});
|
||||
case "GetSignedArtifactURL":
|
||||
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
|
||||
ctx = Object.assign(Object.assign({}, ctx), { methodName: "GetSignedArtifactURL" });
|
||||
yield events.onMatch(ctx);
|
||||
return handleArtifactServiceGetSignedArtifactURLRequest(ctx, service, data, interceptors);
|
||||
});
|
||||
case "DeleteArtifact":
|
||||
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
|
||||
ctx = Object.assign(Object.assign({}, ctx), { methodName: "DeleteArtifact" });
|
||||
yield events.onMatch(ctx);
|
||||
return handleArtifactServiceDeleteArtifactRequest(ctx, service, data, interceptors);
|
||||
});
|
||||
default:
|
||||
events.onNotFound();
|
||||
const msg = `no handler found`;
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
function handleArtifactServiceCreateArtifactRequest(ctx, service, data, interceptors) {
|
||||
switch (ctx.contentType) {
|
||||
case twirp_ts_1.TwirpContentType.JSON:
|
||||
return handleArtifactServiceCreateArtifactJSON(ctx, service, data, interceptors);
|
||||
case twirp_ts_1.TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceCreateArtifactProtobuf(ctx, service, data, interceptors);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
function handleArtifactServiceFinalizeArtifactRequest(ctx, service, data, interceptors) {
|
||||
switch (ctx.contentType) {
|
||||
case twirp_ts_1.TwirpContentType.JSON:
|
||||
return handleArtifactServiceFinalizeArtifactJSON(ctx, service, data, interceptors);
|
||||
case twirp_ts_1.TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceFinalizeArtifactProtobuf(ctx, service, data, interceptors);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
function handleArtifactServiceListArtifactsRequest(ctx, service, data, interceptors) {
|
||||
switch (ctx.contentType) {
|
||||
case twirp_ts_1.TwirpContentType.JSON:
|
||||
return handleArtifactServiceListArtifactsJSON(ctx, service, data, interceptors);
|
||||
case twirp_ts_1.TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceListArtifactsProtobuf(ctx, service, data, interceptors);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
function handleArtifactServiceGetSignedArtifactURLRequest(ctx, service, data, interceptors) {
|
||||
switch (ctx.contentType) {
|
||||
case twirp_ts_1.TwirpContentType.JSON:
|
||||
return handleArtifactServiceGetSignedArtifactURLJSON(ctx, service, data, interceptors);
|
||||
case twirp_ts_1.TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceGetSignedArtifactURLProtobuf(ctx, service, data, interceptors);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
function handleArtifactServiceDeleteArtifactRequest(ctx, service, data, interceptors) {
|
||||
switch (ctx.contentType) {
|
||||
case twirp_ts_1.TwirpContentType.JSON:
|
||||
return handleArtifactServiceDeleteArtifactJSON(ctx, service, data, interceptors);
|
||||
case twirp_ts_1.TwirpContentType.Protobuf:
|
||||
return handleArtifactServiceDeleteArtifactProtobuf(ctx, service, data, interceptors);
|
||||
default:
|
||||
const msg = "unexpected Content-Type";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
|
||||
}
|
||||
}
|
||||
function handleArtifactServiceCreateArtifactJSON(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = artifact_1.CreateArtifactRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.CreateArtifact(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.CreateArtifact(ctx, request);
|
||||
}
|
||||
return JSON.stringify(artifact_1.CreateArtifactResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}));
|
||||
});
|
||||
}
|
||||
function handleArtifactServiceFinalizeArtifactJSON(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = artifact_1.FinalizeArtifactRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.FinalizeArtifact(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.FinalizeArtifact(ctx, request);
|
||||
}
|
||||
return JSON.stringify(artifact_1.FinalizeArtifactResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}));
|
||||
});
|
||||
}
|
||||
function handleArtifactServiceListArtifactsJSON(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = artifact_1.ListArtifactsRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.ListArtifacts(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.ListArtifacts(ctx, request);
|
||||
}
|
||||
return JSON.stringify(artifact_1.ListArtifactsResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}));
|
||||
});
|
||||
}
|
||||
function handleArtifactServiceGetSignedArtifactURLJSON(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = artifact_1.GetSignedArtifactURLRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.GetSignedArtifactURL(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.GetSignedArtifactURL(ctx, request);
|
||||
}
|
||||
return JSON.stringify(artifact_1.GetSignedArtifactURLResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}));
|
||||
});
|
||||
}
|
||||
function handleArtifactServiceDeleteArtifactJSON(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
const body = JSON.parse(data.toString() || "{}");
|
||||
request = artifact_1.DeleteArtifactRequest.fromJson(body, {
|
||||
ignoreUnknownFields: true,
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the json request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.DeleteArtifact(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.DeleteArtifact(ctx, request);
|
||||
}
|
||||
return JSON.stringify(artifact_1.DeleteArtifactResponse.toJson(response, {
|
||||
useProtoFieldName: true,
|
||||
emitDefaultValues: false,
|
||||
}));
|
||||
});
|
||||
}
|
||||
function handleArtifactServiceCreateArtifactProtobuf(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
request = artifact_1.CreateArtifactRequest.fromBinary(data);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.CreateArtifact(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.CreateArtifact(ctx, request);
|
||||
}
|
||||
return Buffer.from(artifact_1.CreateArtifactResponse.toBinary(response));
|
||||
});
|
||||
}
|
||||
function handleArtifactServiceFinalizeArtifactProtobuf(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
request = artifact_1.FinalizeArtifactRequest.fromBinary(data);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.FinalizeArtifact(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.FinalizeArtifact(ctx, request);
|
||||
}
|
||||
return Buffer.from(artifact_1.FinalizeArtifactResponse.toBinary(response));
|
||||
});
|
||||
}
|
||||
function handleArtifactServiceListArtifactsProtobuf(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
request = artifact_1.ListArtifactsRequest.fromBinary(data);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.ListArtifacts(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.ListArtifacts(ctx, request);
|
||||
}
|
||||
return Buffer.from(artifact_1.ListArtifactsResponse.toBinary(response));
|
||||
});
|
||||
}
|
||||
function handleArtifactServiceGetSignedArtifactURLProtobuf(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
request = artifact_1.GetSignedArtifactURLRequest.fromBinary(data);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.GetSignedArtifactURL(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.GetSignedArtifactURL(ctx, request);
|
||||
}
|
||||
return Buffer.from(artifact_1.GetSignedArtifactURLResponse.toBinary(response));
|
||||
});
|
||||
}
|
||||
function handleArtifactServiceDeleteArtifactProtobuf(ctx, service, data, interceptors) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let request;
|
||||
let response;
|
||||
try {
|
||||
request = artifact_1.DeleteArtifactRequest.fromBinary(data);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error) {
|
||||
const msg = "the protobuf request could not be decoded";
|
||||
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
|
||||
}
|
||||
}
|
||||
if (interceptors && interceptors.length > 0) {
|
||||
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
|
||||
response = yield interceptor(ctx, request, (ctx, inputReq) => {
|
||||
return service.DeleteArtifact(ctx, inputReq);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response = yield service.DeleteArtifact(ctx, request);
|
||||
}
|
||||
return Buffer.from(artifact_1.DeleteArtifactResponse.toBinary(response));
|
||||
});
|
||||
}
|
||||
//# sourceMappingURL=artifact.twirp.js.map
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.twirp.js.map
generated
vendored
Normal file
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.twirp.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
72
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/client.d.ts
generated
vendored
Normal file
72
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/client.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
import { UploadArtifactOptions, UploadArtifactResponse, DownloadArtifactOptions, GetArtifactResponse, ListArtifactsOptions, ListArtifactsResponse, DownloadArtifactResponse, FindOptions, DeleteArtifactResponse } from './shared/interfaces';
|
||||
/**
|
||||
* Generic interface for the artifact client.
|
||||
*/
|
||||
export interface ArtifactClient {
|
||||
/**
|
||||
* Uploads an artifact.
|
||||
*
|
||||
* @param name The name of the artifact, required
|
||||
* @param files A list of absolute or relative paths that denote what files should be uploaded
|
||||
* @param rootDirectory An absolute or relative file path that denotes the root parent directory of the files being uploaded
|
||||
* @param options Extra options for customizing the upload behavior
|
||||
* @returns single UploadArtifactResponse object
|
||||
*/
|
||||
uploadArtifact(name: string, files: string[], rootDirectory: string, options?: UploadArtifactOptions): Promise<UploadArtifactResponse>;
|
||||
/**
|
||||
* Lists all artifacts that are part of the current workflow run.
|
||||
* This function will return at most 1000 artifacts per workflow run.
|
||||
*
|
||||
* If `options.findBy` is specified, this will call the public List-Artifacts API which can list from other runs.
|
||||
* https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
|
||||
*
|
||||
* @param options Extra options that allow for the customization of the list behavior
|
||||
* @returns ListArtifactResponse object
|
||||
*/
|
||||
listArtifacts(options?: ListArtifactsOptions & FindOptions): Promise<ListArtifactsResponse>;
|
||||
/**
|
||||
* Finds an artifact by name.
|
||||
* If there are multiple artifacts with the same name in the same workflow run, this will return the latest.
|
||||
* If the artifact is not found, it will throw.
|
||||
*
|
||||
* If `options.findBy` is specified, this will use the public List Artifacts API with a name filter which can get artifacts from other runs.
|
||||
* https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#list-workflow-run-artifacts
|
||||
* `@actions/artifact` v2+ does not allow for creating multiple artifacts with the same name in the same workflow run.
|
||||
* It is possible to have multiple artifacts with the same name in the same workflow run by using old versions of upload-artifact (v1,v2 and v3), @actions/artifact < v2 or it is a rerun.
|
||||
* If there are multiple artifacts with the same name in the same workflow run this function will return the first artifact that matches the name.
|
||||
*
|
||||
* @param artifactName The name of the artifact to find
|
||||
* @param options Extra options that allow for the customization of the get behavior
|
||||
*/
|
||||
getArtifact(artifactName: string, options?: FindOptions): Promise<GetArtifactResponse>;
|
||||
/**
|
||||
* Downloads an artifact and unzips the content.
|
||||
*
|
||||
* If `options.findBy` is specified, this will use the public Download Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#download-an-artifact
|
||||
*
|
||||
* @param artifactId The id of the artifact to download
|
||||
* @param options Extra options that allow for the customization of the download behavior
|
||||
* @returns single DownloadArtifactResponse object
|
||||
*/
|
||||
downloadArtifact(artifactId: number, options?: DownloadArtifactOptions & FindOptions): Promise<DownloadArtifactResponse>;
|
||||
/**
|
||||
* Delete an Artifact
|
||||
*
|
||||
* If `options.findBy` is specified, this will use the public Delete Artifact API https://docs.github.com/en/rest/actions/artifacts?apiVersion=2022-11-28#delete-an-artifact
|
||||
*
|
||||
* @param artifactName The name of the artifact to delete
|
||||
* @param options Extra options that allow for the customization of the delete behavior
|
||||
* @returns single DeleteArtifactResponse object
|
||||
*/
|
||||
deleteArtifact(artifactName: string, options?: FindOptions): Promise<DeleteArtifactResponse>;
|
||||
}
|
||||
/**
|
||||
* The default artifact client that is used by the artifact action(s).
|
||||
*/
|
||||
export declare class DefaultArtifactClient implements ArtifactClient {
|
||||
uploadArtifact(name: string, files: string[], rootDirectory: string, options?: UploadArtifactOptions): Promise<UploadArtifactResponse>;
|
||||
downloadArtifact(artifactId: number, options?: DownloadArtifactOptions & FindOptions): Promise<DownloadArtifactResponse>;
|
||||
listArtifacts(options?: ListArtifactsOptions & FindOptions): Promise<ListArtifactsResponse>;
|
||||
getArtifact(artifactName: string, options?: FindOptions): Promise<GetArtifactResponse>;
|
||||
deleteArtifact(artifactName: string, options?: FindOptions): Promise<DeleteArtifactResponse>;
|
||||
}
|
||||
144
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/client.js
generated
vendored
Normal file
144
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/client.js
generated
vendored
Normal file
|
|
@ -0,0 +1,144 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __rest = (this && this.__rest) || function (s, e) {
|
||||
var t = {};
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
||||
t[p] = s[p];
|
||||
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
||||
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
||||
t[p[i]] = s[p[i]];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.DefaultArtifactClient = void 0;
|
||||
const core_1 = require("@actions/core");
|
||||
const config_1 = require("./shared/config");
|
||||
const upload_artifact_1 = require("./upload/upload-artifact");
|
||||
const download_artifact_1 = require("./download/download-artifact");
|
||||
const delete_artifact_1 = require("./delete/delete-artifact");
|
||||
const get_artifact_1 = require("./find/get-artifact");
|
||||
const list_artifacts_1 = require("./find/list-artifacts");
|
||||
const errors_1 = require("./shared/errors");
|
||||
/**
|
||||
* The default artifact client that is used by the artifact action(s).
|
||||
*/
|
||||
class DefaultArtifactClient {
|
||||
uploadArtifact(name, files, rootDirectory, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
if ((0, config_1.isGhes)()) {
|
||||
throw new errors_1.GHESNotSupportedError();
|
||||
}
|
||||
return (0, upload_artifact_1.uploadArtifact)(name, files, rootDirectory, options);
|
||||
}
|
||||
catch (error) {
|
||||
(0, core_1.warning)(`Artifact upload failed with error: ${error}.
|
||||
|
||||
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
|
||||
|
||||
If the error persists, please check whether Actions is operating normally at [https://githubstatus.com](https://www.githubstatus.com).`);
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
downloadArtifact(artifactId, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
if ((0, config_1.isGhes)()) {
|
||||
throw new errors_1.GHESNotSupportedError();
|
||||
}
|
||||
if (options === null || options === void 0 ? void 0 : options.findBy) {
|
||||
const { findBy: { repositoryOwner, repositoryName, token } } = options, downloadOptions = __rest(options, ["findBy"]);
|
||||
return (0, download_artifact_1.downloadArtifactPublic)(artifactId, repositoryOwner, repositoryName, token, downloadOptions);
|
||||
}
|
||||
return (0, download_artifact_1.downloadArtifactInternal)(artifactId, options);
|
||||
}
|
||||
catch (error) {
|
||||
(0, core_1.warning)(`Download Artifact failed with error: ${error}.
|
||||
|
||||
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
|
||||
|
||||
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`);
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
listArtifacts(options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
if ((0, config_1.isGhes)()) {
|
||||
throw new errors_1.GHESNotSupportedError();
|
||||
}
|
||||
if (options === null || options === void 0 ? void 0 : options.findBy) {
|
||||
const { findBy: { workflowRunId, repositoryOwner, repositoryName, token } } = options;
|
||||
return (0, list_artifacts_1.listArtifactsPublic)(workflowRunId, repositoryOwner, repositoryName, token, options === null || options === void 0 ? void 0 : options.latest);
|
||||
}
|
||||
return (0, list_artifacts_1.listArtifactsInternal)(options === null || options === void 0 ? void 0 : options.latest);
|
||||
}
|
||||
catch (error) {
|
||||
(0, core_1.warning)(`Listing Artifacts failed with error: ${error}.
|
||||
|
||||
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
|
||||
|
||||
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`);
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
getArtifact(artifactName, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
if ((0, config_1.isGhes)()) {
|
||||
throw new errors_1.GHESNotSupportedError();
|
||||
}
|
||||
if (options === null || options === void 0 ? void 0 : options.findBy) {
|
||||
const { findBy: { workflowRunId, repositoryOwner, repositoryName, token } } = options;
|
||||
return (0, get_artifact_1.getArtifactPublic)(artifactName, workflowRunId, repositoryOwner, repositoryName, token);
|
||||
}
|
||||
return (0, get_artifact_1.getArtifactInternal)(artifactName);
|
||||
}
|
||||
catch (error) {
|
||||
(0, core_1.warning)(`Get Artifact failed with error: ${error}.
|
||||
|
||||
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
|
||||
|
||||
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`);
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
deleteArtifact(artifactName, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
if ((0, config_1.isGhes)()) {
|
||||
throw new errors_1.GHESNotSupportedError();
|
||||
}
|
||||
if (options === null || options === void 0 ? void 0 : options.findBy) {
|
||||
const { findBy: { repositoryOwner, repositoryName, workflowRunId, token } } = options;
|
||||
return (0, delete_artifact_1.deleteArtifactPublic)(artifactName, workflowRunId, repositoryOwner, repositoryName, token);
|
||||
}
|
||||
return (0, delete_artifact_1.deleteArtifactInternal)(artifactName);
|
||||
}
|
||||
catch (error) {
|
||||
(0, core_1.warning)(`Delete Artifact failed with error: ${error}.
|
||||
|
||||
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
|
||||
|
||||
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`);
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.DefaultArtifactClient = DefaultArtifactClient;
|
||||
//# sourceMappingURL=client.js.map
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/client.js.map
generated
vendored
Normal file
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/client.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"client.js","sourceRoot":"","sources":["../../src/internal/client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;AAAA,wCAAqC;AACrC,4CAAsC;AAYtC,8DAAuD;AACvD,oEAGqC;AACrC,8DAGiC;AACjC,sDAA0E;AAC1E,0DAAgF;AAChF,4CAAqD;AAoFrD;;GAEG;AACH,MAAa,qBAAqB;IAC1B,cAAc,CAClB,IAAY,EACZ,KAAe,EACf,aAAqB,EACrB,OAA+B;;YAE/B,IAAI;gBACF,IAAI,IAAA,eAAM,GAAE,EAAE;oBACZ,MAAM,IAAI,8BAAqB,EAAE,CAAA;iBAClC;gBAED,OAAO,IAAA,gCAAc,EAAC,IAAI,EAAE,KAAK,EAAE,aAAa,EAAE,OAAO,CAAC,CAAA;aAC3D;YAAC,OAAO,KAAK,EAAE;gBACd,IAAA,cAAO,EACL,sCAAsC,KAAK;;;;uIAIoF,CAChI,CAAA;gBAED,MAAM,KAAK,CAAA;aACZ;QACH,CAAC;KAAA;IAEK,gBAAgB,CACpB,UAAkB,EAClB,OAA+C;;YAE/C,IAAI;gBACF,IAAI,IAAA,eAAM,GAAE,EAAE;oBACZ,MAAM,IAAI,8BAAqB,EAAE,CAAA;iBAClC;gBAED,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,MAAM,EAAE;oBACnB,MAAM,EACJ,MAAM,EAAE,EAAC,eAAe,EAAE,cAAc,EAAE,KAAK,EAAC,KAE9C,OAAO,EADN,eAAe,UAChB,OAAO,EAHL,UAGL,CAAU,CAAA;oBAEX,OAAO,IAAA,0CAAsB,EAC3B,UAAU,EACV,eAAe,EACf,cAAc,EACd,KAAK,EACL,eAAe,CAChB,CAAA;iBACF;gBAED,OAAO,IAAA,4CAAwB,EAAC,UAAU,EAAE,OAAO,CAAC,CAAA;aACrD;YAAC,OAAO,KAAK,EAAE;gBACd,IAAA,cAAO,EACL,wCAAwC,KAAK;;;;yJAIoG,CAClJ,CAAA;gBAED,MAAM,KAAK,CAAA;aACZ;QACH,CAAC;KAAA;IAEK,aAAa,CACjB,OAA4C;;YAE5C,IAAI;gBACF,IAAI,IAAA,eAAM,GAAE,EAAE;oBACZ,MAAM,IAAI,8BAAqB,EAAE,CAAA;iBAClC;gBAED,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,MAAM,EAAE;oBACnB,MAAM,EACJ,MAAM,EAAE,EAAC,aAAa,EAAE,eAAe,EAAE,cAAc,EAAE,KAAK,EAAC,EAChE,GAAG,OAAO,CAAA;oBAEX,OAAO,IAAA,oCAAmB,EACxB,aAAa,EACb,eAAe,EACf,cAAc,EACd,KAAK,EACL,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,MAAM,CAChB,CAAA;iBACF;gBAED,OAAO,IAAA,sCAAqB,EAAC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,MAAM,CAAC,CAAA;aAC9C;YAAC,OAAO,KAAc,EAAE;gBACvB,IAAA,cAAO,EACL,wCAAwC,KAAK;;;;yJAIoG,CAClJ,CAAA;gBAED,MAAM,KAAK,CAAA;aACZ;QACH,CAAC;KAAA;IAEK,WAAW,CACf,YAAoB,EACpB,OAAqB;;YAErB,IAAI;gBACF,IAAI,IAAA,eAAM,GAAE,EAAE;oBACZ,MAAM,IAAI,8BAAqB,EAAE,CAAA;iBAClC;gBAED,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,MAAM,EAAE;oBACnB,MAAM,EACJ,MAAM,EAAE,EAAC,aAAa,EAAE,eAAe,EAAE,cAAc,EAAE,KAAK,EAAC,EAChE,GAAG,OAAO,CAAA;oBAEX,OAAO,IAAA,gCAAiB,EACtB,YAAY,EACZ,aAAa,EACb,eAAe,EACf,cAAc,EACd,KAAK,CACN,CAAA;iBACF;gBAED,OAAO,IAAA,kCAAmB,EAAC,YAAY,CAAC,CAAA;aACzC;YAAC,OAAO,KAAc,EAAE;gBACvB,IAAA,cAAO,EACL,mCAAmC,KAAK;;;;yJAIyG,CAClJ,CAAA;gBACD,MAAM,KAAK,CAAA;aACZ;QACH,CAAC;KAAA;IAEK,cAAc,CAClB,YAAoB,EACpB,OAAqB;;YAErB,IAAI;gBACF,IAAI,IAAA,eAAM,GAAE,EAAE;oBACZ,MAAM,IAAI,8BAAqB,EAAE,CAAA;iBAClC;gBAED,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,MAAM,EAAE;oBACnB,MAAM,EACJ,MAAM,EAAE,EAAC,eAAe,EAAE,cAAc,EAAE,aAAa,EAAE,KAAK,EAAC,EAChE,GAAG,OAAO,CAAA;oBAEX,OAAO,IAAA,sCAAoB,EACzB,YAAY,EACZ,aAAa,EACb,eAAe,EACf,cAAc,EACd,KAAK,CACN,CAAA;iBACF;gBAED,OAAO,IAAA,wCAAsB,EAAC,YAAY,CAAC,CAAA;aAC5C;YAAC,OAAO,KAAK,EAAE;gBACd,IAAA,cAAO,EACL,sCAAsC,KAAK;;;;yJAIsG,CAClJ,CAAA;gBAED,MAAM,KAAK,CAAA;aACZ;QACH,CAAC;KAAA;CACF;AA5KD,sDA4KC"}
|
||||
3
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/delete/delete-artifact.d.ts
generated
vendored
Normal file
3
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/delete/delete-artifact.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
import { DeleteArtifactResponse } from '../shared/interfaces';
|
||||
export declare function deleteArtifactPublic(artifactName: string, workflowRunId: number, repositoryOwner: string, repositoryName: string, token: string): Promise<DeleteArtifactResponse>;
|
||||
export declare function deleteArtifactInternal(artifactName: any): Promise<DeleteArtifactResponse>;
|
||||
83
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/delete/delete-artifact.js
generated
vendored
Normal file
83
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/delete/delete-artifact.js
generated
vendored
Normal file
|
|
@ -0,0 +1,83 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.deleteArtifactInternal = exports.deleteArtifactPublic = void 0;
|
||||
const core_1 = require("@actions/core");
|
||||
const github_1 = require("@actions/github");
|
||||
const user_agent_1 = require("../shared/user-agent");
|
||||
const retry_options_1 = require("../find/retry-options");
|
||||
const utils_1 = require("@actions/github/lib/utils");
|
||||
const plugin_request_log_1 = require("@octokit/plugin-request-log");
|
||||
const plugin_retry_1 = require("@octokit/plugin-retry");
|
||||
const artifact_twirp_client_1 = require("../shared/artifact-twirp-client");
|
||||
const util_1 = require("../shared/util");
|
||||
const generated_1 = require("../../generated");
|
||||
const get_artifact_1 = require("../find/get-artifact");
|
||||
const errors_1 = require("../shared/errors");
|
||||
function deleteArtifactPublic(artifactName, workflowRunId, repositoryOwner, repositoryName, token) {
|
||||
var _a;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const [retryOpts, requestOpts] = (0, retry_options_1.getRetryOptions)(utils_1.defaults);
|
||||
const opts = {
|
||||
log: undefined,
|
||||
userAgent: (0, user_agent_1.getUserAgentString)(),
|
||||
previews: undefined,
|
||||
retry: retryOpts,
|
||||
request: requestOpts
|
||||
};
|
||||
const github = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog);
|
||||
const getArtifactResp = yield (0, get_artifact_1.getArtifactPublic)(artifactName, workflowRunId, repositoryOwner, repositoryName, token);
|
||||
const deleteArtifactResp = yield github.rest.actions.deleteArtifact({
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
artifact_id: getArtifactResp.artifact.id
|
||||
});
|
||||
if (deleteArtifactResp.status !== 204) {
|
||||
throw new errors_1.InvalidResponseError(`Invalid response from GitHub API: ${deleteArtifactResp.status} (${(_a = deleteArtifactResp === null || deleteArtifactResp === void 0 ? void 0 : deleteArtifactResp.headers) === null || _a === void 0 ? void 0 : _a['x-github-request-id']})`);
|
||||
}
|
||||
return {
|
||||
id: getArtifactResp.artifact.id
|
||||
};
|
||||
});
|
||||
}
|
||||
exports.deleteArtifactPublic = deleteArtifactPublic;
|
||||
function deleteArtifactInternal(artifactName) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const artifactClient = (0, artifact_twirp_client_1.internalArtifactTwirpClient)();
|
||||
const { workflowRunBackendId, workflowJobRunBackendId } = (0, util_1.getBackendIdsFromToken)();
|
||||
const listReq = {
|
||||
workflowRunBackendId,
|
||||
workflowJobRunBackendId,
|
||||
nameFilter: generated_1.StringValue.create({ value: artifactName })
|
||||
};
|
||||
const listRes = yield artifactClient.ListArtifacts(listReq);
|
||||
if (listRes.artifacts.length === 0) {
|
||||
throw new errors_1.ArtifactNotFoundError(`Artifact not found for name: ${artifactName}`);
|
||||
}
|
||||
let artifact = listRes.artifacts[0];
|
||||
if (listRes.artifacts.length > 1) {
|
||||
artifact = listRes.artifacts.sort((a, b) => Number(b.databaseId) - Number(a.databaseId))[0];
|
||||
(0, core_1.debug)(`More than one artifact found for a single name, returning newest (id: ${artifact.databaseId})`);
|
||||
}
|
||||
const req = {
|
||||
workflowRunBackendId: artifact.workflowRunBackendId,
|
||||
workflowJobRunBackendId: artifact.workflowJobRunBackendId,
|
||||
name: artifact.name
|
||||
};
|
||||
const res = yield artifactClient.DeleteArtifact(req);
|
||||
(0, core_1.info)(`Artifact '${artifactName}' (ID: ${res.artifactId}) deleted`);
|
||||
return {
|
||||
id: Number(res.artifactId)
|
||||
};
|
||||
});
|
||||
}
|
||||
exports.deleteArtifactInternal = deleteArtifactInternal;
|
||||
//# sourceMappingURL=delete-artifact.js.map
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/delete/delete-artifact.js.map
generated
vendored
Normal file
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/delete/delete-artifact.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"delete-artifact.js","sourceRoot":"","sources":["../../../src/internal/delete/delete-artifact.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,wCAAyC;AACzC,4CAA0C;AAE1C,qDAAuD;AACvD,yDAAqD;AACrD,qDAA0E;AAC1E,oEAAsD;AACtD,wDAA2C;AAE3C,2EAA2E;AAC3E,yCAAqD;AACrD,+CAIwB;AACxB,uDAAsD;AACtD,6CAA4E;AAE5E,SAAsB,oBAAoB,CACxC,YAAoB,EACpB,aAAqB,EACrB,eAAuB,EACvB,cAAsB,EACtB,KAAa;;;QAEb,MAAM,CAAC,SAAS,EAAE,WAAW,CAAC,GAAG,IAAA,+BAAe,EAAC,gBAAoB,CAAC,CAAA;QAEtE,MAAM,IAAI,GAAmB;YAC3B,GAAG,EAAE,SAAS;YACd,SAAS,EAAE,IAAA,+BAAkB,GAAE;YAC/B,QAAQ,EAAE,SAAS;YACnB,KAAK,EAAE,SAAS;YAChB,OAAO,EAAE,WAAW;SACrB,CAAA;QAED,MAAM,MAAM,GAAG,IAAA,mBAAU,EAAC,KAAK,EAAE,IAAI,EAAE,oBAAK,EAAE,+BAAU,CAAC,CAAA;QAEzD,MAAM,eAAe,GAAG,MAAM,IAAA,gCAAiB,EAC7C,YAAY,EACZ,aAAa,EACb,eAAe,EACf,cAAc,EACd,KAAK,CACN,CAAA;QAED,MAAM,kBAAkB,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,cAAc,CAAC;YAClE,KAAK,EAAE,eAAe;YACtB,IAAI,EAAE,cAAc;YACpB,WAAW,EAAE,eAAe,CAAC,QAAQ,CAAC,EAAE;SACzC,CAAC,CAAA;QAEF,IAAI,kBAAkB,CAAC,MAAM,KAAK,GAAG,EAAE;YACrC,MAAM,IAAI,6BAAoB,CAC5B,qCAAqC,kBAAkB,CAAC,MAAM,KAAK,MAAA,kBAAkB,aAAlB,kBAAkB,uBAAlB,kBAAkB,CAAE,OAAO,0CAAG,qBAAqB,CAAC,GAAG,CAC3H,CAAA;SACF;QAED,OAAO;YACL,EAAE,EAAE,eAAe,CAAC,QAAQ,CAAC,EAAE;SAChC,CAAA;;CACF;AA1CD,oDA0CC;AAED,SAAsB,sBAAsB,CAC1C,YAAY;;QAEZ,MAAM,cAAc,GAAG,IAAA,mDAA2B,GAAE,CAAA;QAEpD,MAAM,EAAC,oBAAoB,EAAE,uBAAuB,EAAC,GACnD,IAAA,6BAAsB,GAAE,CAAA;QAE1B,MAAM,OAAO,GAAyB;YACpC,oBAAoB;YACpB,uBAAuB;YACvB,UAAU,EAAE,uBAAW,CAAC,MAAM,CAAC,EAAC,KAAK,EAAE,YAAY,EAAC,CAAC;SACtD,CAAA;QAED,MAAM,OAAO,GAAG,MAAM,cAAc,CAAC,aAAa,CAAC,OAAO,CAAC,CAAA;QAE3D,IAAI,OAAO,CAAC,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YAClC,MAAM,IAAI,8BAAqB,CAC7B,gCAAgC,YAAY,EAAE,CAC/C,CAAA;SACF;QAED,IAAI,QAAQ,GAAG,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;QACnC,IAAI,OAAO,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE;YAChC,QAAQ,GAAG,OAAO,CAAC,SAAS,CAAC,IAAI,CAC/B,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,UAAU,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,UAAU,CAAC,CACtD,CAAC,CAAC,CAAC,CAAA;YAEJ,IAAA,YAAK,EACH,yEAAyE,QAAQ,CAAC,UAAU,GAAG,CAChG,CAAA;SACF;QAED,MAAM,GAAG,GAA0B;YACjC,oBAAoB,EAAE,QAAQ,CAAC,oBAAoB;YACnD,uBAAuB,EAAE,QAAQ,CAAC,uBAAuB;YACzD,IAAI,EAAE,QAAQ,CAAC,IAAI;SACpB,CAAA;QAED,MAAM,GAAG,GAAG,MAAM,cAAc,CAAC,cAAc,CAAC,GAAG,CAAC,CAAA;QACpD,IAAA,WAAI,EAAC,aAAa,YAAY,UAAU,GAAG,CAAC,UAAU,WAAW,CAAC,CAAA;QAElE,OAAO;YACL,EAAE,EAAE,MAAM,CAAC,GAAG,CAAC,UAAU,CAAC;SAC3B,CAAA;IACH,CAAC;CAAA;AA7CD,wDA6CC"}
|
||||
4
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/download/download-artifact.d.ts
generated
vendored
Normal file
4
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/download/download-artifact.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
import { DownloadArtifactOptions, DownloadArtifactResponse } from '../shared/interfaces';
|
||||
export declare function streamExtractExternal(url: string, directory: string): Promise<void>;
|
||||
export declare function downloadArtifactPublic(artifactId: number, repositoryOwner: string, repositoryName: string, token: string, options?: DownloadArtifactOptions): Promise<DownloadArtifactResponse>;
|
||||
export declare function downloadArtifactInternal(artifactId: number, options?: DownloadArtifactOptions): Promise<DownloadArtifactResponse>;
|
||||
205
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/download/download-artifact.js
generated
vendored
Normal file
205
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/download/download-artifact.js
generated
vendored
Normal file
|
|
@ -0,0 +1,205 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.downloadArtifactInternal = exports.downloadArtifactPublic = exports.streamExtractExternal = void 0;
|
||||
const promises_1 = __importDefault(require("fs/promises"));
|
||||
const github = __importStar(require("@actions/github"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const httpClient = __importStar(require("@actions/http-client"));
|
||||
const unzip_stream_1 = __importDefault(require("unzip-stream"));
|
||||
const user_agent_1 = require("../shared/user-agent");
|
||||
const config_1 = require("../shared/config");
|
||||
const artifact_twirp_client_1 = require("../shared/artifact-twirp-client");
|
||||
const generated_1 = require("../../generated");
|
||||
const util_1 = require("../shared/util");
|
||||
const errors_1 = require("../shared/errors");
|
||||
const scrubQueryParameters = (url) => {
|
||||
const parsed = new URL(url);
|
||||
parsed.search = '';
|
||||
return parsed.toString();
|
||||
};
|
||||
function exists(path) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
yield promises_1.default.access(path);
|
||||
return true;
|
||||
}
|
||||
catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return false;
|
||||
}
|
||||
else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
function streamExtract(url, directory) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let retryCount = 0;
|
||||
while (retryCount < 5) {
|
||||
try {
|
||||
yield streamExtractExternal(url, directory);
|
||||
return;
|
||||
}
|
||||
catch (error) {
|
||||
retryCount++;
|
||||
core.debug(`Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...`);
|
||||
// wait 5 seconds before retrying
|
||||
yield new Promise(resolve => setTimeout(resolve, 5000));
|
||||
}
|
||||
}
|
||||
throw new Error(`Artifact download failed after ${retryCount} retries.`);
|
||||
});
|
||||
}
|
||||
function streamExtractExternal(url, directory) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const client = new httpClient.HttpClient((0, user_agent_1.getUserAgentString)());
|
||||
const response = yield client.get(url);
|
||||
if (response.message.statusCode !== 200) {
|
||||
throw new Error(`Unexpected HTTP response from blob storage: ${response.message.statusCode} ${response.message.statusMessage}`);
|
||||
}
|
||||
const timeout = 30 * 1000; // 30 seconds
|
||||
return new Promise((resolve, reject) => {
|
||||
const timerFn = () => {
|
||||
response.message.destroy(new Error(`Blob storage chunk did not respond in ${timeout}ms`));
|
||||
};
|
||||
const timer = setTimeout(timerFn, timeout);
|
||||
response.message
|
||||
.on('data', () => {
|
||||
timer.refresh();
|
||||
})
|
||||
.on('error', (error) => {
|
||||
core.debug(`response.message: Artifact download failed: ${error.message}`);
|
||||
clearTimeout(timer);
|
||||
reject(error);
|
||||
})
|
||||
.pipe(unzip_stream_1.default.Extract({ path: directory }))
|
||||
.on('close', () => {
|
||||
clearTimeout(timer);
|
||||
resolve();
|
||||
})
|
||||
.on('error', (error) => {
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
exports.streamExtractExternal = streamExtractExternal;
|
||||
function downloadArtifactPublic(artifactId, repositoryOwner, repositoryName, token, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path);
|
||||
const api = github.getOctokit(token);
|
||||
core.info(`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`);
|
||||
const { headers, status } = yield api.rest.actions.downloadArtifact({
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
artifact_id: artifactId,
|
||||
archive_format: 'zip',
|
||||
request: {
|
||||
redirect: 'manual'
|
||||
}
|
||||
});
|
||||
if (status !== 302) {
|
||||
throw new Error(`Unable to download artifact. Unexpected status: ${status}`);
|
||||
}
|
||||
const { location } = headers;
|
||||
if (!location) {
|
||||
throw new Error(`Unable to redirect to artifact download url`);
|
||||
}
|
||||
core.info(`Redirecting to blob download url: ${scrubQueryParameters(location)}`);
|
||||
try {
|
||||
core.info(`Starting download of artifact to: ${downloadPath}`);
|
||||
yield streamExtract(location, downloadPath);
|
||||
core.info(`Artifact download completed successfully.`);
|
||||
}
|
||||
catch (error) {
|
||||
throw new Error(`Unable to download and extract artifact: ${error.message}`);
|
||||
}
|
||||
return { downloadPath };
|
||||
});
|
||||
}
|
||||
exports.downloadArtifactPublic = downloadArtifactPublic;
|
||||
function downloadArtifactInternal(artifactId, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path);
|
||||
const artifactClient = (0, artifact_twirp_client_1.internalArtifactTwirpClient)();
|
||||
const { workflowRunBackendId, workflowJobRunBackendId } = (0, util_1.getBackendIdsFromToken)();
|
||||
const listReq = {
|
||||
workflowRunBackendId,
|
||||
workflowJobRunBackendId,
|
||||
idFilter: generated_1.Int64Value.create({ value: artifactId.toString() })
|
||||
};
|
||||
const { artifacts } = yield artifactClient.ListArtifacts(listReq);
|
||||
if (artifacts.length === 0) {
|
||||
throw new errors_1.ArtifactNotFoundError(`No artifacts found for ID: ${artifactId}\nAre you trying to download from a different run? Try specifying a github-token with \`actions:read\` scope.`);
|
||||
}
|
||||
if (artifacts.length > 1) {
|
||||
core.warning('Multiple artifacts found, defaulting to first.');
|
||||
}
|
||||
const signedReq = {
|
||||
workflowRunBackendId: artifacts[0].workflowRunBackendId,
|
||||
workflowJobRunBackendId: artifacts[0].workflowJobRunBackendId,
|
||||
name: artifacts[0].name
|
||||
};
|
||||
const { signedUrl } = yield artifactClient.GetSignedArtifactURL(signedReq);
|
||||
core.info(`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`);
|
||||
try {
|
||||
core.info(`Starting download of artifact to: ${downloadPath}`);
|
||||
yield streamExtract(signedUrl, downloadPath);
|
||||
core.info(`Artifact download completed successfully.`);
|
||||
}
|
||||
catch (error) {
|
||||
throw new Error(`Unable to download and extract artifact: ${error.message}`);
|
||||
}
|
||||
return { downloadPath };
|
||||
});
|
||||
}
|
||||
exports.downloadArtifactInternal = downloadArtifactInternal;
|
||||
function resolveOrCreateDirectory(downloadPath = (0, config_1.getGitHubWorkspaceDir)()) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (!(yield exists(downloadPath))) {
|
||||
core.debug(`Artifact destination folder does not exist, creating: ${downloadPath}`);
|
||||
yield promises_1.default.mkdir(downloadPath, { recursive: true });
|
||||
}
|
||||
else {
|
||||
core.debug(`Artifact destination folder already exists: ${downloadPath}`);
|
||||
}
|
||||
return downloadPath;
|
||||
});
|
||||
}
|
||||
//# sourceMappingURL=download-artifact.js.map
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/download/download-artifact.js.map
generated
vendored
Normal file
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/download/download-artifact.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
3
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/find/get-artifact.d.ts
generated
vendored
Normal file
3
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/find/get-artifact.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
import { GetArtifactResponse } from '../shared/interfaces';
|
||||
export declare function getArtifactPublic(artifactName: string, workflowRunId: number, repositoryOwner: string, repositoryName: string, token: string): Promise<GetArtifactResponse>;
|
||||
export declare function getArtifactInternal(artifactName: string): Promise<GetArtifactResponse>;
|
||||
122
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/find/get-artifact.js
generated
vendored
Normal file
122
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/find/get-artifact.js
generated
vendored
Normal file
|
|
@ -0,0 +1,122 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getArtifactInternal = exports.getArtifactPublic = void 0;
|
||||
const github_1 = require("@actions/github");
|
||||
const plugin_retry_1 = require("@octokit/plugin-retry");
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const utils_1 = require("@actions/github/lib/utils");
|
||||
const retry_options_1 = require("./retry-options");
|
||||
const plugin_request_log_1 = require("@octokit/plugin-request-log");
|
||||
const util_1 = require("../shared/util");
|
||||
const user_agent_1 = require("../shared/user-agent");
|
||||
const artifact_twirp_client_1 = require("../shared/artifact-twirp-client");
|
||||
const generated_1 = require("../../generated");
|
||||
const errors_1 = require("../shared/errors");
|
||||
function getArtifactPublic(artifactName, workflowRunId, repositoryOwner, repositoryName, token) {
|
||||
var _a;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const [retryOpts, requestOpts] = (0, retry_options_1.getRetryOptions)(utils_1.defaults);
|
||||
const opts = {
|
||||
log: undefined,
|
||||
userAgent: (0, user_agent_1.getUserAgentString)(),
|
||||
previews: undefined,
|
||||
retry: retryOpts,
|
||||
request: requestOpts
|
||||
};
|
||||
const github = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog);
|
||||
const getArtifactResp = yield github.request('GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts{?name}', {
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
run_id: workflowRunId,
|
||||
name: artifactName
|
||||
});
|
||||
if (getArtifactResp.status !== 200) {
|
||||
throw new errors_1.InvalidResponseError(`Invalid response from GitHub API: ${getArtifactResp.status} (${(_a = getArtifactResp === null || getArtifactResp === void 0 ? void 0 : getArtifactResp.headers) === null || _a === void 0 ? void 0 : _a['x-github-request-id']})`);
|
||||
}
|
||||
if (getArtifactResp.data.artifacts.length === 0) {
|
||||
throw new errors_1.ArtifactNotFoundError(`Artifact not found for name: ${artifactName}
|
||||
Please ensure that your artifact is not expired and the artifact was uploaded using a compatible version of toolkit/upload-artifact.
|
||||
For more information, visit the GitHub Artifacts FAQ: https://github.com/actions/toolkit/blob/main/packages/artifact/docs/faq.md`);
|
||||
}
|
||||
let artifact = getArtifactResp.data.artifacts[0];
|
||||
if (getArtifactResp.data.artifacts.length > 1) {
|
||||
artifact = getArtifactResp.data.artifacts.sort((a, b) => b.id - a.id)[0];
|
||||
core.debug(`More than one artifact found for a single name, returning newest (id: ${artifact.id})`);
|
||||
}
|
||||
return {
|
||||
artifact: {
|
||||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
size: artifact.size_in_bytes,
|
||||
createdAt: artifact.created_at ? new Date(artifact.created_at) : undefined
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
exports.getArtifactPublic = getArtifactPublic;
|
||||
function getArtifactInternal(artifactName) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const artifactClient = (0, artifact_twirp_client_1.internalArtifactTwirpClient)();
|
||||
const { workflowRunBackendId, workflowJobRunBackendId } = (0, util_1.getBackendIdsFromToken)();
|
||||
const req = {
|
||||
workflowRunBackendId,
|
||||
workflowJobRunBackendId,
|
||||
nameFilter: generated_1.StringValue.create({ value: artifactName })
|
||||
};
|
||||
const res = yield artifactClient.ListArtifacts(req);
|
||||
if (res.artifacts.length === 0) {
|
||||
throw new errors_1.ArtifactNotFoundError(`Artifact not found for name: ${artifactName}
|
||||
Please ensure that your artifact is not expired and the artifact was uploaded using a compatible version of toolkit/upload-artifact.
|
||||
For more information, visit the GitHub Artifacts FAQ: https://github.com/actions/toolkit/blob/main/packages/artifact/docs/faq.md`);
|
||||
}
|
||||
let artifact = res.artifacts[0];
|
||||
if (res.artifacts.length > 1) {
|
||||
artifact = res.artifacts.sort((a, b) => Number(b.databaseId) - Number(a.databaseId))[0];
|
||||
core.debug(`More than one artifact found for a single name, returning newest (id: ${artifact.databaseId})`);
|
||||
}
|
||||
return {
|
||||
artifact: {
|
||||
name: artifact.name,
|
||||
id: Number(artifact.databaseId),
|
||||
size: Number(artifact.size),
|
||||
createdAt: artifact.createdAt
|
||||
? generated_1.Timestamp.toDate(artifact.createdAt)
|
||||
: undefined
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
exports.getArtifactInternal = getArtifactInternal;
|
||||
//# sourceMappingURL=get-artifact.js.map
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/find/get-artifact.js.map
generated
vendored
Normal file
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/find/get-artifact.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"get-artifact.js","sourceRoot":"","sources":["../../../src/internal/find/get-artifact.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,4CAA0C;AAC1C,wDAA2C;AAC3C,oDAAqC;AAErC,qDAA0E;AAC1E,mDAA+C;AAC/C,oEAAsD;AAEtD,yCAAqD;AACrD,qDAAuD;AACvD,2EAA2E;AAC3E,+CAA4E;AAC5E,6CAA4E;AAE5E,SAAsB,iBAAiB,CACrC,YAAoB,EACpB,aAAqB,EACrB,eAAuB,EACvB,cAAsB,EACtB,KAAa;;;QAEb,MAAM,CAAC,SAAS,EAAE,WAAW,CAAC,GAAG,IAAA,+BAAe,EAAC,gBAAoB,CAAC,CAAA;QAEtE,MAAM,IAAI,GAAmB;YAC3B,GAAG,EAAE,SAAS;YACd,SAAS,EAAE,IAAA,+BAAkB,GAAE;YAC/B,QAAQ,EAAE,SAAS;YACnB,KAAK,EAAE,SAAS;YAChB,OAAO,EAAE,WAAW;SACrB,CAAA;QAED,MAAM,MAAM,GAAG,IAAA,mBAAU,EAAC,KAAK,EAAE,IAAI,EAAE,oBAAK,EAAE,+BAAU,CAAC,CAAA;QAEzD,MAAM,eAAe,GAAG,MAAM,MAAM,CAAC,OAAO,CAC1C,kEAAkE,EAClE;YACE,KAAK,EAAE,eAAe;YACtB,IAAI,EAAE,cAAc;YACpB,MAAM,EAAE,aAAa;YACrB,IAAI,EAAE,YAAY;SACnB,CACF,CAAA;QAED,IAAI,eAAe,CAAC,MAAM,KAAK,GAAG,EAAE;YAClC,MAAM,IAAI,6BAAoB,CAC5B,qCAAqC,eAAe,CAAC,MAAM,KAAK,MAAA,eAAe,aAAf,eAAe,uBAAf,eAAe,CAAE,OAAO,0CAAG,qBAAqB,CAAC,GAAG,CACrH,CAAA;SACF;QAED,IAAI,eAAe,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YAC/C,MAAM,IAAI,8BAAqB,CAC7B,gCAAgC,YAAY;;yIAEuF,CACpI,CAAA;SACF;QAED,IAAI,QAAQ,GAAG,eAAe,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;QAChD,IAAI,eAAe,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE;YAC7C,QAAQ,GAAG,eAAe,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAA;YACxE,IAAI,CAAC,KAAK,CACR,yEAAyE,QAAQ,CAAC,EAAE,GAAG,CACxF,CAAA;SACF;QAED,OAAO;YACL,QAAQ,EAAE;gBACR,IAAI,EAAE,QAAQ,CAAC,IAAI;gBACnB,EAAE,EAAE,QAAQ,CAAC,EAAE;gBACf,IAAI,EAAE,QAAQ,CAAC,aAAa;gBAC5B,SAAS,EAAE,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,SAAS;aAC3E;SACF,CAAA;;CACF;AA3DD,8CA2DC;AAED,SAAsB,mBAAmB,CACvC,YAAoB;;QAEpB,MAAM,cAAc,GAAG,IAAA,mDAA2B,GAAE,CAAA;QAEpD,MAAM,EAAC,oBAAoB,EAAE,uBAAuB,EAAC,GACnD,IAAA,6BAAsB,GAAE,CAAA;QAE1B,MAAM,GAAG,GAAyB;YAChC,oBAAoB;YACpB,uBAAuB;YACvB,UAAU,EAAE,uBAAW,CAAC,MAAM,CAAC,EAAC,KAAK,EAAE,YAAY,EAAC,CAAC;SACtD,CAAA;QAED,MAAM,GAAG,GAAG,MAAM,cAAc,CAAC,aAAa,CAAC,GAAG,CAAC,CAAA;QAEnD,IAAI,GAAG,CAAC,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YAC9B,MAAM,IAAI,8BAAqB,CAC7B,gCAAgC,YAAY;;yIAEuF,CACpI,CAAA;SACF;QAED,IAAI,QAAQ,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;QAC/B,IAAI,GAAG,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE;YAC5B,QAAQ,GAAG,GAAG,CAAC,SAAS,CAAC,IAAI,CAC3B,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,UAAU,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,UAAU,CAAC,CACtD,CAAC,CAAC,CAAC,CAAA;YAEJ,IAAI,CAAC,KAAK,CACR,yEAAyE,QAAQ,CAAC,UAAU,GAAG,CAChG,CAAA;SACF;QAED,OAAO;YACL,QAAQ,EAAE;gBACR,IAAI,EAAE,QAAQ,CAAC,IAAI;gBACnB,EAAE,EAAE,MAAM,CAAC,QAAQ,CAAC,UAAU,CAAC;gBAC/B,IAAI,EAAE,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC;gBAC3B,SAAS,EAAE,QAAQ,CAAC,SAAS;oBAC3B,CAAC,CAAC,qBAAS,CAAC,MAAM,CAAC,QAAQ,CAAC,SAAS,CAAC;oBACtC,CAAC,CAAC,SAAS;aACd;SACF,CAAA;IACH,CAAC;CAAA;AA7CD,kDA6CC"}
|
||||
3
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/find/list-artifacts.d.ts
generated
vendored
Normal file
3
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/find/list-artifacts.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
import { ListArtifactsResponse } from '../shared/interfaces';
|
||||
export declare function listArtifactsPublic(workflowRunId: number, repositoryOwner: string, repositoryName: string, token: string, latest?: boolean): Promise<ListArtifactsResponse>;
|
||||
export declare function listArtifactsInternal(latest?: boolean): Promise<ListArtifactsResponse>;
|
||||
139
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/find/list-artifacts.js
generated
vendored
Normal file
139
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/find/list-artifacts.js
generated
vendored
Normal file
|
|
@ -0,0 +1,139 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.listArtifactsInternal = exports.listArtifactsPublic = void 0;
|
||||
const core_1 = require("@actions/core");
|
||||
const github_1 = require("@actions/github");
|
||||
const user_agent_1 = require("../shared/user-agent");
|
||||
const retry_options_1 = require("./retry-options");
|
||||
const utils_1 = require("@actions/github/lib/utils");
|
||||
const plugin_request_log_1 = require("@octokit/plugin-request-log");
|
||||
const plugin_retry_1 = require("@octokit/plugin-retry");
|
||||
const artifact_twirp_client_1 = require("../shared/artifact-twirp-client");
|
||||
const util_1 = require("../shared/util");
|
||||
const generated_1 = require("../../generated");
|
||||
// Limiting to 1000 for perf reasons
|
||||
const maximumArtifactCount = 1000;
|
||||
const paginationCount = 100;
|
||||
const maxNumberOfPages = maximumArtifactCount / paginationCount;
|
||||
function listArtifactsPublic(workflowRunId, repositoryOwner, repositoryName, token, latest = false) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
(0, core_1.info)(`Fetching artifact list for workflow run ${workflowRunId} in repository ${repositoryOwner}/${repositoryName}`);
|
||||
let artifacts = [];
|
||||
const [retryOpts, requestOpts] = (0, retry_options_1.getRetryOptions)(utils_1.defaults);
|
||||
const opts = {
|
||||
log: undefined,
|
||||
userAgent: (0, user_agent_1.getUserAgentString)(),
|
||||
previews: undefined,
|
||||
retry: retryOpts,
|
||||
request: requestOpts
|
||||
};
|
||||
const github = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog);
|
||||
let currentPageNumber = 1;
|
||||
const { data: listArtifactResponse } = yield github.rest.actions.listWorkflowRunArtifacts({
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
run_id: workflowRunId,
|
||||
per_page: paginationCount,
|
||||
page: currentPageNumber
|
||||
});
|
||||
let numberOfPages = Math.ceil(listArtifactResponse.total_count / paginationCount);
|
||||
const totalArtifactCount = listArtifactResponse.total_count;
|
||||
if (totalArtifactCount > maximumArtifactCount) {
|
||||
(0, core_1.warning)(`Workflow run ${workflowRunId} has more than 1000 artifacts. Results will be incomplete as only the first ${maximumArtifactCount} artifacts will be returned`);
|
||||
numberOfPages = maxNumberOfPages;
|
||||
}
|
||||
// Iterate over the first page
|
||||
for (const artifact of listArtifactResponse.artifacts) {
|
||||
artifacts.push({
|
||||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
size: artifact.size_in_bytes,
|
||||
createdAt: artifact.created_at ? new Date(artifact.created_at) : undefined
|
||||
});
|
||||
}
|
||||
// Iterate over any remaining pages
|
||||
for (currentPageNumber; currentPageNumber < numberOfPages; currentPageNumber++) {
|
||||
currentPageNumber++;
|
||||
(0, core_1.debug)(`Fetching page ${currentPageNumber} of artifact list`);
|
||||
const { data: listArtifactResponse } = yield github.rest.actions.listWorkflowRunArtifacts({
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
run_id: workflowRunId,
|
||||
per_page: paginationCount,
|
||||
page: currentPageNumber
|
||||
});
|
||||
for (const artifact of listArtifactResponse.artifacts) {
|
||||
artifacts.push({
|
||||
name: artifact.name,
|
||||
id: artifact.id,
|
||||
size: artifact.size_in_bytes,
|
||||
createdAt: artifact.created_at
|
||||
? new Date(artifact.created_at)
|
||||
: undefined
|
||||
});
|
||||
}
|
||||
}
|
||||
if (latest) {
|
||||
artifacts = filterLatest(artifacts);
|
||||
}
|
||||
(0, core_1.info)(`Found ${artifacts.length} artifact(s)`);
|
||||
return {
|
||||
artifacts
|
||||
};
|
||||
});
|
||||
}
|
||||
exports.listArtifactsPublic = listArtifactsPublic;
|
||||
function listArtifactsInternal(latest = false) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const artifactClient = (0, artifact_twirp_client_1.internalArtifactTwirpClient)();
|
||||
const { workflowRunBackendId, workflowJobRunBackendId } = (0, util_1.getBackendIdsFromToken)();
|
||||
const req = {
|
||||
workflowRunBackendId,
|
||||
workflowJobRunBackendId
|
||||
};
|
||||
const res = yield artifactClient.ListArtifacts(req);
|
||||
let artifacts = res.artifacts.map(artifact => ({
|
||||
name: artifact.name,
|
||||
id: Number(artifact.databaseId),
|
||||
size: Number(artifact.size),
|
||||
createdAt: artifact.createdAt
|
||||
? generated_1.Timestamp.toDate(artifact.createdAt)
|
||||
: undefined
|
||||
}));
|
||||
if (latest) {
|
||||
artifacts = filterLatest(artifacts);
|
||||
}
|
||||
(0, core_1.info)(`Found ${artifacts.length} artifact(s)`);
|
||||
return {
|
||||
artifacts
|
||||
};
|
||||
});
|
||||
}
|
||||
exports.listArtifactsInternal = listArtifactsInternal;
|
||||
/**
|
||||
* Filters a list of artifacts to only include the latest artifact for each name
|
||||
* @param artifacts The artifacts to filter
|
||||
* @returns The filtered list of artifacts
|
||||
*/
|
||||
function filterLatest(artifacts) {
|
||||
artifacts.sort((a, b) => b.id - a.id);
|
||||
const latestArtifacts = [];
|
||||
const seenArtifactNames = new Set();
|
||||
for (const artifact of artifacts) {
|
||||
if (!seenArtifactNames.has(artifact.name)) {
|
||||
latestArtifacts.push(artifact);
|
||||
seenArtifactNames.add(artifact.name);
|
||||
}
|
||||
}
|
||||
return latestArtifacts;
|
||||
}
|
||||
//# sourceMappingURL=list-artifacts.js.map
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/find/list-artifacts.js.map
generated
vendored
Normal file
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/find/list-artifacts.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"list-artifacts.js","sourceRoot":"","sources":["../../../src/internal/find/list-artifacts.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,wCAAkD;AAClD,4CAA0C;AAE1C,qDAAuD;AACvD,mDAA+C;AAC/C,qDAA0E;AAC1E,oEAAsD;AACtD,wDAA2C;AAE3C,2EAA2E;AAC3E,yCAAqD;AACrD,+CAA+D;AAE/D,oCAAoC;AACpC,MAAM,oBAAoB,GAAG,IAAI,CAAA;AACjC,MAAM,eAAe,GAAG,GAAG,CAAA;AAC3B,MAAM,gBAAgB,GAAG,oBAAoB,GAAG,eAAe,CAAA;AAE/D,SAAsB,mBAAmB,CACvC,aAAqB,EACrB,eAAuB,EACvB,cAAsB,EACtB,KAAa,EACb,MAAM,GAAG,KAAK;;QAEd,IAAA,WAAI,EACF,2CAA2C,aAAa,kBAAkB,eAAe,IAAI,cAAc,EAAE,CAC9G,CAAA;QAED,IAAI,SAAS,GAAe,EAAE,CAAA;QAC9B,MAAM,CAAC,SAAS,EAAE,WAAW,CAAC,GAAG,IAAA,+BAAe,EAAC,gBAAoB,CAAC,CAAA;QAEtE,MAAM,IAAI,GAAmB;YAC3B,GAAG,EAAE,SAAS;YACd,SAAS,EAAE,IAAA,+BAAkB,GAAE;YAC/B,QAAQ,EAAE,SAAS;YACnB,KAAK,EAAE,SAAS;YAChB,OAAO,EAAE,WAAW;SACrB,CAAA;QAED,MAAM,MAAM,GAAG,IAAA,mBAAU,EAAC,KAAK,EAAE,IAAI,EAAE,oBAAK,EAAE,+BAAU,CAAC,CAAA;QAEzD,IAAI,iBAAiB,GAAG,CAAC,CAAA;QACzB,MAAM,EAAC,IAAI,EAAE,oBAAoB,EAAC,GAChC,MAAM,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,wBAAwB,CAAC;YACjD,KAAK,EAAE,eAAe;YACtB,IAAI,EAAE,cAAc;YACpB,MAAM,EAAE,aAAa;YACrB,QAAQ,EAAE,eAAe;YACzB,IAAI,EAAE,iBAAiB;SACxB,CAAC,CAAA;QAEJ,IAAI,aAAa,GAAG,IAAI,CAAC,IAAI,CAC3B,oBAAoB,CAAC,WAAW,GAAG,eAAe,CACnD,CAAA;QACD,MAAM,kBAAkB,GAAG,oBAAoB,CAAC,WAAW,CAAA;QAC3D,IAAI,kBAAkB,GAAG,oBAAoB,EAAE;YAC7C,IAAA,cAAO,EACL,gBAAgB,aAAa,+EAA+E,oBAAoB,6BAA6B,CAC9J,CAAA;YACD,aAAa,GAAG,gBAAgB,CAAA;SACjC;QAED,8BAA8B;QAC9B,KAAK,MAAM,QAAQ,IAAI,oBAAoB,CAAC,SAAS,EAAE;YACrD,SAAS,CAAC,IAAI,CAAC;gBACb,IAAI,EAAE,QAAQ,CAAC,IAAI;gBACnB,EAAE,EAAE,QAAQ,CAAC,EAAE;gBACf,IAAI,EAAE,QAAQ,CAAC,aAAa;gBAC5B,SAAS,EAAE,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,SAAS;aAC3E,CAAC,CAAA;SACH;QAED,mCAAmC;QACnC,KACE,iBAAiB,EACjB,iBAAiB,GAAG,aAAa,EACjC,iBAAiB,EAAE,EACnB;YACA,iBAAiB,EAAE,CAAA;YACnB,IAAA,YAAK,EAAC,iBAAiB,iBAAiB,mBAAmB,CAAC,CAAA;YAE5D,MAAM,EAAC,IAAI,EAAE,oBAAoB,EAAC,GAChC,MAAM,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,wBAAwB,CAAC;gBACjD,KAAK,EAAE,eAAe;gBACtB,IAAI,EAAE,cAAc;gBACpB,MAAM,EAAE,aAAa;gBACrB,QAAQ,EAAE,eAAe;gBACzB,IAAI,EAAE,iBAAiB;aACxB,CAAC,CAAA;YAEJ,KAAK,MAAM,QAAQ,IAAI,oBAAoB,CAAC,SAAS,EAAE;gBACrD,SAAS,CAAC,IAAI,CAAC;oBACb,IAAI,EAAE,QAAQ,CAAC,IAAI;oBACnB,EAAE,EAAE,QAAQ,CAAC,EAAE;oBACf,IAAI,EAAE,QAAQ,CAAC,aAAa;oBAC5B,SAAS,EAAE,QAAQ,CAAC,UAAU;wBAC5B,CAAC,CAAC,IAAI,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC;wBAC/B,CAAC,CAAC,SAAS;iBACd,CAAC,CAAA;aACH;SACF;QAED,IAAI,MAAM,EAAE;YACV,SAAS,GAAG,YAAY,CAAC,SAAS,CAAC,CAAA;SACpC;QAED,IAAA,WAAI,EAAC,SAAS,SAAS,CAAC,MAAM,cAAc,CAAC,CAAA;QAE7C,OAAO;YACL,SAAS;SACV,CAAA;IACH,CAAC;CAAA;AA9FD,kDA8FC;AAED,SAAsB,qBAAqB,CACzC,MAAM,GAAG,KAAK;;QAEd,MAAM,cAAc,GAAG,IAAA,mDAA2B,GAAE,CAAA;QAEpD,MAAM,EAAC,oBAAoB,EAAE,uBAAuB,EAAC,GACnD,IAAA,6BAAsB,GAAE,CAAA;QAE1B,MAAM,GAAG,GAAyB;YAChC,oBAAoB;YACpB,uBAAuB;SACxB,CAAA;QAED,MAAM,GAAG,GAAG,MAAM,cAAc,CAAC,aAAa,CAAC,GAAG,CAAC,CAAA;QACnD,IAAI,SAAS,GAAe,GAAG,CAAC,SAAS,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;YACzD,IAAI,EAAE,QAAQ,CAAC,IAAI;YACnB,EAAE,EAAE,MAAM,CAAC,QAAQ,CAAC,UAAU,CAAC;YAC/B,IAAI,EAAE,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC;YAC3B,SAAS,EAAE,QAAQ,CAAC,SAAS;gBAC3B,CAAC,CAAC,qBAAS,CAAC,MAAM,CAAC,QAAQ,CAAC,SAAS,CAAC;gBACtC,CAAC,CAAC,SAAS;SACd,CAAC,CAAC,CAAA;QAEH,IAAI,MAAM,EAAE;YACV,SAAS,GAAG,YAAY,CAAC,SAAS,CAAC,CAAA;SACpC;QAED,IAAA,WAAI,EAAC,SAAS,SAAS,CAAC,MAAM,cAAc,CAAC,CAAA;QAE7C,OAAO;YACL,SAAS;SACV,CAAA;IACH,CAAC;CAAA;AAhCD,sDAgCC;AAED;;;;GAIG;AACH,SAAS,YAAY,CAAC,SAAqB;IACzC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC,CAAA;IACrC,MAAM,eAAe,GAAe,EAAE,CAAA;IACtC,MAAM,iBAAiB,GAAG,IAAI,GAAG,EAAU,CAAA;IAC3C,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE;QAChC,IAAI,CAAC,iBAAiB,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;YACzC,eAAe,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;YAC9B,iBAAiB,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAA;SACrC;KACF;IACD,OAAO,eAAe,CAAA;AACxB,CAAC"}
|
||||
7
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/find/retry-options.d.ts
generated
vendored
Normal file
7
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/find/retry-options.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
import { OctokitOptions } from '@octokit/core/dist-types/types';
|
||||
import { RequestRequestOptions } from '@octokit/types';
|
||||
export type RetryOptions = {
|
||||
doNotRetry?: number[];
|
||||
enabled?: boolean;
|
||||
};
|
||||
export declare function getRetryOptions(defaultOptions: OctokitOptions, retries?: number, exemptStatusCodes?: number[]): [RetryOptions, RequestRequestOptions | undefined];
|
||||
50
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/find/retry-options.js
generated
vendored
Normal file
50
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/find/retry-options.js
generated
vendored
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getRetryOptions = void 0;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
// Defaults for fetching artifacts
|
||||
const defaultMaxRetryNumber = 5;
|
||||
const defaultExemptStatusCodes = [400, 401, 403, 404, 422]; // https://github.com/octokit/plugin-retry.js/blob/9a2443746c350b3beedec35cf26e197ea318a261/src/index.ts#L14
|
||||
function getRetryOptions(defaultOptions, retries = defaultMaxRetryNumber, exemptStatusCodes = defaultExemptStatusCodes) {
|
||||
var _a;
|
||||
if (retries <= 0) {
|
||||
return [{ enabled: false }, defaultOptions.request];
|
||||
}
|
||||
const retryOptions = {
|
||||
enabled: true
|
||||
};
|
||||
if (exemptStatusCodes.length > 0) {
|
||||
retryOptions.doNotRetry = exemptStatusCodes;
|
||||
}
|
||||
// The GitHub type has some defaults for `options.request`
|
||||
// see: https://github.com/actions/toolkit/blob/4fbc5c941a57249b19562015edbd72add14be93d/packages/github/src/utils.ts#L15
|
||||
// We pass these in here so they are not overridden.
|
||||
const requestOptions = Object.assign(Object.assign({}, defaultOptions.request), { retries });
|
||||
core.debug(`GitHub client configured with: (retries: ${requestOptions.retries}, retry-exempt-status-code: ${(_a = retryOptions.doNotRetry) !== null && _a !== void 0 ? _a : 'octokit default: [400, 401, 403, 404, 422]'})`);
|
||||
return [retryOptions, requestOptions];
|
||||
}
|
||||
exports.getRetryOptions = getRetryOptions;
|
||||
//# sourceMappingURL=retry-options.js.map
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/find/retry-options.js.map
generated
vendored
Normal file
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/find/retry-options.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"retry-options.js","sourceRoot":"","sources":["../../../src/internal/find/retry-options.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AASrC,kCAAkC;AAClC,MAAM,qBAAqB,GAAG,CAAC,CAAA;AAC/B,MAAM,wBAAwB,GAAG,CAAC,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAA,CAAC,4GAA4G;AAEvK,SAAgB,eAAe,CAC7B,cAA8B,EAC9B,UAAkB,qBAAqB,EACvC,oBAA8B,wBAAwB;;IAEtD,IAAI,OAAO,IAAI,CAAC,EAAE;QAChB,OAAO,CAAC,EAAC,OAAO,EAAE,KAAK,EAAC,EAAE,cAAc,CAAC,OAAO,CAAC,CAAA;KAClD;IAED,MAAM,YAAY,GAAiB;QACjC,OAAO,EAAE,IAAI;KACd,CAAA;IAED,IAAI,iBAAiB,CAAC,MAAM,GAAG,CAAC,EAAE;QAChC,YAAY,CAAC,UAAU,GAAG,iBAAiB,CAAA;KAC5C;IAED,0DAA0D;IAC1D,yHAAyH;IACzH,oDAAoD;IACpD,MAAM,cAAc,mCACf,cAAc,CAAC,OAAO,KACzB,OAAO,GACR,CAAA;IAED,IAAI,CAAC,KAAK,CACR,4CACE,cAAc,CAAC,OACjB,+BACE,MAAA,YAAY,CAAC,UAAU,mCAAI,4CAC7B,GAAG,CACJ,CAAA;IAED,OAAO,CAAC,YAAY,EAAE,cAAc,CAAC,CAAA;AACvC,CAAC;AAlCD,0CAkCC"}
|
||||
6
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/artifact-twirp-client.d.ts
generated
vendored
Normal file
6
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/artifact-twirp-client.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
import { ArtifactServiceClientJSON } from '../../generated';
|
||||
export declare function internalArtifactTwirpClient(options?: {
|
||||
maxAttempts?: number;
|
||||
retryIntervalMs?: number;
|
||||
retryMultiplier?: number;
|
||||
}): ArtifactServiceClientJSON;
|
||||
153
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/artifact-twirp-client.js
generated
vendored
Normal file
153
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/artifact-twirp-client.js
generated
vendored
Normal file
|
|
@ -0,0 +1,153 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.internalArtifactTwirpClient = void 0;
|
||||
const http_client_1 = require("@actions/http-client");
|
||||
const auth_1 = require("@actions/http-client/lib/auth");
|
||||
const core_1 = require("@actions/core");
|
||||
const generated_1 = require("../../generated");
|
||||
const config_1 = require("./config");
|
||||
const user_agent_1 = require("./user-agent");
|
||||
const errors_1 = require("./errors");
|
||||
class ArtifactHttpClient {
|
||||
constructor(userAgent, maxAttempts, baseRetryIntervalMilliseconds, retryMultiplier) {
|
||||
this.maxAttempts = 5;
|
||||
this.baseRetryIntervalMilliseconds = 3000;
|
||||
this.retryMultiplier = 1.5;
|
||||
const token = (0, config_1.getRuntimeToken)();
|
||||
this.baseUrl = (0, config_1.getResultsServiceUrl)();
|
||||
if (maxAttempts) {
|
||||
this.maxAttempts = maxAttempts;
|
||||
}
|
||||
if (baseRetryIntervalMilliseconds) {
|
||||
this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds;
|
||||
}
|
||||
if (retryMultiplier) {
|
||||
this.retryMultiplier = retryMultiplier;
|
||||
}
|
||||
this.httpClient = new http_client_1.HttpClient(userAgent, [
|
||||
new auth_1.BearerCredentialHandler(token)
|
||||
]);
|
||||
}
|
||||
// This function satisfies the Rpc interface. It is compatible with the JSON
|
||||
// JSON generated client.
|
||||
request(service, method, contentType, data) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href;
|
||||
(0, core_1.debug)(`[Request] ${method} ${url}`);
|
||||
const headers = {
|
||||
'Content-Type': contentType
|
||||
};
|
||||
try {
|
||||
const { body } = yield this.retryableRequest(() => __awaiter(this, void 0, void 0, function* () { return this.httpClient.post(url, JSON.stringify(data), headers); }));
|
||||
return body;
|
||||
}
|
||||
catch (error) {
|
||||
throw new Error(`Failed to ${method}: ${error.message}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
retryableRequest(operation) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let attempt = 0;
|
||||
let errorMessage = '';
|
||||
let rawBody = '';
|
||||
while (attempt < this.maxAttempts) {
|
||||
let isRetryable = false;
|
||||
try {
|
||||
const response = yield operation();
|
||||
const statusCode = response.message.statusCode;
|
||||
rawBody = yield response.readBody();
|
||||
(0, core_1.debug)(`[Response] - ${response.message.statusCode}`);
|
||||
(0, core_1.debug)(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`);
|
||||
const body = JSON.parse(rawBody);
|
||||
(0, core_1.debug)(`Body: ${JSON.stringify(body, null, 2)}`);
|
||||
if (this.isSuccessStatusCode(statusCode)) {
|
||||
return { response, body };
|
||||
}
|
||||
isRetryable = this.isRetryableHttpStatusCode(statusCode);
|
||||
errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}`;
|
||||
if (body.msg) {
|
||||
if (errors_1.UsageError.isUsageErrorMessage(body.msg)) {
|
||||
throw new errors_1.UsageError();
|
||||
}
|
||||
errorMessage = `${errorMessage}: ${body.msg}`;
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
if (error instanceof SyntaxError) {
|
||||
(0, core_1.debug)(`Raw Body: ${rawBody}`);
|
||||
throw error;
|
||||
}
|
||||
if (error instanceof errors_1.UsageError) {
|
||||
throw error;
|
||||
}
|
||||
if (errors_1.NetworkError.isNetworkErrorCode(error === null || error === void 0 ? void 0 : error.code)) {
|
||||
throw new errors_1.NetworkError(error === null || error === void 0 ? void 0 : error.code);
|
||||
}
|
||||
isRetryable = true;
|
||||
errorMessage = error.message;
|
||||
}
|
||||
if (!isRetryable) {
|
||||
throw new Error(`Received non-retryable error: ${errorMessage}`);
|
||||
}
|
||||
if (attempt + 1 === this.maxAttempts) {
|
||||
throw new Error(`Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}`);
|
||||
}
|
||||
const retryTimeMilliseconds = this.getExponentialRetryTimeMilliseconds(attempt);
|
||||
(0, core_1.info)(`Attempt ${attempt + 1} of ${this.maxAttempts} failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...`);
|
||||
yield this.sleep(retryTimeMilliseconds);
|
||||
attempt++;
|
||||
}
|
||||
throw new Error(`Request failed`);
|
||||
});
|
||||
}
|
||||
isSuccessStatusCode(statusCode) {
|
||||
if (!statusCode)
|
||||
return false;
|
||||
return statusCode >= 200 && statusCode < 300;
|
||||
}
|
||||
isRetryableHttpStatusCode(statusCode) {
|
||||
if (!statusCode)
|
||||
return false;
|
||||
const retryableStatusCodes = [
|
||||
http_client_1.HttpCodes.BadGateway,
|
||||
http_client_1.HttpCodes.GatewayTimeout,
|
||||
http_client_1.HttpCodes.InternalServerError,
|
||||
http_client_1.HttpCodes.ServiceUnavailable,
|
||||
http_client_1.HttpCodes.TooManyRequests
|
||||
];
|
||||
return retryableStatusCodes.includes(statusCode);
|
||||
}
|
||||
sleep(milliseconds) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return new Promise(resolve => setTimeout(resolve, milliseconds));
|
||||
});
|
||||
}
|
||||
getExponentialRetryTimeMilliseconds(attempt) {
|
||||
if (attempt < 0) {
|
||||
throw new Error('attempt should be a positive integer');
|
||||
}
|
||||
if (attempt === 0) {
|
||||
return this.baseRetryIntervalMilliseconds;
|
||||
}
|
||||
const minTime = this.baseRetryIntervalMilliseconds * Math.pow(this.retryMultiplier, attempt);
|
||||
const maxTime = minTime * this.retryMultiplier;
|
||||
// returns a random number between minTime and maxTime (exclusive)
|
||||
return Math.trunc(Math.random() * (maxTime - minTime) + minTime);
|
||||
}
|
||||
}
|
||||
function internalArtifactTwirpClient(options) {
|
||||
const client = new ArtifactHttpClient((0, user_agent_1.getUserAgentString)(), options === null || options === void 0 ? void 0 : options.maxAttempts, options === null || options === void 0 ? void 0 : options.retryIntervalMs, options === null || options === void 0 ? void 0 : options.retryMultiplier);
|
||||
return new generated_1.ArtifactServiceClientJSON(client);
|
||||
}
|
||||
exports.internalArtifactTwirpClient = internalArtifactTwirpClient;
|
||||
//# sourceMappingURL=artifact-twirp-client.js.map
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/artifact-twirp-client.js.map
generated
vendored
Normal file
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/artifact-twirp-client.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"artifact-twirp-client.js","sourceRoot":"","sources":["../../../src/internal/shared/artifact-twirp-client.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,sDAA8E;AAC9E,wDAAqE;AACrE,wCAAyC;AACzC,+CAAyD;AACzD,qCAA8D;AAC9D,6CAA+C;AAC/C,qCAAiD;AAYjD,MAAM,kBAAkB;IAOtB,YACE,SAAiB,EACjB,WAAoB,EACpB,6BAAsC,EACtC,eAAwB;QARlB,gBAAW,GAAG,CAAC,CAAA;QACf,kCAA6B,GAAG,IAAI,CAAA;QACpC,oBAAe,GAAG,GAAG,CAAA;QAQ3B,MAAM,KAAK,GAAG,IAAA,wBAAe,GAAE,CAAA;QAC/B,IAAI,CAAC,OAAO,GAAG,IAAA,6BAAoB,GAAE,CAAA;QACrC,IAAI,WAAW,EAAE;YACf,IAAI,CAAC,WAAW,GAAG,WAAW,CAAA;SAC/B;QACD,IAAI,6BAA6B,EAAE;YACjC,IAAI,CAAC,6BAA6B,GAAG,6BAA6B,CAAA;SACnE;QACD,IAAI,eAAe,EAAE;YACnB,IAAI,CAAC,eAAe,GAAG,eAAe,CAAA;SACvC;QAED,IAAI,CAAC,UAAU,GAAG,IAAI,wBAAU,CAAC,SAAS,EAAE;YAC1C,IAAI,8BAAuB,CAAC,KAAK,CAAC;SACnC,CAAC,CAAA;IACJ,CAAC;IAED,4EAA4E;IAC5E,yBAAyB;IACnB,OAAO,CACX,OAAe,EACf,MAAc,EACd,WAAwD,EACxD,IAAyB;;YAEzB,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,UAAU,OAAO,IAAI,MAAM,EAAE,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC,IAAI,CAAA;YACrE,IAAA,YAAK,EAAC,aAAa,MAAM,IAAI,GAAG,EAAE,CAAC,CAAA;YACnC,MAAM,OAAO,GAAG;gBACd,cAAc,EAAE,WAAW;aAC5B,CAAA;YACD,IAAI;gBACF,MAAM,EAAC,IAAI,EAAC,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,GAAS,EAAE,gDACpD,OAAA,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE,OAAO,CAAC,CAAA,GAAA,CACzD,CAAA;gBAED,OAAO,IAAI,CAAA;aACZ;YAAC,OAAO,KAAK,EAAE;gBACd,MAAM,IAAI,KAAK,CAAC,aAAa,MAAM,KAAK,KAAK,CAAC,OAAO,EAAE,CAAC,CAAA;aACzD;QACH,CAAC;KAAA;IAEK,gBAAgB,CACpB,SAA4C;;YAE5C,IAAI,OAAO,GAAG,CAAC,CAAA;YACf,IAAI,YAAY,GAAG,EAAE,CAAA;YACrB,IAAI,OAAO,GAAG,EAAE,CAAA;YAChB,OAAO,OAAO,GAAG,IAAI,CAAC,WAAW,EAAE;gBACjC,IAAI,WAAW,GAAG,KAAK,CAAA;gBAEvB,IAAI;oBACF,MAAM,QAAQ,GAAG,MAAM,SAAS,EAAE,CAAA;oBAClC,MAAM,UAAU,GAAG,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAA;oBAC9C,OAAO,GAAG,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAA;oBACnC,IAAA,YAAK,EAAC,gBAAgB,QAAQ,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC,CAAA;oBACpD,IAAA,YAAK,EAAC,YAAY,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,OAAO,CAAC,OAAO,EAAE,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC,CAAA;oBACtE,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAA;oBAChC,IAAA,YAAK,EAAC,SAAS,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC,CAAA;oBAC/C,IAAI,IAAI,CAAC,mBAAmB,CAAC,UAAU,CAAC,EAAE;wBACxC,OAAO,EAAC,QAAQ,EAAE,IAAI,EAAC,CAAA;qBACxB;oBACD,WAAW,GAAG,IAAI,CAAC,yBAAyB,CAAC,UAAU,CAAC,CAAA;oBACxD,YAAY,GAAG,oBAAoB,UAAU,KAAK,QAAQ,CAAC,OAAO,CAAC,aAAa,EAAE,CAAA;oBAClF,IAAI,IAAI,CAAC,GAAG,EAAE;wBACZ,IAAI,mBAAU,CAAC,mBAAmB,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;4BAC5C,MAAM,IAAI,mBAAU,EAAE,CAAA;yBACvB;wBAED,YAAY,GAAG,GAAG,YAAY,KAAK,IAAI,CAAC,GAAG,EAAE,CAAA;qBAC9C;iBACF;gBAAC,OAAO,KAAK,EAAE;oBACd,IAAI,KAAK,YAAY,WAAW,EAAE;wBAChC,IAAA,YAAK,EAAC,aAAa,OAAO,EAAE,CAAC,CAAA;wBAC7B,MAAM,KAAK,CAAA;qBACZ;oBAED,IAAI,KAAK,YAAY,mBAAU,EAAE;wBAC/B,MAAM,KAAK,CAAA;qBACZ;oBAED,IAAI,qBAAY,CAAC,kBAAkB,CAAC,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,IAAI,CAAC,EAAE;wBAChD,MAAM,IAAI,qBAAY,CAAC,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,IAAI,CAAC,CAAA;qBACpC;oBAED,WAAW,GAAG,IAAI,CAAA;oBAClB,YAAY,GAAG,KAAK,CAAC,OAAO,CAAA;iBAC7B;gBAED,IAAI,CAAC,WAAW,EAAE;oBAChB,MAAM,IAAI,KAAK,CAAC,iCAAiC,YAAY,EAAE,CAAC,CAAA;iBACjE;gBAED,IAAI,OAAO,GAAG,CAAC,KAAK,IAAI,CAAC,WAAW,EAAE;oBACpC,MAAM,IAAI,KAAK,CACb,gCAAgC,IAAI,CAAC,WAAW,cAAc,YAAY,EAAE,CAC7E,CAAA;iBACF;gBAED,MAAM,qBAAqB,GACzB,IAAI,CAAC,mCAAmC,CAAC,OAAO,CAAC,CAAA;gBACnD,IAAA,WAAI,EACF,WAAW,OAAO,GAAG,CAAC,OACpB,IAAI,CAAC,WACP,uBAAuB,YAAY,yBAAyB,qBAAqB,QAAQ,CAC1F,CAAA;gBACD,MAAM,IAAI,CAAC,KAAK,CAAC,qBAAqB,CAAC,CAAA;gBACvC,OAAO,EAAE,CAAA;aACV;YAED,MAAM,IAAI,KAAK,CAAC,gBAAgB,CAAC,CAAA;QACnC,CAAC;KAAA;IAED,mBAAmB,CAAC,UAAmB;QACrC,IAAI,CAAC,UAAU;YAAE,OAAO,KAAK,CAAA;QAC7B,OAAO,UAAU,IAAI,GAAG,IAAI,UAAU,GAAG,GAAG,CAAA;IAC9C,CAAC;IAED,yBAAyB,CAAC,UAAmB;QAC3C,IAAI,CAAC,UAAU;YAAE,OAAO,KAAK,CAAA;QAE7B,MAAM,oBAAoB,GAAG;YAC3B,uBAAS,CAAC,UAAU;YACpB,uBAAS,CAAC,cAAc;YACxB,uBAAS,CAAC,mBAAmB;YAC7B,uBAAS,CAAC,kBAAkB;YAC5B,uBAAS,CAAC,eAAe;SAC1B,CAAA;QAED,OAAO,oBAAoB,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAA;IAClD,CAAC;IAEK,KAAK,CAAC,YAAoB;;YAC9B,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC,CAAA;QAClE,CAAC;KAAA;IAED,mCAAmC,CAAC,OAAe;QACjD,IAAI,OAAO,GAAG,CAAC,EAAE;YACf,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAA;SACxD;QAED,IAAI,OAAO,KAAK,CAAC,EAAE;YACjB,OAAO,IAAI,CAAC,6BAA6B,CAAA;SAC1C;QAED,MAAM,OAAO,GACX,IAAI,CAAC,6BAA6B,GAAG,SAAA,IAAI,CAAC,eAAe,EAAI,OAAO,CAAA,CAAA;QACtE,MAAM,OAAO,GAAG,OAAO,GAAG,IAAI,CAAC,eAAe,CAAA;QAE9C,kEAAkE;QAClE,OAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,OAAO,GAAG,OAAO,CAAC,GAAG,OAAO,CAAC,CAAA;IAClE,CAAC;CACF;AAED,SAAgB,2BAA2B,CAAC,OAI3C;IACC,MAAM,MAAM,GAAG,IAAI,kBAAkB,CACnC,IAAA,+BAAkB,GAAE,EACpB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,WAAW,EACpB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,eAAe,EACxB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,eAAe,CACzB,CAAA;IACD,OAAO,IAAI,qCAAyB,CAAC,MAAM,CAAC,CAAA;AAC9C,CAAC;AAZD,kEAYC"}
|
||||
6
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/config.d.ts
generated
vendored
Normal file
6
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/config.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
export declare function getUploadChunkSize(): number;
|
||||
export declare function getRuntimeToken(): string;
|
||||
export declare function getResultsServiceUrl(): string;
|
||||
export declare function isGhes(): boolean;
|
||||
export declare function getGitHubWorkspaceDir(): string;
|
||||
export declare function getConcurrency(): number;
|
||||
58
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/config.js
generated
vendored
Normal file
58
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/config.js
generated
vendored
Normal file
|
|
@ -0,0 +1,58 @@
|
|||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0;
|
||||
const os_1 = __importDefault(require("os"));
|
||||
// Used for controlling the highWaterMark value of the zip that is being streamed
|
||||
// The same value is used as the chunk size that is use during upload to blob storage
|
||||
function getUploadChunkSize() {
|
||||
return 8 * 1024 * 1024; // 8 MB Chunks
|
||||
}
|
||||
exports.getUploadChunkSize = getUploadChunkSize;
|
||||
function getRuntimeToken() {
|
||||
const token = process.env['ACTIONS_RUNTIME_TOKEN'];
|
||||
if (!token) {
|
||||
throw new Error('Unable to get the ACTIONS_RUNTIME_TOKEN env variable');
|
||||
}
|
||||
return token;
|
||||
}
|
||||
exports.getRuntimeToken = getRuntimeToken;
|
||||
function getResultsServiceUrl() {
|
||||
const resultsUrl = process.env['ACTIONS_RESULTS_URL'];
|
||||
if (!resultsUrl) {
|
||||
throw new Error('Unable to get the ACTIONS_RESULTS_URL env variable');
|
||||
}
|
||||
return new URL(resultsUrl).origin;
|
||||
}
|
||||
exports.getResultsServiceUrl = getResultsServiceUrl;
|
||||
function isGhes() {
|
||||
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
|
||||
const hostname = ghUrl.hostname.trimEnd().toUpperCase();
|
||||
const isGitHubHost = hostname === 'GITHUB.COM';
|
||||
const isGheHost = hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST');
|
||||
return !isGitHubHost && !isGheHost;
|
||||
}
|
||||
exports.isGhes = isGhes;
|
||||
function getGitHubWorkspaceDir() {
|
||||
const ghWorkspaceDir = process.env['GITHUB_WORKSPACE'];
|
||||
if (!ghWorkspaceDir) {
|
||||
throw new Error('Unable to get the GITHUB_WORKSPACE env variable');
|
||||
}
|
||||
return ghWorkspaceDir;
|
||||
}
|
||||
exports.getGitHubWorkspaceDir = getGitHubWorkspaceDir;
|
||||
// Mimics behavior of azcopy: https://learn.microsoft.com/en-us/azure/storage/common/storage-use-azcopy-optimize
|
||||
// If your machine has fewer than 5 CPUs, then the value of this variable is set to 32.
|
||||
// Otherwise, the default value is equal to 16 multiplied by the number of CPUs. The maximum value of this variable is 300.
|
||||
function getConcurrency() {
|
||||
const numCPUs = os_1.default.cpus().length;
|
||||
if (numCPUs <= 4) {
|
||||
return 32;
|
||||
}
|
||||
const concurrency = 16 * numCPUs;
|
||||
return concurrency > 300 ? 300 : concurrency;
|
||||
}
|
||||
exports.getConcurrency = getConcurrency;
|
||||
//# sourceMappingURL=config.js.map
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/config.js.map
generated
vendored
Normal file
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/config.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"config.js","sourceRoot":"","sources":["../../../src/internal/shared/config.ts"],"names":[],"mappings":";;;;;;AAAA,4CAAmB;AAEnB,iFAAiF;AACjF,qFAAqF;AACrF,SAAgB,kBAAkB;IAChC,OAAO,CAAC,GAAG,IAAI,GAAG,IAAI,CAAA,CAAC,cAAc;AACvC,CAAC;AAFD,gDAEC;AAED,SAAgB,eAAe;IAC7B,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,uBAAuB,CAAC,CAAA;IAClD,IAAI,CAAC,KAAK,EAAE;QACV,MAAM,IAAI,KAAK,CAAC,sDAAsD,CAAC,CAAA;KACxE;IACD,OAAO,KAAK,CAAA;AACd,CAAC;AAND,0CAMC;AAED,SAAgB,oBAAoB;IAClC,MAAM,UAAU,GAAG,OAAO,CAAC,GAAG,CAAC,qBAAqB,CAAC,CAAA;IACrD,IAAI,CAAC,UAAU,EAAE;QACf,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAA;KACtE;IAED,OAAO,IAAI,GAAG,CAAC,UAAU,CAAC,CAAC,MAAM,CAAA;AACnC,CAAC;AAPD,oDAOC;AAED,SAAgB,MAAM;IACpB,MAAM,KAAK,GAAG,IAAI,GAAG,CACnB,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,IAAI,oBAAoB,CACzD,CAAA;IAED,MAAM,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,WAAW,EAAE,CAAA;IACvD,MAAM,YAAY,GAAG,QAAQ,KAAK,YAAY,CAAA;IAC9C,MAAM,SAAS,GACb,QAAQ,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,QAAQ,CAAC,QAAQ,CAAC,gBAAgB,CAAC,CAAA;IAEtE,OAAO,CAAC,YAAY,IAAI,CAAC,SAAS,CAAA;AACpC,CAAC;AAXD,wBAWC;AAED,SAAgB,qBAAqB;IACnC,MAAM,cAAc,GAAG,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,CAAA;IACtD,IAAI,CAAC,cAAc,EAAE;QACnB,MAAM,IAAI,KAAK,CAAC,iDAAiD,CAAC,CAAA;KACnE;IACD,OAAO,cAAc,CAAA;AACvB,CAAC;AAND,sDAMC;AAED,gHAAgH;AAChH,uFAAuF;AACvF,2HAA2H;AAC3H,SAAgB,cAAc;IAC5B,MAAM,OAAO,GAAG,YAAE,CAAC,IAAI,EAAE,CAAC,MAAM,CAAA;IAEhC,IAAI,OAAO,IAAI,CAAC,EAAE;QAChB,OAAO,EAAE,CAAA;KACV;IAED,MAAM,WAAW,GAAG,EAAE,GAAG,OAAO,CAAA;IAChC,OAAO,WAAW,GAAG,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,WAAW,CAAA;AAC9C,CAAC;AATD,wCASC"}
|
||||
22
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/errors.d.ts
generated
vendored
Normal file
22
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/errors.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
export declare class FilesNotFoundError extends Error {
|
||||
files: string[];
|
||||
constructor(files?: string[]);
|
||||
}
|
||||
export declare class InvalidResponseError extends Error {
|
||||
constructor(message: string);
|
||||
}
|
||||
export declare class ArtifactNotFoundError extends Error {
|
||||
constructor(message?: string);
|
||||
}
|
||||
export declare class GHESNotSupportedError extends Error {
|
||||
constructor(message?: string);
|
||||
}
|
||||
export declare class NetworkError extends Error {
|
||||
code: string;
|
||||
constructor(code: string);
|
||||
static isNetworkErrorCode: (code?: string) => boolean;
|
||||
}
|
||||
export declare class UsageError extends Error {
|
||||
constructor();
|
||||
static isUsageErrorMessage: (msg?: string) => boolean;
|
||||
}
|
||||
70
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/errors.js
generated
vendored
Normal file
70
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/errors.js
generated
vendored
Normal file
|
|
@ -0,0 +1,70 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.UsageError = exports.NetworkError = exports.GHESNotSupportedError = exports.ArtifactNotFoundError = exports.InvalidResponseError = exports.FilesNotFoundError = void 0;
|
||||
class FilesNotFoundError extends Error {
|
||||
constructor(files = []) {
|
||||
let message = 'No files were found to upload';
|
||||
if (files.length > 0) {
|
||||
message += `: ${files.join(', ')}`;
|
||||
}
|
||||
super(message);
|
||||
this.files = files;
|
||||
this.name = 'FilesNotFoundError';
|
||||
}
|
||||
}
|
||||
exports.FilesNotFoundError = FilesNotFoundError;
|
||||
class InvalidResponseError extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
this.name = 'InvalidResponseError';
|
||||
}
|
||||
}
|
||||
exports.InvalidResponseError = InvalidResponseError;
|
||||
class ArtifactNotFoundError extends Error {
|
||||
constructor(message = 'Artifact not found') {
|
||||
super(message);
|
||||
this.name = 'ArtifactNotFoundError';
|
||||
}
|
||||
}
|
||||
exports.ArtifactNotFoundError = ArtifactNotFoundError;
|
||||
class GHESNotSupportedError extends Error {
|
||||
constructor(message = '@actions/artifact v2.0.0+, upload-artifact@v4+ and download-artifact@v4+ are not currently supported on GHES.') {
|
||||
super(message);
|
||||
this.name = 'GHESNotSupportedError';
|
||||
}
|
||||
}
|
||||
exports.GHESNotSupportedError = GHESNotSupportedError;
|
||||
class NetworkError extends Error {
|
||||
constructor(code) {
|
||||
const message = `Unable to make request: ${code}\nIf you are using self-hosted runners, please make sure your runner has access to all GitHub endpoints: https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github`;
|
||||
super(message);
|
||||
this.code = code;
|
||||
this.name = 'NetworkError';
|
||||
}
|
||||
}
|
||||
exports.NetworkError = NetworkError;
|
||||
NetworkError.isNetworkErrorCode = (code) => {
|
||||
if (!code)
|
||||
return false;
|
||||
return [
|
||||
'ECONNRESET',
|
||||
'ENOTFOUND',
|
||||
'ETIMEDOUT',
|
||||
'ECONNREFUSED',
|
||||
'EHOSTUNREACH'
|
||||
].includes(code);
|
||||
};
|
||||
class UsageError extends Error {
|
||||
constructor() {
|
||||
const message = `Artifact storage quota has been hit. Unable to upload any new artifacts. Usage is recalculated every 6-12 hours.\nMore info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending`;
|
||||
super(message);
|
||||
this.name = 'UsageError';
|
||||
}
|
||||
}
|
||||
exports.UsageError = UsageError;
|
||||
UsageError.isUsageErrorMessage = (msg) => {
|
||||
if (!msg)
|
||||
return false;
|
||||
return msg.includes('insufficient usage');
|
||||
};
|
||||
//# sourceMappingURL=errors.js.map
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/errors.js.map
generated
vendored
Normal file
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/errors.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"errors.js","sourceRoot":"","sources":["../../../src/internal/shared/errors.ts"],"names":[],"mappings":";;;AAAA,MAAa,kBAAmB,SAAQ,KAAK;IAG3C,YAAY,QAAkB,EAAE;QAC9B,IAAI,OAAO,GAAG,+BAA+B,CAAA;QAC7C,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE;YACpB,OAAO,IAAI,KAAK,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAA;SACnC;QAED,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;QAClB,IAAI,CAAC,IAAI,GAAG,oBAAoB,CAAA;IAClC,CAAC;CACF;AAbD,gDAaC;AAED,MAAa,oBAAqB,SAAQ,KAAK;IAC7C,YAAY,OAAe;QACzB,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,sBAAsB,CAAA;IACpC,CAAC;CACF;AALD,oDAKC;AAED,MAAa,qBAAsB,SAAQ,KAAK;IAC9C,YAAY,OAAO,GAAG,oBAAoB;QACxC,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,uBAAuB,CAAA;IACrC,CAAC;CACF;AALD,sDAKC;AAED,MAAa,qBAAsB,SAAQ,KAAK;IAC9C,YACE,OAAO,GAAG,+GAA+G;QAEzH,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,uBAAuB,CAAA;IACrC,CAAC;CACF;AAPD,sDAOC;AAED,MAAa,YAAa,SAAQ,KAAK;IAGrC,YAAY,IAAY;QACtB,MAAM,OAAO,GAAG,2BAA2B,IAAI,kRAAkR,CAAA;QACjU,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;QAChB,IAAI,CAAC,IAAI,GAAG,cAAc,CAAA;IAC5B,CAAC;;AARH,oCAoBC;AAVQ,+BAAkB,GAAG,CAAC,IAAa,EAAW,EAAE;IACrD,IAAI,CAAC,IAAI;QAAE,OAAO,KAAK,CAAA;IACvB,OAAO;QACL,YAAY;QACZ,WAAW;QACX,WAAW;QACX,cAAc;QACd,cAAc;KACf,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAA;AAClB,CAAC,CAAA;AAGH,MAAa,UAAW,SAAQ,KAAK;IACnC;QACE,MAAM,OAAO,GAAG,gSAAgS,CAAA;QAChT,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,YAAY,CAAA;IAC1B,CAAC;;AALH,gCAWC;AAJQ,8BAAmB,GAAG,CAAC,GAAY,EAAW,EAAE;IACrD,IAAI,CAAC,GAAG;QAAE,OAAO,KAAK,CAAA;IACtB,OAAO,GAAG,CAAC,QAAQ,CAAC,oBAAoB,CAAC,CAAA;AAC3C,CAAC,CAAA"}
|
||||
145
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/interfaces.d.ts
generated
vendored
Normal file
145
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/interfaces.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,145 @@
|
|||
/**
|
||||
* Response from the server when an artifact is uploaded
|
||||
*/
|
||||
export interface UploadArtifactResponse {
|
||||
/**
|
||||
* Total size of the artifact in bytes. Not provided if no artifact was uploaded
|
||||
*/
|
||||
size?: number;
|
||||
/**
|
||||
* The id of the artifact that was created. Not provided if no artifact was uploaded
|
||||
* This ID can be used as input to other APIs to download, delete or get more information about an artifact: https://docs.github.com/en/rest/actions/artifacts
|
||||
*/
|
||||
id?: number;
|
||||
}
|
||||
/**
|
||||
* Options for uploading an artifact
|
||||
*/
|
||||
export interface UploadArtifactOptions {
|
||||
/**
|
||||
* Duration after which artifact will expire in days.
|
||||
*
|
||||
* By default artifact expires after 90 days:
|
||||
* https://docs.github.com/en/actions/configuring-and-managing-workflows/persisting-workflow-data-using-artifacts#downloading-and-deleting-artifacts-after-a-workflow-run-is-complete
|
||||
*
|
||||
* Use this option to override the default expiry.
|
||||
*
|
||||
* Min value: 1
|
||||
* Max value: 90 unless changed by repository setting
|
||||
*
|
||||
* If this is set to a greater value than the retention settings allowed, the retention on artifacts
|
||||
* will be reduced to match the max value allowed on server, and the upload process will continue. An
|
||||
* input of 0 assumes default retention setting.
|
||||
*/
|
||||
retentionDays?: number;
|
||||
/**
|
||||
* The level of compression for Zlib to be applied to the artifact archive.
|
||||
* The value can range from 0 to 9:
|
||||
* - 0: No compression
|
||||
* - 1: Best speed
|
||||
* - 6: Default compression (same as GNU Gzip)
|
||||
* - 9: Best compression
|
||||
* Higher levels will result in better compression, but will take longer to complete.
|
||||
* For large files that are not easily compressed, a value of 0 is recommended for significantly faster uploads.
|
||||
*/
|
||||
compressionLevel?: number;
|
||||
}
|
||||
/**
|
||||
* Response from the server when getting an artifact
|
||||
*/
|
||||
export interface GetArtifactResponse {
|
||||
/**
|
||||
* Metadata about the artifact that was found
|
||||
*/
|
||||
artifact: Artifact;
|
||||
}
|
||||
/**
|
||||
* Options for listing artifacts
|
||||
*/
|
||||
export interface ListArtifactsOptions {
|
||||
/**
|
||||
* Filter the workflow run's artifacts to the latest by name
|
||||
* In the case of reruns, this can be useful to avoid duplicates
|
||||
*/
|
||||
latest?: boolean;
|
||||
}
|
||||
/**
|
||||
* Response from the server when listing artifacts
|
||||
*/
|
||||
export interface ListArtifactsResponse {
|
||||
/**
|
||||
* A list of artifacts that were found
|
||||
*/
|
||||
artifacts: Artifact[];
|
||||
}
|
||||
/**
|
||||
* Response from the server when downloading an artifact
|
||||
*/
|
||||
export interface DownloadArtifactResponse {
|
||||
/**
|
||||
* The path where the artifact was downloaded to
|
||||
*/
|
||||
downloadPath?: string;
|
||||
}
|
||||
/**
|
||||
* Options for downloading an artifact
|
||||
*/
|
||||
export interface DownloadArtifactOptions {
|
||||
/**
|
||||
* Denotes where the artifact will be downloaded to. If not specified then the artifact is download to GITHUB_WORKSPACE
|
||||
*/
|
||||
path?: string;
|
||||
}
|
||||
/**
|
||||
* An Actions Artifact
|
||||
*/
|
||||
export interface Artifact {
|
||||
/**
|
||||
* The name of the artifact
|
||||
*/
|
||||
name: string;
|
||||
/**
|
||||
* The ID of the artifact
|
||||
*/
|
||||
id: number;
|
||||
/**
|
||||
* The size of the artifact in bytes
|
||||
*/
|
||||
size: number;
|
||||
/**
|
||||
* The time when the artifact was created
|
||||
*/
|
||||
createdAt?: Date;
|
||||
}
|
||||
export interface FindOptions {
|
||||
/**
|
||||
* The criteria for finding Artifact(s) out of the scope of the current run.
|
||||
*/
|
||||
findBy?: {
|
||||
/**
|
||||
* Token with actions:read permissions
|
||||
*/
|
||||
token: string;
|
||||
/**
|
||||
* WorkflowRun of the artifact(s) to lookup
|
||||
*/
|
||||
workflowRunId: number;
|
||||
/**
|
||||
* Repository owner (eg. 'actions')
|
||||
*/
|
||||
repositoryOwner: string;
|
||||
/**
|
||||
* Repository owner (eg. 'toolkit')
|
||||
*/
|
||||
repositoryName: string;
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Response from the server when deleting an artifact
|
||||
*/
|
||||
export interface DeleteArtifactResponse {
|
||||
/**
|
||||
* The id of the artifact that was deleted
|
||||
*/
|
||||
id: number;
|
||||
}
|
||||
3
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/interfaces.js
generated
vendored
Normal file
3
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/interfaces.js
generated
vendored
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
//# sourceMappingURL=interfaces.js.map
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/interfaces.js.map
generated
vendored
Normal file
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/interfaces.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"interfaces.js","sourceRoot":"","sources":["../../../src/internal/shared/interfaces.ts"],"names":[],"mappings":""}
|
||||
4
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/user-agent.d.ts
generated
vendored
Normal file
4
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/user-agent.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
* Ensure that this User Agent String is used in all HTTP calls so that we can monitor telemetry between different versions of this package
|
||||
*/
|
||||
export declare function getUserAgentString(): string;
|
||||
13
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/user-agent.js
generated
vendored
Normal file
13
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/user-agent.js
generated
vendored
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getUserAgentString = void 0;
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires, @typescript-eslint/no-require-imports
|
||||
const packageJson = require('../../../package.json');
|
||||
/**
|
||||
* Ensure that this User Agent String is used in all HTTP calls so that we can monitor telemetry between different versions of this package
|
||||
*/
|
||||
function getUserAgentString() {
|
||||
return `@actions/artifact-${packageJson.version}`;
|
||||
}
|
||||
exports.getUserAgentString = getUserAgentString;
|
||||
//# sourceMappingURL=user-agent.js.map
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/user-agent.js.map
generated
vendored
Normal file
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/user-agent.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"user-agent.js","sourceRoot":"","sources":["../../../src/internal/shared/user-agent.ts"],"names":[],"mappings":";;;AAAA,qGAAqG;AACrG,MAAM,WAAW,GAAG,OAAO,CAAC,uBAAuB,CAAC,CAAA;AAEpD;;GAEG;AACH,SAAgB,kBAAkB;IAChC,OAAO,qBAAqB,WAAW,CAAC,OAAO,EAAE,CAAA;AACnD,CAAC;AAFD,gDAEC"}
|
||||
5
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/util.d.ts
generated
vendored
Normal file
5
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/util.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
export interface BackendIds {
|
||||
workflowRunBackendId: string;
|
||||
workflowJobRunBackendId: string;
|
||||
}
|
||||
export declare function getBackendIdsFromToken(): BackendIds;
|
||||
81
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/util.js
generated
vendored
Normal file
81
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/util.js
generated
vendored
Normal file
|
|
@ -0,0 +1,81 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getBackendIdsFromToken = void 0;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const config_1 = require("./config");
|
||||
const jwt_decode_1 = __importDefault(require("jwt-decode"));
|
||||
const InvalidJwtError = new Error('Failed to get backend IDs: The provided JWT token is invalid and/or missing claims');
|
||||
// uses the JWT token claims to get the
|
||||
// workflow run and workflow job run backend ids
|
||||
function getBackendIdsFromToken() {
|
||||
const token = (0, config_1.getRuntimeToken)();
|
||||
const decoded = (0, jwt_decode_1.default)(token);
|
||||
if (!decoded.scp) {
|
||||
throw InvalidJwtError;
|
||||
}
|
||||
/*
|
||||
* example decoded:
|
||||
* {
|
||||
* scp: "Actions.ExampleScope Actions.Results:ce7f54c7-61c7-4aae-887f-30da475f5f1a:ca395085-040a-526b-2ce8-bdc85f692774"
|
||||
* }
|
||||
*/
|
||||
const scpParts = decoded.scp.split(' ');
|
||||
if (scpParts.length === 0) {
|
||||
throw InvalidJwtError;
|
||||
}
|
||||
/*
|
||||
* example scpParts:
|
||||
* ["Actions.ExampleScope", "Actions.Results:ce7f54c7-61c7-4aae-887f-30da475f5f1a:ca395085-040a-526b-2ce8-bdc85f692774"]
|
||||
*/
|
||||
for (const scopes of scpParts) {
|
||||
const scopeParts = scopes.split(':');
|
||||
if ((scopeParts === null || scopeParts === void 0 ? void 0 : scopeParts[0]) !== 'Actions.Results') {
|
||||
// not the Actions.Results scope
|
||||
continue;
|
||||
}
|
||||
/*
|
||||
* example scopeParts:
|
||||
* ["Actions.Results", "ce7f54c7-61c7-4aae-887f-30da475f5f1a", "ca395085-040a-526b-2ce8-bdc85f692774"]
|
||||
*/
|
||||
if (scopeParts.length !== 3) {
|
||||
// missing expected number of claims
|
||||
throw InvalidJwtError;
|
||||
}
|
||||
const ids = {
|
||||
workflowRunBackendId: scopeParts[1],
|
||||
workflowJobRunBackendId: scopeParts[2]
|
||||
};
|
||||
core.debug(`Workflow Run Backend ID: ${ids.workflowRunBackendId}`);
|
||||
core.debug(`Workflow Job Run Backend ID: ${ids.workflowJobRunBackendId}`);
|
||||
return ids;
|
||||
}
|
||||
throw InvalidJwtError;
|
||||
}
|
||||
exports.getBackendIdsFromToken = getBackendIdsFromToken;
|
||||
//# sourceMappingURL=util.js.map
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/util.js.map
generated
vendored
Normal file
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/shared/util.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"util.js","sourceRoot":"","sources":["../../../src/internal/shared/util.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,qCAAwC;AACxC,4DAAmC;AAWnC,MAAM,eAAe,GAAG,IAAI,KAAK,CAC/B,oFAAoF,CACrF,CAAA;AAED,uCAAuC;AACvC,gDAAgD;AAChD,SAAgB,sBAAsB;IACpC,MAAM,KAAK,GAAG,IAAA,wBAAe,GAAE,CAAA;IAC/B,MAAM,OAAO,GAAG,IAAA,oBAAU,EAAe,KAAK,CAAC,CAAA;IAC/C,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE;QAChB,MAAM,eAAe,CAAA;KACtB;IAED;;;;;OAKG;IAEH,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;IACvC,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC,EAAE;QACzB,MAAM,eAAe,CAAA;KACtB;IACD;;;OAGG;IAEH,KAAK,MAAM,MAAM,IAAI,QAAQ,EAAE;QAC7B,MAAM,UAAU,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;QACpC,IAAI,CAAA,UAAU,aAAV,UAAU,uBAAV,UAAU,CAAG,CAAC,CAAC,MAAK,iBAAiB,EAAE;YACzC,gCAAgC;YAChC,SAAQ;SACT;QAED;;;WAGG;QACH,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;YAC3B,oCAAoC;YACpC,MAAM,eAAe,CAAA;SACtB;QAED,MAAM,GAAG,GAAG;YACV,oBAAoB,EAAE,UAAU,CAAC,CAAC,CAAC;YACnC,uBAAuB,EAAE,UAAU,CAAC,CAAC,CAAC;SACvC,CAAA;QAED,IAAI,CAAC,KAAK,CAAC,4BAA4B,GAAG,CAAC,oBAAoB,EAAE,CAAC,CAAA;QAClE,IAAI,CAAC,KAAK,CAAC,gCAAgC,GAAG,CAAC,uBAAuB,EAAE,CAAC,CAAA;QAEzE,OAAO,GAAG,CAAA;KACX;IAED,MAAM,eAAe,CAAA;AACvB,CAAC;AAnDD,wDAmDC"}
|
||||
12
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/blob-upload.d.ts
generated
vendored
Normal file
12
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/blob-upload.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
import { ZipUploadStream } from './zip';
|
||||
export interface BlobUploadResponse {
|
||||
/**
|
||||
* The total reported upload size in bytes. Empty if the upload failed
|
||||
*/
|
||||
uploadSize?: number;
|
||||
/**
|
||||
* The SHA256 hash of the uploaded file. Empty if the upload failed
|
||||
*/
|
||||
sha256Hash?: string;
|
||||
}
|
||||
export declare function uploadZipToBlobStorage(authenticatedUploadURL: string, zipUploadStream: ZipUploadStream): Promise<BlobUploadResponse>;
|
||||
87
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/blob-upload.js
generated
vendored
Normal file
87
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/blob-upload.js
generated
vendored
Normal file
|
|
@ -0,0 +1,87 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.uploadZipToBlobStorage = void 0;
|
||||
const storage_blob_1 = require("@azure/storage-blob");
|
||||
const config_1 = require("../shared/config");
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const crypto = __importStar(require("crypto"));
|
||||
const stream = __importStar(require("stream"));
|
||||
const errors_1 = require("../shared/errors");
|
||||
function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let uploadByteCount = 0;
|
||||
const maxConcurrency = (0, config_1.getConcurrency)();
|
||||
const bufferSize = (0, config_1.getUploadChunkSize)();
|
||||
const blobClient = new storage_blob_1.BlobClient(authenticatedUploadURL);
|
||||
const blockBlobClient = blobClient.getBlockBlobClient();
|
||||
core.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`);
|
||||
const uploadCallback = (progress) => {
|
||||
core.info(`Uploaded bytes ${progress.loadedBytes}`);
|
||||
uploadByteCount = progress.loadedBytes;
|
||||
};
|
||||
const options = {
|
||||
blobHTTPHeaders: { blobContentType: 'zip' },
|
||||
onProgress: uploadCallback
|
||||
};
|
||||
let sha256Hash = undefined;
|
||||
const uploadStream = new stream.PassThrough();
|
||||
const hashStream = crypto.createHash('sha256');
|
||||
zipUploadStream.pipe(uploadStream); // This stream is used for the upload
|
||||
zipUploadStream.pipe(hashStream).setEncoding('hex'); // This stream is used to compute a hash of the zip content that gets used. Integrity check
|
||||
core.info('Beginning upload of artifact content to blob storage');
|
||||
try {
|
||||
yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options);
|
||||
}
|
||||
catch (error) {
|
||||
if (errors_1.NetworkError.isNetworkErrorCode(error === null || error === void 0 ? void 0 : error.code)) {
|
||||
throw new errors_1.NetworkError(error === null || error === void 0 ? void 0 : error.code);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
core.info('Finished uploading artifact content to blob storage!');
|
||||
hashStream.end();
|
||||
sha256Hash = hashStream.read();
|
||||
core.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`);
|
||||
if (uploadByteCount === 0) {
|
||||
core.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`);
|
||||
}
|
||||
return {
|
||||
uploadSize: uploadByteCount,
|
||||
sha256Hash
|
||||
};
|
||||
});
|
||||
}
|
||||
exports.uploadZipToBlobStorage = uploadZipToBlobStorage;
|
||||
//# sourceMappingURL=blob-upload.js.map
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/blob-upload.js.map
generated
vendored
Normal file
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/blob-upload.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"blob-upload.js","sourceRoot":"","sources":["../../../src/internal/upload/blob-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,sDAA4E;AAG5E,6CAAmE;AACnE,oDAAqC;AACrC,+CAAgC;AAChC,+CAAgC;AAChC,6CAA6C;AAc7C,SAAsB,sBAAsB,CAC1C,sBAA8B,EAC9B,eAAgC;;QAEhC,IAAI,eAAe,GAAG,CAAC,CAAA;QAEvB,MAAM,cAAc,GAAG,IAAA,uBAAc,GAAE,CAAA;QACvC,MAAM,UAAU,GAAG,IAAA,2BAAkB,GAAE,CAAA;QACvC,MAAM,UAAU,GAAG,IAAI,yBAAU,CAAC,sBAAsB,CAAC,CAAA;QACzD,MAAM,eAAe,GAAG,UAAU,CAAC,kBAAkB,EAAE,CAAA;QAEvD,IAAI,CAAC,KAAK,CACR,+DAA+D,cAAc,iBAAiB,UAAU,EAAE,CAC3G,CAAA;QAED,MAAM,cAAc,GAAG,CAAC,QAA+B,EAAQ,EAAE;YAC/D,IAAI,CAAC,IAAI,CAAC,kBAAkB,QAAQ,CAAC,WAAW,EAAE,CAAC,CAAA;YACnD,eAAe,GAAG,QAAQ,CAAC,WAAW,CAAA;QACxC,CAAC,CAAA;QAED,MAAM,OAAO,GAAiC;YAC5C,eAAe,EAAE,EAAC,eAAe,EAAE,KAAK,EAAC;YACzC,UAAU,EAAE,cAAc;SAC3B,CAAA;QAED,IAAI,UAAU,GAAuB,SAAS,CAAA;QAC9C,MAAM,YAAY,GAAG,IAAI,MAAM,CAAC,WAAW,EAAE,CAAA;QAC7C,MAAM,UAAU,GAAG,MAAM,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAA;QAE9C,eAAe,CAAC,IAAI,CAAC,YAAY,CAAC,CAAA,CAAC,qCAAqC;QACxE,eAAe,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,WAAW,CAAC,KAAK,CAAC,CAAA,CAAC,2FAA2F;QAE/I,IAAI,CAAC,IAAI,CAAC,sDAAsD,CAAC,CAAA;QAEjE,IAAI;YACF,MAAM,eAAe,CAAC,YAAY,CAChC,YAAY,EACZ,UAAU,EACV,cAAc,EACd,OAAO,CACR,CAAA;SACF;QAAC,OAAO,KAAK,EAAE;YACd,IAAI,qBAAY,CAAC,kBAAkB,CAAC,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,IAAI,CAAC,EAAE;gBAChD,MAAM,IAAI,qBAAY,CAAC,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,IAAI,CAAC,CAAA;aACpC;YAED,MAAM,KAAK,CAAA;SACZ;QAED,IAAI,CAAC,IAAI,CAAC,sDAAsD,CAAC,CAAA;QAEjE,UAAU,CAAC,GAAG,EAAE,CAAA;QAChB,UAAU,GAAG,UAAU,CAAC,IAAI,EAAY,CAAA;QACxC,IAAI,CAAC,IAAI,CAAC,2CAA2C,UAAU,EAAE,CAAC,CAAA;QAElE,IAAI,eAAe,KAAK,CAAC,EAAE;YACzB,IAAI,CAAC,OAAO,CACV,wEAAwE,CACzE,CAAA;SACF;QAED,OAAO;YACL,UAAU,EAAE,eAAe;YAC3B,UAAU;SACX,CAAA;IACH,CAAC;CAAA;AAjED,wDAiEC"}
|
||||
8
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/path-and-artifact-name-validation.d.ts
generated
vendored
Normal file
8
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/path-and-artifact-name-validation.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
/**
|
||||
* Validates the name of the artifact to check to make sure there are no illegal characters
|
||||
*/
|
||||
export declare function validateArtifactName(name: string): void;
|
||||
/**
|
||||
* Validates file paths to check for any illegal characters that can cause problems on different file systems
|
||||
*/
|
||||
export declare function validateFilePath(path: string): void;
|
||||
67
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/path-and-artifact-name-validation.js
generated
vendored
Normal file
67
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/path-and-artifact-name-validation.js
generated
vendored
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.validateFilePath = exports.validateArtifactName = void 0;
|
||||
const core_1 = require("@actions/core");
|
||||
/**
|
||||
* Invalid characters that cannot be in the artifact name or an uploaded file. Will be rejected
|
||||
* from the server if attempted to be sent over. These characters are not allowed due to limitations with certain
|
||||
* file systems such as NTFS. To maintain platform-agnostic behavior, all characters that are not supported by an
|
||||
* individual filesystem/platform will not be supported on all fileSystems/platforms
|
||||
*
|
||||
* FilePaths can include characters such as \ and / which are not permitted in the artifact name alone
|
||||
*/
|
||||
const invalidArtifactFilePathCharacters = new Map([
|
||||
['"', ' Double quote "'],
|
||||
[':', ' Colon :'],
|
||||
['<', ' Less than <'],
|
||||
['>', ' Greater than >'],
|
||||
['|', ' Vertical bar |'],
|
||||
['*', ' Asterisk *'],
|
||||
['?', ' Question mark ?'],
|
||||
['\r', ' Carriage return \\r'],
|
||||
['\n', ' Line feed \\n']
|
||||
]);
|
||||
const invalidArtifactNameCharacters = new Map([
|
||||
...invalidArtifactFilePathCharacters,
|
||||
['\\', ' Backslash \\'],
|
||||
['/', ' Forward slash /']
|
||||
]);
|
||||
/**
|
||||
* Validates the name of the artifact to check to make sure there are no illegal characters
|
||||
*/
|
||||
function validateArtifactName(name) {
|
||||
if (!name) {
|
||||
throw new Error(`Provided artifact name input during validation is empty`);
|
||||
}
|
||||
for (const [invalidCharacterKey, errorMessageForCharacter] of invalidArtifactNameCharacters) {
|
||||
if (name.includes(invalidCharacterKey)) {
|
||||
throw new Error(`The artifact name is not valid: ${name}. Contains the following character: ${errorMessageForCharacter}
|
||||
|
||||
Invalid characters include: ${Array.from(invalidArtifactNameCharacters.values()).toString()}
|
||||
|
||||
These characters are not allowed in the artifact name due to limitations with certain file systems such as NTFS. To maintain file system agnostic behavior, these characters are intentionally not allowed to prevent potential problems with downloads on different file systems.`);
|
||||
}
|
||||
}
|
||||
(0, core_1.info)(`Artifact name is valid!`);
|
||||
}
|
||||
exports.validateArtifactName = validateArtifactName;
|
||||
/**
|
||||
* Validates file paths to check for any illegal characters that can cause problems on different file systems
|
||||
*/
|
||||
function validateFilePath(path) {
|
||||
if (!path) {
|
||||
throw new Error(`Provided file path input during validation is empty`);
|
||||
}
|
||||
for (const [invalidCharacterKey, errorMessageForCharacter] of invalidArtifactFilePathCharacters) {
|
||||
if (path.includes(invalidCharacterKey)) {
|
||||
throw new Error(`The path for one of the files in artifact is not valid: ${path}. Contains the following character: ${errorMessageForCharacter}
|
||||
|
||||
Invalid characters include: ${Array.from(invalidArtifactFilePathCharacters.values()).toString()}
|
||||
|
||||
The following characters are not allowed in files that are uploaded due to limitations with certain file systems such as NTFS. To maintain file system agnostic behavior, these characters are intentionally not allowed to prevent potential problems with downloads on different file systems.
|
||||
`);
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.validateFilePath = validateFilePath;
|
||||
//# sourceMappingURL=path-and-artifact-name-validation.js.map
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/path-and-artifact-name-validation.js.map
generated
vendored
Normal file
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/path-and-artifact-name-validation.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"path-and-artifact-name-validation.js","sourceRoot":"","sources":["../../../src/internal/upload/path-and-artifact-name-validation.ts"],"names":[],"mappings":";;;AAAA,wCAAkC;AAElC;;;;;;;GAOG;AACH,MAAM,iCAAiC,GAAG,IAAI,GAAG,CAAiB;IAChE,CAAC,GAAG,EAAE,iBAAiB,CAAC;IACxB,CAAC,GAAG,EAAE,UAAU,CAAC;IACjB,CAAC,GAAG,EAAE,cAAc,CAAC;IACrB,CAAC,GAAG,EAAE,iBAAiB,CAAC;IACxB,CAAC,GAAG,EAAE,iBAAiB,CAAC;IACxB,CAAC,GAAG,EAAE,aAAa,CAAC;IACpB,CAAC,GAAG,EAAE,kBAAkB,CAAC;IACzB,CAAC,IAAI,EAAE,sBAAsB,CAAC;IAC9B,CAAC,IAAI,EAAE,gBAAgB,CAAC;CACzB,CAAC,CAAA;AAEF,MAAM,6BAA6B,GAAG,IAAI,GAAG,CAAiB;IAC5D,GAAG,iCAAiC;IACpC,CAAC,IAAI,EAAE,eAAe,CAAC;IACvB,CAAC,GAAG,EAAE,kBAAkB,CAAC;CAC1B,CAAC,CAAA;AAEF;;GAEG;AACH,SAAgB,oBAAoB,CAAC,IAAY;IAC/C,IAAI,CAAC,IAAI,EAAE;QACT,MAAM,IAAI,KAAK,CAAC,yDAAyD,CAAC,CAAA;KAC3E;IAED,KAAK,MAAM,CACT,mBAAmB,EACnB,wBAAwB,CACzB,IAAI,6BAA6B,EAAE;QAClC,IAAI,IAAI,CAAC,QAAQ,CAAC,mBAAmB,CAAC,EAAE;YACtC,MAAM,IAAI,KAAK,CACb,mCAAmC,IAAI,uCAAuC,wBAAwB;;8BAEhF,KAAK,CAAC,IAAI,CAC9B,6BAA6B,CAAC,MAAM,EAAE,CACvC,CAAC,QAAQ,EAAE;;mRAE+P,CAC5Q,CAAA;SACF;KACF;IAED,IAAA,WAAI,EAAC,yBAAyB,CAAC,CAAA;AACjC,CAAC;AAvBD,oDAuBC;AAED;;GAEG;AACH,SAAgB,gBAAgB,CAAC,IAAY;IAC3C,IAAI,CAAC,IAAI,EAAE;QACT,MAAM,IAAI,KAAK,CAAC,qDAAqD,CAAC,CAAA;KACvE;IAED,KAAK,MAAM,CACT,mBAAmB,EACnB,wBAAwB,CACzB,IAAI,iCAAiC,EAAE;QACtC,IAAI,IAAI,CAAC,QAAQ,CAAC,mBAAmB,CAAC,EAAE;YACtC,MAAM,IAAI,KAAK,CACb,2DAA2D,IAAI,uCAAuC,wBAAwB;;8BAExG,KAAK,CAAC,IAAI,CAC9B,iCAAiC,CAAC,MAAM,EAAE,CAC3C,CAAC,QAAQ,EAAE;;;WAGT,CACJ,CAAA;SACF;KACF;AACH,CAAC;AAtBD,4CAsBC"}
|
||||
2
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/retention.d.ts
generated
vendored
Normal file
2
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/retention.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
import { Timestamp } from '../../generated';
|
||||
export declare function getExpiration(retentionDays?: number): Timestamp | undefined;
|
||||
54
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/retention.js
generated
vendored
Normal file
54
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/retention.js
generated
vendored
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getExpiration = void 0;
|
||||
const generated_1 = require("../../generated");
|
||||
const core = __importStar(require("@actions/core"));
|
||||
function getExpiration(retentionDays) {
|
||||
if (!retentionDays) {
|
||||
return undefined;
|
||||
}
|
||||
const maxRetentionDays = getRetentionDays();
|
||||
if (maxRetentionDays && maxRetentionDays < retentionDays) {
|
||||
core.warning(`Retention days cannot be greater than the maximum allowed retention set within the repository. Using ${maxRetentionDays} instead.`);
|
||||
retentionDays = maxRetentionDays;
|
||||
}
|
||||
const expirationDate = new Date();
|
||||
expirationDate.setDate(expirationDate.getDate() + retentionDays);
|
||||
return generated_1.Timestamp.fromDate(expirationDate);
|
||||
}
|
||||
exports.getExpiration = getExpiration;
|
||||
function getRetentionDays() {
|
||||
const retentionDays = process.env['GITHUB_RETENTION_DAYS'];
|
||||
if (!retentionDays) {
|
||||
return undefined;
|
||||
}
|
||||
const days = parseInt(retentionDays);
|
||||
if (isNaN(days)) {
|
||||
return undefined;
|
||||
}
|
||||
return days;
|
||||
}
|
||||
//# sourceMappingURL=retention.js.map
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/retention.js.map
generated
vendored
Normal file
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/retention.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"retention.js","sourceRoot":"","sources":["../../../src/internal/upload/retention.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,+CAAyC;AACzC,oDAAqC;AAErC,SAAgB,aAAa,CAAC,aAAsB;IAClD,IAAI,CAAC,aAAa,EAAE;QAClB,OAAO,SAAS,CAAA;KACjB;IAED,MAAM,gBAAgB,GAAG,gBAAgB,EAAE,CAAA;IAC3C,IAAI,gBAAgB,IAAI,gBAAgB,GAAG,aAAa,EAAE;QACxD,IAAI,CAAC,OAAO,CACV,wGAAwG,gBAAgB,WAAW,CACpI,CAAA;QACD,aAAa,GAAG,gBAAgB,CAAA;KACjC;IAED,MAAM,cAAc,GAAG,IAAI,IAAI,EAAE,CAAA;IACjC,cAAc,CAAC,OAAO,CAAC,cAAc,CAAC,OAAO,EAAE,GAAG,aAAa,CAAC,CAAA;IAEhE,OAAO,qBAAS,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAA;AAC3C,CAAC;AAjBD,sCAiBC;AAED,SAAS,gBAAgB;IACvB,MAAM,aAAa,GAAG,OAAO,CAAC,GAAG,CAAC,uBAAuB,CAAC,CAAA;IAC1D,IAAI,CAAC,aAAa,EAAE;QAClB,OAAO,SAAS,CAAA;KACjB;IACD,MAAM,IAAI,GAAG,QAAQ,CAAC,aAAa,CAAC,CAAA;IACpC,IAAI,KAAK,CAAC,IAAI,CAAC,EAAE;QACf,OAAO,SAAS,CAAA;KACjB;IAED,OAAO,IAAI,CAAA;AACb,CAAC"}
|
||||
2
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/upload-artifact.d.ts
generated
vendored
Normal file
2
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/upload-artifact.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
import { UploadArtifactOptions, UploadArtifactResponse } from '../shared/interfaces';
|
||||
export declare function uploadArtifact(name: string, files: string[], rootDirectory: string, options?: UploadArtifactOptions | undefined): Promise<UploadArtifactResponse>;
|
||||
103
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/upload-artifact.js
generated
vendored
Normal file
103
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/upload-artifact.js
generated
vendored
Normal file
|
|
@ -0,0 +1,103 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.uploadArtifact = void 0;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const retention_1 = require("./retention");
|
||||
const path_and_artifact_name_validation_1 = require("./path-and-artifact-name-validation");
|
||||
const artifact_twirp_client_1 = require("../shared/artifact-twirp-client");
|
||||
const upload_zip_specification_1 = require("./upload-zip-specification");
|
||||
const util_1 = require("../shared/util");
|
||||
const blob_upload_1 = require("./blob-upload");
|
||||
const zip_1 = require("./zip");
|
||||
const generated_1 = require("../../generated");
|
||||
const errors_1 = require("../shared/errors");
|
||||
function uploadArtifact(name, files, rootDirectory, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
(0, path_and_artifact_name_validation_1.validateArtifactName)(name);
|
||||
(0, upload_zip_specification_1.validateRootDirectory)(rootDirectory);
|
||||
const zipSpecification = (0, upload_zip_specification_1.getUploadZipSpecification)(files, rootDirectory);
|
||||
if (zipSpecification.length === 0) {
|
||||
throw new errors_1.FilesNotFoundError(zipSpecification.flatMap(s => (s.sourcePath ? [s.sourcePath] : [])));
|
||||
}
|
||||
// get the IDs needed for the artifact creation
|
||||
const backendIds = (0, util_1.getBackendIdsFromToken)();
|
||||
// create the artifact client
|
||||
const artifactClient = (0, artifact_twirp_client_1.internalArtifactTwirpClient)();
|
||||
// create the artifact
|
||||
const createArtifactReq = {
|
||||
workflowRunBackendId: backendIds.workflowRunBackendId,
|
||||
workflowJobRunBackendId: backendIds.workflowJobRunBackendId,
|
||||
name,
|
||||
version: 4
|
||||
};
|
||||
// if there is a retention period, add it to the request
|
||||
const expiresAt = (0, retention_1.getExpiration)(options === null || options === void 0 ? void 0 : options.retentionDays);
|
||||
if (expiresAt) {
|
||||
createArtifactReq.expiresAt = expiresAt;
|
||||
}
|
||||
const createArtifactResp = yield artifactClient.CreateArtifact(createArtifactReq);
|
||||
if (!createArtifactResp.ok) {
|
||||
throw new errors_1.InvalidResponseError('CreateArtifact: response from backend was not ok');
|
||||
}
|
||||
const zipUploadStream = yield (0, zip_1.createZipUploadStream)(zipSpecification, options === null || options === void 0 ? void 0 : options.compressionLevel);
|
||||
// Upload zip to blob storage
|
||||
const uploadResult = yield (0, blob_upload_1.uploadZipToBlobStorage)(createArtifactResp.signedUploadUrl, zipUploadStream);
|
||||
// finalize the artifact
|
||||
const finalizeArtifactReq = {
|
||||
workflowRunBackendId: backendIds.workflowRunBackendId,
|
||||
workflowJobRunBackendId: backendIds.workflowJobRunBackendId,
|
||||
name,
|
||||
size: uploadResult.uploadSize ? uploadResult.uploadSize.toString() : '0'
|
||||
};
|
||||
if (uploadResult.sha256Hash) {
|
||||
finalizeArtifactReq.hash = generated_1.StringValue.create({
|
||||
value: `sha256:${uploadResult.sha256Hash}`
|
||||
});
|
||||
}
|
||||
core.info(`Finalizing artifact upload`);
|
||||
const finalizeArtifactResp = yield artifactClient.FinalizeArtifact(finalizeArtifactReq);
|
||||
if (!finalizeArtifactResp.ok) {
|
||||
throw new errors_1.InvalidResponseError('FinalizeArtifact: response from backend was not ok');
|
||||
}
|
||||
const artifactId = BigInt(finalizeArtifactResp.artifactId);
|
||||
core.info(`Artifact ${name}.zip successfully finalized. Artifact ID ${artifactId}`);
|
||||
return {
|
||||
size: uploadResult.uploadSize,
|
||||
id: Number(artifactId)
|
||||
};
|
||||
});
|
||||
}
|
||||
exports.uploadArtifact = uploadArtifact;
|
||||
//# sourceMappingURL=upload-artifact.js.map
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/upload-artifact.js.map
generated
vendored
Normal file
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/upload-artifact.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"upload-artifact.js","sourceRoot":"","sources":["../../../src/internal/upload/upload-artifact.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AAKrC,2CAAyC;AACzC,2FAAwE;AACxE,2EAA2E;AAC3E,yEAImC;AACnC,yCAAqD;AACrD,+CAAoD;AACpD,+BAA2C;AAC3C,+CAIwB;AACxB,6CAAyE;AAEzE,SAAsB,cAAc,CAClC,IAAY,EACZ,KAAe,EACf,aAAqB,EACrB,OAA2C;;QAE3C,IAAA,wDAAoB,EAAC,IAAI,CAAC,CAAA;QAC1B,IAAA,gDAAqB,EAAC,aAAa,CAAC,CAAA;QAEpC,MAAM,gBAAgB,GAA6B,IAAA,oDAAyB,EAC1E,KAAK,EACL,aAAa,CACd,CAAA;QACD,IAAI,gBAAgB,CAAC,MAAM,KAAK,CAAC,EAAE;YACjC,MAAM,IAAI,2BAAkB,CAC1B,gBAAgB,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CACpE,CAAA;SACF;QAED,+CAA+C;QAC/C,MAAM,UAAU,GAAG,IAAA,6BAAsB,GAAE,CAAA;QAE3C,6BAA6B;QAC7B,MAAM,cAAc,GAAG,IAAA,mDAA2B,GAAE,CAAA;QAEpD,sBAAsB;QACtB,MAAM,iBAAiB,GAA0B;YAC/C,oBAAoB,EAAE,UAAU,CAAC,oBAAoB;YACrD,uBAAuB,EAAE,UAAU,CAAC,uBAAuB;YAC3D,IAAI;YACJ,OAAO,EAAE,CAAC;SACX,CAAA;QAED,wDAAwD;QACxD,MAAM,SAAS,GAAG,IAAA,yBAAa,EAAC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,aAAa,CAAC,CAAA;QACvD,IAAI,SAAS,EAAE;YACb,iBAAiB,CAAC,SAAS,GAAG,SAAS,CAAA;SACxC;QAED,MAAM,kBAAkB,GACtB,MAAM,cAAc,CAAC,cAAc,CAAC,iBAAiB,CAAC,CAAA;QACxD,IAAI,CAAC,kBAAkB,CAAC,EAAE,EAAE;YAC1B,MAAM,IAAI,6BAAoB,CAC5B,kDAAkD,CACnD,CAAA;SACF;QAED,MAAM,eAAe,GAAG,MAAM,IAAA,2BAAqB,EACjD,gBAAgB,EAChB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,gBAAgB,CAC1B,CAAA;QAED,6BAA6B;QAC7B,MAAM,YAAY,GAAG,MAAM,IAAA,oCAAsB,EAC/C,kBAAkB,CAAC,eAAe,EAClC,eAAe,CAChB,CAAA;QAED,wBAAwB;QACxB,MAAM,mBAAmB,GAA4B;YACnD,oBAAoB,EAAE,UAAU,CAAC,oBAAoB;YACrD,uBAAuB,EAAE,UAAU,CAAC,uBAAuB;YAC3D,IAAI;YACJ,IAAI,EAAE,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC,YAAY,CAAC,UAAU,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,GAAG;SACzE,CAAA;QAED,IAAI,YAAY,CAAC,UAAU,EAAE;YAC3B,mBAAmB,CAAC,IAAI,GAAG,uBAAW,CAAC,MAAM,CAAC;gBAC5C,KAAK,EAAE,UAAU,YAAY,CAAC,UAAU,EAAE;aAC3C,CAAC,CAAA;SACH;QAED,IAAI,CAAC,IAAI,CAAC,4BAA4B,CAAC,CAAA;QAEvC,MAAM,oBAAoB,GACxB,MAAM,cAAc,CAAC,gBAAgB,CAAC,mBAAmB,CAAC,CAAA;QAC5D,IAAI,CAAC,oBAAoB,CAAC,EAAE,EAAE;YAC5B,MAAM,IAAI,6BAAoB,CAC5B,oDAAoD,CACrD,CAAA;SACF;QAED,MAAM,UAAU,GAAG,MAAM,CAAC,oBAAoB,CAAC,UAAU,CAAC,CAAA;QAC1D,IAAI,CAAC,IAAI,CACP,YAAY,IAAI,4CAA4C,UAAU,EAAE,CACzE,CAAA;QAED,OAAO;YACL,IAAI,EAAE,YAAY,CAAC,UAAU;YAC7B,EAAE,EAAE,MAAM,CAAC,UAAU,CAAC;SACvB,CAAA;IACH,CAAC;CAAA;AA3FD,wCA2FC"}
|
||||
21
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/upload-zip-specification.d.ts
generated
vendored
Normal file
21
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/upload-zip-specification.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
export interface UploadZipSpecification {
|
||||
/**
|
||||
* An absolute source path that points to a file that will be added to a zip. Null if creating a new directory
|
||||
*/
|
||||
sourcePath: string | null;
|
||||
/**
|
||||
* The destination path in a zip for a file
|
||||
*/
|
||||
destinationPath: string;
|
||||
}
|
||||
/**
|
||||
* Checks if a root directory exists and is valid
|
||||
* @param rootDirectory an absolute root directory path common to all input files that that will be trimmed from the final zip structure
|
||||
*/
|
||||
export declare function validateRootDirectory(rootDirectory: string): void;
|
||||
/**
|
||||
* Creates a specification that describes how a zip file will be created for a set of input files
|
||||
* @param filesToZip a list of file that should be included in the zip
|
||||
* @param rootDirectory an absolute root directory path common to all input files that that will be trimmed from the final zip structure
|
||||
*/
|
||||
export declare function getUploadZipSpecification(filesToZip: string[], rootDirectory: string): UploadZipSpecification[];
|
||||
113
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/upload-zip-specification.js
generated
vendored
Normal file
113
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/upload-zip-specification.js
generated
vendored
Normal file
|
|
@ -0,0 +1,113 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getUploadZipSpecification = exports.validateRootDirectory = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const core_1 = require("@actions/core");
|
||||
const path_1 = require("path");
|
||||
const path_and_artifact_name_validation_1 = require("./path-and-artifact-name-validation");
|
||||
/**
|
||||
* Checks if a root directory exists and is valid
|
||||
* @param rootDirectory an absolute root directory path common to all input files that that will be trimmed from the final zip structure
|
||||
*/
|
||||
function validateRootDirectory(rootDirectory) {
|
||||
if (!fs.existsSync(rootDirectory)) {
|
||||
throw new Error(`The provided rootDirectory ${rootDirectory} does not exist`);
|
||||
}
|
||||
if (!fs.statSync(rootDirectory).isDirectory()) {
|
||||
throw new Error(`The provided rootDirectory ${rootDirectory} is not a valid directory`);
|
||||
}
|
||||
(0, core_1.info)(`Root directory input is valid!`);
|
||||
}
|
||||
exports.validateRootDirectory = validateRootDirectory;
|
||||
/**
|
||||
* Creates a specification that describes how a zip file will be created for a set of input files
|
||||
* @param filesToZip a list of file that should be included in the zip
|
||||
* @param rootDirectory an absolute root directory path common to all input files that that will be trimmed from the final zip structure
|
||||
*/
|
||||
function getUploadZipSpecification(filesToZip, rootDirectory) {
|
||||
const specification = [];
|
||||
// Normalize and resolve, this allows for either absolute or relative paths to be used
|
||||
rootDirectory = (0, path_1.normalize)(rootDirectory);
|
||||
rootDirectory = (0, path_1.resolve)(rootDirectory);
|
||||
/*
|
||||
Example
|
||||
|
||||
Input:
|
||||
rootDirectory: '/home/user/files/plz-upload'
|
||||
artifactFiles: [
|
||||
'/home/user/files/plz-upload/file1.txt',
|
||||
'/home/user/files/plz-upload/file2.txt',
|
||||
'/home/user/files/plz-upload/dir/file3.txt'
|
||||
]
|
||||
|
||||
Output:
|
||||
specifications: [
|
||||
['/home/user/files/plz-upload/file1.txt', '/file1.txt'],
|
||||
['/home/user/files/plz-upload/file1.txt', '/file2.txt'],
|
||||
['/home/user/files/plz-upload/file1.txt', '/dir/file3.txt']
|
||||
]
|
||||
|
||||
The final zip that is later uploaded will look like this:
|
||||
|
||||
my-artifact.zip
|
||||
- file.txt
|
||||
- file2.txt
|
||||
- dir/
|
||||
- file3.txt
|
||||
*/
|
||||
for (let file of filesToZip) {
|
||||
if (!fs.existsSync(file)) {
|
||||
throw new Error(`File ${file} does not exist`);
|
||||
}
|
||||
if (!fs.statSync(file).isDirectory()) {
|
||||
// Normalize and resolve, this allows for either absolute or relative paths to be used
|
||||
file = (0, path_1.normalize)(file);
|
||||
file = (0, path_1.resolve)(file);
|
||||
if (!file.startsWith(rootDirectory)) {
|
||||
throw new Error(`The rootDirectory: ${rootDirectory} is not a parent directory of the file: ${file}`);
|
||||
}
|
||||
// Check for forbidden characters in file paths that may cause ambiguous behavior if downloaded on different file systems
|
||||
const uploadPath = file.replace(rootDirectory, '');
|
||||
(0, path_and_artifact_name_validation_1.validateFilePath)(uploadPath);
|
||||
specification.push({
|
||||
sourcePath: file,
|
||||
destinationPath: uploadPath
|
||||
});
|
||||
}
|
||||
else {
|
||||
// Empty directory
|
||||
const directoryPath = file.replace(rootDirectory, '');
|
||||
(0, path_and_artifact_name_validation_1.validateFilePath)(directoryPath);
|
||||
specification.push({
|
||||
sourcePath: null,
|
||||
destinationPath: directoryPath
|
||||
});
|
||||
}
|
||||
}
|
||||
return specification;
|
||||
}
|
||||
exports.getUploadZipSpecification = getUploadZipSpecification;
|
||||
//# sourceMappingURL=upload-zip-specification.js.map
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/upload-zip-specification.js.map
generated
vendored
Normal file
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/upload-zip-specification.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"upload-zip-specification.js","sourceRoot":"","sources":["../../../src/internal/upload/upload-zip-specification.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAwB;AACxB,wCAAkC;AAClC,+BAAuC;AACvC,2FAAoE;AAcpE;;;GAGG;AACH,SAAgB,qBAAqB,CAAC,aAAqB;IACzD,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,aAAa,CAAC,EAAE;QACjC,MAAM,IAAI,KAAK,CACb,8BAA8B,aAAa,iBAAiB,CAC7D,CAAA;KACF;IACD,IAAI,CAAC,EAAE,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC,WAAW,EAAE,EAAE;QAC7C,MAAM,IAAI,KAAK,CACb,8BAA8B,aAAa,2BAA2B,CACvE,CAAA;KACF;IACD,IAAA,WAAI,EAAC,gCAAgC,CAAC,CAAA;AACxC,CAAC;AAZD,sDAYC;AAED;;;;GAIG;AACH,SAAgB,yBAAyB,CACvC,UAAoB,EACpB,aAAqB;IAErB,MAAM,aAAa,GAA6B,EAAE,CAAA;IAElD,sFAAsF;IACtF,aAAa,GAAG,IAAA,gBAAS,EAAC,aAAa,CAAC,CAAA;IACxC,aAAa,GAAG,IAAA,cAAO,EAAC,aAAa,CAAC,CAAA;IAEtC;;;;;;;;;;;;;;;;;;;;;;;;;MAyBE;IACF,KAAK,IAAI,IAAI,IAAI,UAAU,EAAE;QAC3B,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;YACxB,MAAM,IAAI,KAAK,CAAC,QAAQ,IAAI,iBAAiB,CAAC,CAAA;SAC/C;QACD,IAAI,CAAC,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE,EAAE;YACpC,sFAAsF;YACtF,IAAI,GAAG,IAAA,gBAAS,EAAC,IAAI,CAAC,CAAA;YACtB,IAAI,GAAG,IAAA,cAAO,EAAC,IAAI,CAAC,CAAA;YACpB,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,aAAa,CAAC,EAAE;gBACnC,MAAM,IAAI,KAAK,CACb,sBAAsB,aAAa,2CAA2C,IAAI,EAAE,CACrF,CAAA;aACF;YAED,yHAAyH;YACzH,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE,EAAE,CAAC,CAAA;YAClD,IAAA,oDAAgB,EAAC,UAAU,CAAC,CAAA;YAE5B,aAAa,CAAC,IAAI,CAAC;gBACjB,UAAU,EAAE,IAAI;gBAChB,eAAe,EAAE,UAAU;aAC5B,CAAC,CAAA;SACH;aAAM;YACL,kBAAkB;YAClB,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE,EAAE,CAAC,CAAA;YACrD,IAAA,oDAAgB,EAAC,aAAa,CAAC,CAAA;YAE/B,aAAa,CAAC,IAAI,CAAC;gBACjB,UAAU,EAAE,IAAI;gBAChB,eAAe,EAAE,aAAa;aAC/B,CAAC,CAAA;SACH;KACF;IACD,OAAO,aAAa,CAAA;AACtB,CAAC;AAtED,8DAsEC"}
|
||||
9
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/zip.d.ts
generated
vendored
Normal file
9
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/zip.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
/// <reference types="node" />
|
||||
import * as stream from 'stream';
|
||||
import { UploadZipSpecification } from './upload-zip-specification';
|
||||
export declare const DEFAULT_COMPRESSION_LEVEL = 6;
|
||||
export declare class ZipUploadStream extends stream.Transform {
|
||||
constructor(bufferSize: number);
|
||||
_transform(chunk: any, enc: any, cb: any): void;
|
||||
}
|
||||
export declare function createZipUploadStream(uploadSpecification: UploadZipSpecification[], compressionLevel?: number): Promise<ZipUploadStream>;
|
||||
113
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/zip.js
generated
vendored
Normal file
113
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/zip.js
generated
vendored
Normal file
|
|
@ -0,0 +1,113 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createZipUploadStream = exports.ZipUploadStream = exports.DEFAULT_COMPRESSION_LEVEL = void 0;
|
||||
const stream = __importStar(require("stream"));
|
||||
const archiver = __importStar(require("archiver"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const fs_1 = require("fs");
|
||||
const config_1 = require("../shared/config");
|
||||
exports.DEFAULT_COMPRESSION_LEVEL = 6;
|
||||
// Custom stream transformer so we can set the highWaterMark property
|
||||
// See https://github.com/nodejs/node/issues/8855
|
||||
class ZipUploadStream extends stream.Transform {
|
||||
constructor(bufferSize) {
|
||||
super({
|
||||
highWaterMark: bufferSize
|
||||
});
|
||||
}
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
_transform(chunk, enc, cb) {
|
||||
cb(null, chunk);
|
||||
}
|
||||
}
|
||||
exports.ZipUploadStream = ZipUploadStream;
|
||||
function createZipUploadStream(uploadSpecification, compressionLevel = exports.DEFAULT_COMPRESSION_LEVEL) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
core.debug(`Creating Artifact archive with compressionLevel: ${compressionLevel}`);
|
||||
const zip = archiver.create('zip', {
|
||||
highWaterMark: (0, config_1.getUploadChunkSize)(),
|
||||
zlib: { level: compressionLevel }
|
||||
});
|
||||
// register callbacks for various events during the zip lifecycle
|
||||
zip.on('error', zipErrorCallback);
|
||||
zip.on('warning', zipWarningCallback);
|
||||
zip.on('finish', zipFinishCallback);
|
||||
zip.on('end', zipEndCallback);
|
||||
for (const file of uploadSpecification) {
|
||||
if (file.sourcePath !== null) {
|
||||
// Add a normal file to the zip
|
||||
zip.append((0, fs_1.createReadStream)(file.sourcePath), {
|
||||
name: file.destinationPath
|
||||
});
|
||||
}
|
||||
else {
|
||||
// Add a directory to the zip
|
||||
zip.append('', { name: file.destinationPath });
|
||||
}
|
||||
}
|
||||
const bufferSize = (0, config_1.getUploadChunkSize)();
|
||||
const zipUploadStream = new ZipUploadStream(bufferSize);
|
||||
core.debug(`Zip write high watermark value ${zipUploadStream.writableHighWaterMark}`);
|
||||
core.debug(`Zip read high watermark value ${zipUploadStream.readableHighWaterMark}`);
|
||||
zip.pipe(zipUploadStream);
|
||||
zip.finalize();
|
||||
return zipUploadStream;
|
||||
});
|
||||
}
|
||||
exports.createZipUploadStream = createZipUploadStream;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const zipErrorCallback = (error) => {
|
||||
core.error('An error has occurred while creating the zip file for upload');
|
||||
core.info(error);
|
||||
throw new Error('An error has occurred during zip creation for the artifact');
|
||||
};
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const zipWarningCallback = (error) => {
|
||||
if (error.code === 'ENOENT') {
|
||||
core.warning('ENOENT warning during artifact zip creation. No such file or directory');
|
||||
core.info(error);
|
||||
}
|
||||
else {
|
||||
core.warning(`A non-blocking warning has occurred during artifact zip creation: ${error.code}`);
|
||||
core.info(error);
|
||||
}
|
||||
};
|
||||
const zipFinishCallback = () => {
|
||||
core.debug('Zip stream for upload has finished.');
|
||||
};
|
||||
const zipEndCallback = () => {
|
||||
core.debug('Zip stream for upload has ended.');
|
||||
};
|
||||
//# sourceMappingURL=zip.js.map
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/zip.js.map
generated
vendored
Normal file
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/lib/internal/upload/zip.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"zip.js","sourceRoot":"","sources":["../../../src/internal/upload/zip.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,+CAAgC;AAChC,mDAAoC;AACpC,oDAAqC;AACrC,2BAAmC;AAEnC,6CAAmD;AAEtC,QAAA,yBAAyB,GAAG,CAAC,CAAA;AAE1C,qEAAqE;AACrE,iDAAiD;AACjD,MAAa,eAAgB,SAAQ,MAAM,CAAC,SAAS;IACnD,YAAY,UAAkB;QAC5B,KAAK,CAAC;YACJ,aAAa,EAAE,UAAU;SAC1B,CAAC,CAAA;IACJ,CAAC;IAED,8DAA8D;IAC9D,UAAU,CAAC,KAAU,EAAE,GAAQ,EAAE,EAAO;QACtC,EAAE,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;IACjB,CAAC;CACF;AAXD,0CAWC;AAED,SAAsB,qBAAqB,CACzC,mBAA6C,EAC7C,mBAA2B,iCAAyB;;QAEpD,IAAI,CAAC,KAAK,CACR,oDAAoD,gBAAgB,EAAE,CACvE,CAAA;QAED,MAAM,GAAG,GAAG,QAAQ,CAAC,MAAM,CAAC,KAAK,EAAE;YACjC,aAAa,EAAE,IAAA,2BAAkB,GAAE;YACnC,IAAI,EAAE,EAAC,KAAK,EAAE,gBAAgB,EAAC;SAChC,CAAC,CAAA;QAEF,iEAAiE;QACjE,GAAG,CAAC,EAAE,CAAC,OAAO,EAAE,gBAAgB,CAAC,CAAA;QACjC,GAAG,CAAC,EAAE,CAAC,SAAS,EAAE,kBAAkB,CAAC,CAAA;QACrC,GAAG,CAAC,EAAE,CAAC,QAAQ,EAAE,iBAAiB,CAAC,CAAA;QACnC,GAAG,CAAC,EAAE,CAAC,KAAK,EAAE,cAAc,CAAC,CAAA;QAE7B,KAAK,MAAM,IAAI,IAAI,mBAAmB,EAAE;YACtC,IAAI,IAAI,CAAC,UAAU,KAAK,IAAI,EAAE;gBAC5B,+BAA+B;gBAC/B,GAAG,CAAC,MAAM,CAAC,IAAA,qBAAgB,EAAC,IAAI,CAAC,UAAU,CAAC,EAAE;oBAC5C,IAAI,EAAE,IAAI,CAAC,eAAe;iBAC3B,CAAC,CAAA;aACH;iBAAM;gBACL,6BAA6B;gBAC7B,GAAG,CAAC,MAAM,CAAC,EAAE,EAAE,EAAC,IAAI,EAAE,IAAI,CAAC,eAAe,EAAC,CAAC,CAAA;aAC7C;SACF;QAED,MAAM,UAAU,GAAG,IAAA,2BAAkB,GAAE,CAAA;QACvC,MAAM,eAAe,GAAG,IAAI,eAAe,CAAC,UAAU,CAAC,CAAA;QAEvD,IAAI,CAAC,KAAK,CACR,kCAAkC,eAAe,CAAC,qBAAqB,EAAE,CAC1E,CAAA;QACD,IAAI,CAAC,KAAK,CACR,iCAAiC,eAAe,CAAC,qBAAqB,EAAE,CACzE,CAAA;QAED,GAAG,CAAC,IAAI,CAAC,eAAe,CAAC,CAAA;QACzB,GAAG,CAAC,QAAQ,EAAE,CAAA;QAEd,OAAO,eAAe,CAAA;IACxB,CAAC;CAAA;AA7CD,sDA6CC;AAED,8DAA8D;AAC9D,MAAM,gBAAgB,GAAG,CAAC,KAAU,EAAQ,EAAE;IAC5C,IAAI,CAAC,KAAK,CAAC,8DAA8D,CAAC,CAAA;IAC1E,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAA;IAEhB,MAAM,IAAI,KAAK,CAAC,4DAA4D,CAAC,CAAA;AAC/E,CAAC,CAAA;AAED,8DAA8D;AAC9D,MAAM,kBAAkB,GAAG,CAAC,KAAU,EAAQ,EAAE;IAC9C,IAAI,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;QAC3B,IAAI,CAAC,OAAO,CACV,wEAAwE,CACzE,CAAA;QACD,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAA;KACjB;SAAM;QACL,IAAI,CAAC,OAAO,CACV,qEAAqE,KAAK,CAAC,IAAI,EAAE,CAClF,CAAA;QACD,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAA;KACjB;AACH,CAAC,CAAA;AAED,MAAM,iBAAiB,GAAG,GAAS,EAAE;IACnC,IAAI,CAAC,KAAK,CAAC,qCAAqC,CAAC,CAAA;AACnD,CAAC,CAAA;AAED,MAAM,cAAc,GAAG,GAAS,EAAE;IAChC,IAAI,CAAC,KAAK,CAAC,kCAAkC,CAAC,CAAA;AAChD,CAAC,CAAA"}
|
||||
9
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/node_modules/@actions/github/LICENSE.md
generated
vendored
Normal file
9
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/node_modules/@actions/github/LICENSE.md
generated
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright 2019 GitHub
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
98
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/node_modules/@actions/github/README.md
generated
vendored
Normal file
98
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/node_modules/@actions/github/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,98 @@
|
|||
# `@actions/github`
|
||||
|
||||
> A hydrated Octokit client.
|
||||
|
||||
## Usage
|
||||
|
||||
Returns an authenticated Octokit client that follows the machine [proxy settings](https://help.github.com/en/actions/hosting-your-own-runners/using-a-proxy-server-with-self-hosted-runners) and correctly sets GHES base urls. See https://octokit.github.io/rest.js for the API.
|
||||
|
||||
```js
|
||||
const github = require('@actions/github');
|
||||
const core = require('@actions/core');
|
||||
|
||||
async function run() {
|
||||
// This should be a token with access to your repository scoped in as a secret.
|
||||
// The YML workflow will need to set myToken with the GitHub Secret Token
|
||||
// myToken: ${{ secrets.GITHUB_TOKEN }}
|
||||
// https://help.github.com/en/actions/automating-your-workflow-with-github-actions/authenticating-with-the-github_token#about-the-github_token-secret
|
||||
const myToken = core.getInput('myToken');
|
||||
|
||||
const octokit = github.getOctokit(myToken)
|
||||
|
||||
// You can also pass in additional options as a second parameter to getOctokit
|
||||
// const octokit = github.getOctokit(myToken, {userAgent: "MyActionVersion1"});
|
||||
|
||||
const { data: pullRequest } = await octokit.rest.pulls.get({
|
||||
owner: 'octokit',
|
||||
repo: 'rest.js',
|
||||
pull_number: 123,
|
||||
mediaType: {
|
||||
format: 'diff'
|
||||
}
|
||||
});
|
||||
|
||||
console.log(pullRequest);
|
||||
}
|
||||
|
||||
run();
|
||||
```
|
||||
|
||||
You can also make GraphQL requests. See https://github.com/octokit/graphql.js for the API.
|
||||
|
||||
```js
|
||||
const result = await octokit.graphql(query, variables);
|
||||
```
|
||||
|
||||
Finally, you can get the context of the current action:
|
||||
|
||||
```js
|
||||
const github = require('@actions/github');
|
||||
|
||||
const context = github.context;
|
||||
|
||||
const newIssue = await octokit.rest.issues.create({
|
||||
...context.repo,
|
||||
title: 'New issue!',
|
||||
body: 'Hello Universe!'
|
||||
});
|
||||
```
|
||||
|
||||
## Webhook payload typescript definitions
|
||||
|
||||
The npm module `@octokit/webhooks-definitions` provides type definitions for the response payloads. You can cast the payload to these types for better type information.
|
||||
|
||||
First, install the npm module `npm install @octokit/webhooks-definitions`
|
||||
|
||||
Then, assert the type based on the eventName
|
||||
```ts
|
||||
import * as core from '@actions/core'
|
||||
import * as github from '@actions/github'
|
||||
import {PushEvent} from '@octokit/webhooks-definitions/schema'
|
||||
|
||||
if (github.context.eventName === 'push') {
|
||||
const pushPayload = github.context.payload as PushEvent
|
||||
core.info(`The head commit is: ${pushPayload.head_commit}`)
|
||||
}
|
||||
```
|
||||
|
||||
## Extending the Octokit instance
|
||||
`@octokit/core` now supports the [plugin architecture](https://github.com/octokit/core.js#plugins). You can extend the GitHub instance using plugins.
|
||||
|
||||
For example, using the `@octokit/plugin-enterprise-server` you can now access enterprise admin apis on GHES instances.
|
||||
|
||||
```ts
|
||||
import { GitHub, getOctokitOptions } from '@actions/github/lib/utils'
|
||||
import { enterpriseServer220Admin } from '@octokit/plugin-enterprise-server'
|
||||
|
||||
const octokit = GitHub.plugin(enterpriseServer220Admin)
|
||||
// or override some of the default values as well
|
||||
// const octokit = GitHub.plugin(enterpriseServer220Admin).defaults({userAgent: "MyNewUserAgent"})
|
||||
|
||||
const myToken = core.getInput('myToken');
|
||||
const myOctokit = new octokit(getOctokitOptions(token))
|
||||
// Create a new user
|
||||
myOctokit.rest.enterpriseAdmin.createUser({
|
||||
login: "testuser",
|
||||
email: "testuser@test.com",
|
||||
});
|
||||
```
|
||||
32
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/node_modules/@actions/github/lib/context.d.ts
generated
vendored
Normal file
32
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/node_modules/@actions/github/lib/context.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
import { WebhookPayload } from './interfaces';
|
||||
export declare class Context {
|
||||
/**
|
||||
* Webhook payload object that triggered the workflow
|
||||
*/
|
||||
payload: WebhookPayload;
|
||||
eventName: string;
|
||||
sha: string;
|
||||
ref: string;
|
||||
workflow: string;
|
||||
action: string;
|
||||
actor: string;
|
||||
job: string;
|
||||
runNumber: number;
|
||||
runId: number;
|
||||
apiUrl: string;
|
||||
serverUrl: string;
|
||||
graphqlUrl: string;
|
||||
/**
|
||||
* Hydrate the context from the environment
|
||||
*/
|
||||
constructor();
|
||||
get issue(): {
|
||||
owner: string;
|
||||
repo: string;
|
||||
number: number;
|
||||
};
|
||||
get repo(): {
|
||||
owner: string;
|
||||
repo: string;
|
||||
};
|
||||
}
|
||||
54
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/node_modules/@actions/github/lib/context.js
generated
vendored
Normal file
54
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/node_modules/@actions/github/lib/context.js
generated
vendored
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Context = void 0;
|
||||
const fs_1 = require("fs");
|
||||
const os_1 = require("os");
|
||||
class Context {
|
||||
/**
|
||||
* Hydrate the context from the environment
|
||||
*/
|
||||
constructor() {
|
||||
var _a, _b, _c;
|
||||
this.payload = {};
|
||||
if (process.env.GITHUB_EVENT_PATH) {
|
||||
if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {
|
||||
this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));
|
||||
}
|
||||
else {
|
||||
const path = process.env.GITHUB_EVENT_PATH;
|
||||
process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);
|
||||
}
|
||||
}
|
||||
this.eventName = process.env.GITHUB_EVENT_NAME;
|
||||
this.sha = process.env.GITHUB_SHA;
|
||||
this.ref = process.env.GITHUB_REF;
|
||||
this.workflow = process.env.GITHUB_WORKFLOW;
|
||||
this.action = process.env.GITHUB_ACTION;
|
||||
this.actor = process.env.GITHUB_ACTOR;
|
||||
this.job = process.env.GITHUB_JOB;
|
||||
this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10);
|
||||
this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);
|
||||
this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`;
|
||||
this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`;
|
||||
this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`;
|
||||
}
|
||||
get issue() {
|
||||
const payload = this.payload;
|
||||
return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });
|
||||
}
|
||||
get repo() {
|
||||
if (process.env.GITHUB_REPOSITORY) {
|
||||
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');
|
||||
return { owner, repo };
|
||||
}
|
||||
if (this.payload.repository) {
|
||||
return {
|
||||
owner: this.payload.repository.owner.login,
|
||||
repo: this.payload.repository.name
|
||||
};
|
||||
}
|
||||
throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'");
|
||||
}
|
||||
}
|
||||
exports.Context = Context;
|
||||
//# sourceMappingURL=context.js.map
|
||||
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/node_modules/@actions/github/lib/context.js.map
generated
vendored
Normal file
1
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/node_modules/@actions/github/lib/context.js.map
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"context.js","sourceRoot":"","sources":["../src/context.ts"],"names":[],"mappings":";;;AAEA,2BAA2C;AAC3C,2BAAsB;AAEtB,MAAa,OAAO;IAmBlB;;OAEG;IACH;;QACE,IAAI,CAAC,OAAO,GAAG,EAAE,CAAA;QACjB,IAAI,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE;YACjC,IAAI,eAAU,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EAAE;gBAC7C,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,KAAK,CACvB,iBAAY,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE,EAAC,QAAQ,EAAE,MAAM,EAAC,CAAC,CAChE,CAAA;aACF;iBAAM;gBACL,MAAM,IAAI,GAAG,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAA;gBAC1C,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,qBAAqB,IAAI,kBAAkB,QAAG,EAAE,CAAC,CAAA;aACvE;SACF;QACD,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,iBAA2B,CAAA;QACxD,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,UAAoB,CAAA;QAC3C,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,UAAoB,CAAA;QAC3C,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,eAAyB,CAAA;QACrD,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,aAAuB,CAAA;QACjD,IAAI,CAAC,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,YAAsB,CAAA;QAC/C,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,UAAoB,CAAA;QAC3C,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,iBAA2B,EAAE,EAAE,CAAC,CAAA;QACtE,IAAI,CAAC,KAAK,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,aAAuB,EAAE,EAAE,CAAC,CAAA;QAC9D,IAAI,CAAC,MAAM,SAAG,OAAO,CAAC,GAAG,CAAC,cAAc,mCAAI,wBAAwB,CAAA;QACpE,IAAI,CAAC,SAAS,SAAG,OAAO,CAAC,GAAG,CAAC,iBAAiB,mCAAI,oBAAoB,CAAA;QACtE,IAAI,CAAC,UAAU,SACb,OAAO,CAAC,GAAG,CAAC,kBAAkB,mCAAI,gCAAgC,CAAA;IACtE,CAAC;IAED,IAAI,KAAK;QACP,MAAM,OAAO,GAAG,IAAI,CAAC,OAAO,CAAA;QAE5B,uCACK,IAAI,CAAC,IAAI,KACZ,MAAM,EAAE,CAAC,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,YAAY,IAAI,OAAO,CAAC,CAAC,MAAM,IAClE;IACH,CAAC;IAED,IAAI,IAAI;QACN,IAAI,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE;YACjC,MAAM,CAAC,KAAK,EAAE,IAAI,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;YAC9D,OAAO,EAAC,KAAK,EAAE,IAAI,EAAC,CAAA;SACrB;QAED,IAAI,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;YAC3B,OAAO;gBACL,KAAK,EAAE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,KAAK,CAAC,KAAK;gBAC1C,IAAI,EAAE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI;aACnC,CAAA;SACF;QAED,MAAM,IAAI,KAAK,CACb,kFAAkF,CACnF,CAAA;IACH,CAAC;CACF;AA3ED,0BA2EC"}
|
||||
11
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/node_modules/@actions/github/lib/github.d.ts
generated
vendored
Normal file
11
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/node_modules/@actions/github/lib/github.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
import * as Context from './context';
|
||||
import { GitHub } from './utils';
|
||||
import { OctokitOptions, OctokitPlugin } from '@octokit/core/dist-types/types';
|
||||
export declare const context: Context.Context;
|
||||
/**
|
||||
* Returns a hydrated octokit ready to use for GitHub Actions
|
||||
*
|
||||
* @param token the repo PAT or GITHUB_TOKEN
|
||||
* @param options other options to set
|
||||
*/
|
||||
export declare function getOctokit(token: string, options?: OctokitOptions, ...additionalPlugins: OctokitPlugin[]): InstanceType<typeof GitHub>;
|
||||
37
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/node_modules/@actions/github/lib/github.js
generated
vendored
Normal file
37
dawidd6/action-download-artifact-v3/node_modules/@actions/artifact/node_modules/@actions/github/lib/github.js
generated
vendored
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getOctokit = exports.context = void 0;
|
||||
const Context = __importStar(require("./context"));
|
||||
const utils_1 = require("./utils");
|
||||
exports.context = new Context.Context();
|
||||
/**
|
||||
* Returns a hydrated octokit ready to use for GitHub Actions
|
||||
*
|
||||
* @param token the repo PAT or GITHUB_TOKEN
|
||||
* @param options other options to set
|
||||
*/
|
||||
function getOctokit(token, options, ...additionalPlugins) {
|
||||
const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins);
|
||||
return new GitHubWithPlugins(utils_1.getOctokitOptions(token, options));
|
||||
}
|
||||
exports.getOctokit = getOctokit;
|
||||
//# sourceMappingURL=github.js.map
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue