initial commit of actions
This commit is contained in:
commit
949ece5785
44660 changed files with 12034344 additions and 0 deletions
77
github/codeql-action-v2/node_modules/@actions/artifact-legacy/lib/internal/config-variables.js
generated
vendored
Normal file
77
github/codeql-action-v2/node_modules/@actions/artifact-legacy/lib/internal/config-variables.js
generated
vendored
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.isGhes = exports.getRetentionDays = exports.getWorkSpaceDirectory = exports.getWorkFlowRunId = exports.getRuntimeUrl = exports.getRuntimeToken = exports.getDownloadFileConcurrency = exports.getInitialRetryIntervalInMilliseconds = exports.getRetryMultiplier = exports.getRetryLimit = exports.getUploadChunkSize = exports.getUploadFileConcurrency = void 0;
|
||||
// The number of concurrent uploads that happens at the same time
|
||||
function getUploadFileConcurrency() {
|
||||
return 2;
|
||||
}
|
||||
exports.getUploadFileConcurrency = getUploadFileConcurrency;
|
||||
// When uploading large files that can't be uploaded with a single http call, this controls
|
||||
// the chunk size that is used during upload
|
||||
function getUploadChunkSize() {
|
||||
return 8 * 1024 * 1024; // 8 MB Chunks
|
||||
}
|
||||
exports.getUploadChunkSize = getUploadChunkSize;
|
||||
// The maximum number of retries that can be attempted before an upload or download fails
|
||||
function getRetryLimit() {
|
||||
return 5;
|
||||
}
|
||||
exports.getRetryLimit = getRetryLimit;
|
||||
// With exponential backoff, the larger the retry count, the larger the wait time before another attempt
|
||||
// The retry multiplier controls by how much the backOff time increases depending on the number of retries
|
||||
function getRetryMultiplier() {
|
||||
return 1.5;
|
||||
}
|
||||
exports.getRetryMultiplier = getRetryMultiplier;
|
||||
// The initial wait time if an upload or download fails and a retry is being attempted for the first time
|
||||
function getInitialRetryIntervalInMilliseconds() {
|
||||
return 3000;
|
||||
}
|
||||
exports.getInitialRetryIntervalInMilliseconds = getInitialRetryIntervalInMilliseconds;
|
||||
// The number of concurrent downloads that happens at the same time
|
||||
function getDownloadFileConcurrency() {
|
||||
return 2;
|
||||
}
|
||||
exports.getDownloadFileConcurrency = getDownloadFileConcurrency;
|
||||
function getRuntimeToken() {
|
||||
const token = process.env['ACTIONS_RUNTIME_TOKEN'];
|
||||
if (!token) {
|
||||
throw new Error('Unable to get ACTIONS_RUNTIME_TOKEN env variable');
|
||||
}
|
||||
return token;
|
||||
}
|
||||
exports.getRuntimeToken = getRuntimeToken;
|
||||
function getRuntimeUrl() {
|
||||
const runtimeUrl = process.env['ACTIONS_RUNTIME_URL'];
|
||||
if (!runtimeUrl) {
|
||||
throw new Error('Unable to get ACTIONS_RUNTIME_URL env variable');
|
||||
}
|
||||
return runtimeUrl;
|
||||
}
|
||||
exports.getRuntimeUrl = getRuntimeUrl;
|
||||
function getWorkFlowRunId() {
|
||||
const workFlowRunId = process.env['GITHUB_RUN_ID'];
|
||||
if (!workFlowRunId) {
|
||||
throw new Error('Unable to get GITHUB_RUN_ID env variable');
|
||||
}
|
||||
return workFlowRunId;
|
||||
}
|
||||
exports.getWorkFlowRunId = getWorkFlowRunId;
|
||||
function getWorkSpaceDirectory() {
|
||||
const workspaceDirectory = process.env['GITHUB_WORKSPACE'];
|
||||
if (!workspaceDirectory) {
|
||||
throw new Error('Unable to get GITHUB_WORKSPACE env variable');
|
||||
}
|
||||
return workspaceDirectory;
|
||||
}
|
||||
exports.getWorkSpaceDirectory = getWorkSpaceDirectory;
|
||||
function getRetentionDays() {
|
||||
return process.env['GITHUB_RETENTION_DAYS'];
|
||||
}
|
||||
exports.getRetentionDays = getRetentionDays;
|
||||
function isGhes() {
|
||||
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
|
||||
return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM';
|
||||
}
|
||||
exports.isGhes = isGhes;
|
||||
//# sourceMappingURL=config-variables.js.map
|
||||
Loading…
Add table
Add a link
Reference in a new issue