initial commit of actions

This commit is contained in:
Dominik Polakovics Polakovics 2026-01-31 18:56:04 +01:00
commit 949ece5785
44660 changed files with 12034344 additions and 0 deletions

View file

@ -0,0 +1,34 @@
import { DownloadOptions, UploadOptions } from './options';
export declare class ValidationError extends Error {
constructor(message: string);
}
export declare class ReserveCacheError extends Error {
constructor(message: string);
}
/**
* isFeatureAvailable to check the presence of Actions cache service
*
* @returns boolean return true if Actions cache service feature is available, otherwise false
*/
export declare function isFeatureAvailable(): boolean;
/**
* Restores cache from keys
*
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param downloadOptions cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
*/
export declare function restoreCache(paths: string[], primaryKey: string, restoreKeys?: string[], options?: DownloadOptions, enableCrossOsArchive?: boolean): Promise<string | undefined>;
/**
* Saves a list of files with the specified key
*
* @param paths a list of file paths to be cached
* @param key an explicit key for restoring the cache
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
* @param options cache upload options
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
*/
export declare function saveCache(paths: string[], key: string, options?: UploadOptions, enableCrossOsArchive?: boolean): Promise<number>;

View file

@ -0,0 +1,450 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.ReserveCacheError = exports.ValidationError = void 0;
const core = __importStar(require("@actions/core"));
const path = __importStar(require("path"));
const utils = __importStar(require("./internal/cacheUtils"));
const cacheHttpClient = __importStar(require("./internal/cacheHttpClient"));
const cacheTwirpClient = __importStar(require("./internal/shared/cacheTwirpClient"));
const config_1 = require("./internal/config");
const tar_1 = require("./internal/tar");
const constants_1 = require("./internal/constants");
class ValidationError extends Error {
constructor(message) {
super(message);
this.name = 'ValidationError';
Object.setPrototypeOf(this, ValidationError.prototype);
}
}
exports.ValidationError = ValidationError;
class ReserveCacheError extends Error {
constructor(message) {
super(message);
this.name = 'ReserveCacheError';
Object.setPrototypeOf(this, ReserveCacheError.prototype);
}
}
exports.ReserveCacheError = ReserveCacheError;
function checkPaths(paths) {
if (!paths || paths.length === 0) {
throw new ValidationError(`Path Validation Error: At least one directory or file path is required`);
}
}
function checkKey(key) {
if (key.length > 512) {
throw new ValidationError(`Key Validation Error: ${key} cannot be larger than 512 characters.`);
}
const regex = /^[^,]*$/;
if (!regex.test(key)) {
throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`);
}
}
/**
* isFeatureAvailable to check the presence of Actions cache service
*
* @returns boolean return true if Actions cache service feature is available, otherwise false
*/
function isFeatureAvailable() {
return !!process.env['ACTIONS_CACHE_URL'];
}
exports.isFeatureAvailable = isFeatureAvailable;
/**
* Restores cache from keys
*
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param downloadOptions cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
*/
function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
return __awaiter(this, void 0, void 0, function* () {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core.debug(`Cache service version: ${cacheServiceVersion}`);
checkPaths(paths);
switch (cacheServiceVersion) {
case 'v2':
return yield restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive);
case 'v1':
default:
return yield restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive);
}
});
}
exports.restoreCache = restoreCache;
/**
* Restores cache using the legacy Cache Service
*
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching.
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param options cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on Windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
*/
function restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
return __awaiter(this, void 0, void 0, function* () {
restoreKeys = restoreKeys || [];
const keys = [primaryKey, ...restoreKeys];
core.debug('Resolved Keys:');
core.debug(JSON.stringify(keys));
if (keys.length > 10) {
throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`);
}
for (const key of keys) {
checkKey(key);
}
const compressionMethod = yield utils.getCompressionMethod();
let archivePath = '';
try {
// path are needed to compute version
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
compressionMethod,
enableCrossOsArchive
});
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
// Cache not found
return undefined;
}
if (options === null || options === void 0 ? void 0 : options.lookupOnly) {
core.info('Lookup only - skipping download');
return cacheEntry.cacheKey;
}
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
core.debug(`Archive Path: ${archivePath}`);
// Download the cache from the cache entry
yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options);
if (core.isDebug()) {
yield (0, tar_1.listTar)(archivePath, compressionMethod);
}
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
yield (0, tar_1.extractTar)(archivePath, compressionMethod);
core.info('Cache restored successfully');
return cacheEntry.cacheKey;
}
catch (error) {
const typedError = error;
if (typedError.name === ValidationError.name) {
throw error;
}
else {
// Supress all non-validation cache related errors because caching should be optional
core.warning(`Failed to restore: ${error.message}`);
}
}
finally {
// Try to delete the archive to save space
try {
yield utils.unlinkFile(archivePath);
}
catch (error) {
core.debug(`Failed to delete archive: ${error}`);
}
}
return undefined;
});
}
/**
* Restores cache using Cache Service v2
*
* @param paths a list of file paths to restore from the cache
* @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey
* @param downloadOptions cache download options
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
* @returns string returns the key for the cache hit, otherwise returns undefined
*/
function restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
return __awaiter(this, void 0, void 0, function* () {
// Override UploadOptions to force the use of Azure
options = Object.assign(Object.assign({}, options), { useAzureSdk: true });
restoreKeys = restoreKeys || [];
const keys = [primaryKey, ...restoreKeys];
core.debug('Resolved Keys:');
core.debug(JSON.stringify(keys));
if (keys.length > 10) {
throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`);
}
for (const key of keys) {
checkKey(key);
}
let archivePath = '';
try {
const twirpClient = cacheTwirpClient.internalCacheTwirpClient();
const compressionMethod = yield utils.getCompressionMethod();
const request = {
key: primaryKey,
restoreKeys,
version: utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive)
};
const response = yield twirpClient.GetCacheEntryDownloadURL(request);
if (!response.ok) {
core.warning(`Cache not found for keys: ${keys.join(', ')}`);
return undefined;
}
core.info(`Cache hit for: ${request.key}`);
if (options === null || options === void 0 ? void 0 : options.lookupOnly) {
core.info('Lookup only - skipping download');
return response.matchedKey;
}
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
core.debug(`Archive path: ${archivePath}`);
core.debug(`Starting download of archive to: ${archivePath}`);
yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options);
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
if (core.isDebug()) {
yield (0, tar_1.listTar)(archivePath, compressionMethod);
}
yield (0, tar_1.extractTar)(archivePath, compressionMethod);
core.info('Cache restored successfully');
return response.matchedKey;
}
catch (error) {
const typedError = error;
if (typedError.name === ValidationError.name) {
throw error;
}
else {
// Supress all non-validation cache related errors because caching should be optional
core.warning(`Failed to restore: ${error.message}`);
}
}
finally {
try {
if (archivePath) {
yield utils.unlinkFile(archivePath);
}
}
catch (error) {
core.debug(`Failed to delete archive: ${error}`);
}
}
return undefined;
});
}
/**
* Saves a list of files with the specified key
*
* @param paths a list of file paths to be cached
* @param key an explicit key for restoring the cache
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
* @param options cache upload options
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
*/
function saveCache(paths, key, options, enableCrossOsArchive = false) {
return __awaiter(this, void 0, void 0, function* () {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core.debug(`Cache service version: ${cacheServiceVersion}`);
checkPaths(paths);
checkKey(key);
switch (cacheServiceVersion) {
case 'v2':
return yield saveCacheV2(paths, key, options, enableCrossOsArchive);
case 'v1':
default:
return yield saveCacheV1(paths, key, options, enableCrossOsArchive);
}
});
}
exports.saveCache = saveCache;
/**
* Save cache using the legacy Cache Service
*
* @param paths
* @param key
* @param options
* @param enableCrossOsArchive
* @returns
*/
function saveCacheV1(paths, key, options, enableCrossOsArchive = false) {
var _a, _b, _c, _d, _e;
return __awaiter(this, void 0, void 0, function* () {
const compressionMethod = yield utils.getCompressionMethod();
let cacheId = -1;
const cachePaths = yield utils.resolvePaths(paths);
core.debug('Cache Paths:');
core.debug(`${JSON.stringify(cachePaths)}`);
if (cachePaths.length === 0) {
throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`);
}
const archiveFolder = yield utils.createTempDirectory();
const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod));
core.debug(`Archive Path: ${archivePath}`);
try {
yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod);
if (core.isDebug()) {
yield (0, tar_1.listTar)(archivePath, compressionMethod);
}
const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
core.debug(`File Size: ${archiveFileSize}`);
// For GHES, this check will take place in ReserveCache API with enterprise file size limit
if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) {
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
}
core.debug('Reserving Cache');
const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, {
compressionMethod,
enableCrossOsArchive,
cacheSize: archiveFileSize
});
if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) {
cacheId = (_b = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _b === void 0 ? void 0 : _b.cacheId;
}
else if ((reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.statusCode) === 400) {
throw new Error((_d = (_c = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _c === void 0 ? void 0 : _c.message) !== null && _d !== void 0 ? _d : `Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.`);
}
else {
throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`);
}
core.debug(`Saving Cache (ID: ${cacheId})`);
yield cacheHttpClient.saveCache(cacheId, archivePath, '', options);
}
catch (error) {
const typedError = error;
if (typedError.name === ValidationError.name) {
throw error;
}
else if (typedError.name === ReserveCacheError.name) {
core.info(`Failed to save: ${typedError.message}`);
}
else {
core.warning(`Failed to save: ${typedError.message}`);
}
}
finally {
// Try to delete the archive to save space
try {
yield utils.unlinkFile(archivePath);
}
catch (error) {
core.debug(`Failed to delete archive: ${error}`);
}
}
return cacheId;
});
}
/**
* Save cache using Cache Service v2
*
* @param paths a list of file paths to restore from the cache
* @param key an explicit key for restoring the cache
* @param options cache upload options
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
* @returns
*/
function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
return __awaiter(this, void 0, void 0, function* () {
// Override UploadOptions to force the use of Azure
// ...options goes first because we want to override the default values
// set in UploadOptions with these specific figures
options = Object.assign(Object.assign({}, options), { uploadChunkSize: 64 * 1024 * 1024, uploadConcurrency: 8, useAzureSdk: true });
const compressionMethod = yield utils.getCompressionMethod();
const twirpClient = cacheTwirpClient.internalCacheTwirpClient();
let cacheId = -1;
const cachePaths = yield utils.resolvePaths(paths);
core.debug('Cache Paths:');
core.debug(`${JSON.stringify(cachePaths)}`);
if (cachePaths.length === 0) {
throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`);
}
const archiveFolder = yield utils.createTempDirectory();
const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod));
core.debug(`Archive Path: ${archivePath}`);
try {
yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod);
if (core.isDebug()) {
yield (0, tar_1.listTar)(archivePath, compressionMethod);
}
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
core.debug(`File Size: ${archiveFileSize}`);
// For GHES, this check will take place in ReserveCache API with enterprise file size limit
if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) {
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
}
// Set the archive size in the options, will be used to display the upload progress
options.archiveSizeBytes = archiveFileSize;
core.debug('Reserving Cache');
const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive);
const request = {
key,
version
};
const response = yield twirpClient.CreateCacheEntry(request);
if (!response.ok) {
throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`);
}
core.debug(`Attempting to upload cache located at: ${archivePath}`);
yield cacheHttpClient.saveCache(cacheId, archivePath, response.signedUploadUrl, options);
const finalizeRequest = {
key,
version,
sizeBytes: `${archiveFileSize}`
};
const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest);
core.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`);
if (!finalizeResponse.ok) {
throw new Error(`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`);
}
cacheId = parseInt(finalizeResponse.entryId);
}
catch (error) {
const typedError = error;
if (typedError.name === ValidationError.name) {
throw error;
}
else if (typedError.name === ReserveCacheError.name) {
core.info(`Failed to save: ${typedError.message}`);
}
else {
core.warning(`Failed to save: ${typedError.message}`);
}
}
finally {
// Try to delete the archive to save space
try {
yield utils.unlinkFile(archivePath);
}
catch (error) {
core.debug(`Failed to delete archive: ${error}`);
}
}
return cacheId;
});
}
//# sourceMappingURL=cache.js.map

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,158 @@
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import type { JsonValue } from "@protobuf-ts/runtime";
import type { JsonReadOptions } from "@protobuf-ts/runtime";
import type { JsonWriteOptions } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
/**
* A Timestamp represents a point in time independent of any time zone or local
* calendar, encoded as a count of seconds and fractions of seconds at
* nanosecond resolution. The count is relative to an epoch at UTC midnight on
* January 1, 1970, in the proleptic Gregorian calendar which extends the
* Gregorian calendar backwards to year one.
*
* All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
* second table is needed for interpretation, using a [24-hour linear
* smear](https://developers.google.com/time/smear).
*
* The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
* restricting to that range, we ensure that we can convert to and from [RFC
* 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
*
* # Examples
*
* Example 1: Compute Timestamp from POSIX `time()`.
*
* Timestamp timestamp;
* timestamp.set_seconds(time(NULL));
* timestamp.set_nanos(0);
*
* Example 2: Compute Timestamp from POSIX `gettimeofday()`.
*
* struct timeval tv;
* gettimeofday(&tv, NULL);
*
* Timestamp timestamp;
* timestamp.set_seconds(tv.tv_sec);
* timestamp.set_nanos(tv.tv_usec * 1000);
*
* Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
*
* FILETIME ft;
* GetSystemTimeAsFileTime(&ft);
* UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
*
* // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
* // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
* Timestamp timestamp;
* timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
* timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
*
* Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
*
* long millis = System.currentTimeMillis();
*
* Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
* .setNanos((int) ((millis % 1000) * 1000000)).build();
*
*
* Example 5: Compute Timestamp from Java `Instant.now()`.
*
* Instant now = Instant.now();
*
* Timestamp timestamp =
* Timestamp.newBuilder().setSeconds(now.getEpochSecond())
* .setNanos(now.getNano()).build();
*
*
* Example 6: Compute Timestamp from current time in Python.
*
* timestamp = Timestamp()
* timestamp.GetCurrentTime()
*
* # JSON Mapping
*
* In JSON format, the Timestamp type is encoded as a string in the
* [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
* format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
* where {year} is always expressed using four digits while {month}, {day},
* {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
* seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
* are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
* is required. A proto3 JSON serializer should always use UTC (as indicated by
* "Z") when printing the Timestamp type and a proto3 JSON parser should be
* able to accept both UTC and other timezones (as indicated by an offset).
*
* For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
* 01:30 UTC on January 15, 2017.
*
* In JavaScript, one can convert a Date object to this format using the
* standard
* [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)
* method. In Python, a standard `datetime.datetime` object can be converted
* to this format using
* [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
* the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
* the Joda Time's [`ISODateTimeFormat.dateTime()`](
* http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D
* ) to obtain a formatter capable of generating timestamps in this format.
*
*
*
* @generated from protobuf message google.protobuf.Timestamp
*/
export interface Timestamp {
/**
* Represents seconds of UTC time since Unix epoch
* 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
* 9999-12-31T23:59:59Z inclusive.
*
* @generated from protobuf field: int64 seconds = 1;
*/
seconds: string;
/**
* Non-negative fractions of a second at nanosecond resolution. Negative
* second values with fractions must still have non-negative nanos values
* that count forward in time. Must be from 0 to 999,999,999
* inclusive.
*
* @generated from protobuf field: int32 nanos = 2;
*/
nanos: number;
}
declare class Timestamp$Type extends MessageType<Timestamp> {
constructor();
/**
* Creates a new `Timestamp` for the current time.
*/
now(): Timestamp;
/**
* Converts a `Timestamp` to a JavaScript Date.
*/
toDate(message: Timestamp): Date;
/**
* Converts a JavaScript Date to a `Timestamp`.
*/
fromDate(date: Date): Timestamp;
/**
* In JSON format, the `Timestamp` type is encoded as a string
* in the RFC 3339 format.
*/
internalJsonWrite(message: Timestamp, options: JsonWriteOptions): JsonValue;
/**
* In JSON format, the `Timestamp` type is encoded as a string
* in the RFC 3339 format.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Timestamp): Timestamp;
create(value?: PartialMessage<Timestamp>): Timestamp;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Timestamp): Timestamp;
internalBinaryWrite(message: Timestamp, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.Timestamp
*/
export declare const Timestamp: Timestamp$Type;
export {};

View file

@ -0,0 +1,136 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Timestamp = void 0;
const runtime_1 = require("@protobuf-ts/runtime");
const runtime_2 = require("@protobuf-ts/runtime");
const runtime_3 = require("@protobuf-ts/runtime");
const runtime_4 = require("@protobuf-ts/runtime");
const runtime_5 = require("@protobuf-ts/runtime");
const runtime_6 = require("@protobuf-ts/runtime");
const runtime_7 = require("@protobuf-ts/runtime");
// @generated message type with reflection information, may provide speed optimized methods
class Timestamp$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.Timestamp", [
{ no: 1, name: "seconds", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
{ no: 2, name: "nanos", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
]);
}
/**
* Creates a new `Timestamp` for the current time.
*/
now() {
const msg = this.create();
const ms = Date.now();
msg.seconds = runtime_6.PbLong.from(Math.floor(ms / 1000)).toString();
msg.nanos = (ms % 1000) * 1000000;
return msg;
}
/**
* Converts a `Timestamp` to a JavaScript Date.
*/
toDate(message) {
return new Date(runtime_6.PbLong.from(message.seconds).toNumber() * 1000 + Math.ceil(message.nanos / 1000000));
}
/**
* Converts a JavaScript Date to a `Timestamp`.
*/
fromDate(date) {
const msg = this.create();
const ms = date.getTime();
msg.seconds = runtime_6.PbLong.from(Math.floor(ms / 1000)).toString();
msg.nanos = (ms % 1000) * 1000000;
return msg;
}
/**
* In JSON format, the `Timestamp` type is encoded as a string
* in the RFC 3339 format.
*/
internalJsonWrite(message, options) {
let ms = runtime_6.PbLong.from(message.seconds).toNumber() * 1000;
if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z"))
throw new Error("Unable to encode Timestamp to JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");
if (message.nanos < 0)
throw new Error("Unable to encode invalid Timestamp to JSON. Nanos must not be negative.");
let z = "Z";
if (message.nanos > 0) {
let nanosStr = (message.nanos + 1000000000).toString().substring(1);
if (nanosStr.substring(3) === "000000")
z = "." + nanosStr.substring(0, 3) + "Z";
else if (nanosStr.substring(6) === "000")
z = "." + nanosStr.substring(0, 6) + "Z";
else
z = "." + nanosStr + "Z";
}
return new Date(ms).toISOString().replace(".000Z", z);
}
/**
* In JSON format, the `Timestamp` type is encoded as a string
* in the RFC 3339 format.
*/
internalJsonRead(json, options, target) {
if (typeof json !== "string")
throw new Error("Unable to parse Timestamp from JSON " + (0, runtime_5.typeofJsonValue)(json) + ".");
let matches = json.match(/^([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2})(?:Z|\.([0-9]{3,9})Z|([+-][0-9][0-9]:[0-9][0-9]))$/);
if (!matches)
throw new Error("Unable to parse Timestamp from JSON. Invalid format.");
let ms = Date.parse(matches[1] + "-" + matches[2] + "-" + matches[3] + "T" + matches[4] + ":" + matches[5] + ":" + matches[6] + (matches[8] ? matches[8] : "Z"));
if (Number.isNaN(ms))
throw new Error("Unable to parse Timestamp from JSON. Invalid value.");
if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z"))
throw new globalThis.Error("Unable to parse Timestamp from JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive.");
if (!target)
target = this.create();
target.seconds = runtime_6.PbLong.from(ms / 1000).toString();
target.nanos = 0;
if (matches[7])
target.nanos = (parseInt("1" + matches[7] + "0".repeat(9 - matches[7].length)) - 1000000000);
return target;
}
create(value) {
const message = { seconds: "0", nanos: 0 };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int64 seconds */ 1:
message.seconds = reader.int64().toString();
break;
case /* int32 nanos */ 2:
message.nanos = reader.int32();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* int64 seconds = 1; */
if (message.seconds !== "0")
writer.tag(1, runtime_1.WireType.Varint).int64(message.seconds);
/* int32 nanos = 2; */
if (message.nanos !== 0)
writer.tag(2, runtime_1.WireType.Varint).int32(message.nanos);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.Timestamp
*/
exports.Timestamp = new Timestamp$Type();
//# sourceMappingURL=timestamp.js.map

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,307 @@
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import type { JsonValue } from "@protobuf-ts/runtime";
import type { JsonReadOptions } from "@protobuf-ts/runtime";
import type { JsonWriteOptions } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
/**
* Wrapper message for `double`.
*
* The JSON representation for `DoubleValue` is JSON number.
*
* @generated from protobuf message google.protobuf.DoubleValue
*/
export interface DoubleValue {
/**
* The double value.
*
* @generated from protobuf field: double value = 1;
*/
value: number;
}
/**
* Wrapper message for `float`.
*
* The JSON representation for `FloatValue` is JSON number.
*
* @generated from protobuf message google.protobuf.FloatValue
*/
export interface FloatValue {
/**
* The float value.
*
* @generated from protobuf field: float value = 1;
*/
value: number;
}
/**
* Wrapper message for `int64`.
*
* The JSON representation for `Int64Value` is JSON string.
*
* @generated from protobuf message google.protobuf.Int64Value
*/
export interface Int64Value {
/**
* The int64 value.
*
* @generated from protobuf field: int64 value = 1;
*/
value: string;
}
/**
* Wrapper message for `uint64`.
*
* The JSON representation for `UInt64Value` is JSON string.
*
* @generated from protobuf message google.protobuf.UInt64Value
*/
export interface UInt64Value {
/**
* The uint64 value.
*
* @generated from protobuf field: uint64 value = 1;
*/
value: string;
}
/**
* Wrapper message for `int32`.
*
* The JSON representation for `Int32Value` is JSON number.
*
* @generated from protobuf message google.protobuf.Int32Value
*/
export interface Int32Value {
/**
* The int32 value.
*
* @generated from protobuf field: int32 value = 1;
*/
value: number;
}
/**
* Wrapper message for `uint32`.
*
* The JSON representation for `UInt32Value` is JSON number.
*
* @generated from protobuf message google.protobuf.UInt32Value
*/
export interface UInt32Value {
/**
* The uint32 value.
*
* @generated from protobuf field: uint32 value = 1;
*/
value: number;
}
/**
* Wrapper message for `bool`.
*
* The JSON representation for `BoolValue` is JSON `true` and `false`.
*
* @generated from protobuf message google.protobuf.BoolValue
*/
export interface BoolValue {
/**
* The bool value.
*
* @generated from protobuf field: bool value = 1;
*/
value: boolean;
}
/**
* Wrapper message for `string`.
*
* The JSON representation for `StringValue` is JSON string.
*
* @generated from protobuf message google.protobuf.StringValue
*/
export interface StringValue {
/**
* The string value.
*
* @generated from protobuf field: string value = 1;
*/
value: string;
}
/**
* Wrapper message for `bytes`.
*
* The JSON representation for `BytesValue` is JSON string.
*
* @generated from protobuf message google.protobuf.BytesValue
*/
export interface BytesValue {
/**
* The bytes value.
*
* @generated from protobuf field: bytes value = 1;
*/
value: Uint8Array;
}
declare class DoubleValue$Type extends MessageType<DoubleValue> {
constructor();
/**
* Encode `DoubleValue` to JSON number.
*/
internalJsonWrite(message: DoubleValue, options: JsonWriteOptions): JsonValue;
/**
* Decode `DoubleValue` from JSON number.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: DoubleValue): DoubleValue;
create(value?: PartialMessage<DoubleValue>): DoubleValue;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DoubleValue): DoubleValue;
internalBinaryWrite(message: DoubleValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.DoubleValue
*/
export declare const DoubleValue: DoubleValue$Type;
declare class FloatValue$Type extends MessageType<FloatValue> {
constructor();
/**
* Encode `FloatValue` to JSON number.
*/
internalJsonWrite(message: FloatValue, options: JsonWriteOptions): JsonValue;
/**
* Decode `FloatValue` from JSON number.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: FloatValue): FloatValue;
create(value?: PartialMessage<FloatValue>): FloatValue;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FloatValue): FloatValue;
internalBinaryWrite(message: FloatValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.FloatValue
*/
export declare const FloatValue: FloatValue$Type;
declare class Int64Value$Type extends MessageType<Int64Value> {
constructor();
/**
* Encode `Int64Value` to JSON string.
*/
internalJsonWrite(message: Int64Value, options: JsonWriteOptions): JsonValue;
/**
* Decode `Int64Value` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int64Value): Int64Value;
create(value?: PartialMessage<Int64Value>): Int64Value;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Int64Value): Int64Value;
internalBinaryWrite(message: Int64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.Int64Value
*/
export declare const Int64Value: Int64Value$Type;
declare class UInt64Value$Type extends MessageType<UInt64Value> {
constructor();
/**
* Encode `UInt64Value` to JSON string.
*/
internalJsonWrite(message: UInt64Value, options: JsonWriteOptions): JsonValue;
/**
* Decode `UInt64Value` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt64Value): UInt64Value;
create(value?: PartialMessage<UInt64Value>): UInt64Value;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: UInt64Value): UInt64Value;
internalBinaryWrite(message: UInt64Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.UInt64Value
*/
export declare const UInt64Value: UInt64Value$Type;
declare class Int32Value$Type extends MessageType<Int32Value> {
constructor();
/**
* Encode `Int32Value` to JSON string.
*/
internalJsonWrite(message: Int32Value, options: JsonWriteOptions): JsonValue;
/**
* Decode `Int32Value` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: Int32Value): Int32Value;
create(value?: PartialMessage<Int32Value>): Int32Value;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Int32Value): Int32Value;
internalBinaryWrite(message: Int32Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.Int32Value
*/
export declare const Int32Value: Int32Value$Type;
declare class UInt32Value$Type extends MessageType<UInt32Value> {
constructor();
/**
* Encode `UInt32Value` to JSON string.
*/
internalJsonWrite(message: UInt32Value, options: JsonWriteOptions): JsonValue;
/**
* Decode `UInt32Value` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: UInt32Value): UInt32Value;
create(value?: PartialMessage<UInt32Value>): UInt32Value;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: UInt32Value): UInt32Value;
internalBinaryWrite(message: UInt32Value, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.UInt32Value
*/
export declare const UInt32Value: UInt32Value$Type;
declare class BoolValue$Type extends MessageType<BoolValue> {
constructor();
/**
* Encode `BoolValue` to JSON bool.
*/
internalJsonWrite(message: BoolValue, options: JsonWriteOptions): JsonValue;
/**
* Decode `BoolValue` from JSON bool.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: BoolValue): BoolValue;
create(value?: PartialMessage<BoolValue>): BoolValue;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BoolValue): BoolValue;
internalBinaryWrite(message: BoolValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.BoolValue
*/
export declare const BoolValue: BoolValue$Type;
declare class StringValue$Type extends MessageType<StringValue> {
constructor();
/**
* Encode `StringValue` to JSON string.
*/
internalJsonWrite(message: StringValue, options: JsonWriteOptions): JsonValue;
/**
* Decode `StringValue` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: StringValue): StringValue;
create(value?: PartialMessage<StringValue>): StringValue;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StringValue): StringValue;
internalBinaryWrite(message: StringValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.StringValue
*/
export declare const StringValue: StringValue$Type;
declare class BytesValue$Type extends MessageType<BytesValue> {
constructor();
/**
* Encode `BytesValue` to JSON string.
*/
internalJsonWrite(message: BytesValue, options: JsonWriteOptions): JsonValue;
/**
* Decode `BytesValue` from JSON string.
*/
internalJsonRead(json: JsonValue, options: JsonReadOptions, target?: BytesValue): BytesValue;
create(value?: PartialMessage<BytesValue>): BytesValue;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BytesValue): BytesValue;
internalBinaryWrite(message: BytesValue, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message google.protobuf.BytesValue
*/
export declare const BytesValue: BytesValue$Type;
export {};

View file

@ -0,0 +1,614 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.BytesValue = exports.StringValue = exports.BoolValue = exports.UInt32Value = exports.Int32Value = exports.UInt64Value = exports.Int64Value = exports.FloatValue = exports.DoubleValue = void 0;
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
// @generated from protobuf file "google/protobuf/wrappers.proto" (package "google.protobuf", syntax proto3)
// tslint:disable
//
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
//
// Wrappers for primitive (non-message) types. These types are useful
// for embedding primitives in the `google.protobuf.Any` type and for places
// where we need to distinguish between the absence of a primitive
// typed field and its default value.
//
// These wrappers have no meaningful use within repeated fields as they lack
// the ability to detect presence on individual elements.
// These wrappers have no meaningful use within a map or a oneof since
// individual entries of a map or fields of a oneof can already detect presence.
//
const runtime_1 = require("@protobuf-ts/runtime");
const runtime_2 = require("@protobuf-ts/runtime");
const runtime_3 = require("@protobuf-ts/runtime");
const runtime_4 = require("@protobuf-ts/runtime");
const runtime_5 = require("@protobuf-ts/runtime");
const runtime_6 = require("@protobuf-ts/runtime");
const runtime_7 = require("@protobuf-ts/runtime");
// @generated message type with reflection information, may provide speed optimized methods
class DoubleValue$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.DoubleValue", [
{ no: 1, name: "value", kind: "scalar", T: 1 /*ScalarType.DOUBLE*/ }
]);
}
/**
* Encode `DoubleValue` to JSON number.
*/
internalJsonWrite(message, options) {
return this.refJsonWriter.scalar(2, message.value, "value", false, true);
}
/**
* Decode `DoubleValue` from JSON number.
*/
internalJsonRead(json, options, target) {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 1, undefined, "value");
return target;
}
create(value) {
const message = { value: 0 };
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_5.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* double value */ 1:
message.value = reader.double();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* double value = 1; */
if (message.value !== 0)
writer.tag(1, runtime_3.WireType.Bit64).double(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.DoubleValue
*/
exports.DoubleValue = new DoubleValue$Type();
// @generated message type with reflection information, may provide speed optimized methods
class FloatValue$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.FloatValue", [
{ no: 1, name: "value", kind: "scalar", T: 2 /*ScalarType.FLOAT*/ }
]);
}
/**
* Encode `FloatValue` to JSON number.
*/
internalJsonWrite(message, options) {
return this.refJsonWriter.scalar(1, message.value, "value", false, true);
}
/**
* Decode `FloatValue` from JSON number.
*/
internalJsonRead(json, options, target) {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 1, undefined, "value");
return target;
}
create(value) {
const message = { value: 0 };
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_5.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* float value */ 1:
message.value = reader.float();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* float value = 1; */
if (message.value !== 0)
writer.tag(1, runtime_3.WireType.Bit32).float(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.FloatValue
*/
exports.FloatValue = new FloatValue$Type();
// @generated message type with reflection information, may provide speed optimized methods
class Int64Value$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.Int64Value", [
{ no: 1, name: "value", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
]);
}
/**
* Encode `Int64Value` to JSON string.
*/
internalJsonWrite(message, options) {
return this.refJsonWriter.scalar(runtime_1.ScalarType.INT64, message.value, "value", false, true);
}
/**
* Decode `Int64Value` from JSON string.
*/
internalJsonRead(json, options, target) {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, runtime_1.ScalarType.INT64, runtime_2.LongType.STRING, "value");
return target;
}
create(value) {
const message = { value: "0" };
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_5.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int64 value */ 1:
message.value = reader.int64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* int64 value = 1; */
if (message.value !== "0")
writer.tag(1, runtime_3.WireType.Varint).int64(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.Int64Value
*/
exports.Int64Value = new Int64Value$Type();
// @generated message type with reflection information, may provide speed optimized methods
class UInt64Value$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.UInt64Value", [
{ no: 1, name: "value", kind: "scalar", T: 4 /*ScalarType.UINT64*/ }
]);
}
/**
* Encode `UInt64Value` to JSON string.
*/
internalJsonWrite(message, options) {
return this.refJsonWriter.scalar(runtime_1.ScalarType.UINT64, message.value, "value", false, true);
}
/**
* Decode `UInt64Value` from JSON string.
*/
internalJsonRead(json, options, target) {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, runtime_1.ScalarType.UINT64, runtime_2.LongType.STRING, "value");
return target;
}
create(value) {
const message = { value: "0" };
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_5.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* uint64 value */ 1:
message.value = reader.uint64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* uint64 value = 1; */
if (message.value !== "0")
writer.tag(1, runtime_3.WireType.Varint).uint64(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.UInt64Value
*/
exports.UInt64Value = new UInt64Value$Type();
// @generated message type with reflection information, may provide speed optimized methods
class Int32Value$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.Int32Value", [
{ no: 1, name: "value", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
]);
}
/**
* Encode `Int32Value` to JSON string.
*/
internalJsonWrite(message, options) {
return this.refJsonWriter.scalar(5, message.value, "value", false, true);
}
/**
* Decode `Int32Value` from JSON string.
*/
internalJsonRead(json, options, target) {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 5, undefined, "value");
return target;
}
create(value) {
const message = { value: 0 };
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_5.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int32 value */ 1:
message.value = reader.int32();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* int32 value = 1; */
if (message.value !== 0)
writer.tag(1, runtime_3.WireType.Varint).int32(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.Int32Value
*/
exports.Int32Value = new Int32Value$Type();
// @generated message type with reflection information, may provide speed optimized methods
class UInt32Value$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.UInt32Value", [
{ no: 1, name: "value", kind: "scalar", T: 13 /*ScalarType.UINT32*/ }
]);
}
/**
* Encode `UInt32Value` to JSON string.
*/
internalJsonWrite(message, options) {
return this.refJsonWriter.scalar(13, message.value, "value", false, true);
}
/**
* Decode `UInt32Value` from JSON string.
*/
internalJsonRead(json, options, target) {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 13, undefined, "value");
return target;
}
create(value) {
const message = { value: 0 };
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_5.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* uint32 value */ 1:
message.value = reader.uint32();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* uint32 value = 1; */
if (message.value !== 0)
writer.tag(1, runtime_3.WireType.Varint).uint32(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.UInt32Value
*/
exports.UInt32Value = new UInt32Value$Type();
// @generated message type with reflection information, may provide speed optimized methods
class BoolValue$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.BoolValue", [
{ no: 1, name: "value", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }
]);
}
/**
* Encode `BoolValue` to JSON bool.
*/
internalJsonWrite(message, options) {
return message.value;
}
/**
* Decode `BoolValue` from JSON bool.
*/
internalJsonRead(json, options, target) {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 8, undefined, "value");
return target;
}
create(value) {
const message = { value: false };
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_5.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool value */ 1:
message.value = reader.bool();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* bool value = 1; */
if (message.value !== false)
writer.tag(1, runtime_3.WireType.Varint).bool(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.BoolValue
*/
exports.BoolValue = new BoolValue$Type();
// @generated message type with reflection information, may provide speed optimized methods
class StringValue$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.StringValue", [
{ no: 1, name: "value", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
/**
* Encode `StringValue` to JSON string.
*/
internalJsonWrite(message, options) {
return message.value;
}
/**
* Decode `StringValue` from JSON string.
*/
internalJsonRead(json, options, target) {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 9, undefined, "value");
return target;
}
create(value) {
const message = { value: "" };
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_5.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string value */ 1:
message.value = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* string value = 1; */
if (message.value !== "")
writer.tag(1, runtime_3.WireType.LengthDelimited).string(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.StringValue
*/
exports.StringValue = new StringValue$Type();
// @generated message type with reflection information, may provide speed optimized methods
class BytesValue$Type extends runtime_7.MessageType {
constructor() {
super("google.protobuf.BytesValue", [
{ no: 1, name: "value", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }
]);
}
/**
* Encode `BytesValue` to JSON string.
*/
internalJsonWrite(message, options) {
return this.refJsonWriter.scalar(12, message.value, "value", false, true);
}
/**
* Decode `BytesValue` from JSON string.
*/
internalJsonRead(json, options, target) {
if (!target)
target = this.create();
target.value = this.refJsonReader.scalar(json, 12, undefined, "value");
return target;
}
create(value) {
const message = { value: new Uint8Array(0) };
globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_5.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bytes value */ 1:
message.value = reader.bytes();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* bytes value = 1; */
if (message.value.length)
writer.tag(1, runtime_3.WireType.LengthDelimited).bytes(message.value);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message google.protobuf.BytesValue
*/
exports.BytesValue = new BytesValue$Type();
//# sourceMappingURL=wrappers.js.map

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,378 @@
import { ServiceType } from "@protobuf-ts/runtime-rpc";
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
import { CacheEntry } from "../../entities/v1/cacheentry";
import { CacheMetadata } from "../../entities/v1/cachemetadata";
/**
* @generated from protobuf message github.actions.results.api.v1.CreateCacheEntryRequest
*/
export interface CreateCacheEntryRequest {
/**
* Scope and other metadata for the cache entry
*
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
*/
metadata?: CacheMetadata;
/**
* An explicit key for a cache entry
*
* @generated from protobuf field: string key = 2;
*/
key: string;
/**
* Hash of the compression tool, runner OS and paths cached
*
* @generated from protobuf field: string version = 3;
*/
version: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.CreateCacheEntryResponse
*/
export interface CreateCacheEntryResponse {
/**
* @generated from protobuf field: bool ok = 1;
*/
ok: boolean;
/**
* SAS URL to upload the cache archive
*
* @generated from protobuf field: string signed_upload_url = 2;
*/
signedUploadUrl: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadRequest
*/
export interface FinalizeCacheEntryUploadRequest {
/**
* Scope and other metadata for the cache entry
*
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
*/
metadata?: CacheMetadata;
/**
* An explicit key for a cache entry
*
* @generated from protobuf field: string key = 2;
*/
key: string;
/**
* Size of the cache archive in Bytes
*
* @generated from protobuf field: int64 size_bytes = 3;
*/
sizeBytes: string;
/**
* Hash of the compression tool, runner OS and paths cached
*
* @generated from protobuf field: string version = 4;
*/
version: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadResponse
*/
export interface FinalizeCacheEntryUploadResponse {
/**
* @generated from protobuf field: bool ok = 1;
*/
ok: boolean;
/**
* Cache entry database ID
*
* @generated from protobuf field: int64 entry_id = 2;
*/
entryId: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLRequest
*/
export interface GetCacheEntryDownloadURLRequest {
/**
* Scope and other metadata for the cache entry
*
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
*/
metadata?: CacheMetadata;
/**
* An explicit key for a cache entry
*
* @generated from protobuf field: string key = 2;
*/
key: string;
/**
* Restore keys used for prefix searching
*
* @generated from protobuf field: repeated string restore_keys = 3;
*/
restoreKeys: string[];
/**
* Hash of the compression tool, runner OS and paths cached
*
* @generated from protobuf field: string version = 4;
*/
version: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLResponse
*/
export interface GetCacheEntryDownloadURLResponse {
/**
* @generated from protobuf field: bool ok = 1;
*/
ok: boolean;
/**
* SAS URL to download the cache archive
*
* @generated from protobuf field: string signed_download_url = 2;
*/
signedDownloadUrl: string;
/**
* Key or restore key that matches the lookup
*
* @generated from protobuf field: string matched_key = 3;
*/
matchedKey: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.DeleteCacheEntryRequest
*/
export interface DeleteCacheEntryRequest {
/**
* Scope and other metadata for the cache entry
*
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
*/
metadata?: CacheMetadata;
/**
* An explicit key for a cache entry
*
* @generated from protobuf field: string key = 2;
*/
key: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.DeleteCacheEntryResponse
*/
export interface DeleteCacheEntryResponse {
/**
* @generated from protobuf field: bool ok = 1;
*/
ok: boolean;
/**
* Cache entry database ID
*
* @generated from protobuf field: int64 entry_id = 2;
*/
entryId: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.ListCacheEntriesRequest
*/
export interface ListCacheEntriesRequest {
/**
* Scope and other metadata for the cache entry
*
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
*/
metadata?: CacheMetadata;
/**
* An explicit key for a cache entry
*
* @generated from protobuf field: string key = 2;
*/
key: string;
/**
* Restore keys used for prefix searching
*
* @generated from protobuf field: repeated string restore_keys = 3;
*/
restoreKeys: string[];
}
/**
* @generated from protobuf message github.actions.results.api.v1.ListCacheEntriesResponse
*/
export interface ListCacheEntriesResponse {
/**
* Cache entries in the defined scope
*
* @generated from protobuf field: repeated github.actions.results.entities.v1.CacheEntry entries = 1;
*/
entries: CacheEntry[];
}
/**
* @generated from protobuf message github.actions.results.api.v1.LookupCacheEntryRequest
*/
export interface LookupCacheEntryRequest {
/**
* Scope and other metadata for the cache entry
*
* @generated from protobuf field: github.actions.results.entities.v1.CacheMetadata metadata = 1;
*/
metadata?: CacheMetadata;
/**
* An explicit key for a cache entry
*
* @generated from protobuf field: string key = 2;
*/
key: string;
/**
* Restore keys used for prefix searching
*
* @generated from protobuf field: repeated string restore_keys = 3;
*/
restoreKeys: string[];
/**
* Hash of the compression tool, runner OS and paths cached
*
* @generated from protobuf field: string version = 4;
*/
version: string;
}
/**
* @generated from protobuf message github.actions.results.api.v1.LookupCacheEntryResponse
*/
export interface LookupCacheEntryResponse {
/**
* Indicates whether the cache entry exists or not
*
* @generated from protobuf field: bool exists = 1;
*/
exists: boolean;
/**
* Matched cache entry metadata
*
* @generated from protobuf field: github.actions.results.entities.v1.CacheEntry entry = 2;
*/
entry?: CacheEntry;
}
declare class CreateCacheEntryRequest$Type extends MessageType<CreateCacheEntryRequest> {
constructor();
create(value?: PartialMessage<CreateCacheEntryRequest>): CreateCacheEntryRequest;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CreateCacheEntryRequest): CreateCacheEntryRequest;
internalBinaryWrite(message: CreateCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryRequest
*/
export declare const CreateCacheEntryRequest: CreateCacheEntryRequest$Type;
declare class CreateCacheEntryResponse$Type extends MessageType<CreateCacheEntryResponse> {
constructor();
create(value?: PartialMessage<CreateCacheEntryResponse>): CreateCacheEntryResponse;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CreateCacheEntryResponse): CreateCacheEntryResponse;
internalBinaryWrite(message: CreateCacheEntryResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryResponse
*/
export declare const CreateCacheEntryResponse: CreateCacheEntryResponse$Type;
declare class FinalizeCacheEntryUploadRequest$Type extends MessageType<FinalizeCacheEntryUploadRequest> {
constructor();
create(value?: PartialMessage<FinalizeCacheEntryUploadRequest>): FinalizeCacheEntryUploadRequest;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeCacheEntryUploadRequest): FinalizeCacheEntryUploadRequest;
internalBinaryWrite(message: FinalizeCacheEntryUploadRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadRequest
*/
export declare const FinalizeCacheEntryUploadRequest: FinalizeCacheEntryUploadRequest$Type;
declare class FinalizeCacheEntryUploadResponse$Type extends MessageType<FinalizeCacheEntryUploadResponse> {
constructor();
create(value?: PartialMessage<FinalizeCacheEntryUploadResponse>): FinalizeCacheEntryUploadResponse;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeCacheEntryUploadResponse): FinalizeCacheEntryUploadResponse;
internalBinaryWrite(message: FinalizeCacheEntryUploadResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadResponse
*/
export declare const FinalizeCacheEntryUploadResponse: FinalizeCacheEntryUploadResponse$Type;
declare class GetCacheEntryDownloadURLRequest$Type extends MessageType<GetCacheEntryDownloadURLRequest> {
constructor();
create(value?: PartialMessage<GetCacheEntryDownloadURLRequest>): GetCacheEntryDownloadURLRequest;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheEntryDownloadURLRequest): GetCacheEntryDownloadURLRequest;
internalBinaryWrite(message: GetCacheEntryDownloadURLRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLRequest
*/
export declare const GetCacheEntryDownloadURLRequest: GetCacheEntryDownloadURLRequest$Type;
declare class GetCacheEntryDownloadURLResponse$Type extends MessageType<GetCacheEntryDownloadURLResponse> {
constructor();
create(value?: PartialMessage<GetCacheEntryDownloadURLResponse>): GetCacheEntryDownloadURLResponse;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetCacheEntryDownloadURLResponse): GetCacheEntryDownloadURLResponse;
internalBinaryWrite(message: GetCacheEntryDownloadURLResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLResponse
*/
export declare const GetCacheEntryDownloadURLResponse: GetCacheEntryDownloadURLResponse$Type;
declare class DeleteCacheEntryRequest$Type extends MessageType<DeleteCacheEntryRequest> {
constructor();
create(value?: PartialMessage<DeleteCacheEntryRequest>): DeleteCacheEntryRequest;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeleteCacheEntryRequest): DeleteCacheEntryRequest;
internalBinaryWrite(message: DeleteCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteCacheEntryRequest
*/
export declare const DeleteCacheEntryRequest: DeleteCacheEntryRequest$Type;
declare class DeleteCacheEntryResponse$Type extends MessageType<DeleteCacheEntryResponse> {
constructor();
create(value?: PartialMessage<DeleteCacheEntryResponse>): DeleteCacheEntryResponse;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeleteCacheEntryResponse): DeleteCacheEntryResponse;
internalBinaryWrite(message: DeleteCacheEntryResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteCacheEntryResponse
*/
export declare const DeleteCacheEntryResponse: DeleteCacheEntryResponse$Type;
declare class ListCacheEntriesRequest$Type extends MessageType<ListCacheEntriesRequest> {
constructor();
create(value?: PartialMessage<ListCacheEntriesRequest>): ListCacheEntriesRequest;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListCacheEntriesRequest): ListCacheEntriesRequest;
internalBinaryWrite(message: ListCacheEntriesRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesRequest
*/
export declare const ListCacheEntriesRequest: ListCacheEntriesRequest$Type;
declare class ListCacheEntriesResponse$Type extends MessageType<ListCacheEntriesResponse> {
constructor();
create(value?: PartialMessage<ListCacheEntriesResponse>): ListCacheEntriesResponse;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListCacheEntriesResponse): ListCacheEntriesResponse;
internalBinaryWrite(message: ListCacheEntriesResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesResponse
*/
export declare const ListCacheEntriesResponse: ListCacheEntriesResponse$Type;
declare class LookupCacheEntryRequest$Type extends MessageType<LookupCacheEntryRequest> {
constructor();
create(value?: PartialMessage<LookupCacheEntryRequest>): LookupCacheEntryRequest;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LookupCacheEntryRequest): LookupCacheEntryRequest;
internalBinaryWrite(message: LookupCacheEntryRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryRequest
*/
export declare const LookupCacheEntryRequest: LookupCacheEntryRequest$Type;
declare class LookupCacheEntryResponse$Type extends MessageType<LookupCacheEntryResponse> {
constructor();
create(value?: PartialMessage<LookupCacheEntryResponse>): LookupCacheEntryResponse;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LookupCacheEntryResponse): LookupCacheEntryResponse;
internalBinaryWrite(message: LookupCacheEntryResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse
*/
export declare const LookupCacheEntryResponse: LookupCacheEntryResponse$Type;
/**
* @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService
*/
export declare const CacheService: ServiceType;
export {};

View file

@ -0,0 +1,730 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CacheService = exports.LookupCacheEntryResponse = exports.LookupCacheEntryRequest = exports.ListCacheEntriesResponse = exports.ListCacheEntriesRequest = exports.DeleteCacheEntryResponse = exports.DeleteCacheEntryRequest = exports.GetCacheEntryDownloadURLResponse = exports.GetCacheEntryDownloadURLRequest = exports.FinalizeCacheEntryUploadResponse = exports.FinalizeCacheEntryUploadRequest = exports.CreateCacheEntryResponse = exports.CreateCacheEntryRequest = void 0;
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
// @generated from protobuf file "results/api/v1/cache.proto" (package "github.actions.results.api.v1", syntax proto3)
// tslint:disable
const runtime_rpc_1 = require("@protobuf-ts/runtime-rpc");
const runtime_1 = require("@protobuf-ts/runtime");
const runtime_2 = require("@protobuf-ts/runtime");
const runtime_3 = require("@protobuf-ts/runtime");
const runtime_4 = require("@protobuf-ts/runtime");
const runtime_5 = require("@protobuf-ts/runtime");
const cacheentry_1 = require("../../entities/v1/cacheentry");
const cachemetadata_1 = require("../../entities/v1/cachemetadata");
// @generated message type with reflection information, may provide speed optimized methods
class CreateCacheEntryRequest$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.CreateCacheEntryRequest", [
{ no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata },
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value) {
const message = { key: "", version: "" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
break;
case /* string key */ 2:
message.key = reader.string();
break;
case /* string version */ 3:
message.version = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
if (message.metadata)
cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
/* string key = 2; */
if (message.key !== "")
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key);
/* string version = 3; */
if (message.version !== "")
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.version);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryRequest
*/
exports.CreateCacheEntryRequest = new CreateCacheEntryRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class CreateCacheEntryResponse$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.CreateCacheEntryResponse", [
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value) {
const message = { ok: false, signedUploadUrl: "" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool ok */ 1:
message.ok = reader.bool();
break;
case /* string signed_upload_url */ 2:
message.signedUploadUrl = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* bool ok = 1; */
if (message.ok !== false)
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
/* string signed_upload_url = 2; */
if (message.signedUploadUrl !== "")
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryResponse
*/
exports.CreateCacheEntryResponse = new CreateCacheEntryResponse$Type();
// @generated message type with reflection information, may provide speed optimized methods
class FinalizeCacheEntryUploadRequest$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.FinalizeCacheEntryUploadRequest", [
{ no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata },
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
{ no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value) {
const message = { key: "", sizeBytes: "0", version: "" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
break;
case /* string key */ 2:
message.key = reader.string();
break;
case /* int64 size_bytes */ 3:
message.sizeBytes = reader.int64().toString();
break;
case /* string version */ 4:
message.version = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
if (message.metadata)
cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
/* string key = 2; */
if (message.key !== "")
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key);
/* int64 size_bytes = 3; */
if (message.sizeBytes !== "0")
writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes);
/* string version = 4; */
if (message.version !== "")
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadRequest
*/
exports.FinalizeCacheEntryUploadRequest = new FinalizeCacheEntryUploadRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class FinalizeCacheEntryUploadResponse$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.FinalizeCacheEntryUploadResponse", [
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "entry_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
]);
}
create(value) {
const message = { ok: false, entryId: "0" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool ok */ 1:
message.ok = reader.bool();
break;
case /* int64 entry_id */ 2:
message.entryId = reader.int64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* bool ok = 1; */
if (message.ok !== false)
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
/* int64 entry_id = 2; */
if (message.entryId !== "0")
writer.tag(2, runtime_1.WireType.Varint).int64(message.entryId);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadResponse
*/
exports.FinalizeCacheEntryUploadResponse = new FinalizeCacheEntryUploadResponse$Type();
// @generated message type with reflection information, may provide speed optimized methods
class GetCacheEntryDownloadURLRequest$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.GetCacheEntryDownloadURLRequest", [
{ no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata },
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ },
{ no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value) {
const message = { key: "", restoreKeys: [], version: "" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
break;
case /* string key */ 2:
message.key = reader.string();
break;
case /* repeated string restore_keys */ 3:
message.restoreKeys.push(reader.string());
break;
case /* string version */ 4:
message.version = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
if (message.metadata)
cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
/* string key = 2; */
if (message.key !== "")
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key);
/* repeated string restore_keys = 3; */
for (let i = 0; i < message.restoreKeys.length; i++)
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]);
/* string version = 4; */
if (message.version !== "")
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLRequest
*/
exports.GetCacheEntryDownloadURLRequest = new GetCacheEntryDownloadURLRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class GetCacheEntryDownloadURLResponse$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.GetCacheEntryDownloadURLResponse", [
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "signed_download_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "matched_key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value) {
const message = { ok: false, signedDownloadUrl: "", matchedKey: "" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool ok */ 1:
message.ok = reader.bool();
break;
case /* string signed_download_url */ 2:
message.signedDownloadUrl = reader.string();
break;
case /* string matched_key */ 3:
message.matchedKey = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* bool ok = 1; */
if (message.ok !== false)
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
/* string signed_download_url = 2; */
if (message.signedDownloadUrl !== "")
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedDownloadUrl);
/* string matched_key = 3; */
if (message.matchedKey !== "")
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.matchedKey);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLResponse
*/
exports.GetCacheEntryDownloadURLResponse = new GetCacheEntryDownloadURLResponse$Type();
// @generated message type with reflection information, may provide speed optimized methods
class DeleteCacheEntryRequest$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.DeleteCacheEntryRequest", [
{ no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata },
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value) {
const message = { key: "" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
break;
case /* string key */ 2:
message.key = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
if (message.metadata)
cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
/* string key = 2; */
if (message.key !== "")
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteCacheEntryRequest
*/
exports.DeleteCacheEntryRequest = new DeleteCacheEntryRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class DeleteCacheEntryResponse$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.DeleteCacheEntryResponse", [
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "entry_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
]);
}
create(value) {
const message = { ok: false, entryId: "0" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool ok */ 1:
message.ok = reader.bool();
break;
case /* int64 entry_id */ 2:
message.entryId = reader.int64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* bool ok = 1; */
if (message.ok !== false)
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
/* int64 entry_id = 2; */
if (message.entryId !== "0")
writer.tag(2, runtime_1.WireType.Varint).int64(message.entryId);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteCacheEntryResponse
*/
exports.DeleteCacheEntryResponse = new DeleteCacheEntryResponse$Type();
// @generated message type with reflection information, may provide speed optimized methods
class ListCacheEntriesRequest$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.ListCacheEntriesRequest", [
{ no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata },
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }
]);
}
create(value) {
const message = { key: "", restoreKeys: [] };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
break;
case /* string key */ 2:
message.key = reader.string();
break;
case /* repeated string restore_keys */ 3:
message.restoreKeys.push(reader.string());
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
if (message.metadata)
cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
/* string key = 2; */
if (message.key !== "")
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key);
/* repeated string restore_keys = 3; */
for (let i = 0; i < message.restoreKeys.length; i++)
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesRequest
*/
exports.ListCacheEntriesRequest = new ListCacheEntriesRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class ListCacheEntriesResponse$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.ListCacheEntriesResponse", [
{ no: 1, name: "entries", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => cacheentry_1.CacheEntry }
]);
}
create(value) {
const message = { entries: [] };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* repeated github.actions.results.entities.v1.CacheEntry entries */ 1:
message.entries.push(cacheentry_1.CacheEntry.internalBinaryRead(reader, reader.uint32(), options));
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* repeated github.actions.results.entities.v1.CacheEntry entries = 1; */
for (let i = 0; i < message.entries.length; i++)
cacheentry_1.CacheEntry.internalBinaryWrite(message.entries[i], writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesResponse
*/
exports.ListCacheEntriesResponse = new ListCacheEntriesResponse$Type();
// @generated message type with reflection information, may provide speed optimized methods
class LookupCacheEntryRequest$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.LookupCacheEntryRequest", [
{ no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata },
{ no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ },
{ no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value) {
const message = { key: "", restoreKeys: [], version: "" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1:
message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata);
break;
case /* string key */ 2:
message.key = reader.string();
break;
case /* repeated string restore_keys */ 3:
message.restoreKeys.push(reader.string());
break;
case /* string version */ 4:
message.version = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* github.actions.results.entities.v1.CacheMetadata metadata = 1; */
if (message.metadata)
cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join();
/* string key = 2; */
if (message.key !== "")
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key);
/* repeated string restore_keys = 3; */
for (let i = 0; i < message.restoreKeys.length; i++)
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]);
/* string version = 4; */
if (message.version !== "")
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryRequest
*/
exports.LookupCacheEntryRequest = new LookupCacheEntryRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class LookupCacheEntryResponse$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.LookupCacheEntryResponse", [
{ no: 1, name: "exists", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "entry", kind: "message", T: () => cacheentry_1.CacheEntry }
]);
}
create(value) {
const message = { exists: false };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool exists */ 1:
message.exists = reader.bool();
break;
case /* github.actions.results.entities.v1.CacheEntry entry */ 2:
message.entry = cacheentry_1.CacheEntry.internalBinaryRead(reader, reader.uint32(), options, message.entry);
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* bool exists = 1; */
if (message.exists !== false)
writer.tag(1, runtime_1.WireType.Varint).bool(message.exists);
/* github.actions.results.entities.v1.CacheEntry entry = 2; */
if (message.entry)
cacheentry_1.CacheEntry.internalBinaryWrite(message.entry, writer.tag(2, runtime_1.WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse
*/
exports.LookupCacheEntryResponse = new LookupCacheEntryResponse$Type();
/**
* @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService
*/
exports.CacheService = new runtime_rpc_1.ServiceType("github.actions.results.api.v1.CacheService", [
{ name: "CreateCacheEntry", options: {}, I: exports.CreateCacheEntryRequest, O: exports.CreateCacheEntryResponse },
{ name: "FinalizeCacheEntryUpload", options: {}, I: exports.FinalizeCacheEntryUploadRequest, O: exports.FinalizeCacheEntryUploadResponse },
{ name: "GetCacheEntryDownloadURL", options: {}, I: exports.GetCacheEntryDownloadURLRequest, O: exports.GetCacheEntryDownloadURLResponse },
{ name: "DeleteCacheEntry", options: {}, I: exports.DeleteCacheEntryRequest, O: exports.DeleteCacheEntryResponse },
{ name: "ListCacheEntries", options: {}, I: exports.ListCacheEntriesRequest, O: exports.ListCacheEntriesResponse },
{ name: "LookupCacheEntry", options: {}, I: exports.LookupCacheEntryRequest, O: exports.LookupCacheEntryResponse }
]);
//# sourceMappingURL=cache.js.map

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,53 @@
/// <reference types="node" />
import { TwirpContext, TwirpServer } from "twirp-ts";
import { CreateCacheEntryRequest, CreateCacheEntryResponse, FinalizeCacheEntryUploadRequest, FinalizeCacheEntryUploadResponse, GetCacheEntryDownloadURLRequest, GetCacheEntryDownloadURLResponse, DeleteCacheEntryRequest, DeleteCacheEntryResponse, ListCacheEntriesRequest, ListCacheEntriesResponse, LookupCacheEntryRequest, LookupCacheEntryResponse } from "./cache";
interface Rpc {
request(service: string, method: string, contentType: "application/json" | "application/protobuf", data: object | Uint8Array): Promise<object | Uint8Array>;
}
export interface CacheServiceClient {
CreateCacheEntry(request: CreateCacheEntryRequest): Promise<CreateCacheEntryResponse>;
FinalizeCacheEntryUpload(request: FinalizeCacheEntryUploadRequest): Promise<FinalizeCacheEntryUploadResponse>;
GetCacheEntryDownloadURL(request: GetCacheEntryDownloadURLRequest): Promise<GetCacheEntryDownloadURLResponse>;
DeleteCacheEntry(request: DeleteCacheEntryRequest): Promise<DeleteCacheEntryResponse>;
ListCacheEntries(request: ListCacheEntriesRequest): Promise<ListCacheEntriesResponse>;
LookupCacheEntry(request: LookupCacheEntryRequest): Promise<LookupCacheEntryResponse>;
}
export declare class CacheServiceClientJSON implements CacheServiceClient {
private readonly rpc;
constructor(rpc: Rpc);
CreateCacheEntry(request: CreateCacheEntryRequest): Promise<CreateCacheEntryResponse>;
FinalizeCacheEntryUpload(request: FinalizeCacheEntryUploadRequest): Promise<FinalizeCacheEntryUploadResponse>;
GetCacheEntryDownloadURL(request: GetCacheEntryDownloadURLRequest): Promise<GetCacheEntryDownloadURLResponse>;
DeleteCacheEntry(request: DeleteCacheEntryRequest): Promise<DeleteCacheEntryResponse>;
ListCacheEntries(request: ListCacheEntriesRequest): Promise<ListCacheEntriesResponse>;
LookupCacheEntry(request: LookupCacheEntryRequest): Promise<LookupCacheEntryResponse>;
}
export declare class CacheServiceClientProtobuf implements CacheServiceClient {
private readonly rpc;
constructor(rpc: Rpc);
CreateCacheEntry(request: CreateCacheEntryRequest): Promise<CreateCacheEntryResponse>;
FinalizeCacheEntryUpload(request: FinalizeCacheEntryUploadRequest): Promise<FinalizeCacheEntryUploadResponse>;
GetCacheEntryDownloadURL(request: GetCacheEntryDownloadURLRequest): Promise<GetCacheEntryDownloadURLResponse>;
DeleteCacheEntry(request: DeleteCacheEntryRequest): Promise<DeleteCacheEntryResponse>;
ListCacheEntries(request: ListCacheEntriesRequest): Promise<ListCacheEntriesResponse>;
LookupCacheEntry(request: LookupCacheEntryRequest): Promise<LookupCacheEntryResponse>;
}
export interface CacheServiceTwirp<T extends TwirpContext = TwirpContext> {
CreateCacheEntry(ctx: T, request: CreateCacheEntryRequest): Promise<CreateCacheEntryResponse>;
FinalizeCacheEntryUpload(ctx: T, request: FinalizeCacheEntryUploadRequest): Promise<FinalizeCacheEntryUploadResponse>;
GetCacheEntryDownloadURL(ctx: T, request: GetCacheEntryDownloadURLRequest): Promise<GetCacheEntryDownloadURLResponse>;
DeleteCacheEntry(ctx: T, request: DeleteCacheEntryRequest): Promise<DeleteCacheEntryResponse>;
ListCacheEntries(ctx: T, request: ListCacheEntriesRequest): Promise<ListCacheEntriesResponse>;
LookupCacheEntry(ctx: T, request: LookupCacheEntryRequest): Promise<LookupCacheEntryResponse>;
}
export declare enum CacheServiceMethod {
CreateCacheEntry = "CreateCacheEntry",
FinalizeCacheEntryUpload = "FinalizeCacheEntryUpload",
GetCacheEntryDownloadURL = "GetCacheEntryDownloadURL",
DeleteCacheEntry = "DeleteCacheEntry",
ListCacheEntries = "ListCacheEntries",
LookupCacheEntry = "LookupCacheEntry"
}
export declare const CacheServiceMethodList: CacheServiceMethod[];
export declare function createCacheServiceServer<T extends TwirpContext = TwirpContext>(service: CacheServiceTwirp<T>): TwirpServer<CacheServiceTwirp<TwirpContext<import("http").IncomingMessage, import("http").ServerResponse<import("http").IncomingMessage>>>, T>;
export {};

View file

@ -0,0 +1,602 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.createCacheServiceServer = exports.CacheServiceMethodList = exports.CacheServiceMethod = exports.CacheServiceClientProtobuf = exports.CacheServiceClientJSON = void 0;
const twirp_ts_1 = require("twirp-ts");
const cache_1 = require("./cache");
class CacheServiceClientJSON {
constructor(rpc) {
this.rpc = rpc;
this.CreateCacheEntry.bind(this);
this.FinalizeCacheEntryUpload.bind(this);
this.GetCacheEntryDownloadURL.bind(this);
this.DeleteCacheEntry.bind(this);
this.ListCacheEntries.bind(this);
this.LookupCacheEntry.bind(this);
}
CreateCacheEntry(request) {
const data = cache_1.CreateCacheEntryRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "CreateCacheEntry", "application/json", data);
return promise.then((data) => cache_1.CreateCacheEntryResponse.fromJson(data, {
ignoreUnknownFields: true,
}));
}
FinalizeCacheEntryUpload(request) {
const data = cache_1.FinalizeCacheEntryUploadRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "FinalizeCacheEntryUpload", "application/json", data);
return promise.then((data) => cache_1.FinalizeCacheEntryUploadResponse.fromJson(data, {
ignoreUnknownFields: true,
}));
}
GetCacheEntryDownloadURL(request) {
const data = cache_1.GetCacheEntryDownloadURLRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "GetCacheEntryDownloadURL", "application/json", data);
return promise.then((data) => cache_1.GetCacheEntryDownloadURLResponse.fromJson(data, {
ignoreUnknownFields: true,
}));
}
DeleteCacheEntry(request) {
const data = cache_1.DeleteCacheEntryRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "DeleteCacheEntry", "application/json", data);
return promise.then((data) => cache_1.DeleteCacheEntryResponse.fromJson(data, {
ignoreUnknownFields: true,
}));
}
ListCacheEntries(request) {
const data = cache_1.ListCacheEntriesRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "ListCacheEntries", "application/json", data);
return promise.then((data) => cache_1.ListCacheEntriesResponse.fromJson(data, {
ignoreUnknownFields: true,
}));
}
LookupCacheEntry(request) {
const data = cache_1.LookupCacheEntryRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "LookupCacheEntry", "application/json", data);
return promise.then((data) => cache_1.LookupCacheEntryResponse.fromJson(data, {
ignoreUnknownFields: true,
}));
}
}
exports.CacheServiceClientJSON = CacheServiceClientJSON;
class CacheServiceClientProtobuf {
constructor(rpc) {
this.rpc = rpc;
this.CreateCacheEntry.bind(this);
this.FinalizeCacheEntryUpload.bind(this);
this.GetCacheEntryDownloadURL.bind(this);
this.DeleteCacheEntry.bind(this);
this.ListCacheEntries.bind(this);
this.LookupCacheEntry.bind(this);
}
CreateCacheEntry(request) {
const data = cache_1.CreateCacheEntryRequest.toBinary(request);
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "CreateCacheEntry", "application/protobuf", data);
return promise.then((data) => cache_1.CreateCacheEntryResponse.fromBinary(data));
}
FinalizeCacheEntryUpload(request) {
const data = cache_1.FinalizeCacheEntryUploadRequest.toBinary(request);
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "FinalizeCacheEntryUpload", "application/protobuf", data);
return promise.then((data) => cache_1.FinalizeCacheEntryUploadResponse.fromBinary(data));
}
GetCacheEntryDownloadURL(request) {
const data = cache_1.GetCacheEntryDownloadURLRequest.toBinary(request);
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "GetCacheEntryDownloadURL", "application/protobuf", data);
return promise.then((data) => cache_1.GetCacheEntryDownloadURLResponse.fromBinary(data));
}
DeleteCacheEntry(request) {
const data = cache_1.DeleteCacheEntryRequest.toBinary(request);
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "DeleteCacheEntry", "application/protobuf", data);
return promise.then((data) => cache_1.DeleteCacheEntryResponse.fromBinary(data));
}
ListCacheEntries(request) {
const data = cache_1.ListCacheEntriesRequest.toBinary(request);
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "ListCacheEntries", "application/protobuf", data);
return promise.then((data) => cache_1.ListCacheEntriesResponse.fromBinary(data));
}
LookupCacheEntry(request) {
const data = cache_1.LookupCacheEntryRequest.toBinary(request);
const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "LookupCacheEntry", "application/protobuf", data);
return promise.then((data) => cache_1.LookupCacheEntryResponse.fromBinary(data));
}
}
exports.CacheServiceClientProtobuf = CacheServiceClientProtobuf;
var CacheServiceMethod;
(function (CacheServiceMethod) {
CacheServiceMethod["CreateCacheEntry"] = "CreateCacheEntry";
CacheServiceMethod["FinalizeCacheEntryUpload"] = "FinalizeCacheEntryUpload";
CacheServiceMethod["GetCacheEntryDownloadURL"] = "GetCacheEntryDownloadURL";
CacheServiceMethod["DeleteCacheEntry"] = "DeleteCacheEntry";
CacheServiceMethod["ListCacheEntries"] = "ListCacheEntries";
CacheServiceMethod["LookupCacheEntry"] = "LookupCacheEntry";
})(CacheServiceMethod || (exports.CacheServiceMethod = CacheServiceMethod = {}));
exports.CacheServiceMethodList = [
CacheServiceMethod.CreateCacheEntry,
CacheServiceMethod.FinalizeCacheEntryUpload,
CacheServiceMethod.GetCacheEntryDownloadURL,
CacheServiceMethod.DeleteCacheEntry,
CacheServiceMethod.ListCacheEntries,
CacheServiceMethod.LookupCacheEntry,
];
function createCacheServiceServer(service) {
return new twirp_ts_1.TwirpServer({
service,
packageName: "github.actions.results.api.v1",
serviceName: "CacheService",
methodList: exports.CacheServiceMethodList,
matchRoute: matchCacheServiceRoute,
});
}
exports.createCacheServiceServer = createCacheServiceServer;
function matchCacheServiceRoute(method, events) {
switch (method) {
case "CreateCacheEntry":
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
ctx = Object.assign(Object.assign({}, ctx), { methodName: "CreateCacheEntry" });
yield events.onMatch(ctx);
return handleCacheServiceCreateCacheEntryRequest(ctx, service, data, interceptors);
});
case "FinalizeCacheEntryUpload":
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
ctx = Object.assign(Object.assign({}, ctx), { methodName: "FinalizeCacheEntryUpload" });
yield events.onMatch(ctx);
return handleCacheServiceFinalizeCacheEntryUploadRequest(ctx, service, data, interceptors);
});
case "GetCacheEntryDownloadURL":
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
ctx = Object.assign(Object.assign({}, ctx), { methodName: "GetCacheEntryDownloadURL" });
yield events.onMatch(ctx);
return handleCacheServiceGetCacheEntryDownloadURLRequest(ctx, service, data, interceptors);
});
case "DeleteCacheEntry":
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
ctx = Object.assign(Object.assign({}, ctx), { methodName: "DeleteCacheEntry" });
yield events.onMatch(ctx);
return handleCacheServiceDeleteCacheEntryRequest(ctx, service, data, interceptors);
});
case "ListCacheEntries":
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
ctx = Object.assign(Object.assign({}, ctx), { methodName: "ListCacheEntries" });
yield events.onMatch(ctx);
return handleCacheServiceListCacheEntriesRequest(ctx, service, data, interceptors);
});
case "LookupCacheEntry":
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
ctx = Object.assign(Object.assign({}, ctx), { methodName: "LookupCacheEntry" });
yield events.onMatch(ctx);
return handleCacheServiceLookupCacheEntryRequest(ctx, service, data, interceptors);
});
default:
events.onNotFound();
const msg = `no handler found`;
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
}
}
function handleCacheServiceCreateCacheEntryRequest(ctx, service, data, interceptors) {
switch (ctx.contentType) {
case twirp_ts_1.TwirpContentType.JSON:
return handleCacheServiceCreateCacheEntryJSON(ctx, service, data, interceptors);
case twirp_ts_1.TwirpContentType.Protobuf:
return handleCacheServiceCreateCacheEntryProtobuf(ctx, service, data, interceptors);
default:
const msg = "unexpected Content-Type";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
}
}
function handleCacheServiceFinalizeCacheEntryUploadRequest(ctx, service, data, interceptors) {
switch (ctx.contentType) {
case twirp_ts_1.TwirpContentType.JSON:
return handleCacheServiceFinalizeCacheEntryUploadJSON(ctx, service, data, interceptors);
case twirp_ts_1.TwirpContentType.Protobuf:
return handleCacheServiceFinalizeCacheEntryUploadProtobuf(ctx, service, data, interceptors);
default:
const msg = "unexpected Content-Type";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
}
}
function handleCacheServiceGetCacheEntryDownloadURLRequest(ctx, service, data, interceptors) {
switch (ctx.contentType) {
case twirp_ts_1.TwirpContentType.JSON:
return handleCacheServiceGetCacheEntryDownloadURLJSON(ctx, service, data, interceptors);
case twirp_ts_1.TwirpContentType.Protobuf:
return handleCacheServiceGetCacheEntryDownloadURLProtobuf(ctx, service, data, interceptors);
default:
const msg = "unexpected Content-Type";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
}
}
function handleCacheServiceDeleteCacheEntryRequest(ctx, service, data, interceptors) {
switch (ctx.contentType) {
case twirp_ts_1.TwirpContentType.JSON:
return handleCacheServiceDeleteCacheEntryJSON(ctx, service, data, interceptors);
case twirp_ts_1.TwirpContentType.Protobuf:
return handleCacheServiceDeleteCacheEntryProtobuf(ctx, service, data, interceptors);
default:
const msg = "unexpected Content-Type";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
}
}
function handleCacheServiceListCacheEntriesRequest(ctx, service, data, interceptors) {
switch (ctx.contentType) {
case twirp_ts_1.TwirpContentType.JSON:
return handleCacheServiceListCacheEntriesJSON(ctx, service, data, interceptors);
case twirp_ts_1.TwirpContentType.Protobuf:
return handleCacheServiceListCacheEntriesProtobuf(ctx, service, data, interceptors);
default:
const msg = "unexpected Content-Type";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
}
}
function handleCacheServiceLookupCacheEntryRequest(ctx, service, data, interceptors) {
switch (ctx.contentType) {
case twirp_ts_1.TwirpContentType.JSON:
return handleCacheServiceLookupCacheEntryJSON(ctx, service, data, interceptors);
case twirp_ts_1.TwirpContentType.Protobuf:
return handleCacheServiceLookupCacheEntryProtobuf(ctx, service, data, interceptors);
default:
const msg = "unexpected Content-Type";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
}
}
function handleCacheServiceCreateCacheEntryJSON(ctx, service, data, interceptors) {
return __awaiter(this, void 0, void 0, function* () {
let request;
let response;
try {
const body = JSON.parse(data.toString() || "{}");
request = cache_1.CreateCacheEntryRequest.fromJson(body, {
ignoreUnknownFields: true,
});
}
catch (e) {
if (e instanceof Error) {
const msg = "the json request could not be decoded";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
response = yield interceptor(ctx, request, (ctx, inputReq) => {
return service.CreateCacheEntry(ctx, inputReq);
});
}
else {
response = yield service.CreateCacheEntry(ctx, request);
}
return JSON.stringify(cache_1.CreateCacheEntryResponse.toJson(response, {
useProtoFieldName: true,
emitDefaultValues: false,
}));
});
}
function handleCacheServiceFinalizeCacheEntryUploadJSON(ctx, service, data, interceptors) {
return __awaiter(this, void 0, void 0, function* () {
let request;
let response;
try {
const body = JSON.parse(data.toString() || "{}");
request = cache_1.FinalizeCacheEntryUploadRequest.fromJson(body, {
ignoreUnknownFields: true,
});
}
catch (e) {
if (e instanceof Error) {
const msg = "the json request could not be decoded";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
response = yield interceptor(ctx, request, (ctx, inputReq) => {
return service.FinalizeCacheEntryUpload(ctx, inputReq);
});
}
else {
response = yield service.FinalizeCacheEntryUpload(ctx, request);
}
return JSON.stringify(cache_1.FinalizeCacheEntryUploadResponse.toJson(response, {
useProtoFieldName: true,
emitDefaultValues: false,
}));
});
}
function handleCacheServiceGetCacheEntryDownloadURLJSON(ctx, service, data, interceptors) {
return __awaiter(this, void 0, void 0, function* () {
let request;
let response;
try {
const body = JSON.parse(data.toString() || "{}");
request = cache_1.GetCacheEntryDownloadURLRequest.fromJson(body, {
ignoreUnknownFields: true,
});
}
catch (e) {
if (e instanceof Error) {
const msg = "the json request could not be decoded";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
response = yield interceptor(ctx, request, (ctx, inputReq) => {
return service.GetCacheEntryDownloadURL(ctx, inputReq);
});
}
else {
response = yield service.GetCacheEntryDownloadURL(ctx, request);
}
return JSON.stringify(cache_1.GetCacheEntryDownloadURLResponse.toJson(response, {
useProtoFieldName: true,
emitDefaultValues: false,
}));
});
}
function handleCacheServiceDeleteCacheEntryJSON(ctx, service, data, interceptors) {
return __awaiter(this, void 0, void 0, function* () {
let request;
let response;
try {
const body = JSON.parse(data.toString() || "{}");
request = cache_1.DeleteCacheEntryRequest.fromJson(body, {
ignoreUnknownFields: true,
});
}
catch (e) {
if (e instanceof Error) {
const msg = "the json request could not be decoded";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
response = yield interceptor(ctx, request, (ctx, inputReq) => {
return service.DeleteCacheEntry(ctx, inputReq);
});
}
else {
response = yield service.DeleteCacheEntry(ctx, request);
}
return JSON.stringify(cache_1.DeleteCacheEntryResponse.toJson(response, {
useProtoFieldName: true,
emitDefaultValues: false,
}));
});
}
function handleCacheServiceListCacheEntriesJSON(ctx, service, data, interceptors) {
return __awaiter(this, void 0, void 0, function* () {
let request;
let response;
try {
const body = JSON.parse(data.toString() || "{}");
request = cache_1.ListCacheEntriesRequest.fromJson(body, {
ignoreUnknownFields: true,
});
}
catch (e) {
if (e instanceof Error) {
const msg = "the json request could not be decoded";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
response = yield interceptor(ctx, request, (ctx, inputReq) => {
return service.ListCacheEntries(ctx, inputReq);
});
}
else {
response = yield service.ListCacheEntries(ctx, request);
}
return JSON.stringify(cache_1.ListCacheEntriesResponse.toJson(response, {
useProtoFieldName: true,
emitDefaultValues: false,
}));
});
}
function handleCacheServiceLookupCacheEntryJSON(ctx, service, data, interceptors) {
return __awaiter(this, void 0, void 0, function* () {
let request;
let response;
try {
const body = JSON.parse(data.toString() || "{}");
request = cache_1.LookupCacheEntryRequest.fromJson(body, {
ignoreUnknownFields: true,
});
}
catch (e) {
if (e instanceof Error) {
const msg = "the json request could not be decoded";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
response = yield interceptor(ctx, request, (ctx, inputReq) => {
return service.LookupCacheEntry(ctx, inputReq);
});
}
else {
response = yield service.LookupCacheEntry(ctx, request);
}
return JSON.stringify(cache_1.LookupCacheEntryResponse.toJson(response, {
useProtoFieldName: true,
emitDefaultValues: false,
}));
});
}
function handleCacheServiceCreateCacheEntryProtobuf(ctx, service, data, interceptors) {
return __awaiter(this, void 0, void 0, function* () {
let request;
let response;
try {
request = cache_1.CreateCacheEntryRequest.fromBinary(data);
}
catch (e) {
if (e instanceof Error) {
const msg = "the protobuf request could not be decoded";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
response = yield interceptor(ctx, request, (ctx, inputReq) => {
return service.CreateCacheEntry(ctx, inputReq);
});
}
else {
response = yield service.CreateCacheEntry(ctx, request);
}
return Buffer.from(cache_1.CreateCacheEntryResponse.toBinary(response));
});
}
function handleCacheServiceFinalizeCacheEntryUploadProtobuf(ctx, service, data, interceptors) {
return __awaiter(this, void 0, void 0, function* () {
let request;
let response;
try {
request = cache_1.FinalizeCacheEntryUploadRequest.fromBinary(data);
}
catch (e) {
if (e instanceof Error) {
const msg = "the protobuf request could not be decoded";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
response = yield interceptor(ctx, request, (ctx, inputReq) => {
return service.FinalizeCacheEntryUpload(ctx, inputReq);
});
}
else {
response = yield service.FinalizeCacheEntryUpload(ctx, request);
}
return Buffer.from(cache_1.FinalizeCacheEntryUploadResponse.toBinary(response));
});
}
function handleCacheServiceGetCacheEntryDownloadURLProtobuf(ctx, service, data, interceptors) {
return __awaiter(this, void 0, void 0, function* () {
let request;
let response;
try {
request = cache_1.GetCacheEntryDownloadURLRequest.fromBinary(data);
}
catch (e) {
if (e instanceof Error) {
const msg = "the protobuf request could not be decoded";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
response = yield interceptor(ctx, request, (ctx, inputReq) => {
return service.GetCacheEntryDownloadURL(ctx, inputReq);
});
}
else {
response = yield service.GetCacheEntryDownloadURL(ctx, request);
}
return Buffer.from(cache_1.GetCacheEntryDownloadURLResponse.toBinary(response));
});
}
function handleCacheServiceDeleteCacheEntryProtobuf(ctx, service, data, interceptors) {
return __awaiter(this, void 0, void 0, function* () {
let request;
let response;
try {
request = cache_1.DeleteCacheEntryRequest.fromBinary(data);
}
catch (e) {
if (e instanceof Error) {
const msg = "the protobuf request could not be decoded";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
response = yield interceptor(ctx, request, (ctx, inputReq) => {
return service.DeleteCacheEntry(ctx, inputReq);
});
}
else {
response = yield service.DeleteCacheEntry(ctx, request);
}
return Buffer.from(cache_1.DeleteCacheEntryResponse.toBinary(response));
});
}
function handleCacheServiceListCacheEntriesProtobuf(ctx, service, data, interceptors) {
return __awaiter(this, void 0, void 0, function* () {
let request;
let response;
try {
request = cache_1.ListCacheEntriesRequest.fromBinary(data);
}
catch (e) {
if (e instanceof Error) {
const msg = "the protobuf request could not be decoded";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
response = yield interceptor(ctx, request, (ctx, inputReq) => {
return service.ListCacheEntries(ctx, inputReq);
});
}
else {
response = yield service.ListCacheEntries(ctx, request);
}
return Buffer.from(cache_1.ListCacheEntriesResponse.toBinary(response));
});
}
function handleCacheServiceLookupCacheEntryProtobuf(ctx, service, data, interceptors) {
return __awaiter(this, void 0, void 0, function* () {
let request;
let response;
try {
request = cache_1.LookupCacheEntryRequest.fromBinary(data);
}
catch (e) {
if (e instanceof Error) {
const msg = "the protobuf request could not be decoded";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
response = yield interceptor(ctx, request, (ctx, inputReq) => {
return service.LookupCacheEntry(ctx, inputReq);
});
}
else {
response = yield service.LookupCacheEntry(ctx, request);
}
return Buffer.from(cache_1.LookupCacheEntryResponse.toBinary(response));
});
}
//# sourceMappingURL=cache.twirp.js.map

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,71 @@
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
import { Timestamp } from "../../../google/protobuf/timestamp";
/**
* @generated from protobuf message github.actions.results.entities.v1.CacheEntry
*/
export interface CacheEntry {
/**
* An explicit key for a cache entry
*
* @generated from protobuf field: string key = 1;
*/
key: string;
/**
* SHA256 hex digest of the cache archive
*
* @generated from protobuf field: string hash = 2;
*/
hash: string;
/**
* Cache entry size in bytes
*
* @generated from protobuf field: int64 size_bytes = 3;
*/
sizeBytes: string;
/**
* Access scope
*
* @generated from protobuf field: string scope = 4;
*/
scope: string;
/**
* Version SHA256 hex digest
*
* @generated from protobuf field: string version = 5;
*/
version: string;
/**
* When the cache entry was created
*
* @generated from protobuf field: google.protobuf.Timestamp created_at = 6;
*/
createdAt?: Timestamp;
/**
* When the cache entry was last accessed
*
* @generated from protobuf field: google.protobuf.Timestamp last_accessed_at = 7;
*/
lastAccessedAt?: Timestamp;
/**
* When the cache entry is set to expire
*
* @generated from protobuf field: google.protobuf.Timestamp expires_at = 8;
*/
expiresAt?: Timestamp;
}
declare class CacheEntry$Type extends MessageType<CacheEntry> {
constructor();
create(value?: PartialMessage<CacheEntry>): CacheEntry;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CacheEntry): CacheEntry;
internalBinaryWrite(message: CacheEntry, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheEntry
*/
export declare const CacheEntry: CacheEntry$Type;
export {};

View file

@ -0,0 +1,106 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CacheEntry = void 0;
const runtime_1 = require("@protobuf-ts/runtime");
const runtime_2 = require("@protobuf-ts/runtime");
const runtime_3 = require("@protobuf-ts/runtime");
const runtime_4 = require("@protobuf-ts/runtime");
const runtime_5 = require("@protobuf-ts/runtime");
const timestamp_1 = require("../../../google/protobuf/timestamp");
// @generated message type with reflection information, may provide speed optimized methods
class CacheEntry$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.entities.v1.CacheEntry", [
{ no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
{ no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp },
{ no: 7, name: "last_accessed_at", kind: "message", T: () => timestamp_1.Timestamp },
{ no: 8, name: "expires_at", kind: "message", T: () => timestamp_1.Timestamp }
]);
}
create(value) {
const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string key */ 1:
message.key = reader.string();
break;
case /* string hash */ 2:
message.hash = reader.string();
break;
case /* int64 size_bytes */ 3:
message.sizeBytes = reader.int64().toString();
break;
case /* string scope */ 4:
message.scope = reader.string();
break;
case /* string version */ 5:
message.version = reader.string();
break;
case /* google.protobuf.Timestamp created_at */ 6:
message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
break;
case /* google.protobuf.Timestamp last_accessed_at */ 7:
message.lastAccessedAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt);
break;
case /* google.protobuf.Timestamp expires_at */ 8:
message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* string key = 1; */
if (message.key !== "")
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.key);
/* string hash = 2; */
if (message.hash !== "")
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.hash);
/* int64 size_bytes = 3; */
if (message.sizeBytes !== "0")
writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes);
/* string scope = 4; */
if (message.scope !== "")
writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.scope);
/* string version = 5; */
if (message.version !== "")
writer.tag(5, runtime_1.WireType.LengthDelimited).string(message.version);
/* google.protobuf.Timestamp created_at = 6; */
if (message.createdAt)
timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
/* google.protobuf.Timestamp last_accessed_at = 7; */
if (message.lastAccessedAt)
timestamp_1.Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, runtime_1.WireType.LengthDelimited).fork(), options).join();
/* google.protobuf.Timestamp expires_at = 8; */
if (message.expiresAt)
timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, runtime_1.WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheEntry
*/
exports.CacheEntry = new CacheEntry$Type();
//# sourceMappingURL=cacheentry.js.map

View file

@ -0,0 +1 @@
{"version":3,"file":"cacheentry.js","sourceRoot":"","sources":["../../../../../src/generated/results/entities/v1/cacheentry.ts"],"names":[],"mappings":";;;AAKA,kDAAgD;AAGhD,kDAA2D;AAE3D,kDAA8D;AAC9D,kDAAoD;AACpD,kDAAmD;AACnD,kEAA+D;AAsD/D,2FAA2F;AAC3F,MAAM,eAAgB,SAAQ,qBAAuB;IACjD;QACI,KAAK,CAAC,+CAA+C,EAAE;YACnD,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,qBAAqB,EAAE;YAClE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,qBAAqB,EAAE;YACnE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,oBAAoB,EAAE;YACxE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,qBAAqB,EAAE;YACpE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,qBAAqB,EAAE;YACtE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,qBAAS,EAAE;YAClE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,kBAAkB,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,qBAAS,EAAE;YACxE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,qBAAS,EAAE;SACrE,CAAC,CAAC;IACP,CAAC;IACD,MAAM,CAAC,KAAkC;QACrC,MAAM,OAAO,GAAG,EAAE,GAAG,EAAE,EAAE,EAAE,IAAI,EAAE,EAAE,EAAE,SAAS,EAAE,GAAG,EAAE,KAAK,EAAE,EAAE,EAAE,OAAO,EAAE,EAAE,EAAE,CAAC;QAC9E,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,sBAAY,EAAE,EAAE,UAAU,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5F,IAAI,KAAK,KAAK,SAAS;YACnB,IAAA,gCAAsB,EAAa,IAAI,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC;QAC7D,OAAO,OAAO,CAAC;IACnB,CAAC;IACD,kBAAkB,CAAC,MAAqB,EAAE,MAAc,EAAE,OAA0B,EAAE,MAAmB;QACrG,IAAI,OAAO,GAAG,MAAM,aAAN,MAAM,cAAN,MAAM,GAAI,IAAI,CAAC,MAAM,EAAE,EAAE,GAAG,GAAG,MAAM,CAAC,GAAG,GAAG,MAAM,CAAC;QACjE,OAAO,MAAM,CAAC,GAAG,GAAG,GAAG,EAAE;YACrB,IAAI,CAAC,OAAO,EAAE,QAAQ,CAAC,GAAG,MAAM,CAAC,GAAG,EAAE,CAAC;YACvC,QAAQ,OAAO,EAAE;gBACb,KAAK,gBAAgB,CAAC,CAAC;oBACnB,OAAO,CAAC,GAAG,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC;oBAC9B,MAAM;gBACV,KAAK,iBAAiB,CAAC,CAAC;oBACpB,OAAO,CAAC,IAAI,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC;oBAC/B,MAAM;gBACV,KAAK,sBAAsB,CAAC,CAAC;oBACzB,OAAO,CAAC,SAAS,GAAG,MAAM,CAAC,KAAK,EAAE,CAAC,QAAQ,EAAE,CAAC;oBAC9C,MAAM;gBACV,KAAK,kBAAkB,CAAC,CAAC;oBACrB,OAAO,CAAC,KAAK,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC;oBAChC,MAAM;gBACV,KAAK,oBAAoB,CAAC,CAAC;oBACvB,OAAO,CAAC,OAAO,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC;oBAClC,MAAM;gBACV,KAAK,0CAA0C,CAAC,CAAC;oBAC7C,OAAO,CAAC,SAAS,GAAG,qBAAS,CAAC,kBAAkB,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,EAAE,OAAO,EAAE,OAAO,CAAC,SAAS,CAAC,CAAC;oBACtG,MAAM;gBACV,KAAK,gDAAgD,CAAC,CAAC;oBACnD,OAAO,CAAC,cAAc,GAAG,qBAAS,CAAC,kBAAkB,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,EAAE,OAAO,EAAE,OAAO,CAAC,cAAc,CAAC,CAAC;oBAChH,MAAM;gBACV,KAAK,0CAA0C,CAAC,CAAC;oBAC7C,OAAO,CAAC,SAAS,GAAG,qBAAS,CAAC,kBAAkB,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,EAAE,OAAO,EAAE,OAAO,CAAC,SAAS,CAAC,CAAC;oBACtG,MAAM;gBACV;oBACI,IAAI,CAAC,GAAG,OAAO,CAAC,gBAAgB,CAAC;oBACjC,IAAI,CAAC,KAAK,OAAO;wBACb,MAAM,IAAI,UAAU,CAAC,KAAK,CAAC,iBAAiB,OAAO,eAAe,QAAQ,SAAS,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;oBACxG,IAAI,CAAC,GAAG,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC9B,IAAI,CAAC,KAAK,KAAK;wBACX,CAAC,CAAC,KAAK,IAAI,CAAC,CAAC,CAAC,6BAAmB,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,CAAC,CAAC,CAAC;aACvG;SACJ;QACD,OAAO,OAAO,CAAC;IACnB,CAAC;IACD,mBAAmB,CAAC,OAAmB,EAAE,MAAqB,EAAE,OAA2B;QACvF,qBAAqB;QACrB,IAAI,OAAO,CAAC,GAAG,KAAK,EAAE;YAClB,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QAChE,sBAAsB;QACtB,IAAI,OAAO,CAAC,IAAI,KAAK,EAAE;YACnB,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACjE,2BAA2B;QAC3B,IAAI,OAAO,CAAC,SAAS,KAAK,GAAG;YACzB,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;QAC5D,uBAAuB;QACvB,IAAI,OAAO,CAAC,KAAK,KAAK,EAAE;YACpB,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;QAClE,yBAAyB;QACzB,IAAI,OAAO,CAAC,OAAO,KAAK,EAAE;YACtB,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QACpE,+CAA+C;QAC/C,IAAI,OAAO,CAAC,SAAS;YACjB,qBAAS,CAAC,mBAAmB,CAAC,OAAO,CAAC,SAAS,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,IAAI,EAAE,EAAE,OAAO,CAAC,CAAC,IAAI,EAAE,CAAC;QACrH,qDAAqD;QACrD,IAAI,OAAO,CAAC,cAAc;YACtB,qBAAS,CAAC,mBAAmB,CAAC,OAAO,CAAC,cAAc,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,IAAI,EAAE,EAAE,OAAO,CAAC,CAAC,IAAI,EAAE,CAAC;QAC1H,+CAA+C;QAC/C,IAAI,OAAO,CAAC,SAAS;YACjB,qBAAS,CAAC,mBAAmB,CAAC,OAAO,CAAC,SAAS,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,IAAI,EAAE,EAAE,OAAO,CAAC,CAAC,IAAI,EAAE,CAAC;QACrH,IAAI,CAAC,GAAG,OAAO,CAAC,kBAAkB,CAAC;QACnC,IAAI,CAAC,KAAK,KAAK;YACX,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC,CAAC,6BAAmB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;QAClF,OAAO,MAAM,CAAC;IAClB,CAAC;CACJ;AACD;;GAEG;AACU,QAAA,UAAU,GAAG,IAAI,eAAe,EAAE,CAAC"}

View file

@ -0,0 +1,35 @@
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
import { CacheScope } from "./cachescope";
/**
* @generated from protobuf message github.actions.results.entities.v1.CacheMetadata
*/
export interface CacheMetadata {
/**
* Backend repository id
*
* @generated from protobuf field: int64 repository_id = 1;
*/
repositoryId: string;
/**
* Scopes for the cache entry
*
* @generated from protobuf field: repeated github.actions.results.entities.v1.CacheScope scope = 2;
*/
scope: CacheScope[];
}
declare class CacheMetadata$Type extends MessageType<CacheMetadata> {
constructor();
create(value?: PartialMessage<CacheMetadata>): CacheMetadata;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CacheMetadata): CacheMetadata;
internalBinaryWrite(message: CacheMetadata, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheMetadata
*/
export declare const CacheMetadata: CacheMetadata$Type;
export {};

View file

@ -0,0 +1,64 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CacheMetadata = void 0;
const runtime_1 = require("@protobuf-ts/runtime");
const runtime_2 = require("@protobuf-ts/runtime");
const runtime_3 = require("@protobuf-ts/runtime");
const runtime_4 = require("@protobuf-ts/runtime");
const runtime_5 = require("@protobuf-ts/runtime");
const cachescope_1 = require("./cachescope");
// @generated message type with reflection information, may provide speed optimized methods
class CacheMetadata$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.entities.v1.CacheMetadata", [
{ no: 1, name: "repository_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
{ no: 2, name: "scope", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => cachescope_1.CacheScope }
]);
}
create(value) {
const message = { repositoryId: "0", scope: [] };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* int64 repository_id */ 1:
message.repositoryId = reader.int64().toString();
break;
case /* repeated github.actions.results.entities.v1.CacheScope scope */ 2:
message.scope.push(cachescope_1.CacheScope.internalBinaryRead(reader, reader.uint32(), options));
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* int64 repository_id = 1; */
if (message.repositoryId !== "0")
writer.tag(1, runtime_1.WireType.Varint).int64(message.repositoryId);
/* repeated github.actions.results.entities.v1.CacheScope scope = 2; */
for (let i = 0; i < message.scope.length; i++)
cachescope_1.CacheScope.internalBinaryWrite(message.scope[i], writer.tag(2, runtime_1.WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheMetadata
*/
exports.CacheMetadata = new CacheMetadata$Type();
//# sourceMappingURL=cachemetadata.js.map

View file

@ -0,0 +1 @@
{"version":3,"file":"cachemetadata.js","sourceRoot":"","sources":["../../../../../src/generated/results/entities/v1/cachemetadata.ts"],"names":[],"mappings":";;;AAKA,kDAAgD;AAGhD,kDAA2D;AAE3D,kDAA8D;AAC9D,kDAAoD;AACpD,kDAAmD;AACnD,6CAA0C;AAkB1C,2FAA2F;AAC3F,MAAM,kBAAmB,SAAQ,qBAA0B;IACvD;QACI,KAAK,CAAC,kDAAkD,EAAE;YACtD,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,eAAe,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,oBAAoB,EAAE;YAC3E,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,MAAM,EAAE,CAAC,CAAC,qBAAqB,EAAE,CAAC,EAAE,GAAG,EAAE,CAAC,uBAAU,EAAE;SAClG,CAAC,CAAC;IACP,CAAC;IACD,MAAM,CAAC,KAAqC;QACxC,MAAM,OAAO,GAAG,EAAE,YAAY,EAAE,GAAG,EAAE,KAAK,EAAE,EAAE,EAAE,CAAC;QACjD,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,sBAAY,EAAE,EAAE,UAAU,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5F,IAAI,KAAK,KAAK,SAAS;YACnB,IAAA,gCAAsB,EAAgB,IAAI,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC;QAChE,OAAO,OAAO,CAAC;IACnB,CAAC;IACD,kBAAkB,CAAC,MAAqB,EAAE,MAAc,EAAE,OAA0B,EAAE,MAAsB;QACxG,IAAI,OAAO,GAAG,MAAM,aAAN,MAAM,cAAN,MAAM,GAAI,IAAI,CAAC,MAAM,EAAE,EAAE,GAAG,GAAG,MAAM,CAAC,GAAG,GAAG,MAAM,CAAC;QACjE,OAAO,MAAM,CAAC,GAAG,GAAG,GAAG,EAAE;YACrB,IAAI,CAAC,OAAO,EAAE,QAAQ,CAAC,GAAG,MAAM,CAAC,GAAG,EAAE,CAAC;YACvC,QAAQ,OAAO,EAAE;gBACb,KAAK,yBAAyB,CAAC,CAAC;oBAC5B,OAAO,CAAC,YAAY,GAAG,MAAM,CAAC,KAAK,EAAE,CAAC,QAAQ,EAAE,CAAC;oBACjD,MAAM;gBACV,KAAK,kEAAkE,CAAC,CAAC;oBACrE,OAAO,CAAC,KAAK,CAAC,IAAI,CAAC,uBAAU,CAAC,kBAAkB,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,EAAE,OAAO,CAAC,CAAC,CAAC;oBACpF,MAAM;gBACV;oBACI,IAAI,CAAC,GAAG,OAAO,CAAC,gBAAgB,CAAC;oBACjC,IAAI,CAAC,KAAK,OAAO;wBACb,MAAM,IAAI,UAAU,CAAC,KAAK,CAAC,iBAAiB,OAAO,eAAe,QAAQ,SAAS,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;oBACxG,IAAI,CAAC,GAAG,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC9B,IAAI,CAAC,KAAK,KAAK;wBACX,CAAC,CAAC,KAAK,IAAI,CAAC,CAAC,CAAC,6BAAmB,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,CAAC,CAAC,CAAC;aACvG;SACJ;QACD,OAAO,OAAO,CAAC;IACnB,CAAC;IACD,mBAAmB,CAAC,OAAsB,EAAE,MAAqB,EAAE,OAA2B;QAC1F,8BAA8B;QAC9B,IAAI,OAAO,CAAC,YAAY,KAAK,GAAG;YAC5B,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC/D,uEAAuE;QACvE,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE;YACzC,uBAAU,CAAC,mBAAmB,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,IAAI,EAAE,EAAE,OAAO,CAAC,CAAC,IAAI,EAAE,CAAC;QACrH,IAAI,CAAC,GAAG,OAAO,CAAC,kBAAkB,CAAC;QACnC,IAAI,CAAC,KAAK,KAAK;YACX,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC,CAAC,6BAAmB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;QAClF,OAAO,MAAM,CAAC;IAClB,CAAC;CACJ;AACD;;GAEG;AACU,QAAA,aAAa,GAAG,IAAI,kBAAkB,EAAE,CAAC"}

View file

@ -0,0 +1,34 @@
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
import type { IBinaryWriter } from "@protobuf-ts/runtime";
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
import type { IBinaryReader } from "@protobuf-ts/runtime";
import type { PartialMessage } from "@protobuf-ts/runtime";
import { MessageType } from "@protobuf-ts/runtime";
/**
* @generated from protobuf message github.actions.results.entities.v1.CacheScope
*/
export interface CacheScope {
/**
* Determines the scope of the cache entry
*
* @generated from protobuf field: string scope = 1;
*/
scope: string;
/**
* None: 0 | Read: 1 | Write: 2 | All: (1|2)
*
* @generated from protobuf field: int64 permission = 2;
*/
permission: string;
}
declare class CacheScope$Type extends MessageType<CacheScope> {
constructor();
create(value?: PartialMessage<CacheScope>): CacheScope;
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CacheScope): CacheScope;
internalBinaryWrite(message: CacheScope, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter;
}
/**
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheScope
*/
export declare const CacheScope: CacheScope$Type;
export {};

View file

@ -0,0 +1,63 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CacheScope = void 0;
const runtime_1 = require("@protobuf-ts/runtime");
const runtime_2 = require("@protobuf-ts/runtime");
const runtime_3 = require("@protobuf-ts/runtime");
const runtime_4 = require("@protobuf-ts/runtime");
const runtime_5 = require("@protobuf-ts/runtime");
// @generated message type with reflection information, may provide speed optimized methods
class CacheScope$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.entities.v1.CacheScope", [
{ no: 1, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "permission", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
]);
}
create(value) {
const message = { scope: "", permission: "0" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string scope */ 1:
message.scope = reader.string();
break;
case /* int64 permission */ 2:
message.permission = reader.int64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* string scope = 1; */
if (message.scope !== "")
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.scope);
/* int64 permission = 2; */
if (message.permission !== "0")
writer.tag(2, runtime_1.WireType.Varint).int64(message.permission);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.entities.v1.CacheScope
*/
exports.CacheScope = new CacheScope$Type();
//# sourceMappingURL=cachescope.js.map

View file

@ -0,0 +1 @@
{"version":3,"file":"cachescope.js","sourceRoot":"","sources":["../../../../../src/generated/results/entities/v1/cachescope.ts"],"names":[],"mappings":";;;AAKA,kDAAgD;AAGhD,kDAA2D;AAE3D,kDAA8D;AAC9D,kDAAoD;AACpD,kDAAmD;AAkBnD,2FAA2F;AAC3F,MAAM,eAAgB,SAAQ,qBAAuB;IACjD;QACI,KAAK,CAAC,+CAA+C,EAAE;YACnD,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,qBAAqB,EAAE;YACpE,EAAE,EAAE,EAAE,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,IAAI,EAAE,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,oBAAoB,EAAE;SAC3E,CAAC,CAAC;IACP,CAAC;IACD,MAAM,CAAC,KAAkC;QACrC,MAAM,OAAO,GAAG,EAAE,KAAK,EAAE,EAAE,EAAE,UAAU,EAAE,GAAG,EAAE,CAAC;QAC/C,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,sBAAY,EAAE,EAAE,UAAU,EAAE,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5F,IAAI,KAAK,KAAK,SAAS;YACnB,IAAA,gCAAsB,EAAa,IAAI,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC;QAC7D,OAAO,OAAO,CAAC;IACnB,CAAC;IACD,kBAAkB,CAAC,MAAqB,EAAE,MAAc,EAAE,OAA0B,EAAE,MAAmB;QACrG,IAAI,OAAO,GAAG,MAAM,aAAN,MAAM,cAAN,MAAM,GAAI,IAAI,CAAC,MAAM,EAAE,EAAE,GAAG,GAAG,MAAM,CAAC,GAAG,GAAG,MAAM,CAAC;QACjE,OAAO,MAAM,CAAC,GAAG,GAAG,GAAG,EAAE;YACrB,IAAI,CAAC,OAAO,EAAE,QAAQ,CAAC,GAAG,MAAM,CAAC,GAAG,EAAE,CAAC;YACvC,QAAQ,OAAO,EAAE;gBACb,KAAK,kBAAkB,CAAC,CAAC;oBACrB,OAAO,CAAC,KAAK,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC;oBAChC,MAAM;gBACV,KAAK,sBAAsB,CAAC,CAAC;oBACzB,OAAO,CAAC,UAAU,GAAG,MAAM,CAAC,KAAK,EAAE,CAAC,QAAQ,EAAE,CAAC;oBAC/C,MAAM;gBACV;oBACI,IAAI,CAAC,GAAG,OAAO,CAAC,gBAAgB,CAAC;oBACjC,IAAI,CAAC,KAAK,OAAO;wBACb,MAAM,IAAI,UAAU,CAAC,KAAK,CAAC,iBAAiB,OAAO,eAAe,QAAQ,SAAS,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;oBACxG,IAAI,CAAC,GAAG,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC9B,IAAI,CAAC,KAAK,KAAK;wBACX,CAAC,CAAC,KAAK,IAAI,CAAC,CAAC,CAAC,6BAAmB,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,OAAO,EAAE,QAAQ,EAAE,CAAC,CAAC,CAAC;aACvG;SACJ;QACD,OAAO,OAAO,CAAC;IACnB,CAAC;IACD,mBAAmB,CAAC,OAAmB,EAAE,MAAqB,EAAE,OAA2B;QACvF,uBAAuB;QACvB,IAAI,OAAO,CAAC,KAAK,KAAK,EAAE;YACpB,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,eAAe,CAAC,CAAC,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;QAClE,2BAA2B;QAC3B,IAAI,OAAO,CAAC,UAAU,KAAK,GAAG;YAC1B,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,kBAAQ,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;QAC7D,IAAI,CAAC,GAAG,OAAO,CAAC,kBAAkB,CAAC;QACnC,IAAI,CAAC,KAAK,KAAK;YACX,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC,CAAC,6BAAmB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;QAClF,OAAO,MAAM,CAAC;IAClB,CAAC;CACJ;AACD;;GAEG;AACU,QAAA,UAAU,GAAG,IAAI,eAAe,EAAE,CAAC"}

View file

@ -0,0 +1,6 @@
import { ArtifactCacheEntry, InternalCacheOptions, ReserveCacheResponse, ITypedResponseWithError } from './contracts';
import { DownloadOptions, UploadOptions } from '../options';
export declare function getCacheEntry(keys: string[], paths: string[], options?: InternalCacheOptions): Promise<ArtifactCacheEntry | null>;
export declare function downloadCache(archiveLocation: string, archivePath: string, options?: DownloadOptions): Promise<void>;
export declare function reserveCache(key: string, paths: string[], options?: InternalCacheOptions): Promise<ITypedResponseWithError<ReserveCacheResponse>>;
export declare function saveCache(cacheId: number, archivePath: string, signedUploadURL?: string, options?: UploadOptions): Promise<void>;

View file

@ -0,0 +1,256 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.saveCache = exports.reserveCache = exports.downloadCache = exports.getCacheEntry = void 0;
const core = __importStar(require("@actions/core"));
const http_client_1 = require("@actions/http-client");
const auth_1 = require("@actions/http-client/lib/auth");
const fs = __importStar(require("fs"));
const url_1 = require("url");
const utils = __importStar(require("./cacheUtils"));
const uploadUtils_1 = require("./uploadUtils");
const downloadUtils_1 = require("./downloadUtils");
const options_1 = require("../options");
const requestUtils_1 = require("./requestUtils");
const config_1 = require("./config");
const user_agent_1 = require("./shared/user-agent");
function getCacheApiUrl(resource) {
const baseUrl = (0, config_1.getCacheServiceURL)();
if (!baseUrl) {
throw new Error('Cache Service Url not found, unable to restore cache.');
}
const url = `${baseUrl}_apis/artifactcache/${resource}`;
core.debug(`Resource Url: ${url}`);
return url;
}
function createAcceptHeader(type, apiVersion) {
return `${type};api-version=${apiVersion}`;
}
function getRequestOptions() {
const requestOptions = {
headers: {
Accept: createAcceptHeader('application/json', '6.0-preview.1')
}
};
return requestOptions;
}
function createHttpClient() {
const token = process.env['ACTIONS_RUNTIME_TOKEN'] || '';
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
return new http_client_1.HttpClient((0, user_agent_1.getUserAgentString)(), [bearerCredentialHandler], getRequestOptions());
}
function getCacheEntry(keys, paths, options) {
return __awaiter(this, void 0, void 0, function* () {
const httpClient = createHttpClient();
const version = utils.getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
const response = yield (0, requestUtils_1.retryTypedResponse)('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
// Cache not found
if (response.statusCode === 204) {
// List cache for primary key only if cache miss occurs
if (core.isDebug()) {
yield printCachesListForDiagnostics(keys[0], httpClient, version);
}
return null;
}
if (!(0, requestUtils_1.isSuccessStatusCode)(response.statusCode)) {
throw new Error(`Cache service responded with ${response.statusCode}`);
}
const cacheResult = response.result;
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
if (!cacheDownloadUrl) {
// Cache achiveLocation not found. This should never happen, and hence bail out.
throw new Error('Cache not found.');
}
core.setSecret(cacheDownloadUrl);
core.debug(`Cache Result:`);
core.debug(JSON.stringify(cacheResult));
return cacheResult;
});
}
exports.getCacheEntry = getCacheEntry;
function printCachesListForDiagnostics(key, httpClient, version) {
return __awaiter(this, void 0, void 0, function* () {
const resource = `caches?key=${encodeURIComponent(key)}`;
const response = yield (0, requestUtils_1.retryTypedResponse)('listCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
if (response.statusCode === 200) {
const cacheListResult = response.result;
const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount;
if (totalCount && totalCount > 0) {
core.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env['GITHUB_REF']}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \nOther caches with similar key:`);
for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) {
core.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`);
}
}
}
});
}
function downloadCache(archiveLocation, archivePath, options) {
return __awaiter(this, void 0, void 0, function* () {
const archiveUrl = new url_1.URL(archiveLocation);
const downloadOptions = (0, options_1.getDownloadOptions)(options);
if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
if (downloadOptions.useAzureSdk) {
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
}
else if (downloadOptions.concurrentBlobDownloads) {
// Use concurrent implementation with HttpClient to work around blob SDK issue
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions);
}
else {
// Otherwise, download using the Actions http-client.
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
}
}
else {
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
}
});
}
exports.downloadCache = downloadCache;
// Reserve Cache
function reserveCache(key, paths, options) {
return __awaiter(this, void 0, void 0, function* () {
const httpClient = createHttpClient();
const version = utils.getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
const reserveCacheRequest = {
key,
version,
cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize
};
const response = yield (0, requestUtils_1.retryTypedResponse)('reserveCache', () => __awaiter(this, void 0, void 0, function* () {
return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest);
}));
return response;
});
}
exports.reserveCache = reserveCache;
function getContentRange(start, end) {
// Format: `bytes start-end/filesize
// start and end are inclusive
// filesize can be *
// For a 200 byte chunk starting at byte 0:
// Content-Range: bytes 0-199/*
return `bytes ${start}-${end}/*`;
}
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
return __awaiter(this, void 0, void 0, function* () {
core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
const additionalHeaders = {
'Content-Type': 'application/octet-stream',
'Content-Range': getContentRange(start, end)
};
const uploadChunkResponse = yield (0, requestUtils_1.retryHttpClientResponse)(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () {
return httpClient.sendStream('PATCH', resourceUrl, openStream(), additionalHeaders);
}));
if (!(0, requestUtils_1.isSuccessStatusCode)(uploadChunkResponse.message.statusCode)) {
throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`);
}
});
}
function uploadFile(httpClient, cacheId, archivePath, options) {
return __awaiter(this, void 0, void 0, function* () {
// Upload Chunks
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
const fd = fs.openSync(archivePath, 'r');
const uploadOptions = (0, options_1.getUploadOptions)(options);
const concurrency = utils.assertDefined('uploadConcurrency', uploadOptions.uploadConcurrency);
const maxChunkSize = utils.assertDefined('uploadChunkSize', uploadOptions.uploadChunkSize);
const parallelUploads = [...new Array(concurrency).keys()];
core.debug('Awaiting all uploads');
let offset = 0;
try {
yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {
while (offset < fileSize) {
const chunkSize = Math.min(fileSize - offset, maxChunkSize);
const start = offset;
const end = offset + chunkSize - 1;
offset += maxChunkSize;
yield uploadChunk(httpClient, resourceUrl, () => fs
.createReadStream(archivePath, {
fd,
start,
end,
autoClose: false
})
.on('error', error => {
throw new Error(`Cache upload failed because file read failed with ${error.message}`);
}), start, end);
}
})));
}
finally {
fs.closeSync(fd);
}
return;
});
}
function commitCache(httpClient, cacheId, filesize) {
return __awaiter(this, void 0, void 0, function* () {
const commitCacheRequest = { size: filesize };
return yield (0, requestUtils_1.retryTypedResponse)('commitCache', () => __awaiter(this, void 0, void 0, function* () {
return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest);
}));
});
}
function saveCache(cacheId, archivePath, signedUploadURL, options) {
return __awaiter(this, void 0, void 0, function* () {
const uploadOptions = (0, options_1.getUploadOptions)(options);
if (uploadOptions.useAzureSdk) {
// Use Azure storage SDK to upload caches directly to Azure
if (!signedUploadURL) {
throw new Error('Azure Storage SDK can only be used when a signed URL is provided.');
}
yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options);
}
else {
const httpClient = createHttpClient();
core.debug('Upload cache');
yield uploadFile(httpClient, cacheId, archivePath, options);
// Commit Cache
core.debug('Commiting cache');
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) {
throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`);
}
core.info('Cache saved successfully');
}
});
}
exports.saveCache = saveCache;
//# sourceMappingURL=cacheHttpClient.js.map

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,13 @@
/// <reference types="node" />
import * as fs from 'fs';
import { CompressionMethod } from './constants';
export declare function createTempDirectory(): Promise<string>;
export declare function getArchiveFileSizeInBytes(filePath: string): number;
export declare function resolvePaths(patterns: string[]): Promise<string[]>;
export declare function unlinkFile(filePath: fs.PathLike): Promise<void>;
export declare function getCompressionMethod(): Promise<CompressionMethod>;
export declare function getCacheFileName(compressionMethod: CompressionMethod): string;
export declare function getGnuTarPathOnWindows(): Promise<string>;
export declare function assertDefined<T>(name: string, value?: T): T;
export declare function getCacheVersion(paths: string[], compressionMethod?: CompressionMethod, enableCrossOsArchive?: boolean): string;
export declare function getRuntimeToken(): string;

View file

@ -0,0 +1,216 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __asyncValues = (this && this.__asyncValues) || function (o) {
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
var m = o[Symbol.asyncIterator], i;
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getRuntimeToken = exports.getCacheVersion = exports.assertDefined = exports.getGnuTarPathOnWindows = exports.getCacheFileName = exports.getCompressionMethod = exports.unlinkFile = exports.resolvePaths = exports.getArchiveFileSizeInBytes = exports.createTempDirectory = void 0;
const core = __importStar(require("@actions/core"));
const exec = __importStar(require("@actions/exec"));
const glob = __importStar(require("@actions/glob"));
const io = __importStar(require("@actions/io"));
const crypto = __importStar(require("crypto"));
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const semver = __importStar(require("semver"));
const util = __importStar(require("util"));
const constants_1 = require("./constants");
const versionSalt = '1.0';
// From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23
function createTempDirectory() {
return __awaiter(this, void 0, void 0, function* () {
const IS_WINDOWS = process.platform === 'win32';
let tempDirectory = process.env['RUNNER_TEMP'] || '';
if (!tempDirectory) {
let baseLocation;
if (IS_WINDOWS) {
// On Windows use the USERPROFILE env variable
baseLocation = process.env['USERPROFILE'] || 'C:\\';
}
else {
if (process.platform === 'darwin') {
baseLocation = '/Users';
}
else {
baseLocation = '/home';
}
}
tempDirectory = path.join(baseLocation, 'actions', 'temp');
}
const dest = path.join(tempDirectory, crypto.randomUUID());
yield io.mkdirP(dest);
return dest;
});
}
exports.createTempDirectory = createTempDirectory;
function getArchiveFileSizeInBytes(filePath) {
return fs.statSync(filePath).size;
}
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
function resolvePaths(patterns) {
var _a, e_1, _b, _c;
var _d;
return __awaiter(this, void 0, void 0, function* () {
const paths = [];
const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd();
const globber = yield glob.create(patterns.join('\n'), {
implicitDescendants: false
});
try {
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) {
_c = _g.value;
_e = false;
const file = _c;
const relativeFile = path
.relative(workspace, file)
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
core.debug(`Matched: ${relativeFile}`);
// Paths are made relative so the tar entries are all relative to the root of the workspace.
if (relativeFile === '') {
// path.relative returns empty string if workspace and file are equal
paths.push('.');
}
else {
paths.push(`${relativeFile}`);
}
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (!_e && !_a && (_b = _f.return)) yield _b.call(_f);
}
finally { if (e_1) throw e_1.error; }
}
return paths;
});
}
exports.resolvePaths = resolvePaths;
function unlinkFile(filePath) {
return __awaiter(this, void 0, void 0, function* () {
return util.promisify(fs.unlink)(filePath);
});
}
exports.unlinkFile = unlinkFile;
function getVersion(app, additionalArgs = []) {
return __awaiter(this, void 0, void 0, function* () {
let versionOutput = '';
additionalArgs.push('--version');
core.debug(`Checking ${app} ${additionalArgs.join(' ')}`);
try {
yield exec.exec(`${app}`, additionalArgs, {
ignoreReturnCode: true,
silent: true,
listeners: {
stdout: (data) => (versionOutput += data.toString()),
stderr: (data) => (versionOutput += data.toString())
}
});
}
catch (err) {
core.debug(err.message);
}
versionOutput = versionOutput.trim();
core.debug(versionOutput);
return versionOutput;
});
}
// Use zstandard if possible to maximize cache performance
function getCompressionMethod() {
return __awaiter(this, void 0, void 0, function* () {
const versionOutput = yield getVersion('zstd', ['--quiet']);
const version = semver.clean(versionOutput);
core.debug(`zstd version: ${version}`);
if (versionOutput === '') {
return constants_1.CompressionMethod.Gzip;
}
else {
return constants_1.CompressionMethod.ZstdWithoutLong;
}
});
}
exports.getCompressionMethod = getCompressionMethod;
function getCacheFileName(compressionMethod) {
return compressionMethod === constants_1.CompressionMethod.Gzip
? constants_1.CacheFilename.Gzip
: constants_1.CacheFilename.Zstd;
}
exports.getCacheFileName = getCacheFileName;
function getGnuTarPathOnWindows() {
return __awaiter(this, void 0, void 0, function* () {
if (fs.existsSync(constants_1.GnuTarPathOnWindows)) {
return constants_1.GnuTarPathOnWindows;
}
const versionOutput = yield getVersion('tar');
return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : '';
});
}
exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows;
function assertDefined(name, value) {
if (value === undefined) {
throw Error(`Expected ${name} but value was undefiend`);
}
return value;
}
exports.assertDefined = assertDefined;
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
// don't pass changes upstream
const components = paths.slice();
// Add compression method to cache version to restore
// compressed cache as per compression method
if (compressionMethod) {
components.push(compressionMethod);
}
// Only check for windows platforms if enableCrossOsArchive is false
if (process.platform === 'win32' && !enableCrossOsArchive) {
components.push('windows-only');
}
// Add salt to cache version to support breaking changes in cache entry
components.push(versionSalt);
return crypto.createHash('sha256').update(components.join('|')).digest('hex');
}
exports.getCacheVersion = getCacheVersion;
function getRuntimeToken() {
const token = process.env['ACTIONS_RUNTIME_TOKEN'];
if (!token) {
throw new Error('Unable to get the ACTIONS_RUNTIME_TOKEN env variable');
}
return token;
}
exports.getRuntimeToken = getRuntimeToken;
//# sourceMappingURL=cacheUtils.js.map

View file

@ -0,0 +1 @@
{"version":3,"file":"cacheUtils.js","sourceRoot":"","sources":["../../src/internal/cacheUtils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,oDAAqC;AACrC,oDAAqC;AACrC,gDAAiC;AACjC,+CAAgC;AAChC,uCAAwB;AACxB,2CAA4B;AAC5B,+CAAgC;AAChC,2CAA4B;AAC5B,2CAIoB;AAEpB,MAAM,WAAW,GAAG,KAAK,CAAA;AAEzB,8FAA8F;AAC9F,SAAsB,mBAAmB;;QACvC,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;QAE/C,IAAI,aAAa,GAAW,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,EAAE,CAAA;QAE5D,IAAI,CAAC,aAAa,EAAE;YAClB,IAAI,YAAoB,CAAA;YACxB,IAAI,UAAU,EAAE;gBACd,8CAA8C;gBAC9C,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,MAAM,CAAA;aACpD;iBAAM;gBACL,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,EAAE;oBACjC,YAAY,GAAG,QAAQ,CAAA;iBACxB;qBAAM;oBACL,YAAY,GAAG,OAAO,CAAA;iBACvB;aACF;YACD,aAAa,GAAG,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,SAAS,EAAE,MAAM,CAAC,CAAA;SAC3D;QAED,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,UAAU,EAAE,CAAC,CAAA;QAC1D,MAAM,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;QACrB,OAAO,IAAI,CAAA;IACb,CAAC;CAAA;AAvBD,kDAuBC;AAED,SAAgB,yBAAyB,CAAC,QAAgB;IACxD,OAAO,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAA;AACnC,CAAC;AAFD,8DAEC;AAED,SAAsB,YAAY,CAAC,QAAkB;;;;QACnD,MAAM,KAAK,GAAa,EAAE,CAAA;QAC1B,MAAM,SAAS,GAAG,MAAA,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,mCAAI,OAAO,CAAC,GAAG,EAAE,CAAA;QAClE,MAAM,OAAO,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;YACrD,mBAAmB,EAAE,KAAK;SAC3B,CAAC,CAAA;;YAEF,KAAyB,eAAA,KAAA,cAAA,OAAO,CAAC,aAAa,EAAE,CAAA,IAAA,sDAAE;gBAAzB,cAAuB;gBAAvB,WAAuB;gBAArC,MAAM,IAAI,KAAA,CAAA;gBACnB,MAAM,YAAY,GAAG,IAAI;qBACtB,QAAQ,CAAC,SAAS,EAAE,IAAI,CAAC;qBACzB,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC,CAAA;gBACjD,IAAI,CAAC,KAAK,CAAC,YAAY,YAAY,EAAE,CAAC,CAAA;gBACtC,4FAA4F;gBAC5F,IAAI,YAAY,KAAK,EAAE,EAAE;oBACvB,qEAAqE;oBACrE,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;iBAChB;qBAAM;oBACL,KAAK,CAAC,IAAI,CAAC,GAAG,YAAY,EAAE,CAAC,CAAA;iBAC9B;aACF;;;;;;;;;QAED,OAAO,KAAK,CAAA;;CACb;AAtBD,oCAsBC;AAED,SAAsB,UAAU,CAAC,QAAqB;;QACpD,OAAO,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,CAAA;IAC5C,CAAC;CAAA;AAFD,gCAEC;AAED,SAAe,UAAU,CACvB,GAAW,EACX,iBAA2B,EAAE;;QAE7B,IAAI,aAAa,GAAG,EAAE,CAAA;QACtB,cAAc,CAAC,IAAI,CAAC,WAAW,CAAC,CAAA;QAChC,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,IAAI,cAAc,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAA;QACzD,IAAI;YACF,MAAM,IAAI,CAAC,IAAI,CAAC,GAAG,GAAG,EAAE,EAAE,cAAc,EAAE;gBACxC,gBAAgB,EAAE,IAAI;gBACtB,MAAM,EAAE,IAAI;gBACZ,SAAS,EAAE;oBACT,MAAM,EAAE,CAAC,IAAY,EAAU,EAAE,CAAC,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;oBACpE,MAAM,EAAE,CAAC,IAAY,EAAU,EAAE,CAAC,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;iBACrE;aACF,CAAC,CAAA;SACH;QAAC,OAAO,GAAG,EAAE;YACZ,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC,CAAA;SACxB;QAED,aAAa,GAAG,aAAa,CAAC,IAAI,EAAE,CAAA;QACpC,IAAI,CAAC,KAAK,CAAC,aAAa,CAAC,CAAA;QACzB,OAAO,aAAa,CAAA;IACtB,CAAC;CAAA;AAED,0DAA0D;AAC1D,SAAsB,oBAAoB;;QACxC,MAAM,aAAa,GAAG,MAAM,UAAU,CAAC,MAAM,EAAE,CAAC,SAAS,CAAC,CAAC,CAAA;QAC3D,MAAM,OAAO,GAAG,MAAM,CAAC,KAAK,CAAC,aAAa,CAAC,CAAA;QAC3C,IAAI,CAAC,KAAK,CAAC,iBAAiB,OAAO,EAAE,CAAC,CAAA;QAEtC,IAAI,aAAa,KAAK,EAAE,EAAE;YACxB,OAAO,6BAAiB,CAAC,IAAI,CAAA;SAC9B;aAAM;YACL,OAAO,6BAAiB,CAAC,eAAe,CAAA;SACzC;IACH,CAAC;CAAA;AAVD,oDAUC;AAED,SAAgB,gBAAgB,CAAC,iBAAoC;IACnE,OAAO,iBAAiB,KAAK,6BAAiB,CAAC,IAAI;QACjD,CAAC,CAAC,yBAAa,CAAC,IAAI;QACpB,CAAC,CAAC,yBAAa,CAAC,IAAI,CAAA;AACxB,CAAC;AAJD,4CAIC;AAED,SAAsB,sBAAsB;;QAC1C,IAAI,EAAE,CAAC,UAAU,CAAC,+BAAmB,CAAC,EAAE;YACtC,OAAO,+BAAmB,CAAA;SAC3B;QACD,MAAM,aAAa,GAAG,MAAM,UAAU,CAAC,KAAK,CAAC,CAAA;QAC7C,OAAO,aAAa,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,CAAA;IAC/E,CAAC;CAAA;AAND,wDAMC;AAED,SAAgB,aAAa,CAAI,IAAY,EAAE,KAAS;IACtD,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,MAAM,KAAK,CAAC,YAAY,IAAI,0BAA0B,CAAC,CAAA;KACxD;IAED,OAAO,KAAK,CAAA;AACd,CAAC;AAND,sCAMC;AAED,SAAgB,eAAe,CAC7B,KAAe,EACf,iBAAqC,EACrC,oBAAoB,GAAG,KAAK;IAE5B,8BAA8B;IAC9B,MAAM,UAAU,GAAG,KAAK,CAAC,KAAK,EAAE,CAAA;IAEhC,qDAAqD;IACrD,6CAA6C;IAC7C,IAAI,iBAAiB,EAAE;QACrB,UAAU,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAA;KACnC;IAED,oEAAoE;IACpE,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,IAAI,CAAC,oBAAoB,EAAE;QACzD,UAAU,CAAC,IAAI,CAAC,cAAc,CAAC,CAAA;KAChC;IAED,uEAAuE;IACvE,UAAU,CAAC,IAAI,CAAC,WAAW,CAAC,CAAA;IAE5B,OAAO,MAAM,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAA;AAC/E,CAAC;AAvBD,0CAuBC;AAED,SAAgB,eAAe;IAC7B,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,uBAAuB,CAAC,CAAA;IAClD,IAAI,CAAC,KAAK,EAAE;QACV,MAAM,IAAI,KAAK,CAAC,sDAAsD,CAAC,CAAA;KACxE;IACD,OAAO,KAAK,CAAA;AACd,CAAC;AAND,0CAMC"}

View file

@ -0,0 +1,3 @@
export declare function isGhes(): boolean;
export declare function getCacheServiceVersion(): string;
export declare function getCacheServiceURL(): string;

View file

@ -0,0 +1,37 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.getCacheServiceURL = exports.getCacheServiceVersion = exports.isGhes = void 0;
function isGhes() {
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
const hostname = ghUrl.hostname.trimEnd().toUpperCase();
const isGitHubHost = hostname === 'GITHUB.COM';
const isGheHost = hostname.endsWith('.GHE.COM');
const isLocalHost = hostname.endsWith('.LOCALHOST');
return !isGitHubHost && !isGheHost && !isLocalHost;
}
exports.isGhes = isGhes;
function getCacheServiceVersion() {
// Cache service v2 is not supported on GHES. We will default to
// cache service v1 even if the feature flag was enabled by user.
if (isGhes())
return 'v1';
return process.env['ACTIONS_CACHE_SERVICE_V2'] ? 'v2' : 'v1';
}
exports.getCacheServiceVersion = getCacheServiceVersion;
function getCacheServiceURL() {
const version = getCacheServiceVersion();
// Based on the version of the cache service, we will determine which
// URL to use.
switch (version) {
case 'v1':
return (process.env['ACTIONS_CACHE_URL'] ||
process.env['ACTIONS_RESULTS_URL'] ||
'');
case 'v2':
return process.env['ACTIONS_RESULTS_URL'] || '';
default:
throw new Error(`Unsupported cache service version: ${version}`);
}
}
exports.getCacheServiceURL = getCacheServiceURL;
//# sourceMappingURL=config.js.map

View file

@ -0,0 +1 @@
{"version":3,"file":"config.js","sourceRoot":"","sources":["../../src/internal/config.ts"],"names":[],"mappings":";;;AAAA,SAAgB,MAAM;IACpB,MAAM,KAAK,GAAG,IAAI,GAAG,CACnB,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,IAAI,oBAAoB,CACzD,CAAA;IAED,MAAM,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,WAAW,EAAE,CAAA;IACvD,MAAM,YAAY,GAAG,QAAQ,KAAK,YAAY,CAAA;IAC9C,MAAM,SAAS,GAAG,QAAQ,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAA;IAC/C,MAAM,WAAW,GAAG,QAAQ,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAA;IAEnD,OAAO,CAAC,YAAY,IAAI,CAAC,SAAS,IAAI,CAAC,WAAW,CAAA;AACpD,CAAC;AAXD,wBAWC;AAED,SAAgB,sBAAsB;IACpC,gEAAgE;IAChE,iEAAiE;IACjE,IAAI,MAAM,EAAE;QAAE,OAAO,IAAI,CAAA;IAEzB,OAAO,OAAO,CAAC,GAAG,CAAC,0BAA0B,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAA;AAC9D,CAAC;AAND,wDAMC;AAED,SAAgB,kBAAkB;IAChC,MAAM,OAAO,GAAG,sBAAsB,EAAE,CAAA;IAExC,qEAAqE;IACrE,cAAc;IACd,QAAQ,OAAO,EAAE;QACf,KAAK,IAAI;YACP,OAAO,CACL,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC;gBAChC,OAAO,CAAC,GAAG,CAAC,qBAAqB,CAAC;gBAClC,EAAE,CACH,CAAA;QACH,KAAK,IAAI;YACP,OAAO,OAAO,CAAC,GAAG,CAAC,qBAAqB,CAAC,IAAI,EAAE,CAAA;QACjD;YACE,MAAM,IAAI,KAAK,CAAC,sCAAsC,OAAO,EAAE,CAAC,CAAA;KACnE;AACH,CAAC;AAjBD,gDAiBC"}

View file

@ -0,0 +1,21 @@
export declare enum CacheFilename {
Gzip = "cache.tgz",
Zstd = "cache.tzst"
}
export declare enum CompressionMethod {
Gzip = "gzip",
ZstdWithoutLong = "zstd-without-long",
Zstd = "zstd"
}
export declare enum ArchiveToolType {
GNU = "gnu",
BSD = "bsd"
}
export declare const DefaultRetryAttempts = 2;
export declare const DefaultRetryDelay = 5000;
export declare const SocketTimeout = 5000;
export declare const GnuTarPathOnWindows: string;
export declare const SystemTarPathOnWindows: string;
export declare const TarFilename = "cache.tar";
export declare const ManifestFilename = "manifest.txt";
export declare const CacheFileSizeLimit: number;

View file

@ -0,0 +1,37 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CacheFileSizeLimit = exports.ManifestFilename = exports.TarFilename = exports.SystemTarPathOnWindows = exports.GnuTarPathOnWindows = exports.SocketTimeout = exports.DefaultRetryDelay = exports.DefaultRetryAttempts = exports.ArchiveToolType = exports.CompressionMethod = exports.CacheFilename = void 0;
var CacheFilename;
(function (CacheFilename) {
CacheFilename["Gzip"] = "cache.tgz";
CacheFilename["Zstd"] = "cache.tzst";
})(CacheFilename || (exports.CacheFilename = CacheFilename = {}));
var CompressionMethod;
(function (CompressionMethod) {
CompressionMethod["Gzip"] = "gzip";
// Long range mode was added to zstd in v1.3.2.
// This enum is for earlier version of zstd that does not have --long support
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
CompressionMethod["Zstd"] = "zstd";
})(CompressionMethod || (exports.CompressionMethod = CompressionMethod = {}));
var ArchiveToolType;
(function (ArchiveToolType) {
ArchiveToolType["GNU"] = "gnu";
ArchiveToolType["BSD"] = "bsd";
})(ArchiveToolType || (exports.ArchiveToolType = ArchiveToolType = {}));
// The default number of retry attempts.
exports.DefaultRetryAttempts = 2;
// The default delay in milliseconds between retry attempts.
exports.DefaultRetryDelay = 5000;
// Socket timeout in milliseconds during download. If no traffic is received
// over the socket during this period, the socket is destroyed and the download
// is aborted.
exports.SocketTimeout = 5000;
// The default path of GNUtar on hosted Windows runners
exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`;
// The default path of BSDtar on hosted Windows runners
exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`;
exports.TarFilename = 'cache.tar';
exports.ManifestFilename = 'manifest.txt';
exports.CacheFileSizeLimit = 10 * Math.pow(1024, 3); // 10GiB per repository
//# sourceMappingURL=constants.js.map

View file

@ -0,0 +1 @@
{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/internal/constants.ts"],"names":[],"mappings":";;;AAAA,IAAY,aAGX;AAHD,WAAY,aAAa;IACvB,mCAAkB,CAAA;IAClB,oCAAmB,CAAA;AACrB,CAAC,EAHW,aAAa,6BAAb,aAAa,QAGxB;AAED,IAAY,iBAMX;AAND,WAAY,iBAAiB;IAC3B,kCAAa,CAAA;IACb,+CAA+C;IAC/C,6EAA6E;IAC7E,0DAAqC,CAAA;IACrC,kCAAa,CAAA;AACf,CAAC,EANW,iBAAiB,iCAAjB,iBAAiB,QAM5B;AAED,IAAY,eAGX;AAHD,WAAY,eAAe;IACzB,8BAAW,CAAA;IACX,8BAAW,CAAA;AACb,CAAC,EAHW,eAAe,+BAAf,eAAe,QAG1B;AAED,wCAAwC;AAC3B,QAAA,oBAAoB,GAAG,CAAC,CAAA;AAErC,4DAA4D;AAC/C,QAAA,iBAAiB,GAAG,IAAI,CAAA;AAErC,6EAA6E;AAC7E,+EAA+E;AAC/E,cAAc;AACD,QAAA,aAAa,GAAG,IAAI,CAAA;AAEjC,uDAAuD;AAC1C,QAAA,mBAAmB,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,0BAA0B,CAAA;AAE3F,uDAAuD;AAC1C,QAAA,sBAAsB,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,8BAA8B,CAAA;AAEpF,QAAA,WAAW,GAAG,WAAW,CAAA;AAEzB,QAAA,gBAAgB,GAAG,cAAc,CAAA;AAEjC,QAAA,kBAAkB,GAAG,EAAE,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,CAAC,CAAC,CAAA,CAAC,uBAAuB"}

View file

@ -0,0 +1,83 @@
/// <reference types="node" />
import { TransferProgressEvent } from '@azure/ms-rest-js';
import * as fs from 'fs';
import { DownloadOptions } from '../options';
/**
* Class for tracking the download state and displaying stats.
*/
export declare class DownloadProgress {
contentLength: number;
segmentIndex: number;
segmentSize: number;
segmentOffset: number;
receivedBytes: number;
startTime: number;
displayedComplete: boolean;
timeoutHandle?: ReturnType<typeof setTimeout>;
constructor(contentLength: number);
/**
* Progress to the next segment. Only call this method when the previous segment
* is complete.
*
* @param segmentSize the length of the next segment
*/
nextSegment(segmentSize: number): void;
/**
* Sets the number of bytes received for the current segment.
*
* @param receivedBytes the number of bytes received
*/
setReceivedBytes(receivedBytes: number): void;
/**
* Returns the total number of bytes transferred.
*/
getTransferredBytes(): number;
/**
* Returns true if the download is complete.
*/
isDone(): boolean;
/**
* Prints the current download stats. Once the download completes, this will print one
* last line and then stop.
*/
display(): void;
/**
* Returns a function used to handle TransferProgressEvents.
*/
onProgress(): (progress: TransferProgressEvent) => void;
/**
* Starts the timer that displays the stats.
*
* @param delayInMs the delay between each write
*/
startDisplayTimer(delayInMs?: number): void;
/**
* Stops the timer that displays the stats. As this typically indicates the download
* is complete, this will display one last line, unless the last line has already
* been written.
*/
stopDisplayTimer(): void;
}
/**
* Download the cache using the Actions toolkit http-client
*
* @param archiveLocation the URL for the cache
* @param archivePath the local path where the cache is saved
*/
export declare function downloadCacheHttpClient(archiveLocation: string, archivePath: string): Promise<void>;
/**
* Download the cache using the Actions toolkit http-client concurrently
*
* @param archiveLocation the URL for the cache
* @param archivePath the local path where the cache is saved
*/
export declare function downloadCacheHttpClientConcurrent(archiveLocation: string, archivePath: fs.PathLike, options: DownloadOptions): Promise<void>;
/**
* Download the cache using the Azure Storage SDK. Only call this method if the
* URL points to an Azure Storage endpoint.
*
* @param archiveLocation the URL for the cache
* @param archivePath the local path where the cache is saved
* @param options the download options with the defaults set
*/
export declare function downloadCacheStorageSDK(archiveLocation: string, archivePath: string, options: DownloadOptions): Promise<void>;

View file

@ -0,0 +1,378 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
const core = __importStar(require("@actions/core"));
const http_client_1 = require("@actions/http-client");
const storage_blob_1 = require("@azure/storage-blob");
const buffer = __importStar(require("buffer"));
const fs = __importStar(require("fs"));
const stream = __importStar(require("stream"));
const util = __importStar(require("util"));
const utils = __importStar(require("./cacheUtils"));
const constants_1 = require("./constants");
const requestUtils_1 = require("./requestUtils");
const abort_controller_1 = require("@azure/abort-controller");
/**
* Pipes the body of a HTTP response to a stream
*
* @param response the HTTP response
* @param output the writable stream
*/
function pipeResponseToStream(response, output) {
return __awaiter(this, void 0, void 0, function* () {
const pipeline = util.promisify(stream.pipeline);
yield pipeline(response.message, output);
});
}
/**
* Class for tracking the download state and displaying stats.
*/
class DownloadProgress {
constructor(contentLength) {
this.contentLength = contentLength;
this.segmentIndex = 0;
this.segmentSize = 0;
this.segmentOffset = 0;
this.receivedBytes = 0;
this.displayedComplete = false;
this.startTime = Date.now();
}
/**
* Progress to the next segment. Only call this method when the previous segment
* is complete.
*
* @param segmentSize the length of the next segment
*/
nextSegment(segmentSize) {
this.segmentOffset = this.segmentOffset + this.segmentSize;
this.segmentIndex = this.segmentIndex + 1;
this.segmentSize = segmentSize;
this.receivedBytes = 0;
core.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`);
}
/**
* Sets the number of bytes received for the current segment.
*
* @param receivedBytes the number of bytes received
*/
setReceivedBytes(receivedBytes) {
this.receivedBytes = receivedBytes;
}
/**
* Returns the total number of bytes transferred.
*/
getTransferredBytes() {
return this.segmentOffset + this.receivedBytes;
}
/**
* Returns true if the download is complete.
*/
isDone() {
return this.getTransferredBytes() === this.contentLength;
}
/**
* Prints the current download stats. Once the download completes, this will print one
* last line and then stop.
*/
display() {
if (this.displayedComplete) {
return;
}
const transferredBytes = this.segmentOffset + this.receivedBytes;
const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1);
const elapsedTime = Date.now() - this.startTime;
const downloadSpeed = (transferredBytes /
(1024 * 1024) /
(elapsedTime / 1000)).toFixed(1);
core.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`);
if (this.isDone()) {
this.displayedComplete = true;
}
}
/**
* Returns a function used to handle TransferProgressEvents.
*/
onProgress() {
return (progress) => {
this.setReceivedBytes(progress.loadedBytes);
};
}
/**
* Starts the timer that displays the stats.
*
* @param delayInMs the delay between each write
*/
startDisplayTimer(delayInMs = 1000) {
const displayCallback = () => {
this.display();
if (!this.isDone()) {
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
}
};
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
}
/**
* Stops the timer that displays the stats. As this typically indicates the download
* is complete, this will display one last line, unless the last line has already
* been written.
*/
stopDisplayTimer() {
if (this.timeoutHandle) {
clearTimeout(this.timeoutHandle);
this.timeoutHandle = undefined;
}
this.display();
}
}
exports.DownloadProgress = DownloadProgress;
/**
* Download the cache using the Actions toolkit http-client
*
* @param archiveLocation the URL for the cache
* @param archivePath the local path where the cache is saved
*/
function downloadCacheHttpClient(archiveLocation, archivePath) {
return __awaiter(this, void 0, void 0, function* () {
const writeStream = fs.createWriteStream(archivePath);
const httpClient = new http_client_1.HttpClient('actions/cache');
const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); }));
// Abort download if no traffic received over the socket.
downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => {
downloadResponse.message.destroy();
core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`);
});
yield pipeResponseToStream(downloadResponse, writeStream);
// Validate download size.
const contentLengthHeader = downloadResponse.message.headers['content-length'];
if (contentLengthHeader) {
const expectedLength = parseInt(contentLengthHeader);
const actualLength = utils.getArchiveFileSizeInBytes(archivePath);
if (actualLength !== expectedLength) {
throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
}
}
else {
core.debug('Unable to validate download, no Content-Length header');
}
});
}
exports.downloadCacheHttpClient = downloadCacheHttpClient;
/**
* Download the cache using the Actions toolkit http-client concurrently
*
* @param archiveLocation the URL for the cache
* @param archivePath the local path where the cache is saved
*/
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
var _a;
return __awaiter(this, void 0, void 0, function* () {
const archiveDescriptor = yield fs.promises.open(archivePath, 'w');
const httpClient = new http_client_1.HttpClient('actions/cache', undefined, {
socketTimeout: options.timeoutInMs,
keepAlive: true
});
try {
const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); }));
const lengthHeader = res.message.headers['content-length'];
if (lengthHeader === undefined || lengthHeader === null) {
throw new Error('Content-Length not found on blob response');
}
const length = parseInt(lengthHeader);
if (Number.isNaN(length)) {
throw new Error(`Could not interpret Content-Length: ${length}`);
}
const downloads = [];
const blockSize = 4 * 1024 * 1024;
for (let offset = 0; offset < length; offset += blockSize) {
const count = Math.min(blockSize, length - offset);
downloads.push({
offset,
promiseGetter: () => __awaiter(this, void 0, void 0, function* () {
return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count);
})
});
}
// reverse to use .pop instead of .shift
downloads.reverse();
let actives = 0;
let bytesDownloaded = 0;
const progress = new DownloadProgress(length);
progress.startDisplayTimer();
const progressFn = progress.onProgress();
const activeDownloads = [];
let nextDownload;
const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () {
const segment = yield Promise.race(Object.values(activeDownloads));
yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset);
actives--;
delete activeDownloads[segment.offset];
bytesDownloaded += segment.count;
progressFn({ loadedBytes: bytesDownloaded });
});
while ((nextDownload = downloads.pop())) {
activeDownloads[nextDownload.offset] = nextDownload.promiseGetter();
actives++;
if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) {
yield waitAndWrite();
}
}
while (actives > 0) {
yield waitAndWrite();
}
}
finally {
httpClient.dispose();
yield archiveDescriptor.close();
}
});
}
exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent;
function downloadSegmentRetry(httpClient, archiveLocation, offset, count) {
return __awaiter(this, void 0, void 0, function* () {
const retries = 5;
let failures = 0;
while (true) {
try {
const timeout = 30000;
const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count));
if (typeof result === 'string') {
throw new Error('downloadSegmentRetry failed due to timeout');
}
return result;
}
catch (err) {
if (failures >= retries) {
throw err;
}
failures++;
}
}
});
}
function downloadSegment(httpClient, archiveLocation, offset, count) {
return __awaiter(this, void 0, void 0, function* () {
const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () {
return yield httpClient.get(archiveLocation, {
Range: `bytes=${offset}-${offset + count - 1}`
});
}));
if (!partRes.readBodyBuffer) {
throw new Error('Expected HttpClientResponse to implement readBodyBuffer');
}
return {
offset,
count,
buffer: yield partRes.readBodyBuffer()
};
});
}
/**
* Download the cache using the Azure Storage SDK. Only call this method if the
* URL points to an Azure Storage endpoint.
*
* @param archiveLocation the URL for the cache
* @param archivePath the local path where the cache is saved
* @param options the download options with the defaults set
*/
function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
var _a;
return __awaiter(this, void 0, void 0, function* () {
const client = new storage_blob_1.BlockBlobClient(archiveLocation, undefined, {
retryOptions: {
// Override the timeout used when downloading each 4 MB chunk
// The default is 2 min / MB, which is way too slow
tryTimeoutInMs: options.timeoutInMs
}
});
const properties = yield client.getProperties();
const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1;
if (contentLength < 0) {
// We should never hit this condition, but just in case fall back to downloading the
// file as one large stream
core.debug('Unable to determine content length, downloading file with http-client...');
yield downloadCacheHttpClient(archiveLocation, archivePath);
}
else {
// Use downloadToBuffer for faster downloads, since internally it splits the
// file into 4 MB chunks which can then be parallelized and retried independently
//
// If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB
// on 64-bit systems), split the download into multiple segments
// ~2 GB = 2147483647, beyond this, we start getting out of range error. So, capping it accordingly.
// Updated segment size to 128MB = 134217728 bytes, to complete a segment faster and fail fast
const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH);
const downloadProgress = new DownloadProgress(contentLength);
const fd = fs.openSync(archivePath, 'w');
try {
downloadProgress.startDisplayTimer();
const controller = new abort_controller_1.AbortController();
const abortSignal = controller.signal;
while (!downloadProgress.isDone()) {
const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;
const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);
downloadProgress.nextSegment(segmentSize);
const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 3600000, client.downloadToBuffer(segmentStart, segmentSize, {
abortSignal,
concurrency: options.downloadConcurrency,
onProgress: downloadProgress.onProgress()
}));
if (result === 'timeout') {
controller.abort();
throw new Error('Aborting cache download as the download time exceeded the timeout.');
}
else if (Buffer.isBuffer(result)) {
fs.writeFileSync(fd, result);
}
}
}
finally {
downloadProgress.stopDisplayTimer();
fs.closeSync(fd);
}
}
});
}
exports.downloadCacheStorageSDK = downloadCacheStorageSDK;
const promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, void 0, function* () {
let timeoutHandle;
const timeoutPromise = new Promise(resolve => {
timeoutHandle = setTimeout(() => resolve('timeout'), timeoutMs);
});
return Promise.race([promise, timeoutPromise]).then(result => {
clearTimeout(timeoutHandle);
return result;
});
});
//# sourceMappingURL=downloadUtils.js.map

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,8 @@
import { HttpClientResponse } from '@actions/http-client';
import { ITypedResponseWithError } from './contracts';
export declare function isSuccessStatusCode(statusCode?: number): boolean;
export declare function isServerErrorStatusCode(statusCode?: number): boolean;
export declare function isRetryableStatusCode(statusCode?: number): boolean;
export declare function retry<T>(name: string, method: () => Promise<T>, getStatusCode: (arg0: T) => number | undefined, maxAttempts?: number, delay?: number, onError?: ((arg0: Error) => T | undefined) | undefined): Promise<T>;
export declare function retryTypedResponse<T>(name: string, method: () => Promise<ITypedResponseWithError<T>>, maxAttempts?: number, delay?: number): Promise<ITypedResponseWithError<T>>;
export declare function retryHttpClientResponse(name: string, method: () => Promise<HttpClientResponse>, maxAttempts?: number, delay?: number): Promise<HttpClientResponse>;

View file

@ -0,0 +1,137 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.retryHttpClientResponse = exports.retryTypedResponse = exports.retry = exports.isRetryableStatusCode = exports.isServerErrorStatusCode = exports.isSuccessStatusCode = void 0;
const core = __importStar(require("@actions/core"));
const http_client_1 = require("@actions/http-client");
const constants_1 = require("./constants");
function isSuccessStatusCode(statusCode) {
if (!statusCode) {
return false;
}
return statusCode >= 200 && statusCode < 300;
}
exports.isSuccessStatusCode = isSuccessStatusCode;
function isServerErrorStatusCode(statusCode) {
if (!statusCode) {
return true;
}
return statusCode >= 500;
}
exports.isServerErrorStatusCode = isServerErrorStatusCode;
function isRetryableStatusCode(statusCode) {
if (!statusCode) {
return false;
}
const retryableStatusCodes = [
http_client_1.HttpCodes.BadGateway,
http_client_1.HttpCodes.ServiceUnavailable,
http_client_1.HttpCodes.GatewayTimeout
];
return retryableStatusCodes.includes(statusCode);
}
exports.isRetryableStatusCode = isRetryableStatusCode;
function sleep(milliseconds) {
return __awaiter(this, void 0, void 0, function* () {
return new Promise(resolve => setTimeout(resolve, milliseconds));
});
}
function retry(name, method, getStatusCode, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay, onError = undefined) {
return __awaiter(this, void 0, void 0, function* () {
let errorMessage = '';
let attempt = 1;
while (attempt <= maxAttempts) {
let response = undefined;
let statusCode = undefined;
let isRetryable = false;
try {
response = yield method();
}
catch (error) {
if (onError) {
response = onError(error);
}
isRetryable = true;
errorMessage = error.message;
}
if (response) {
statusCode = getStatusCode(response);
if (!isServerErrorStatusCode(statusCode)) {
return response;
}
}
if (statusCode) {
isRetryable = isRetryableStatusCode(statusCode);
errorMessage = `Cache service responded with ${statusCode}`;
}
core.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`);
if (!isRetryable) {
core.debug(`${name} - Error is not retryable`);
break;
}
yield sleep(delay);
attempt++;
}
throw Error(`${name} failed: ${errorMessage}`);
});
}
exports.retry = retry;
function retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) {
return __awaiter(this, void 0, void 0, function* () {
return yield retry(name, method, (response) => response.statusCode, maxAttempts, delay,
// If the error object contains the statusCode property, extract it and return
// an TypedResponse<T> so it can be processed by the retry logic.
(error) => {
if (error instanceof http_client_1.HttpClientError) {
return {
statusCode: error.statusCode,
result: null,
headers: {},
error
};
}
else {
return undefined;
}
});
});
}
exports.retryTypedResponse = retryTypedResponse;
function retryHttpClientResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) {
return __awaiter(this, void 0, void 0, function* () {
return yield retry(name, method, (response) => response.message.statusCode, maxAttempts, delay);
});
}
exports.retryHttpClientResponse = retryHttpClientResponse;
//# sourceMappingURL=requestUtils.js.map

View file

@ -0,0 +1 @@
{"version":3,"file":"requestUtils.js","sourceRoot":"","sources":["../../src/internal/requestUtils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,sDAI6B;AAC7B,2CAAmE;AAGnE,SAAgB,mBAAmB,CAAC,UAAmB;IACrD,IAAI,CAAC,UAAU,EAAE;QACf,OAAO,KAAK,CAAA;KACb;IACD,OAAO,UAAU,IAAI,GAAG,IAAI,UAAU,GAAG,GAAG,CAAA;AAC9C,CAAC;AALD,kDAKC;AAED,SAAgB,uBAAuB,CAAC,UAAmB;IACzD,IAAI,CAAC,UAAU,EAAE;QACf,OAAO,IAAI,CAAA;KACZ;IACD,OAAO,UAAU,IAAI,GAAG,CAAA;AAC1B,CAAC;AALD,0DAKC;AAED,SAAgB,qBAAqB,CAAC,UAAmB;IACvD,IAAI,CAAC,UAAU,EAAE;QACf,OAAO,KAAK,CAAA;KACb;IACD,MAAM,oBAAoB,GAAG;QAC3B,uBAAS,CAAC,UAAU;QACpB,uBAAS,CAAC,kBAAkB;QAC5B,uBAAS,CAAC,cAAc;KACzB,CAAA;IACD,OAAO,oBAAoB,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAA;AAClD,CAAC;AAVD,sDAUC;AAED,SAAe,KAAK,CAAC,YAAoB;;QACvC,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC,CAAA;IAClE,CAAC;CAAA;AAED,SAAsB,KAAK,CACzB,IAAY,EACZ,MAAwB,EACxB,aAA8C,EAC9C,WAAW,GAAG,gCAAoB,EAClC,KAAK,GAAG,6BAAiB,EACzB,UAAwD,SAAS;;QAEjE,IAAI,YAAY,GAAG,EAAE,CAAA;QACrB,IAAI,OAAO,GAAG,CAAC,CAAA;QAEf,OAAO,OAAO,IAAI,WAAW,EAAE;YAC7B,IAAI,QAAQ,GAAkB,SAAS,CAAA;YACvC,IAAI,UAAU,GAAuB,SAAS,CAAA;YAC9C,IAAI,WAAW,GAAG,KAAK,CAAA;YAEvB,IAAI;gBACF,QAAQ,GAAG,MAAM,MAAM,EAAE,CAAA;aAC1B;YAAC,OAAO,KAAK,EAAE;gBACd,IAAI,OAAO,EAAE;oBACX,QAAQ,GAAG,OAAO,CAAC,KAAK,CAAC,CAAA;iBAC1B;gBAED,WAAW,GAAG,IAAI,CAAA;gBAClB,YAAY,GAAG,KAAK,CAAC,OAAO,CAAA;aAC7B;YAED,IAAI,QAAQ,EAAE;gBACZ,UAAU,GAAG,aAAa,CAAC,QAAQ,CAAC,CAAA;gBAEpC,IAAI,CAAC,uBAAuB,CAAC,UAAU,CAAC,EAAE;oBACxC,OAAO,QAAQ,CAAA;iBAChB;aACF;YAED,IAAI,UAAU,EAAE;gBACd,WAAW,GAAG,qBAAqB,CAAC,UAAU,CAAC,CAAA;gBAC/C,YAAY,GAAG,gCAAgC,UAAU,EAAE,CAAA;aAC5D;YAED,IAAI,CAAC,KAAK,CACR,GAAG,IAAI,cAAc,OAAO,OAAO,WAAW,uBAAuB,YAAY,EAAE,CACpF,CAAA;YAED,IAAI,CAAC,WAAW,EAAE;gBAChB,IAAI,CAAC,KAAK,CAAC,GAAG,IAAI,2BAA2B,CAAC,CAAA;gBAC9C,MAAK;aACN;YAED,MAAM,KAAK,CAAC,KAAK,CAAC,CAAA;YAClB,OAAO,EAAE,CAAA;SACV;QAED,MAAM,KAAK,CAAC,GAAG,IAAI,YAAY,YAAY,EAAE,CAAC,CAAA;IAChD,CAAC;CAAA;AAtDD,sBAsDC;AAED,SAAsB,kBAAkB,CACtC,IAAY,EACZ,MAAiD,EACjD,WAAW,GAAG,gCAAoB,EAClC,KAAK,GAAG,6BAAiB;;QAEzB,OAAO,MAAM,KAAK,CAChB,IAAI,EACJ,MAAM,EACN,CAAC,QAAoC,EAAE,EAAE,CAAC,QAAQ,CAAC,UAAU,EAC7D,WAAW,EACX,KAAK;QACL,8EAA8E;QAC9E,iEAAiE;QACjE,CAAC,KAAY,EAAE,EAAE;YACf,IAAI,KAAK,YAAY,6BAAe,EAAE;gBACpC,OAAO;oBACL,UAAU,EAAE,KAAK,CAAC,UAAU;oBAC5B,MAAM,EAAE,IAAI;oBACZ,OAAO,EAAE,EAAE;oBACX,KAAK;iBACN,CAAA;aACF;iBAAM;gBACL,OAAO,SAAS,CAAA;aACjB;QACH,CAAC,CACF,CAAA;IACH,CAAC;CAAA;AA3BD,gDA2BC;AAED,SAAsB,uBAAuB,CAC3C,IAAY,EACZ,MAAyC,EACzC,WAAW,GAAG,gCAAoB,EAClC,KAAK,GAAG,6BAAiB;;QAEzB,OAAO,MAAM,KAAK,CAChB,IAAI,EACJ,MAAM,EACN,CAAC,QAA4B,EAAE,EAAE,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,EAC7D,WAAW,EACX,KAAK,CACN,CAAA;IACH,CAAC;CAAA;AAbD,0DAaC"}

View file

@ -0,0 +1,6 @@
import { CacheServiceClientJSON } from '../../generated/results/api/v1/cache.twirp';
export declare function internalCacheTwirpClient(options?: {
maxAttempts?: number;
retryIntervalMs?: number;
retryMultiplier?: number;
}): CacheServiceClientJSON;

View file

@ -0,0 +1,160 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.internalCacheTwirpClient = void 0;
const core_1 = require("@actions/core");
const user_agent_1 = require("./user-agent");
const errors_1 = require("./errors");
const config_1 = require("../config");
const cacheUtils_1 = require("../cacheUtils");
const auth_1 = require("@actions/http-client/lib/auth");
const http_client_1 = require("@actions/http-client");
const cache_twirp_1 = require("../../generated/results/api/v1/cache.twirp");
/**
* This class is a wrapper around the CacheServiceClientJSON class generated by Twirp.
*
* It adds retry logic to the request method, which is not present in the generated client.
*
* This class is used to interact with cache service v2.
*/
class CacheServiceClient {
constructor(userAgent, maxAttempts, baseRetryIntervalMilliseconds, retryMultiplier) {
this.maxAttempts = 5;
this.baseRetryIntervalMilliseconds = 3000;
this.retryMultiplier = 1.5;
const token = (0, cacheUtils_1.getRuntimeToken)();
this.baseUrl = (0, config_1.getCacheServiceURL)();
if (maxAttempts) {
this.maxAttempts = maxAttempts;
}
if (baseRetryIntervalMilliseconds) {
this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds;
}
if (retryMultiplier) {
this.retryMultiplier = retryMultiplier;
}
this.httpClient = new http_client_1.HttpClient(userAgent, [
new auth_1.BearerCredentialHandler(token)
]);
}
// This function satisfies the Rpc interface. It is compatible with the JSON
// JSON generated client.
request(service, method, contentType, data) {
return __awaiter(this, void 0, void 0, function* () {
const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href;
(0, core_1.debug)(`[Request] ${method} ${url}`);
const headers = {
'Content-Type': contentType
};
try {
const { body } = yield this.retryableRequest(() => __awaiter(this, void 0, void 0, function* () { return this.httpClient.post(url, JSON.stringify(data), headers); }));
return body;
}
catch (error) {
throw new Error(`Failed to ${method}: ${error.message}`);
}
});
}
retryableRequest(operation) {
return __awaiter(this, void 0, void 0, function* () {
let attempt = 0;
let errorMessage = '';
let rawBody = '';
while (attempt < this.maxAttempts) {
let isRetryable = false;
try {
const response = yield operation();
const statusCode = response.message.statusCode;
rawBody = yield response.readBody();
(0, core_1.debug)(`[Response] - ${response.message.statusCode}`);
(0, core_1.debug)(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`);
const body = JSON.parse(rawBody);
(0, core_1.debug)(`Body: ${JSON.stringify(body, null, 2)}`);
if (this.isSuccessStatusCode(statusCode)) {
return { response, body };
}
isRetryable = this.isRetryableHttpStatusCode(statusCode);
errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}`;
if (body.msg) {
if (errors_1.UsageError.isUsageErrorMessage(body.msg)) {
throw new errors_1.UsageError();
}
errorMessage = `${errorMessage}: ${body.msg}`;
}
}
catch (error) {
if (error instanceof SyntaxError) {
(0, core_1.debug)(`Raw Body: ${rawBody}`);
}
if (error instanceof errors_1.UsageError) {
throw error;
}
if (errors_1.NetworkError.isNetworkErrorCode(error === null || error === void 0 ? void 0 : error.code)) {
throw new errors_1.NetworkError(error === null || error === void 0 ? void 0 : error.code);
}
isRetryable = true;
errorMessage = error.message;
}
if (!isRetryable) {
throw new Error(`Received non-retryable error: ${errorMessage}`);
}
if (attempt + 1 === this.maxAttempts) {
throw new Error(`Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}`);
}
const retryTimeMilliseconds = this.getExponentialRetryTimeMilliseconds(attempt);
(0, core_1.info)(`Attempt ${attempt + 1} of ${this.maxAttempts} failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...`);
yield this.sleep(retryTimeMilliseconds);
attempt++;
}
throw new Error(`Request failed`);
});
}
isSuccessStatusCode(statusCode) {
if (!statusCode)
return false;
return statusCode >= 200 && statusCode < 300;
}
isRetryableHttpStatusCode(statusCode) {
if (!statusCode)
return false;
const retryableStatusCodes = [
http_client_1.HttpCodes.BadGateway,
http_client_1.HttpCodes.GatewayTimeout,
http_client_1.HttpCodes.InternalServerError,
http_client_1.HttpCodes.ServiceUnavailable,
http_client_1.HttpCodes.TooManyRequests
];
return retryableStatusCodes.includes(statusCode);
}
sleep(milliseconds) {
return __awaiter(this, void 0, void 0, function* () {
return new Promise(resolve => setTimeout(resolve, milliseconds));
});
}
getExponentialRetryTimeMilliseconds(attempt) {
if (attempt < 0) {
throw new Error('attempt should be a positive integer');
}
if (attempt === 0) {
return this.baseRetryIntervalMilliseconds;
}
const minTime = this.baseRetryIntervalMilliseconds * Math.pow(this.retryMultiplier, attempt);
const maxTime = minTime * this.retryMultiplier;
// returns a random number between minTime and maxTime (exclusive)
return Math.trunc(Math.random() * (maxTime - minTime) + minTime);
}
}
function internalCacheTwirpClient(options) {
const client = new CacheServiceClient((0, user_agent_1.getUserAgentString)(), options === null || options === void 0 ? void 0 : options.maxAttempts, options === null || options === void 0 ? void 0 : options.retryIntervalMs, options === null || options === void 0 ? void 0 : options.retryMultiplier);
return new cache_twirp_1.CacheServiceClientJSON(client);
}
exports.internalCacheTwirpClient = internalCacheTwirpClient;
//# sourceMappingURL=cacheTwirpClient.js.map

View file

@ -0,0 +1 @@
{"version":3,"file":"cacheTwirpClient.js","sourceRoot":"","sources":["../../../src/internal/shared/cacheTwirpClient.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,wCAAyC;AACzC,6CAA+C;AAC/C,qCAAiD;AACjD,sCAA4C;AAC5C,8CAA6C;AAC7C,wDAAqE;AACrE,sDAA8E;AAC9E,4EAAiF;AAYjF;;;;;;GAMG;AACH,MAAM,kBAAkB;IAOtB,YACE,SAAiB,EACjB,WAAoB,EACpB,6BAAsC,EACtC,eAAwB;QARlB,gBAAW,GAAG,CAAC,CAAA;QACf,kCAA6B,GAAG,IAAI,CAAA;QACpC,oBAAe,GAAG,GAAG,CAAA;QAQ3B,MAAM,KAAK,GAAG,IAAA,4BAAe,GAAE,CAAA;QAC/B,IAAI,CAAC,OAAO,GAAG,IAAA,2BAAkB,GAAE,CAAA;QACnC,IAAI,WAAW,EAAE;YACf,IAAI,CAAC,WAAW,GAAG,WAAW,CAAA;SAC/B;QACD,IAAI,6BAA6B,EAAE;YACjC,IAAI,CAAC,6BAA6B,GAAG,6BAA6B,CAAA;SACnE;QACD,IAAI,eAAe,EAAE;YACnB,IAAI,CAAC,eAAe,GAAG,eAAe,CAAA;SACvC;QAED,IAAI,CAAC,UAAU,GAAG,IAAI,wBAAU,CAAC,SAAS,EAAE;YAC1C,IAAI,8BAAuB,CAAC,KAAK,CAAC;SACnC,CAAC,CAAA;IACJ,CAAC;IAED,4EAA4E;IAC5E,yBAAyB;IACnB,OAAO,CACX,OAAe,EACf,MAAc,EACd,WAAwD,EACxD,IAAyB;;YAEzB,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,UAAU,OAAO,IAAI,MAAM,EAAE,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC,IAAI,CAAA;YACrE,IAAA,YAAK,EAAC,aAAa,MAAM,IAAI,GAAG,EAAE,CAAC,CAAA;YACnC,MAAM,OAAO,GAAG;gBACd,cAAc,EAAE,WAAW;aAC5B,CAAA;YACD,IAAI;gBACF,MAAM,EAAC,IAAI,EAAC,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,GAAS,EAAE,gDACpD,OAAA,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE,OAAO,CAAC,CAAA,GAAA,CACzD,CAAA;gBAED,OAAO,IAAI,CAAA;aACZ;YAAC,OAAO,KAAK,EAAE;gBACd,MAAM,IAAI,KAAK,CAAC,aAAa,MAAM,KAAK,KAAK,CAAC,OAAO,EAAE,CAAC,CAAA;aACzD;QACH,CAAC;KAAA;IAEK,gBAAgB,CACpB,SAA4C;;YAE5C,IAAI,OAAO,GAAG,CAAC,CAAA;YACf,IAAI,YAAY,GAAG,EAAE,CAAA;YACrB,IAAI,OAAO,GAAG,EAAE,CAAA;YAChB,OAAO,OAAO,GAAG,IAAI,CAAC,WAAW,EAAE;gBACjC,IAAI,WAAW,GAAG,KAAK,CAAA;gBAEvB,IAAI;oBACF,MAAM,QAAQ,GAAG,MAAM,SAAS,EAAE,CAAA;oBAClC,MAAM,UAAU,GAAG,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAA;oBAC9C,OAAO,GAAG,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAA;oBACnC,IAAA,YAAK,EAAC,gBAAgB,QAAQ,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC,CAAA;oBACpD,IAAA,YAAK,EAAC,YAAY,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,OAAO,CAAC,OAAO,EAAE,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC,CAAA;oBACtE,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAA;oBAChC,IAAA,YAAK,EAAC,SAAS,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC,CAAA;oBAC/C,IAAI,IAAI,CAAC,mBAAmB,CAAC,UAAU,CAAC,EAAE;wBACxC,OAAO,EAAC,QAAQ,EAAE,IAAI,EAAC,CAAA;qBACxB;oBACD,WAAW,GAAG,IAAI,CAAC,yBAAyB,CAAC,UAAU,CAAC,CAAA;oBACxD,YAAY,GAAG,oBAAoB,UAAU,KAAK,QAAQ,CAAC,OAAO,CAAC,aAAa,EAAE,CAAA;oBAClF,IAAI,IAAI,CAAC,GAAG,EAAE;wBACZ,IAAI,mBAAU,CAAC,mBAAmB,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;4BAC5C,MAAM,IAAI,mBAAU,EAAE,CAAA;yBACvB;wBAED,YAAY,GAAG,GAAG,YAAY,KAAK,IAAI,CAAC,GAAG,EAAE,CAAA;qBAC9C;iBACF;gBAAC,OAAO,KAAK,EAAE;oBACd,IAAI,KAAK,YAAY,WAAW,EAAE;wBAChC,IAAA,YAAK,EAAC,aAAa,OAAO,EAAE,CAAC,CAAA;qBAC9B;oBAED,IAAI,KAAK,YAAY,mBAAU,EAAE;wBAC/B,MAAM,KAAK,CAAA;qBACZ;oBAED,IAAI,qBAAY,CAAC,kBAAkB,CAAC,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,IAAI,CAAC,EAAE;wBAChD,MAAM,IAAI,qBAAY,CAAC,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,IAAI,CAAC,CAAA;qBACpC;oBAED,WAAW,GAAG,IAAI,CAAA;oBAClB,YAAY,GAAG,KAAK,CAAC,OAAO,CAAA;iBAC7B;gBAED,IAAI,CAAC,WAAW,EAAE;oBAChB,MAAM,IAAI,KAAK,CAAC,iCAAiC,YAAY,EAAE,CAAC,CAAA;iBACjE;gBAED,IAAI,OAAO,GAAG,CAAC,KAAK,IAAI,CAAC,WAAW,EAAE;oBACpC,MAAM,IAAI,KAAK,CACb,gCAAgC,IAAI,CAAC,WAAW,cAAc,YAAY,EAAE,CAC7E,CAAA;iBACF;gBAED,MAAM,qBAAqB,GACzB,IAAI,CAAC,mCAAmC,CAAC,OAAO,CAAC,CAAA;gBACnD,IAAA,WAAI,EACF,WAAW,OAAO,GAAG,CAAC,OACpB,IAAI,CAAC,WACP,uBAAuB,YAAY,yBAAyB,qBAAqB,QAAQ,CAC1F,CAAA;gBACD,MAAM,IAAI,CAAC,KAAK,CAAC,qBAAqB,CAAC,CAAA;gBACvC,OAAO,EAAE,CAAA;aACV;YAED,MAAM,IAAI,KAAK,CAAC,gBAAgB,CAAC,CAAA;QACnC,CAAC;KAAA;IAED,mBAAmB,CAAC,UAAmB;QACrC,IAAI,CAAC,UAAU;YAAE,OAAO,KAAK,CAAA;QAC7B,OAAO,UAAU,IAAI,GAAG,IAAI,UAAU,GAAG,GAAG,CAAA;IAC9C,CAAC;IAED,yBAAyB,CAAC,UAAmB;QAC3C,IAAI,CAAC,UAAU;YAAE,OAAO,KAAK,CAAA;QAE7B,MAAM,oBAAoB,GAAG;YAC3B,uBAAS,CAAC,UAAU;YACpB,uBAAS,CAAC,cAAc;YACxB,uBAAS,CAAC,mBAAmB;YAC7B,uBAAS,CAAC,kBAAkB;YAC5B,uBAAS,CAAC,eAAe;SAC1B,CAAA;QAED,OAAO,oBAAoB,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAA;IAClD,CAAC;IAEK,KAAK,CAAC,YAAoB;;YAC9B,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC,CAAA;QAClE,CAAC;KAAA;IAED,mCAAmC,CAAC,OAAe;QACjD,IAAI,OAAO,GAAG,CAAC,EAAE;YACf,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAA;SACxD;QAED,IAAI,OAAO,KAAK,CAAC,EAAE;YACjB,OAAO,IAAI,CAAC,6BAA6B,CAAA;SAC1C;QAED,MAAM,OAAO,GACX,IAAI,CAAC,6BAA6B,GAAG,SAAA,IAAI,CAAC,eAAe,EAAI,OAAO,CAAA,CAAA;QACtE,MAAM,OAAO,GAAG,OAAO,GAAG,IAAI,CAAC,eAAe,CAAA;QAE9C,kEAAkE;QAClE,OAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,OAAO,GAAG,OAAO,CAAC,GAAG,OAAO,CAAC,CAAA;IAClE,CAAC;CACF;AAED,SAAgB,wBAAwB,CAAC,OAIxC;IACC,MAAM,MAAM,GAAG,IAAI,kBAAkB,CACnC,IAAA,+BAAkB,GAAE,EACpB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,WAAW,EACpB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,eAAe,EACxB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,eAAe,CACzB,CAAA;IACD,OAAO,IAAI,oCAAsB,CAAC,MAAM,CAAC,CAAA;AAC3C,CAAC;AAZD,4DAYC"}

View file

@ -0,0 +1,22 @@
export declare class FilesNotFoundError extends Error {
files: string[];
constructor(files?: string[]);
}
export declare class InvalidResponseError extends Error {
constructor(message: string);
}
export declare class CacheNotFoundError extends Error {
constructor(message?: string);
}
export declare class GHESNotSupportedError extends Error {
constructor(message?: string);
}
export declare class NetworkError extends Error {
code: string;
constructor(code: string);
static isNetworkErrorCode: (code?: string) => boolean;
}
export declare class UsageError extends Error {
constructor();
static isUsageErrorMessage: (msg?: string) => boolean;
}

View file

@ -0,0 +1,70 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.UsageError = exports.NetworkError = exports.GHESNotSupportedError = exports.CacheNotFoundError = exports.InvalidResponseError = exports.FilesNotFoundError = void 0;
class FilesNotFoundError extends Error {
constructor(files = []) {
let message = 'No files were found to upload';
if (files.length > 0) {
message += `: ${files.join(', ')}`;
}
super(message);
this.files = files;
this.name = 'FilesNotFoundError';
}
}
exports.FilesNotFoundError = FilesNotFoundError;
class InvalidResponseError extends Error {
constructor(message) {
super(message);
this.name = 'InvalidResponseError';
}
}
exports.InvalidResponseError = InvalidResponseError;
class CacheNotFoundError extends Error {
constructor(message = 'Cache not found') {
super(message);
this.name = 'CacheNotFoundError';
}
}
exports.CacheNotFoundError = CacheNotFoundError;
class GHESNotSupportedError extends Error {
constructor(message = '@actions/cache v4.1.4+, actions/cache/save@v4+ and actions/cache/restore@v4+ are not currently supported on GHES.') {
super(message);
this.name = 'GHESNotSupportedError';
}
}
exports.GHESNotSupportedError = GHESNotSupportedError;
class NetworkError extends Error {
constructor(code) {
const message = `Unable to make request: ${code}\nIf you are using self-hosted runners, please make sure your runner has access to all GitHub endpoints: https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github`;
super(message);
this.code = code;
this.name = 'NetworkError';
}
}
exports.NetworkError = NetworkError;
NetworkError.isNetworkErrorCode = (code) => {
if (!code)
return false;
return [
'ECONNRESET',
'ENOTFOUND',
'ETIMEDOUT',
'ECONNREFUSED',
'EHOSTUNREACH'
].includes(code);
};
class UsageError extends Error {
constructor() {
const message = `Cache storage quota has been hit. Unable to upload any new cache entries. Usage is recalculated every 6-12 hours.\nMore info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending`;
super(message);
this.name = 'UsageError';
}
}
exports.UsageError = UsageError;
UsageError.isUsageErrorMessage = (msg) => {
if (!msg)
return false;
return msg.includes('insufficient usage');
};
//# sourceMappingURL=errors.js.map

View file

@ -0,0 +1 @@
{"version":3,"file":"errors.js","sourceRoot":"","sources":["../../../src/internal/shared/errors.ts"],"names":[],"mappings":";;;AAAA,MAAa,kBAAmB,SAAQ,KAAK;IAG3C,YAAY,QAAkB,EAAE;QAC9B,IAAI,OAAO,GAAG,+BAA+B,CAAA;QAC7C,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE;YACpB,OAAO,IAAI,KAAK,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAA;SACnC;QAED,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;QAClB,IAAI,CAAC,IAAI,GAAG,oBAAoB,CAAA;IAClC,CAAC;CACF;AAbD,gDAaC;AAED,MAAa,oBAAqB,SAAQ,KAAK;IAC7C,YAAY,OAAe;QACzB,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,sBAAsB,CAAA;IACpC,CAAC;CACF;AALD,oDAKC;AAED,MAAa,kBAAmB,SAAQ,KAAK;IAC3C,YAAY,OAAO,GAAG,iBAAiB;QACrC,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,oBAAoB,CAAA;IAClC,CAAC;CACF;AALD,gDAKC;AAED,MAAa,qBAAsB,SAAQ,KAAK;IAC9C,YACE,OAAO,GAAG,mHAAmH;QAE7H,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,uBAAuB,CAAA;IACrC,CAAC;CACF;AAPD,sDAOC;AAED,MAAa,YAAa,SAAQ,KAAK;IAGrC,YAAY,IAAY;QACtB,MAAM,OAAO,GAAG,2BAA2B,IAAI,kRAAkR,CAAA;QACjU,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;QAChB,IAAI,CAAC,IAAI,GAAG,cAAc,CAAA;IAC5B,CAAC;;AARH,oCAoBC;AAVQ,+BAAkB,GAAG,CAAC,IAAa,EAAW,EAAE;IACrD,IAAI,CAAC,IAAI;QAAE,OAAO,KAAK,CAAA;IACvB,OAAO;QACL,YAAY;QACZ,WAAW;QACX,WAAW;QACX,cAAc;QACd,cAAc;KACf,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAA;AAClB,CAAC,CAAA;AAGH,MAAa,UAAW,SAAQ,KAAK;IACnC;QACE,MAAM,OAAO,GAAG,iSAAiS,CAAA;QACjT,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,YAAY,CAAA;IAC1B,CAAC;;AALH,gCAWC;AAJQ,8BAAmB,GAAG,CAAC,GAAY,EAAW,EAAE;IACrD,IAAI,CAAC,GAAG;QAAE,OAAO,KAAK,CAAA;IACtB,OAAO,GAAG,CAAC,QAAQ,CAAC,oBAAoB,CAAC,CAAA;AAC3C,CAAC,CAAA"}

View file

@ -0,0 +1,4 @@
/**
* Ensure that this User Agent String is used in all HTTP calls so that we can monitor telemetry between different versions of this package
*/
export declare function getUserAgentString(): string;

View file

@ -0,0 +1,13 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.getUserAgentString = void 0;
// eslint-disable-next-line @typescript-eslint/no-var-requires, @typescript-eslint/no-require-imports
const packageJson = require('../../../package.json');
/**
* Ensure that this User Agent String is used in all HTTP calls so that we can monitor telemetry between different versions of this package
*/
function getUserAgentString() {
return `@actions/cache-${packageJson.version}`;
}
exports.getUserAgentString = getUserAgentString;
//# sourceMappingURL=user-agent.js.map

View file

@ -0,0 +1 @@
{"version":3,"file":"user-agent.js","sourceRoot":"","sources":["../../../src/internal/shared/user-agent.ts"],"names":[],"mappings":";;;AAAA,qGAAqG;AACrG,MAAM,WAAW,GAAG,OAAO,CAAC,uBAAuB,CAAC,CAAA;AAEpD;;GAEG;AACH,SAAgB,kBAAkB;IAChC,OAAO,kBAAkB,WAAW,CAAC,OAAO,EAAE,CAAA;AAChD,CAAC;AAFD,gDAEC"}

View file

@ -0,0 +1,4 @@
import { CompressionMethod } from './constants';
export declare function listTar(archivePath: string, compressionMethod: CompressionMethod): Promise<void>;
export declare function extractTar(archivePath: string, compressionMethod: CompressionMethod): Promise<void>;
export declare function createTar(archiveFolder: string, sourceDirectories: string[], compressionMethod: CompressionMethod): Promise<void>;

View file

@ -0,0 +1,272 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.createTar = exports.extractTar = exports.listTar = void 0;
const exec_1 = require("@actions/exec");
const io = __importStar(require("@actions/io"));
const fs_1 = require("fs");
const path = __importStar(require("path"));
const utils = __importStar(require("./cacheUtils"));
const constants_1 = require("./constants");
const IS_WINDOWS = process.platform === 'win32';
// Returns tar path and type: BSD or GNU
function getTarPath() {
return __awaiter(this, void 0, void 0, function* () {
switch (process.platform) {
case 'win32': {
const gnuTar = yield utils.getGnuTarPathOnWindows();
const systemTar = constants_1.SystemTarPathOnWindows;
if (gnuTar) {
// Use GNUtar as default on windows
return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
}
else if ((0, fs_1.existsSync)(systemTar)) {
return { path: systemTar, type: constants_1.ArchiveToolType.BSD };
}
break;
}
case 'darwin': {
const gnuTar = yield io.which('gtar', false);
if (gnuTar) {
// fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
}
else {
return {
path: yield io.which('tar', true),
type: constants_1.ArchiveToolType.BSD
};
}
}
default:
break;
}
// Default assumption is GNU tar is present in path
return {
path: yield io.which('tar', true),
type: constants_1.ArchiveToolType.GNU
};
});
}
// Return arguments for tar as per tarPath, compressionMethod, method type and os
function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
return __awaiter(this, void 0, void 0, function* () {
const args = [`"${tarPath.path}"`];
const cacheFileName = utils.getCacheFileName(compressionMethod);
const tarFile = 'cache.tar';
const workingDirectory = getWorkingDirectory();
// Speficic args for BSD tar on windows for workaround
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS;
// Method specific args
switch (type) {
case 'create':
args.push('--posix', '-cf', BSD_TAR_ZSTD
? tarFile
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD
? tarFile
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename);
break;
case 'extract':
args.push('-xf', BSD_TAR_ZSTD
? tarFile
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'));
break;
case 'list':
args.push('-tf', BSD_TAR_ZSTD
? tarFile
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P');
break;
}
// Platform specific args
if (tarPath.type === constants_1.ArchiveToolType.GNU) {
switch (process.platform) {
case 'win32':
args.push('--force-local');
break;
case 'darwin':
args.push('--delay-directory-restore');
break;
}
}
return args;
});
}
// Returns commands to run tar and compression program
function getCommands(compressionMethod, type, archivePath = '') {
return __awaiter(this, void 0, void 0, function* () {
let args;
const tarPath = yield getTarPath();
const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath);
const compressionArgs = type !== 'create'
? yield getDecompressionProgram(tarPath, compressionMethod, archivePath)
: yield getCompressionProgram(tarPath, compressionMethod);
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS;
if (BSD_TAR_ZSTD && type !== 'create') {
args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')];
}
else {
args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')];
}
if (BSD_TAR_ZSTD) {
return args;
}
return [args.join(' ')];
});
}
function getWorkingDirectory() {
var _a;
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
}
// Common function for extractTar and listTar to get the compression method
function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
return __awaiter(this, void 0, void 0, function* () {
// -d: Decompress.
// unzstd is equivalent to 'zstd -d'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS;
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return BSD_TAR_ZSTD
? [
'zstd -d --long=30 --force -o',
constants_1.TarFilename,
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
]
: [
'--use-compress-program',
IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30'
];
case constants_1.CompressionMethod.ZstdWithoutLong:
return BSD_TAR_ZSTD
? [
'zstd -d --force -o',
constants_1.TarFilename,
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
]
: ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'];
default:
return ['-z'];
}
});
}
// Used for creating the archive
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// zstdmt is equivalent to 'zstd -T0'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
function getCompressionProgram(tarPath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
const cacheFileName = utils.getCacheFileName(compressionMethod);
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
compressionMethod !== constants_1.CompressionMethod.Gzip &&
IS_WINDOWS;
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return BSD_TAR_ZSTD
? [
'zstd -T0 --long=30 --force -o',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
constants_1.TarFilename
]
: [
'--use-compress-program',
IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30'
];
case constants_1.CompressionMethod.ZstdWithoutLong:
return BSD_TAR_ZSTD
? [
'zstd -T0 --force -o',
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
constants_1.TarFilename
]
: ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt'];
default:
return ['-z'];
}
});
}
// Executes all commands as separate processes
function execCommands(commands, cwd) {
return __awaiter(this, void 0, void 0, function* () {
for (const command of commands) {
try {
yield (0, exec_1.exec)(command, undefined, {
cwd,
env: Object.assign(Object.assign({}, process.env), { MSYS: 'winsymlinks:nativestrict' })
});
}
catch (error) {
throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
}
}
});
}
// List the contents of a tar
function listTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
const commands = yield getCommands(compressionMethod, 'list', archivePath);
yield execCommands(commands);
});
}
exports.listTar = listTar;
// Extract a tar
function extractTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
// Create directory to extract tar into
const workingDirectory = getWorkingDirectory();
yield io.mkdirP(workingDirectory);
const commands = yield getCommands(compressionMethod, 'extract', archivePath);
yield execCommands(commands);
});
}
exports.extractTar = extractTar;
// Create a tar
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
// Write source directories to manifest.txt to avoid command length limits
(0, fs_1.writeFileSync)(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n'));
const commands = yield getCommands(compressionMethod, 'create');
yield execCommands(commands, archiveFolder);
});
}
exports.createTar = createTar;
//# sourceMappingURL=tar.js.map

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,60 @@
import { BlobUploadCommonResponse } from '@azure/storage-blob';
import { TransferProgressEvent } from '@azure/ms-rest-js';
import { UploadOptions } from '../options';
/**
* Class for tracking the upload state and displaying stats.
*/
export declare class UploadProgress {
contentLength: number;
sentBytes: number;
startTime: number;
displayedComplete: boolean;
timeoutHandle?: ReturnType<typeof setTimeout>;
constructor(contentLength: number);
/**
* Sets the number of bytes sent
*
* @param sentBytes the number of bytes sent
*/
setSentBytes(sentBytes: number): void;
/**
* Returns the total number of bytes transferred.
*/
getTransferredBytes(): number;
/**
* Returns true if the upload is complete.
*/
isDone(): boolean;
/**
* Prints the current upload stats. Once the upload completes, this will print one
* last line and then stop.
*/
display(): void;
/**
* Returns a function used to handle TransferProgressEvents.
*/
onProgress(): (progress: TransferProgressEvent) => void;
/**
* Starts the timer that displays the stats.
*
* @param delayInMs the delay between each write
*/
startDisplayTimer(delayInMs?: number): void;
/**
* Stops the timer that displays the stats. As this typically indicates the upload
* is complete, this will display one last line, unless the last line has already
* been written.
*/
stopDisplayTimer(): void;
}
/**
* Uploads a cache archive directly to Azure Blob Storage using the Azure SDK.
* This function will display progress information to the console. Concurrency of the
* upload is determined by the calling functions.
*
* @param signedUploadURL
* @param archivePath
* @param options
* @returns
*/
export declare function uploadCacheArchiveSDK(signedUploadURL: string, archivePath: string, options?: UploadOptions): Promise<BlobUploadCommonResponse>;

View file

@ -0,0 +1,167 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.uploadCacheArchiveSDK = exports.UploadProgress = void 0;
const core = __importStar(require("@actions/core"));
const storage_blob_1 = require("@azure/storage-blob");
const errors_1 = require("./shared/errors");
/**
* Class for tracking the upload state and displaying stats.
*/
class UploadProgress {
constructor(contentLength) {
this.contentLength = contentLength;
this.sentBytes = 0;
this.displayedComplete = false;
this.startTime = Date.now();
}
/**
* Sets the number of bytes sent
*
* @param sentBytes the number of bytes sent
*/
setSentBytes(sentBytes) {
this.sentBytes = sentBytes;
}
/**
* Returns the total number of bytes transferred.
*/
getTransferredBytes() {
return this.sentBytes;
}
/**
* Returns true if the upload is complete.
*/
isDone() {
return this.getTransferredBytes() === this.contentLength;
}
/**
* Prints the current upload stats. Once the upload completes, this will print one
* last line and then stop.
*/
display() {
if (this.displayedComplete) {
return;
}
const transferredBytes = this.sentBytes;
const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1);
const elapsedTime = Date.now() - this.startTime;
const uploadSpeed = (transferredBytes /
(1024 * 1024) /
(elapsedTime / 1000)).toFixed(1);
core.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`);
if (this.isDone()) {
this.displayedComplete = true;
}
}
/**
* Returns a function used to handle TransferProgressEvents.
*/
onProgress() {
return (progress) => {
this.setSentBytes(progress.loadedBytes);
};
}
/**
* Starts the timer that displays the stats.
*
* @param delayInMs the delay between each write
*/
startDisplayTimer(delayInMs = 1000) {
const displayCallback = () => {
this.display();
if (!this.isDone()) {
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
}
};
this.timeoutHandle = setTimeout(displayCallback, delayInMs);
}
/**
* Stops the timer that displays the stats. As this typically indicates the upload
* is complete, this will display one last line, unless the last line has already
* been written.
*/
stopDisplayTimer() {
if (this.timeoutHandle) {
clearTimeout(this.timeoutHandle);
this.timeoutHandle = undefined;
}
this.display();
}
}
exports.UploadProgress = UploadProgress;
/**
* Uploads a cache archive directly to Azure Blob Storage using the Azure SDK.
* This function will display progress information to the console. Concurrency of the
* upload is determined by the calling functions.
*
* @param signedUploadURL
* @param archivePath
* @param options
* @returns
*/
function uploadCacheArchiveSDK(signedUploadURL, archivePath, options) {
var _a;
return __awaiter(this, void 0, void 0, function* () {
const blobClient = new storage_blob_1.BlobClient(signedUploadURL);
const blockBlobClient = blobClient.getBlockBlobClient();
const uploadProgress = new UploadProgress((_a = options === null || options === void 0 ? void 0 : options.archiveSizeBytes) !== null && _a !== void 0 ? _a : 0);
// Specify data transfer options
const uploadOptions = {
blockSize: options === null || options === void 0 ? void 0 : options.uploadChunkSize,
concurrency: options === null || options === void 0 ? void 0 : options.uploadConcurrency,
maxSingleShotSize: 128 * 1024 * 1024,
onProgress: uploadProgress.onProgress()
};
try {
uploadProgress.startDisplayTimer();
core.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`);
const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions);
// TODO: better management of non-retryable errors
if (response._response.status >= 400) {
throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`);
}
return response;
}
catch (error) {
core.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error.message}`);
throw error;
}
finally {
uploadProgress.stopDisplayTimer();
}
});
}
exports.uploadCacheArchiveSDK = uploadCacheArchiveSDK;
//# sourceMappingURL=uploadUtils.js.map

View file

@ -0,0 +1 @@
{"version":3,"file":"uploadUtils.js","sourceRoot":"","sources":["../../src/internal/uploadUtils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,sDAK4B;AAE5B,4CAAoD;AAGpD;;GAEG;AACH,MAAa,cAAc;IAOzB,YAAY,aAAqB;QAC/B,IAAI,CAAC,aAAa,GAAG,aAAa,CAAA;QAClC,IAAI,CAAC,SAAS,GAAG,CAAC,CAAA;QAClB,IAAI,CAAC,iBAAiB,GAAG,KAAK,CAAA;QAC9B,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,GAAG,EAAE,CAAA;IAC7B,CAAC;IAED;;;;OAIG;IACH,YAAY,CAAC,SAAiB;QAC5B,IAAI,CAAC,SAAS,GAAG,SAAS,CAAA;IAC5B,CAAC;IAED;;OAEG;IACH,mBAAmB;QACjB,OAAO,IAAI,CAAC,SAAS,CAAA;IACvB,CAAC;IAED;;OAEG;IACH,MAAM;QACJ,OAAO,IAAI,CAAC,mBAAmB,EAAE,KAAK,IAAI,CAAC,aAAa,CAAA;IAC1D,CAAC;IAED;;;OAGG;IACH,OAAO;QACL,IAAI,IAAI,CAAC,iBAAiB,EAAE;YAC1B,OAAM;SACP;QAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,SAAS,CAAA;QACvC,MAAM,UAAU,GAAG,CAAC,GAAG,GAAG,CAAC,gBAAgB,GAAG,IAAI,CAAC,aAAa,CAAC,CAAC,CAAC,OAAO,CACxE,CAAC,CACF,CAAA;QACD,MAAM,WAAW,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,SAAS,CAAA;QAC/C,MAAM,WAAW,GAAG,CAClB,gBAAgB;YAChB,CAAC,IAAI,GAAG,IAAI,CAAC;YACb,CAAC,WAAW,GAAG,IAAI,CAAC,CACrB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAA;QAEZ,IAAI,CAAC,IAAI,CACP,QAAQ,gBAAgB,OAAO,IAAI,CAAC,aAAa,KAAK,UAAU,OAAO,WAAW,UAAU,CAC7F,CAAA;QAED,IAAI,IAAI,CAAC,MAAM,EAAE,EAAE;YACjB,IAAI,CAAC,iBAAiB,GAAG,IAAI,CAAA;SAC9B;IACH,CAAC;IAED;;OAEG;IACH,UAAU;QACR,OAAO,CAAC,QAA+B,EAAE,EAAE;YACzC,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAA;QACzC,CAAC,CAAA;IACH,CAAC;IAED;;;;OAIG;IACH,iBAAiB,CAAC,SAAS,GAAG,IAAI;QAChC,MAAM,eAAe,GAAG,GAAS,EAAE;YACjC,IAAI,CAAC,OAAO,EAAE,CAAA;YAEd,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE;gBAClB,IAAI,CAAC,aAAa,GAAG,UAAU,CAAC,eAAe,EAAE,SAAS,CAAC,CAAA;aAC5D;QACH,CAAC,CAAA;QAED,IAAI,CAAC,aAAa,GAAG,UAAU,CAAC,eAAe,EAAE,SAAS,CAAC,CAAA;IAC7D,CAAC;IAED;;;;OAIG;IACH,gBAAgB;QACd,IAAI,IAAI,CAAC,aAAa,EAAE;YACtB,YAAY,CAAC,IAAI,CAAC,aAAa,CAAC,CAAA;YAChC,IAAI,CAAC,aAAa,GAAG,SAAS,CAAA;SAC/B;QAED,IAAI,CAAC,OAAO,EAAE,CAAA;IAChB,CAAC;CACF;AAzGD,wCAyGC;AAED;;;;;;;;;GASG;AACH,SAAsB,qBAAqB,CACzC,eAAuB,EACvB,WAAmB,EACnB,OAAuB;;;QAEvB,MAAM,UAAU,GAAe,IAAI,yBAAU,CAAC,eAAe,CAAC,CAAA;QAC9D,MAAM,eAAe,GAAoB,UAAU,CAAC,kBAAkB,EAAE,CAAA;QACxE,MAAM,cAAc,GAAG,IAAI,cAAc,CAAC,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,gBAAgB,mCAAI,CAAC,CAAC,CAAA;QAEzE,gCAAgC;QAChC,MAAM,aAAa,GAAmC;YACpD,SAAS,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,eAAe;YACnC,WAAW,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,iBAAiB;YACvC,iBAAiB,EAAE,GAAG,GAAG,IAAI,GAAG,IAAI;YACpC,UAAU,EAAE,cAAc,CAAC,UAAU,EAAE;SACxC,CAAA;QAED,IAAI;YACF,cAAc,CAAC,iBAAiB,EAAE,CAAA;YAElC,IAAI,CAAC,KAAK,CACR,eAAe,UAAU,CAAC,IAAI,IAAI,UAAU,CAAC,WAAW,IAAI,UAAU,CAAC,aAAa,EAAE,CACvF,CAAA;YAED,MAAM,QAAQ,GAAG,MAAM,eAAe,CAAC,UAAU,CAC/C,WAAW,EACX,aAAa,CACd,CAAA;YAED,kDAAkD;YAClD,IAAI,QAAQ,CAAC,SAAS,CAAC,MAAM,IAAI,GAAG,EAAE;gBACpC,MAAM,IAAI,6BAAoB,CAC5B,yDAAyD,QAAQ,CAAC,SAAS,CAAC,MAAM,EAAE,CACrF,CAAA;aACF;YAED,OAAO,QAAQ,CAAA;SAChB;QAAC,OAAO,KAAK,EAAE;YACd,IAAI,CAAC,OAAO,CACV,kEAAkE,KAAK,CAAC,OAAO,EAAE,CAClF,CAAA;YACD,MAAM,KAAK,CAAA;SACZ;gBAAS;YACR,cAAc,CAAC,gBAAgB,EAAE,CAAA;SAClC;;CACF;AA7CD,sDA6CC"}

View file

@ -0,0 +1,87 @@
/**
* Options to control cache upload
*/
export interface UploadOptions {
/**
* Indicates whether to use the Azure Blob SDK to download caches
* that are stored on Azure Blob Storage to improve reliability and
* performance
*
* @default false
*/
useAzureSdk?: boolean;
/**
* Number of parallel cache upload
*
* @default 4
*/
uploadConcurrency?: number;
/**
* Maximum chunk size in bytes for cache upload
*
* @default 32MB
*/
uploadChunkSize?: number;
/**
* Archive size in bytes
*/
archiveSizeBytes?: number;
}
/**
* Options to control cache download
*/
export interface DownloadOptions {
/**
* Indicates whether to use the Azure Blob SDK to download caches
* that are stored on Azure Blob Storage to improve reliability and
* performance
*
* @default true
*/
useAzureSdk?: boolean;
/**
* Number of parallel downloads (this option only applies when using
* the Azure SDK)
*
* @default 8
*/
downloadConcurrency?: number;
/**
* Indicates whether to use Actions HttpClient with concurrency
* for Azure Blob Storage
*/
concurrentBlobDownloads?: boolean;
/**
* Maximum time for each download request, in milliseconds (this
* option only applies when using the Azure SDK)
*
* @default 30000
*/
timeoutInMs?: number;
/**
* Time after which a segment download should be aborted if stuck
*
* @default 3600000
*/
segmentTimeoutInMs?: number;
/**
* Weather to skip downloading the cache entry.
* If lookupOnly is set to true, the restore function will only check if
* a matching cache entry exists and return the cache key if it does.
*
* @default false
*/
lookupOnly?: boolean;
}
/**
* Returns a copy of the upload options with defaults filled in.
*
* @param copy the original upload options
*/
export declare function getUploadOptions(copy?: UploadOptions): UploadOptions;
/**
* Returns a copy of the download options with defaults filled in.
*
* @param copy the original download options
*/
export declare function getDownloadOptions(copy?: DownloadOptions): DownloadOptions;

View file

@ -0,0 +1,117 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getDownloadOptions = exports.getUploadOptions = void 0;
const core = __importStar(require("@actions/core"));
/**
* Returns a copy of the upload options with defaults filled in.
*
* @param copy the original upload options
*/
function getUploadOptions(copy) {
// Defaults if not overriden
const result = {
useAzureSdk: false,
uploadConcurrency: 4,
uploadChunkSize: 32 * 1024 * 1024
};
if (copy) {
if (typeof copy.useAzureSdk === 'boolean') {
result.useAzureSdk = copy.useAzureSdk;
}
if (typeof copy.uploadConcurrency === 'number') {
result.uploadConcurrency = copy.uploadConcurrency;
}
if (typeof copy.uploadChunkSize === 'number') {
result.uploadChunkSize = copy.uploadChunkSize;
}
}
/**
* Add env var overrides
*/
// Cap the uploadConcurrency at 32
result.uploadConcurrency = !isNaN(Number(process.env['CACHE_UPLOAD_CONCURRENCY']))
? Math.min(32, Number(process.env['CACHE_UPLOAD_CONCURRENCY']))
: result.uploadConcurrency;
// Cap the uploadChunkSize at 128MiB
result.uploadChunkSize = !isNaN(Number(process.env['CACHE_UPLOAD_CHUNK_SIZE']))
? Math.min(128 * 1024 * 1024, Number(process.env['CACHE_UPLOAD_CHUNK_SIZE']) * 1024 * 1024)
: result.uploadChunkSize;
core.debug(`Use Azure SDK: ${result.useAzureSdk}`);
core.debug(`Upload concurrency: ${result.uploadConcurrency}`);
core.debug(`Upload chunk size: ${result.uploadChunkSize}`);
return result;
}
exports.getUploadOptions = getUploadOptions;
/**
* Returns a copy of the download options with defaults filled in.
*
* @param copy the original download options
*/
function getDownloadOptions(copy) {
const result = {
useAzureSdk: false,
concurrentBlobDownloads: true,
downloadConcurrency: 8,
timeoutInMs: 30000,
segmentTimeoutInMs: 600000,
lookupOnly: false
};
if (copy) {
if (typeof copy.useAzureSdk === 'boolean') {
result.useAzureSdk = copy.useAzureSdk;
}
if (typeof copy.concurrentBlobDownloads === 'boolean') {
result.concurrentBlobDownloads = copy.concurrentBlobDownloads;
}
if (typeof copy.downloadConcurrency === 'number') {
result.downloadConcurrency = copy.downloadConcurrency;
}
if (typeof copy.timeoutInMs === 'number') {
result.timeoutInMs = copy.timeoutInMs;
}
if (typeof copy.segmentTimeoutInMs === 'number') {
result.segmentTimeoutInMs = copy.segmentTimeoutInMs;
}
if (typeof copy.lookupOnly === 'boolean') {
result.lookupOnly = copy.lookupOnly;
}
}
const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS'];
if (segmentDownloadTimeoutMins &&
!isNaN(Number(segmentDownloadTimeoutMins)) &&
isFinite(Number(segmentDownloadTimeoutMins))) {
result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1000;
}
core.debug(`Use Azure SDK: ${result.useAzureSdk}`);
core.debug(`Download concurrency: ${result.downloadConcurrency}`);
core.debug(`Request timeout (ms): ${result.timeoutInMs}`);
core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`);
core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`);
core.debug(`Lookup only: ${result.lookupOnly}`);
return result;
}
exports.getDownloadOptions = getDownloadOptions;
//# sourceMappingURL=options.js.map

View file

@ -0,0 +1 @@
{"version":3,"file":"options.js","sourceRoot":"","sources":["../src/options.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AAoFrC;;;;GAIG;AACH,SAAgB,gBAAgB,CAAC,IAAoB;IACnD,4BAA4B;IAC5B,MAAM,MAAM,GAAkB;QAC5B,WAAW,EAAE,KAAK;QAClB,iBAAiB,EAAE,CAAC;QACpB,eAAe,EAAE,EAAE,GAAG,IAAI,GAAG,IAAI;KAClC,CAAA;IAED,IAAI,IAAI,EAAE;QACR,IAAI,OAAO,IAAI,CAAC,WAAW,KAAK,SAAS,EAAE;YACzC,MAAM,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,CAAA;SACtC;QAED,IAAI,OAAO,IAAI,CAAC,iBAAiB,KAAK,QAAQ,EAAE;YAC9C,MAAM,CAAC,iBAAiB,GAAG,IAAI,CAAC,iBAAiB,CAAA;SAClD;QAED,IAAI,OAAO,IAAI,CAAC,eAAe,KAAK,QAAQ,EAAE;YAC5C,MAAM,CAAC,eAAe,GAAG,IAAI,CAAC,eAAe,CAAA;SAC9C;KACF;IAED;;OAEG;IACH,kCAAkC;IAClC,MAAM,CAAC,iBAAiB,GAAG,CAAC,KAAK,CAC/B,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,0BAA0B,CAAC,CAAC,CAChD;QACC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,EAAE,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,0BAA0B,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,MAAM,CAAC,iBAAiB,CAAA;IAC5B,oCAAoC;IACpC,MAAM,CAAC,eAAe,GAAG,CAAC,KAAK,CAC7B,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,yBAAyB,CAAC,CAAC,CAC/C;QACC,CAAC,CAAC,IAAI,CAAC,GAAG,CACN,GAAG,GAAG,IAAI,GAAG,IAAI,EACjB,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,yBAAyB,CAAC,CAAC,GAAG,IAAI,GAAG,IAAI,CAC7D;QACH,CAAC,CAAC,MAAM,CAAC,eAAe,CAAA;IAE1B,IAAI,CAAC,KAAK,CAAC,kBAAkB,MAAM,CAAC,WAAW,EAAE,CAAC,CAAA;IAClD,IAAI,CAAC,KAAK,CAAC,uBAAuB,MAAM,CAAC,iBAAiB,EAAE,CAAC,CAAA;IAC7D,IAAI,CAAC,KAAK,CAAC,sBAAsB,MAAM,CAAC,eAAe,EAAE,CAAC,CAAA;IAE1D,OAAO,MAAM,CAAA;AACf,CAAC;AA9CD,4CA8CC;AAED;;;;GAIG;AACH,SAAgB,kBAAkB,CAAC,IAAsB;IACvD,MAAM,MAAM,GAAoB;QAC9B,WAAW,EAAE,KAAK;QAClB,uBAAuB,EAAE,IAAI;QAC7B,mBAAmB,EAAE,CAAC;QACtB,WAAW,EAAE,KAAK;QAClB,kBAAkB,EAAE,MAAM;QAC1B,UAAU,EAAE,KAAK;KAClB,CAAA;IAED,IAAI,IAAI,EAAE;QACR,IAAI,OAAO,IAAI,CAAC,WAAW,KAAK,SAAS,EAAE;YACzC,MAAM,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,CAAA;SACtC;QAED,IAAI,OAAO,IAAI,CAAC,uBAAuB,KAAK,SAAS,EAAE;YACrD,MAAM,CAAC,uBAAuB,GAAG,IAAI,CAAC,uBAAuB,CAAA;SAC9D;QAED,IAAI,OAAO,IAAI,CAAC,mBAAmB,KAAK,QAAQ,EAAE;YAChD,MAAM,CAAC,mBAAmB,GAAG,IAAI,CAAC,mBAAmB,CAAA;SACtD;QAED,IAAI,OAAO,IAAI,CAAC,WAAW,KAAK,QAAQ,EAAE;YACxC,MAAM,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,CAAA;SACtC;QAED,IAAI,OAAO,IAAI,CAAC,kBAAkB,KAAK,QAAQ,EAAE;YAC/C,MAAM,CAAC,kBAAkB,GAAG,IAAI,CAAC,kBAAkB,CAAA;SACpD;QAED,IAAI,OAAO,IAAI,CAAC,UAAU,KAAK,SAAS,EAAE;YACxC,MAAM,CAAC,UAAU,GAAG,IAAI,CAAC,UAAU,CAAA;SACpC;KACF;IACD,MAAM,0BAA0B,GAC9B,OAAO,CAAC,GAAG,CAAC,+BAA+B,CAAC,CAAA;IAE9C,IACE,0BAA0B;QAC1B,CAAC,KAAK,CAAC,MAAM,CAAC,0BAA0B,CAAC,CAAC;QAC1C,QAAQ,CAAC,MAAM,CAAC,0BAA0B,CAAC,CAAC,EAC5C;QACA,MAAM,CAAC,kBAAkB,GAAG,MAAM,CAAC,0BAA0B,CAAC,GAAG,EAAE,GAAG,IAAI,CAAA;KAC3E;IACD,IAAI,CAAC,KAAK,CAAC,kBAAkB,MAAM,CAAC,WAAW,EAAE,CAAC,CAAA;IAClD,IAAI,CAAC,KAAK,CAAC,yBAAyB,MAAM,CAAC,mBAAmB,EAAE,CAAC,CAAA;IACjE,IAAI,CAAC,KAAK,CAAC,yBAAyB,MAAM,CAAC,WAAW,EAAE,CAAC,CAAA;IACzD,IAAI,CAAC,KAAK,CACR,gDAAgD,OAAO,CAAC,GAAG,CAAC,+BAA+B,CAAC,EAAE,CAC/F,CAAA;IACD,IAAI,CAAC,KAAK,CAAC,kCAAkC,MAAM,CAAC,kBAAkB,EAAE,CAAC,CAAA;IACzE,IAAI,CAAC,KAAK,CAAC,gBAAgB,MAAM,CAAC,UAAU,EAAE,CAAC,CAAA;IAE/C,OAAO,MAAM,CAAA;AACf,CAAC;AAvDD,gDAuDC"}