Slightly improve docs
This commit is contained in:
parent
f6987ea139
commit
3312b3ab47
|
@ -62,9 +62,12 @@ sensible defaults.
|
|||
|
||||
# Determiners whether the cache should be saved.
|
||||
# If `false`, the cache is only restored.
|
||||
# Useful for jobs where the matrix is additive e.g. additional Cargo features.
|
||||
# Useful for jobs where the matrix is additive e.g. additional Cargo features,
|
||||
# or when only runs from `master` should be saved to the cache.
|
||||
# default: "true"
|
||||
save-if: ""
|
||||
# To only cache runs from `master`:
|
||||
save-if: ${{ github.ref == 'refs/head/master' }}
|
||||
|
||||
# Specifies what to use as the backend providing cache
|
||||
# Can be set to either "github" or "buildjet"
|
||||
|
@ -90,7 +93,8 @@ repositories with only a `Cargo.toml` file have limited benefits, as cargo will
|
|||
_always_ use the most up-to-date dependency versions, which may not be cached.
|
||||
|
||||
Usage with Stable Rust is most effective, as a cache is tied to the Rust version.
|
||||
Using it with Nightly Rust is less effective as it will throw away the cache every day.
|
||||
Using it with Nightly Rust is less effective as it will throw away the cache every day,
|
||||
unless a specific nightly build is being pinned.
|
||||
|
||||
## Cache Details
|
||||
|
||||
|
@ -106,6 +110,7 @@ This cache is automatically keyed by:
|
|||
- the value of some compiler-specific environment variables (eg. RUSTFLAGS, etc), and
|
||||
- a hash of all `Cargo.lock` / `Cargo.toml` files found anywhere in the repository (if present).
|
||||
- a hash of all `rust-toolchain` / `rust-toolchain.toml` files in the root of the repository (if present).
|
||||
- a hash of all `.cargo/config.toml` files in the root of the repository (if present).
|
||||
|
||||
An additional input `key` can be provided if the builtin keys are not sufficient.
|
||||
|
||||
|
|
|
@ -66788,8 +66788,6 @@ var external_path_ = __nccwpck_require__(1017);
|
|||
var external_path_default = /*#__PURE__*/__nccwpck_require__.n(external_path_);
|
||||
// EXTERNAL MODULE: ./node_modules/@actions/glob/lib/glob.js
|
||||
var glob = __nccwpck_require__(8090);
|
||||
// EXTERNAL MODULE: ./node_modules/toml/index.js
|
||||
var toml = __nccwpck_require__(4920);
|
||||
// EXTERNAL MODULE: external "crypto"
|
||||
var external_crypto_ = __nccwpck_require__(6113);
|
||||
var external_crypto_default = /*#__PURE__*/__nccwpck_require__.n(external_crypto_);
|
||||
|
@ -66799,12 +66797,14 @@ var promises_default = /*#__PURE__*/__nccwpck_require__.n(promises_);
|
|||
// EXTERNAL MODULE: external "os"
|
||||
var external_os_ = __nccwpck_require__(2037);
|
||||
var external_os_default = /*#__PURE__*/__nccwpck_require__.n(external_os_);
|
||||
// EXTERNAL MODULE: ./node_modules/@actions/buildjet-cache/lib/cache.js
|
||||
var cache = __nccwpck_require__(7551);
|
||||
// EXTERNAL MODULE: ./node_modules/@actions/cache/lib/cache.js
|
||||
var lib_cache = __nccwpck_require__(7799);
|
||||
// EXTERNAL MODULE: ./node_modules/toml/index.js
|
||||
var toml = __nccwpck_require__(4920);
|
||||
// EXTERNAL MODULE: ./node_modules/@actions/exec/lib/exec.js
|
||||
var exec = __nccwpck_require__(1514);
|
||||
// EXTERNAL MODULE: ./node_modules/@actions/buildjet-cache/lib/cache.js
|
||||
var lib_cache = __nccwpck_require__(7551);
|
||||
// EXTERNAL MODULE: ./node_modules/@actions/cache/lib/cache.js
|
||||
var cache_lib_cache = __nccwpck_require__(7799);
|
||||
;// CONCATENATED MODULE: ./src/utils.ts
|
||||
|
||||
|
||||
|
@ -66846,18 +66846,16 @@ async function getCmdOutput(cmd, args = [], options = {}) {
|
|||
}
|
||||
return stdout;
|
||||
}
|
||||
function getCacheHandler() {
|
||||
function getCacheProvider() {
|
||||
const cacheProvider = lib_core.getInput("cache-provider");
|
||||
switch (cacheProvider) {
|
||||
case "github":
|
||||
lib_core.info("Using Github Cache.");
|
||||
return lib_cache;
|
||||
case "buildjet":
|
||||
lib_core.info("Using Buildjet Cache.");
|
||||
return cache;
|
||||
default:
|
||||
throw new Error("Only currently support github and buildjet caches");
|
||||
const cache = cacheProvider === "github" ? cache_lib_cache : cacheProvider === "buildjet" ? lib_cache : undefined;
|
||||
if (!cache) {
|
||||
throw new Error(`The \`cache-provider\` \`{cacheProvider}\` is not valid.`);
|
||||
}
|
||||
return {
|
||||
name: cacheProvider,
|
||||
cache: cache,
|
||||
};
|
||||
}
|
||||
|
||||
;// CONCATENATED MODULE: ./src/workspace.ts
|
||||
|
@ -67007,7 +67005,7 @@ class CacheConfig {
|
|||
const cargo_manifests = sort_and_uniq(await globFiles(`${root}/**/Cargo.toml`));
|
||||
for (const cargo_manifest of cargo_manifests) {
|
||||
try {
|
||||
const content = await promises_default().readFile(cargo_manifest, { encoding: 'utf8' });
|
||||
const content = await promises_default().readFile(cargo_manifest, { encoding: "utf8" });
|
||||
const parsed = toml.parse(content);
|
||||
if ("package" in parsed) {
|
||||
const pack = parsed.package;
|
||||
|
@ -67024,21 +67022,22 @@ class CacheConfig {
|
|||
for (const key of Object.keys(deps)) {
|
||||
const dep = deps[key];
|
||||
if ("path" in dep) {
|
||||
dep.version = '0.0.0';
|
||||
dep.version = "0.0.0";
|
||||
}
|
||||
}
|
||||
}
|
||||
hasher.update(JSON.stringify(parsed));
|
||||
parsedKeyFiles.push(cargo_manifest);
|
||||
}
|
||||
catch (_e) { // Fallback to caching them as regular file
|
||||
catch (_e) {
|
||||
// Fallback to caching them as regular file
|
||||
keyFiles.push(cargo_manifest);
|
||||
}
|
||||
}
|
||||
const cargo_locks = sort_and_uniq(await globFiles(`${root}/**/Cargo.lock`));
|
||||
for (const cargo_lock of cargo_locks) {
|
||||
try {
|
||||
const content = await promises_default().readFile(cargo_lock, { encoding: 'utf8' });
|
||||
const content = await promises_default().readFile(cargo_lock, { encoding: "utf8" });
|
||||
const parsed = toml.parse(content);
|
||||
if (parsed.version !== 3 || !("package" in parsed)) {
|
||||
// Fallback to caching them as regular file since this action
|
||||
|
@ -67054,7 +67053,8 @@ class CacheConfig {
|
|||
hasher.update(JSON.stringify(packages));
|
||||
parsedKeyFiles.push(cargo_lock);
|
||||
}
|
||||
catch (_e) { // Fallback to caching them as regular file
|
||||
catch (_e) {
|
||||
// Fallback to caching them as regular file
|
||||
keyFiles.push(cargo_lock);
|
||||
}
|
||||
}
|
||||
|
@ -67098,15 +67098,16 @@ class CacheConfig {
|
|||
}
|
||||
const self = new CacheConfig();
|
||||
Object.assign(self, JSON.parse(source));
|
||||
self.workspaces = self.workspaces
|
||||
.map((w) => new Workspace(w.root, w.target));
|
||||
self.workspaces = self.workspaces.map((w) => new Workspace(w.root, w.target));
|
||||
return self;
|
||||
}
|
||||
/**
|
||||
* Prints the configuration to the action log.
|
||||
*/
|
||||
printInfo() {
|
||||
printInfo(cacheProvider) {
|
||||
lib_core.startGroup("Cache Configuration");
|
||||
lib_core.info(`Cache Provider:`);
|
||||
lib_core.info(` ${cacheProvider.name}`);
|
||||
lib_core.info(`Workspaces:`);
|
||||
for (const workspace of this.workspaces) {
|
||||
lib_core.info(` ${workspace.root}`);
|
||||
|
@ -67173,7 +67174,7 @@ async function globFiles(pattern) {
|
|||
// fs.statSync resolve the symbolic link and returns stat for the
|
||||
// file it pointed to, so isFile would make sure the resolved
|
||||
// file is actually a regular file.
|
||||
return (await globber.glob()).filter(file => external_fs_default().statSync(file).isFile());
|
||||
return (await globber.glob()).filter((file) => external_fs_default().statSync(file).isFile());
|
||||
}
|
||||
function sort_and_uniq(a) {
|
||||
return a
|
||||
|
@ -67475,8 +67476,8 @@ process.on("uncaughtException", (e) => {
|
|||
}
|
||||
});
|
||||
async function run() {
|
||||
const cache = getCacheHandler();
|
||||
if (!cache.isFeatureAvailable()) {
|
||||
const cacheProvider = getCacheProvider();
|
||||
if (!cacheProvider.cache.isFeatureAvailable()) {
|
||||
setCacheHitOutput(false);
|
||||
return;
|
||||
}
|
||||
|
@ -67488,14 +67489,14 @@ async function run() {
|
|||
lib_core.exportVariable("CACHE_ON_FAILURE", cacheOnFailure);
|
||||
lib_core.exportVariable("CARGO_INCREMENTAL", 0);
|
||||
const config = await CacheConfig["new"]();
|
||||
config.printInfo();
|
||||
config.printInfo(cacheProvider);
|
||||
lib_core.info("");
|
||||
lib_core.info(`... Restoring cache ...`);
|
||||
const key = config.cacheKey;
|
||||
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
|
||||
// https://github.com/actions/toolkit/pull/1378
|
||||
// TODO: remove this once the underlying bug is fixed.
|
||||
const restoreKey = await cache.restoreCache(config.cachePaths.slice(), key, [config.restoreKey]);
|
||||
const restoreKey = await cacheProvider.cache.restoreCache(config.cachePaths.slice(), key, [config.restoreKey]);
|
||||
if (restoreKey) {
|
||||
const match = restoreKey === key;
|
||||
lib_core.info(`Restored from cache key "${restoreKey}" full match: ${match}.`);
|
||||
|
|
|
@ -66790,8 +66790,6 @@ var external_path_ = __nccwpck_require__(1017);
|
|||
var external_path_default = /*#__PURE__*/__nccwpck_require__.n(external_path_);
|
||||
// EXTERNAL MODULE: ./node_modules/@actions/glob/lib/glob.js
|
||||
var glob = __nccwpck_require__(8090);
|
||||
// EXTERNAL MODULE: ./node_modules/toml/index.js
|
||||
var toml = __nccwpck_require__(4920);
|
||||
// EXTERNAL MODULE: external "crypto"
|
||||
var external_crypto_ = __nccwpck_require__(6113);
|
||||
var external_crypto_default = /*#__PURE__*/__nccwpck_require__.n(external_crypto_);
|
||||
|
@ -66801,10 +66799,12 @@ var promises_default = /*#__PURE__*/__nccwpck_require__.n(promises_);
|
|||
// EXTERNAL MODULE: external "os"
|
||||
var external_os_ = __nccwpck_require__(2037);
|
||||
var external_os_default = /*#__PURE__*/__nccwpck_require__.n(external_os_);
|
||||
// EXTERNAL MODULE: ./node_modules/toml/index.js
|
||||
var toml = __nccwpck_require__(4920);
|
||||
// EXTERNAL MODULE: ./node_modules/@actions/buildjet-cache/lib/cache.js
|
||||
var cache = __nccwpck_require__(7551);
|
||||
var lib_cache = __nccwpck_require__(7551);
|
||||
// EXTERNAL MODULE: ./node_modules/@actions/cache/lib/cache.js
|
||||
var lib_cache = __nccwpck_require__(7799);
|
||||
var cache_lib_cache = __nccwpck_require__(7799);
|
||||
;// CONCATENATED MODULE: ./src/utils.ts
|
||||
|
||||
|
||||
|
@ -66846,18 +66846,16 @@ async function getCmdOutput(cmd, args = [], options = {}) {
|
|||
}
|
||||
return stdout;
|
||||
}
|
||||
function getCacheHandler() {
|
||||
function getCacheProvider() {
|
||||
const cacheProvider = core.getInput("cache-provider");
|
||||
switch (cacheProvider) {
|
||||
case "github":
|
||||
core.info("Using Github Cache.");
|
||||
return lib_cache;
|
||||
case "buildjet":
|
||||
core.info("Using Buildjet Cache.");
|
||||
return cache;
|
||||
default:
|
||||
throw new Error("Only currently support github and buildjet caches");
|
||||
const cache = cacheProvider === "github" ? cache_lib_cache : cacheProvider === "buildjet" ? lib_cache : undefined;
|
||||
if (!cache) {
|
||||
throw new Error(`The \`cache-provider\` \`{cacheProvider}\` is not valid.`);
|
||||
}
|
||||
return {
|
||||
name: cacheProvider,
|
||||
cache: cache,
|
||||
};
|
||||
}
|
||||
|
||||
;// CONCATENATED MODULE: ./src/workspace.ts
|
||||
|
@ -67007,7 +67005,7 @@ class CacheConfig {
|
|||
const cargo_manifests = sort_and_uniq(await globFiles(`${root}/**/Cargo.toml`));
|
||||
for (const cargo_manifest of cargo_manifests) {
|
||||
try {
|
||||
const content = await promises_default().readFile(cargo_manifest, { encoding: 'utf8' });
|
||||
const content = await promises_default().readFile(cargo_manifest, { encoding: "utf8" });
|
||||
const parsed = toml.parse(content);
|
||||
if ("package" in parsed) {
|
||||
const pack = parsed.package;
|
||||
|
@ -67024,21 +67022,22 @@ class CacheConfig {
|
|||
for (const key of Object.keys(deps)) {
|
||||
const dep = deps[key];
|
||||
if ("path" in dep) {
|
||||
dep.version = '0.0.0';
|
||||
dep.version = "0.0.0";
|
||||
}
|
||||
}
|
||||
}
|
||||
hasher.update(JSON.stringify(parsed));
|
||||
parsedKeyFiles.push(cargo_manifest);
|
||||
}
|
||||
catch (_e) { // Fallback to caching them as regular file
|
||||
catch (_e) {
|
||||
// Fallback to caching them as regular file
|
||||
keyFiles.push(cargo_manifest);
|
||||
}
|
||||
}
|
||||
const cargo_locks = sort_and_uniq(await globFiles(`${root}/**/Cargo.lock`));
|
||||
for (const cargo_lock of cargo_locks) {
|
||||
try {
|
||||
const content = await promises_default().readFile(cargo_lock, { encoding: 'utf8' });
|
||||
const content = await promises_default().readFile(cargo_lock, { encoding: "utf8" });
|
||||
const parsed = toml.parse(content);
|
||||
if (parsed.version !== 3 || !("package" in parsed)) {
|
||||
// Fallback to caching them as regular file since this action
|
||||
|
@ -67054,7 +67053,8 @@ class CacheConfig {
|
|||
hasher.update(JSON.stringify(packages));
|
||||
parsedKeyFiles.push(cargo_lock);
|
||||
}
|
||||
catch (_e) { // Fallback to caching them as regular file
|
||||
catch (_e) {
|
||||
// Fallback to caching them as regular file
|
||||
keyFiles.push(cargo_lock);
|
||||
}
|
||||
}
|
||||
|
@ -67098,15 +67098,16 @@ class CacheConfig {
|
|||
}
|
||||
const self = new CacheConfig();
|
||||
Object.assign(self, JSON.parse(source));
|
||||
self.workspaces = self.workspaces
|
||||
.map((w) => new Workspace(w.root, w.target));
|
||||
self.workspaces = self.workspaces.map((w) => new Workspace(w.root, w.target));
|
||||
return self;
|
||||
}
|
||||
/**
|
||||
* Prints the configuration to the action log.
|
||||
*/
|
||||
printInfo() {
|
||||
printInfo(cacheProvider) {
|
||||
core.startGroup("Cache Configuration");
|
||||
core.info(`Cache Provider:`);
|
||||
core.info(` ${cacheProvider.name}`);
|
||||
core.info(`Workspaces:`);
|
||||
for (const workspace of this.workspaces) {
|
||||
core.info(` ${workspace.root}`);
|
||||
|
@ -67173,7 +67174,7 @@ async function globFiles(pattern) {
|
|||
// fs.statSync resolve the symbolic link and returns stat for the
|
||||
// file it pointed to, so isFile would make sure the resolved
|
||||
// file is actually a regular file.
|
||||
return (await globber.glob()).filter(file => external_fs_default().statSync(file).isFile());
|
||||
return (await globber.glob()).filter((file) => external_fs_default().statSync(file).isFile());
|
||||
}
|
||||
function sort_and_uniq(a) {
|
||||
return a
|
||||
|
@ -67476,9 +67477,9 @@ process.on("uncaughtException", (e) => {
|
|||
}
|
||||
});
|
||||
async function run() {
|
||||
const cache = getCacheHandler();
|
||||
const cacheProvider = getCacheProvider();
|
||||
const save = core.getInput("save-if").toLowerCase() || "true";
|
||||
if (!(cache.isFeatureAvailable() && save === "true")) {
|
||||
if (!(cacheProvider.cache.isFeatureAvailable() && save === "true")) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
|
@ -67487,7 +67488,7 @@ async function run() {
|
|||
return;
|
||||
}
|
||||
const config = CacheConfig.fromState();
|
||||
config.printInfo();
|
||||
config.printInfo(cacheProvider);
|
||||
core.info("");
|
||||
// TODO: remove this once https://github.com/actions/toolkit/pull/553 lands
|
||||
await macOsWorkaround();
|
||||
|
@ -67505,7 +67506,7 @@ async function run() {
|
|||
}
|
||||
try {
|
||||
const crates = core.getInput("cache-all-crates").toLowerCase() || "false";
|
||||
core.info(`... Cleaning cargo registry cache-all-crates: ${crates} ...`);
|
||||
core.info(`... Cleaning cargo registry (cache-all-crates: ${crates}) ...`);
|
||||
await cleanRegistry(allPackages, crates !== "true");
|
||||
}
|
||||
catch (e) {
|
||||
|
@ -67529,7 +67530,7 @@ async function run() {
|
|||
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
|
||||
// https://github.com/actions/toolkit/pull/1378
|
||||
// TODO: remove this once the underlying bug is fixed.
|
||||
await cache.saveCache(config.cachePaths.slice(), config.cacheKey);
|
||||
await cacheProvider.cache.saveCache(config.cachePaths.slice(), config.cacheKey);
|
||||
}
|
||||
catch (e) {
|
||||
reportError(e);
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
import * as core from "@actions/core";
|
||||
import * as glob from "@actions/glob";
|
||||
import * as toml from "toml";
|
||||
import crypto from "crypto";
|
||||
import fs from "fs";
|
||||
import fs_promises from "fs/promises";
|
||||
import os from "os";
|
||||
import path from "path";
|
||||
import * as toml from "toml";
|
||||
|
||||
import { getCmdOutput } from "./utils";
|
||||
import { Workspace } from "./workspace";
|
||||
import { getCargoBins } from "./cleanup";
|
||||
import { CacheProvider, getCmdOutput } from "./utils";
|
||||
import { Workspace } from "./workspace";
|
||||
|
||||
const HOME = os.homedir();
|
||||
export const CARGO_HOME = process.env.CARGO_HOME || path.join(HOME, ".cargo");
|
||||
|
@ -146,7 +146,7 @@ export class CacheConfig {
|
|||
|
||||
for (const cargo_manifest of cargo_manifests) {
|
||||
try {
|
||||
const content = await fs_promises.readFile(cargo_manifest, { encoding: 'utf8' });
|
||||
const content = await fs_promises.readFile(cargo_manifest, { encoding: "utf8" });
|
||||
const parsed = toml.parse(content);
|
||||
|
||||
if ("package" in parsed) {
|
||||
|
@ -167,7 +167,7 @@ export class CacheConfig {
|
|||
const dep = deps[key];
|
||||
|
||||
if ("path" in dep) {
|
||||
dep.version = '0.0.0'
|
||||
dep.version = "0.0.0";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -175,7 +175,8 @@ export class CacheConfig {
|
|||
hasher.update(JSON.stringify(parsed));
|
||||
|
||||
parsedKeyFiles.push(cargo_manifest);
|
||||
} catch (_e) { // Fallback to caching them as regular file
|
||||
} catch (_e) {
|
||||
// Fallback to caching them as regular file
|
||||
keyFiles.push(cargo_manifest);
|
||||
}
|
||||
}
|
||||
|
@ -184,7 +185,7 @@ export class CacheConfig {
|
|||
|
||||
for (const cargo_lock of cargo_locks) {
|
||||
try {
|
||||
const content = await fs_promises.readFile(cargo_lock, { encoding: 'utf8' });
|
||||
const content = await fs_promises.readFile(cargo_lock, { encoding: "utf8" });
|
||||
const parsed = toml.parse(content);
|
||||
|
||||
if (parsed.version !== 3 || !("package" in parsed)) {
|
||||
|
@ -197,13 +198,14 @@ export class CacheConfig {
|
|||
// Package without `[[package]].source` and `[[package]].checksum`
|
||||
// are the one with `path = "..."` to crates within the workspace.
|
||||
const packages = parsed.package.filter((p: any) => {
|
||||
"source" in p || "checksum" in p
|
||||
"source" in p || "checksum" in p;
|
||||
});
|
||||
|
||||
hasher.update(JSON.stringify(packages));
|
||||
|
||||
parsedKeyFiles.push(cargo_lock);
|
||||
} catch (_e) { // Fallback to caching them as regular file
|
||||
} catch (_e) {
|
||||
// Fallback to caching them as regular file
|
||||
keyFiles.push(cargo_lock);
|
||||
}
|
||||
}
|
||||
|
@ -257,8 +259,7 @@ export class CacheConfig {
|
|||
|
||||
const self = new CacheConfig();
|
||||
Object.assign(self, JSON.parse(source));
|
||||
self.workspaces = self.workspaces
|
||||
.map((w: any) => new Workspace(w.root, w.target));
|
||||
self.workspaces = self.workspaces.map((w: any) => new Workspace(w.root, w.target));
|
||||
|
||||
return self;
|
||||
}
|
||||
|
@ -266,8 +267,10 @@ export class CacheConfig {
|
|||
/**
|
||||
* Prints the configuration to the action log.
|
||||
*/
|
||||
printInfo() {
|
||||
printInfo(cacheProvider: CacheProvider) {
|
||||
core.startGroup("Cache Configuration");
|
||||
core.info(`Cache Provider:`);
|
||||
core.info(` ${cacheProvider.name}`);
|
||||
core.info(`Workspaces:`);
|
||||
for (const workspace of this.workspaces) {
|
||||
core.info(` ${workspace.root}`);
|
||||
|
@ -345,25 +348,22 @@ async function globFiles(pattern: string): Promise<string[]> {
|
|||
// fs.statSync resolve the symbolic link and returns stat for the
|
||||
// file it pointed to, so isFile would make sure the resolved
|
||||
// file is actually a regular file.
|
||||
return (await globber.glob()).filter(file => fs.statSync(file).isFile());
|
||||
return (await globber.glob()).filter((file) => fs.statSync(file).isFile());
|
||||
}
|
||||
|
||||
function sort_and_uniq(a: string[]) {
|
||||
return a
|
||||
.sort((a, b) => a.localeCompare(b))
|
||||
.reduce(
|
||||
(accumulator: string[], currentValue: string) => {
|
||||
const len = accumulator.length;
|
||||
// If accumulator is empty or its last element != currentValue
|
||||
// Since array is already sorted, elements with the same value
|
||||
// are grouped together to be continugous in space.
|
||||
//
|
||||
// If currentValue != last element, then it must be unique.
|
||||
if (len == 0 || accumulator[len - 1].localeCompare(currentValue) != 0) {
|
||||
accumulator.push(currentValue);
|
||||
}
|
||||
return accumulator;
|
||||
},
|
||||
[]
|
||||
);
|
||||
return a
|
||||
.sort((a, b) => a.localeCompare(b))
|
||||
.reduce((accumulator: string[], currentValue: string) => {
|
||||
const len = accumulator.length;
|
||||
// If accumulator is empty or its last element != currentValue
|
||||
// Since array is already sorted, elements with the same value
|
||||
// are grouped together to be continugous in space.
|
||||
//
|
||||
// If currentValue != last element, then it must be unique.
|
||||
if (len == 0 || accumulator[len - 1].localeCompare(currentValue) != 0) {
|
||||
accumulator.push(currentValue);
|
||||
}
|
||||
return accumulator;
|
||||
}, []);
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ import * as core from "@actions/core";
|
|||
|
||||
import { cleanTargetDir } from "./cleanup";
|
||||
import { CacheConfig } from "./config";
|
||||
import { getCacheHandler, reportError } from "./utils";
|
||||
import { getCacheProvider, reportError } from "./utils";
|
||||
|
||||
process.on("uncaughtException", (e) => {
|
||||
core.error(e.message);
|
||||
|
@ -12,9 +12,9 @@ process.on("uncaughtException", (e) => {
|
|||
});
|
||||
|
||||
async function run() {
|
||||
const cache = getCacheHandler();
|
||||
const cacheProvider = getCacheProvider();
|
||||
|
||||
if (!cache.isFeatureAvailable()) {
|
||||
if (!cacheProvider.cache.isFeatureAvailable()) {
|
||||
setCacheHitOutput(false);
|
||||
return;
|
||||
}
|
||||
|
@ -28,7 +28,7 @@ async function run() {
|
|||
core.exportVariable("CARGO_INCREMENTAL", 0);
|
||||
|
||||
const config = await CacheConfig.new();
|
||||
config.printInfo();
|
||||
config.printInfo(cacheProvider);
|
||||
core.info("");
|
||||
|
||||
core.info(`... Restoring cache ...`);
|
||||
|
@ -36,7 +36,7 @@ async function run() {
|
|||
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
|
||||
// https://github.com/actions/toolkit/pull/1378
|
||||
// TODO: remove this once the underlying bug is fixed.
|
||||
const restoreKey = await cache.restoreCache(config.cachePaths.slice(), key, [config.restoreKey]);
|
||||
const restoreKey = await cacheProvider.cache.restoreCache(config.cachePaths.slice(), key, [config.restoreKey]);
|
||||
if (restoreKey) {
|
||||
const match = restoreKey === key;
|
||||
core.info(`Restored from cache key "${restoreKey}" full match: ${match}.`);
|
||||
|
|
12
src/save.ts
12
src/save.ts
|
@ -3,7 +3,7 @@ import * as exec from "@actions/exec";
|
|||
|
||||
import { cleanBin, cleanGit, cleanRegistry, cleanTargetDir } from "./cleanup";
|
||||
import { CacheConfig, isCacheUpToDate } from "./config";
|
||||
import { getCacheHandler, reportError } from "./utils";
|
||||
import { getCacheProvider, reportError } from "./utils";
|
||||
|
||||
process.on("uncaughtException", (e) => {
|
||||
core.error(e.message);
|
||||
|
@ -13,11 +13,11 @@ process.on("uncaughtException", (e) => {
|
|||
});
|
||||
|
||||
async function run() {
|
||||
const cache = getCacheHandler();
|
||||
const cacheProvider = getCacheProvider();
|
||||
|
||||
const save = core.getInput("save-if").toLowerCase() || "true";
|
||||
|
||||
if (!(cache.isFeatureAvailable() && save === "true")) {
|
||||
if (!(cacheProvider.cache.isFeatureAvailable() && save === "true")) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -28,7 +28,7 @@ async function run() {
|
|||
}
|
||||
|
||||
const config = CacheConfig.fromState();
|
||||
config.printInfo();
|
||||
config.printInfo(cacheProvider);
|
||||
core.info("");
|
||||
|
||||
// TODO: remove this once https://github.com/actions/toolkit/pull/553 lands
|
||||
|
@ -48,7 +48,7 @@ async function run() {
|
|||
|
||||
try {
|
||||
const crates = core.getInput("cache-all-crates").toLowerCase() || "false";
|
||||
core.info(`... Cleaning cargo registry cache-all-crates: ${crates} ...`);
|
||||
core.info(`... Cleaning cargo registry (cache-all-crates: ${crates}) ...`);
|
||||
await cleanRegistry(allPackages, crates !== "true");
|
||||
} catch (e) {
|
||||
core.debug(`${(e as any).stack}`);
|
||||
|
@ -72,7 +72,7 @@ async function run() {
|
|||
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
|
||||
// https://github.com/actions/toolkit/pull/1378
|
||||
// TODO: remove this once the underlying bug is fixed.
|
||||
await cache.saveCache(config.cachePaths.slice(), config.cacheKey);
|
||||
await cacheProvider.cache.saveCache(config.cachePaths.slice(), config.cacheKey);
|
||||
} catch (e) {
|
||||
reportError(e);
|
||||
}
|
||||
|
|
33
src/utils.ts
33
src/utils.ts
|
@ -1,7 +1,7 @@
|
|||
import * as buildjetCache from "@actions/buildjet-cache";
|
||||
import * as ghCache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
import * as exec from "@actions/exec";
|
||||
import * as buildjetCache from "@actions/buildjet-cache";
|
||||
import * as ghCache from "@actions/cache";
|
||||
|
||||
export function reportError(e: any) {
|
||||
const { commandFailed } = e;
|
||||
|
@ -43,16 +43,21 @@ export async function getCmdOutput(
|
|||
return stdout;
|
||||
}
|
||||
|
||||
export function getCacheHandler() {
|
||||
const cacheProvider = core.getInput("cache-provider");
|
||||
switch (cacheProvider) {
|
||||
case "github":
|
||||
core.info("Using Github Cache.");
|
||||
return ghCache;
|
||||
case "buildjet":
|
||||
core.info("Using Buildjet Cache.");
|
||||
return buildjetCache;
|
||||
default:
|
||||
throw new Error("Only currently support github and buildjet caches");
|
||||
}
|
||||
export interface CacheProvider {
|
||||
name: string;
|
||||
cache: typeof ghCache;
|
||||
}
|
||||
|
||||
export function getCacheProvider(): CacheProvider {
|
||||
const cacheProvider = core.getInput("cache-provider");
|
||||
const cache = cacheProvider === "github" ? ghCache : cacheProvider === "buildjet" ? buildjetCache : undefined;
|
||||
|
||||
if (!cache) {
|
||||
throw new Error(`The \`cache-provider\` \`{cacheProvider}\` is not valid.`);
|
||||
}
|
||||
|
||||
return {
|
||||
name: cacheProvider,
|
||||
cache: cache,
|
||||
};
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue