fix: cache key stability (#142)
Ensure consistency of main and post configuration by storing and restoring it from state, which in turn ensures cache key stability. Also: * Fixed some typos. * Use core.error for logging errors. * Fix inverted condition on cache-all-crates. Reverts: #138 Fixes #140
This commit is contained in:
parent
060bda31e0
commit
ad97570a01
|
@ -1,5 +1,9 @@
|
|||
# Changelog
|
||||
|
||||
## 2.3.1
|
||||
|
||||
- Fix cache key stability.
|
||||
|
||||
## 2.3.0
|
||||
|
||||
- Add `cache-all-crates` option, which enables caching of crates installed by workflows.
|
||||
|
|
|
@ -101,7 +101,6 @@ This cache is automatically keyed by:
|
|||
- the value of some compiler-specific environment variables (eg. RUSTFLAGS, etc), and
|
||||
- a hash of all `Cargo.lock` / `Cargo.toml` files found anywhere in the repository (if present).
|
||||
- a hash of all `rust-toolchain` / `rust-toolchain.toml` files in the root of the repository (if present).
|
||||
- a hash of installed packages as generated by `cargo install --list`.
|
||||
|
||||
An additional input `key` can be provided if the builtin keys are not sufficient.
|
||||
|
||||
|
@ -137,7 +136,7 @@ otherwise corrupt the cache on macOS builds.
|
|||
This specialized cache action is built on top of the upstream cache action
|
||||
maintained by GitHub. The same restrictions and limits apply, which are
|
||||
documented here:
|
||||
https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows
|
||||
[Caching dependencies to speed up workflows](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows)
|
||||
|
||||
In particular, caches are currently limited to 10 GB in total and exceeding that
|
||||
limit will cause eviction of older caches.
|
||||
|
|
|
@ -59977,8 +59977,8 @@ async function getCmdOutput(cmd, args = [], options = {}) {
|
|||
});
|
||||
}
|
||||
catch (e) {
|
||||
lib_core.info(`[warning] Command failed: ${cmd} ${args.join(" ")}`);
|
||||
lib_core.info(`[warning] ${stderr}`);
|
||||
lib_core.error(`Command failed: ${cmd} ${args.join(" ")}`);
|
||||
lib_core.error(stderr);
|
||||
throw e;
|
||||
}
|
||||
return stdout;
|
||||
|
@ -60024,12 +60024,10 @@ class Workspace {
|
|||
|
||||
|
||||
|
||||
|
||||
const HOME = external_os_default().homedir();
|
||||
const config_CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo");
|
||||
const STATE_LOCKFILE_HASH = "RUST_CACHE_LOCKFILE_HASH";
|
||||
const STATE_LOCKFILES = "RUST_CACHE_LOCKFILES";
|
||||
const config_STATE_BINS = "RUST_CACHE_BINS";
|
||||
const STATE_KEY = "RUST_CACHE_KEY";
|
||||
const STATE_CONFIG = "RUST_CACHE_CONFIG";
|
||||
class CacheConfig {
|
||||
constructor() {
|
||||
/** All the paths we want to cache */
|
||||
|
@ -60040,6 +60038,8 @@ class CacheConfig {
|
|||
this.restoreKey = "";
|
||||
/** The workspace configurations */
|
||||
this.workspaces = [];
|
||||
/** The cargo binaries present during main step */
|
||||
this.cargoBins = [];
|
||||
/** The prefix portion of the cache key */
|
||||
this.keyPrefix = "";
|
||||
/** The rust version considered for the cache key */
|
||||
|
@ -60103,20 +60103,11 @@ class CacheConfig {
|
|||
}
|
||||
}
|
||||
self.keyEnvs = keyEnvs;
|
||||
// Installed packages and their versions are also considered for the key.
|
||||
const packages = await getPackages();
|
||||
hasher.update(packages);
|
||||
key += `-${hasher.digest("hex")}`;
|
||||
self.restoreKey = key;
|
||||
// Construct the lockfiles portion of the key:
|
||||
// This considers all the files found via globbing for various manifests
|
||||
// and lockfiles.
|
||||
// This part is computed in the "pre"/"restore" part of the job and persisted
|
||||
// into the `state`. That state is loaded in the "post"/"save" part of the
|
||||
// job so we have consistent values even though the "main" actions run
|
||||
// might create/overwrite lockfiles.
|
||||
let lockHash = lib_core.getState(STATE_LOCKFILE_HASH);
|
||||
let keyFiles = JSON.parse(lib_core.getState(STATE_LOCKFILES) || "[]");
|
||||
// Constructs the workspace config and paths to restore:
|
||||
// The workspaces are given using a `$workspace -> $target` syntax.
|
||||
const workspaces = [];
|
||||
|
@ -60128,24 +60119,20 @@ class CacheConfig {
|
|||
workspaces.push(new Workspace(root, target));
|
||||
}
|
||||
self.workspaces = workspaces;
|
||||
if (!lockHash) {
|
||||
keyFiles = keyFiles.concat(await globFiles("rust-toolchain\nrust-toolchain.toml"));
|
||||
for (const workspace of workspaces) {
|
||||
const root = workspace.root;
|
||||
keyFiles.push(...(await globFiles(`${root}/**/Cargo.toml\n${root}/**/Cargo.lock\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`)));
|
||||
}
|
||||
keyFiles = keyFiles.filter(file => !external_fs_default().statSync(file).isDirectory());
|
||||
keyFiles.sort((a, b) => a.localeCompare(b));
|
||||
hasher = external_crypto_default().createHash("sha1");
|
||||
for (const file of keyFiles) {
|
||||
for await (const chunk of external_fs_default().createReadStream(file)) {
|
||||
hasher.update(chunk);
|
||||
}
|
||||
}
|
||||
lockHash = hasher.digest("hex");
|
||||
lib_core.saveState(STATE_LOCKFILE_HASH, lockHash);
|
||||
lib_core.saveState(STATE_LOCKFILES, JSON.stringify(keyFiles));
|
||||
let keyFiles = await globFiles("rust-toolchain\nrust-toolchain.toml");
|
||||
for (const workspace of workspaces) {
|
||||
const root = workspace.root;
|
||||
keyFiles.push(...(await globFiles(`${root}/**/Cargo.toml\n${root}/**/Cargo.lock\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`)));
|
||||
}
|
||||
keyFiles = keyFiles.filter(file => !external_fs_default().statSync(file).isDirectory());
|
||||
keyFiles.sort((a, b) => a.localeCompare(b));
|
||||
hasher = external_crypto_default().createHash("sha1");
|
||||
for (const file of keyFiles) {
|
||||
for await (const chunk of external_fs_default().createReadStream(file)) {
|
||||
hasher.update(chunk);
|
||||
}
|
||||
}
|
||||
let lockHash = hasher.digest("hex");
|
||||
self.keyFiles = keyFiles;
|
||||
key += `-${lockHash}`;
|
||||
self.cacheKey = key;
|
||||
|
@ -60158,8 +60145,32 @@ class CacheConfig {
|
|||
for (const dir of cacheDirectories.trim().split(/\s+/).filter(Boolean)) {
|
||||
self.cachePaths.push(dir);
|
||||
}
|
||||
const bins = await getCargoBins();
|
||||
self.cargoBins = Array.from(bins.values());
|
||||
return self;
|
||||
}
|
||||
/**
|
||||
* Reads and returns the cache config from the action `state`.
|
||||
*
|
||||
* @throws {Error} if the state is not present.
|
||||
* @returns {CacheConfig} the configuration.
|
||||
* @see {@link CacheConfig#saveState}
|
||||
* @see {@link CacheConfig#new}
|
||||
*/
|
||||
static fromState() {
|
||||
const source = lib_core.getState(STATE_CONFIG);
|
||||
if (!source) {
|
||||
throw new Error("Cache configuration not found in state");
|
||||
}
|
||||
const self = new CacheConfig();
|
||||
Object.assign(self, JSON.parse(source));
|
||||
self.workspaces = self.workspaces
|
||||
.map((w) => new Workspace(w.root, w.target));
|
||||
return self;
|
||||
}
|
||||
/**
|
||||
* Prints the configuration to the action log.
|
||||
*/
|
||||
printInfo() {
|
||||
lib_core.startGroup("Cache Configuration");
|
||||
lib_core.info(`Workspaces:`);
|
||||
|
@ -60187,6 +60198,21 @@ class CacheConfig {
|
|||
}
|
||||
lib_core.endGroup();
|
||||
}
|
||||
/**
|
||||
* Saves the configuration to the state store.
|
||||
* This is used to restore the configuration in the post action.
|
||||
*/
|
||||
saveState() {
|
||||
lib_core.saveState(STATE_CONFIG, this);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Checks if the cache is up to date.
|
||||
*
|
||||
* @returns `true` if the cache is up to date, `false` otherwise.
|
||||
*/
|
||||
function isCacheUpToDate() {
|
||||
return core.getState(STATE_CONFIG) === "";
|
||||
}
|
||||
async function getRustVersion() {
|
||||
const stdout = await getCmdOutput("rustc", ["-vV"]);
|
||||
|
@ -60197,11 +60223,6 @@ async function getRustVersion() {
|
|||
.filter((s) => s.length === 2);
|
||||
return Object.fromEntries(splits);
|
||||
}
|
||||
async function getPackages() {
|
||||
let stdout = await getCmdOutput("cargo", ["install", "--list"]);
|
||||
// Make OS independent.
|
||||
return stdout.split(/[\n\r]+/).join("\n");
|
||||
}
|
||||
async function globFiles(pattern) {
|
||||
const globber = await glob.create(pattern, {
|
||||
followSymbolicLinks: false,
|
||||
|
@ -60269,9 +60290,14 @@ async function getCargoBins() {
|
|||
catch { }
|
||||
return bins;
|
||||
}
|
||||
async function cleanBin() {
|
||||
/**
|
||||
* Clean the cargo bin directory, removing the binaries that existed
|
||||
* when the action started, as they were not created by the build.
|
||||
*
|
||||
* @param oldBins The binaries that existed when the action started.
|
||||
*/
|
||||
async function cleanBin(oldBins) {
|
||||
const bins = await getCargoBins();
|
||||
const oldBins = JSON.parse(core.getState(STATE_BINS));
|
||||
for (const bin of oldBins) {
|
||||
bins.delete(bin);
|
||||
}
|
||||
|
@ -60439,9 +60465,9 @@ async function exists(path) {
|
|||
|
||||
|
||||
process.on("uncaughtException", (e) => {
|
||||
lib_core.info(`[warning] ${e.message}`);
|
||||
lib_core.error(e.message);
|
||||
if (e.stack) {
|
||||
lib_core.info(e.stack);
|
||||
lib_core.error(e.stack);
|
||||
}
|
||||
});
|
||||
async function run() {
|
||||
|
@ -60459,8 +60485,6 @@ async function run() {
|
|||
const config = await CacheConfig["new"]();
|
||||
config.printInfo();
|
||||
lib_core.info("");
|
||||
const bins = await getCargoBins();
|
||||
lib_core.saveState(config_STATE_BINS, JSON.stringify([...bins]));
|
||||
lib_core.info(`... Restoring cache ...`);
|
||||
const key = config.cacheKey;
|
||||
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
|
||||
|
@ -60468,9 +60492,9 @@ async function run() {
|
|||
// TODO: remove this once the underlying bug is fixed.
|
||||
const restoreKey = await cache.restoreCache(config.cachePaths.slice(), key, [config.restoreKey]);
|
||||
if (restoreKey) {
|
||||
lib_core.info(`Restored from cache key "${restoreKey}".`);
|
||||
lib_core.saveState(STATE_KEY, restoreKey);
|
||||
if (restoreKey !== key) {
|
||||
const match = restoreKey === key;
|
||||
lib_core.info(`Restored from cache key "${restoreKey}" full match: ${match}.`);
|
||||
if (!match) {
|
||||
// pre-clean the target directory on cache mismatch
|
||||
for (const workspace of config.workspaces) {
|
||||
try {
|
||||
|
@ -60478,17 +60502,20 @@ async function run() {
|
|||
}
|
||||
catch { }
|
||||
}
|
||||
// We restored the cache but it is not a full match.
|
||||
config.saveState();
|
||||
}
|
||||
setCacheHitOutput(restoreKey === key);
|
||||
setCacheHitOutput(match);
|
||||
}
|
||||
else {
|
||||
lib_core.info("No cache found.");
|
||||
config.saveState();
|
||||
setCacheHitOutput(false);
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
setCacheHitOutput(false);
|
||||
lib_core.info(`[warning] ${e.stack}`);
|
||||
lib_core.error(`${e.stack}`);
|
||||
}
|
||||
}
|
||||
function setCacheHitOutput(cacheHit) {
|
||||
|
|
|
@ -59977,8 +59977,8 @@ async function getCmdOutput(cmd, args = [], options = {}) {
|
|||
});
|
||||
}
|
||||
catch (e) {
|
||||
core.info(`[warning] Command failed: ${cmd} ${args.join(" ")}`);
|
||||
core.info(`[warning] ${stderr}`);
|
||||
core.error(`Command failed: ${cmd} ${args.join(" ")}`);
|
||||
core.error(stderr);
|
||||
throw e;
|
||||
}
|
||||
return stdout;
|
||||
|
@ -60024,12 +60024,10 @@ class Workspace {
|
|||
|
||||
|
||||
|
||||
|
||||
const HOME = external_os_default().homedir();
|
||||
const CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo");
|
||||
const STATE_LOCKFILE_HASH = "RUST_CACHE_LOCKFILE_HASH";
|
||||
const STATE_LOCKFILES = "RUST_CACHE_LOCKFILES";
|
||||
const STATE_BINS = "RUST_CACHE_BINS";
|
||||
const STATE_KEY = "RUST_CACHE_KEY";
|
||||
const STATE_CONFIG = "RUST_CACHE_CONFIG";
|
||||
class CacheConfig {
|
||||
constructor() {
|
||||
/** All the paths we want to cache */
|
||||
|
@ -60040,6 +60038,8 @@ class CacheConfig {
|
|||
this.restoreKey = "";
|
||||
/** The workspace configurations */
|
||||
this.workspaces = [];
|
||||
/** The cargo binaries present during main step */
|
||||
this.cargoBins = [];
|
||||
/** The prefix portion of the cache key */
|
||||
this.keyPrefix = "";
|
||||
/** The rust version considered for the cache key */
|
||||
|
@ -60103,20 +60103,11 @@ class CacheConfig {
|
|||
}
|
||||
}
|
||||
self.keyEnvs = keyEnvs;
|
||||
// Installed packages and their versions are also considered for the key.
|
||||
const packages = await getPackages();
|
||||
hasher.update(packages);
|
||||
key += `-${hasher.digest("hex")}`;
|
||||
self.restoreKey = key;
|
||||
// Construct the lockfiles portion of the key:
|
||||
// This considers all the files found via globbing for various manifests
|
||||
// and lockfiles.
|
||||
// This part is computed in the "pre"/"restore" part of the job and persisted
|
||||
// into the `state`. That state is loaded in the "post"/"save" part of the
|
||||
// job so we have consistent values even though the "main" actions run
|
||||
// might create/overwrite lockfiles.
|
||||
let lockHash = core.getState(STATE_LOCKFILE_HASH);
|
||||
let keyFiles = JSON.parse(core.getState(STATE_LOCKFILES) || "[]");
|
||||
// Constructs the workspace config and paths to restore:
|
||||
// The workspaces are given using a `$workspace -> $target` syntax.
|
||||
const workspaces = [];
|
||||
|
@ -60128,24 +60119,20 @@ class CacheConfig {
|
|||
workspaces.push(new Workspace(root, target));
|
||||
}
|
||||
self.workspaces = workspaces;
|
||||
if (!lockHash) {
|
||||
keyFiles = keyFiles.concat(await globFiles("rust-toolchain\nrust-toolchain.toml"));
|
||||
for (const workspace of workspaces) {
|
||||
const root = workspace.root;
|
||||
keyFiles.push(...(await globFiles(`${root}/**/Cargo.toml\n${root}/**/Cargo.lock\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`)));
|
||||
}
|
||||
keyFiles = keyFiles.filter(file => !external_fs_default().statSync(file).isDirectory());
|
||||
keyFiles.sort((a, b) => a.localeCompare(b));
|
||||
hasher = external_crypto_default().createHash("sha1");
|
||||
for (const file of keyFiles) {
|
||||
for await (const chunk of external_fs_default().createReadStream(file)) {
|
||||
hasher.update(chunk);
|
||||
}
|
||||
}
|
||||
lockHash = hasher.digest("hex");
|
||||
core.saveState(STATE_LOCKFILE_HASH, lockHash);
|
||||
core.saveState(STATE_LOCKFILES, JSON.stringify(keyFiles));
|
||||
let keyFiles = await globFiles("rust-toolchain\nrust-toolchain.toml");
|
||||
for (const workspace of workspaces) {
|
||||
const root = workspace.root;
|
||||
keyFiles.push(...(await globFiles(`${root}/**/Cargo.toml\n${root}/**/Cargo.lock\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`)));
|
||||
}
|
||||
keyFiles = keyFiles.filter(file => !external_fs_default().statSync(file).isDirectory());
|
||||
keyFiles.sort((a, b) => a.localeCompare(b));
|
||||
hasher = external_crypto_default().createHash("sha1");
|
||||
for (const file of keyFiles) {
|
||||
for await (const chunk of external_fs_default().createReadStream(file)) {
|
||||
hasher.update(chunk);
|
||||
}
|
||||
}
|
||||
let lockHash = hasher.digest("hex");
|
||||
self.keyFiles = keyFiles;
|
||||
key += `-${lockHash}`;
|
||||
self.cacheKey = key;
|
||||
|
@ -60158,8 +60145,32 @@ class CacheConfig {
|
|||
for (const dir of cacheDirectories.trim().split(/\s+/).filter(Boolean)) {
|
||||
self.cachePaths.push(dir);
|
||||
}
|
||||
const bins = await getCargoBins();
|
||||
self.cargoBins = Array.from(bins.values());
|
||||
return self;
|
||||
}
|
||||
/**
|
||||
* Reads and returns the cache config from the action `state`.
|
||||
*
|
||||
* @throws {Error} if the state is not present.
|
||||
* @returns {CacheConfig} the configuration.
|
||||
* @see {@link CacheConfig#saveState}
|
||||
* @see {@link CacheConfig#new}
|
||||
*/
|
||||
static fromState() {
|
||||
const source = core.getState(STATE_CONFIG);
|
||||
if (!source) {
|
||||
throw new Error("Cache configuration not found in state");
|
||||
}
|
||||
const self = new CacheConfig();
|
||||
Object.assign(self, JSON.parse(source));
|
||||
self.workspaces = self.workspaces
|
||||
.map((w) => new Workspace(w.root, w.target));
|
||||
return self;
|
||||
}
|
||||
/**
|
||||
* Prints the configuration to the action log.
|
||||
*/
|
||||
printInfo() {
|
||||
core.startGroup("Cache Configuration");
|
||||
core.info(`Workspaces:`);
|
||||
|
@ -60187,6 +60198,21 @@ class CacheConfig {
|
|||
}
|
||||
core.endGroup();
|
||||
}
|
||||
/**
|
||||
* Saves the configuration to the state store.
|
||||
* This is used to restore the configuration in the post action.
|
||||
*/
|
||||
saveState() {
|
||||
core.saveState(STATE_CONFIG, this);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Checks if the cache is up to date.
|
||||
*
|
||||
* @returns `true` if the cache is up to date, `false` otherwise.
|
||||
*/
|
||||
function isCacheUpToDate() {
|
||||
return core.getState(STATE_CONFIG) === "";
|
||||
}
|
||||
async function getRustVersion() {
|
||||
const stdout = await getCmdOutput("rustc", ["-vV"]);
|
||||
|
@ -60197,11 +60223,6 @@ async function getRustVersion() {
|
|||
.filter((s) => s.length === 2);
|
||||
return Object.fromEntries(splits);
|
||||
}
|
||||
async function getPackages() {
|
||||
let stdout = await getCmdOutput("cargo", ["install", "--list"]);
|
||||
// Make OS independent.
|
||||
return stdout.split(/[\n\r]+/).join("\n");
|
||||
}
|
||||
async function globFiles(pattern) {
|
||||
const globber = await glob.create(pattern, {
|
||||
followSymbolicLinks: false,
|
||||
|
@ -60269,9 +60290,14 @@ async function getCargoBins() {
|
|||
catch { }
|
||||
return bins;
|
||||
}
|
||||
async function cleanBin() {
|
||||
/**
|
||||
* Clean the cargo bin directory, removing the binaries that existed
|
||||
* when the action started, as they were not created by the build.
|
||||
*
|
||||
* @param oldBins The binaries that existed when the action started.
|
||||
*/
|
||||
async function cleanBin(oldBins) {
|
||||
const bins = await getCargoBins();
|
||||
const oldBins = JSON.parse(core.getState(STATE_BINS));
|
||||
for (const bin of oldBins) {
|
||||
bins.delete(bin);
|
||||
}
|
||||
|
@ -60440,9 +60466,9 @@ async function exists(path) {
|
|||
|
||||
|
||||
process.on("uncaughtException", (e) => {
|
||||
core.info(`[warning] ${e.message}`);
|
||||
core.error(e.message);
|
||||
if (e.stack) {
|
||||
core.info(e.stack);
|
||||
core.error(e.stack);
|
||||
}
|
||||
});
|
||||
async function run() {
|
||||
|
@ -60451,13 +60477,13 @@ async function run() {
|
|||
return;
|
||||
}
|
||||
try {
|
||||
const config = await CacheConfig["new"]();
|
||||
config.printInfo();
|
||||
core.info("");
|
||||
if (core.getState(STATE_KEY) === config.cacheKey) {
|
||||
if (isCacheUpToDate()) {
|
||||
core.info(`Cache up-to-date.`);
|
||||
return;
|
||||
}
|
||||
const config = CacheConfig.fromState();
|
||||
config.printInfo();
|
||||
core.info("");
|
||||
// TODO: remove this once https://github.com/actions/toolkit/pull/553 lands
|
||||
await macOsWorkaround();
|
||||
const allPackages = [];
|
||||
|
@ -60473,16 +60499,16 @@ async function run() {
|
|||
}
|
||||
}
|
||||
try {
|
||||
const creates = core.getInput("cache-all-crates").toLowerCase() || "false";
|
||||
core.info(`... Cleaning cargo registry cache-all-crates: ${creates} ...`);
|
||||
await cleanRegistry(allPackages, creates === "true");
|
||||
const crates = core.getInput("cache-all-crates").toLowerCase() || "false";
|
||||
core.info(`... Cleaning cargo registry cache-all-crates: ${crates} ...`);
|
||||
await cleanRegistry(allPackages, crates !== "true");
|
||||
}
|
||||
catch (e) {
|
||||
core.error(`${e.stack}`);
|
||||
}
|
||||
try {
|
||||
core.info(`... Cleaning cargo/bin ...`);
|
||||
await cleanBin();
|
||||
await cleanBin(config.cargoBins);
|
||||
}
|
||||
catch (e) {
|
||||
core.error(`${e.stack}`);
|
||||
|
|
|
@ -3,7 +3,7 @@ import * as io from "@actions/io";
|
|||
import fs from "fs";
|
||||
import path from "path";
|
||||
|
||||
import { CARGO_HOME, STATE_BINS } from "./config";
|
||||
import { CARGO_HOME } from "./config";
|
||||
import { Packages } from "./workspace";
|
||||
|
||||
export async function cleanTargetDir(targetDir: string, packages: Packages, checkTimestamp = false) {
|
||||
|
@ -69,9 +69,14 @@ export async function getCargoBins(): Promise<Set<string>> {
|
|||
return bins;
|
||||
}
|
||||
|
||||
export async function cleanBin() {
|
||||
/**
|
||||
* Clean the cargo bin directory, removing the binaries that existed
|
||||
* when the action started, as they were not created by the build.
|
||||
*
|
||||
* @param oldBins The binaries that existed when the action started.
|
||||
*/
|
||||
export async function cleanBin(oldBins: Array<string>) {
|
||||
const bins = await getCargoBins();
|
||||
const oldBins = JSON.parse(core.getState(STATE_BINS));
|
||||
|
||||
for (const bin of oldBins) {
|
||||
bins.delete(bin);
|
||||
|
@ -186,10 +191,10 @@ const ONE_WEEK = 7 * 24 * 3600 * 1000;
|
|||
|
||||
/**
|
||||
* Removes all files or directories in `dirName` matching some criteria.
|
||||
*
|
||||
*
|
||||
* When the `checkTimestamp` flag is set, this will also remove anything older
|
||||
* than one week.
|
||||
*
|
||||
*
|
||||
* Otherwise, it will remove everything that does not match any string in the
|
||||
* `keepPrefix` set.
|
||||
* The matching strips and trailing `-$hash` suffix.
|
||||
|
|
112
src/config.ts
112
src/config.ts
|
@ -7,14 +7,12 @@ import path from "path";
|
|||
|
||||
import { getCmdOutput } from "./utils";
|
||||
import { Workspace } from "./workspace";
|
||||
import { getCargoBins } from "./cleanup";
|
||||
|
||||
const HOME = os.homedir();
|
||||
export const CARGO_HOME = process.env.CARGO_HOME || path.join(HOME, ".cargo");
|
||||
|
||||
const STATE_LOCKFILE_HASH = "RUST_CACHE_LOCKFILE_HASH";
|
||||
const STATE_LOCKFILES = "RUST_CACHE_LOCKFILES";
|
||||
export const STATE_BINS = "RUST_CACHE_BINS";
|
||||
export const STATE_KEY = "RUST_CACHE_KEY";
|
||||
const STATE_CONFIG = "RUST_CACHE_CONFIG";
|
||||
|
||||
export class CacheConfig {
|
||||
/** All the paths we want to cache */
|
||||
|
@ -27,6 +25,9 @@ export class CacheConfig {
|
|||
/** The workspace configurations */
|
||||
public workspaces: Array<Workspace> = [];
|
||||
|
||||
/** The cargo binaries present during main step */
|
||||
public cargoBins: Array<string> = [];
|
||||
|
||||
/** The prefix portion of the cache key */
|
||||
private keyPrefix = "";
|
||||
/** The rust version considered for the cache key */
|
||||
|
@ -104,10 +105,6 @@ export class CacheConfig {
|
|||
|
||||
self.keyEnvs = keyEnvs;
|
||||
|
||||
// Installed packages and their versions are also considered for the key.
|
||||
const packages = await getPackages();
|
||||
hasher.update(packages);
|
||||
|
||||
key += `-${hasher.digest("hex")}`;
|
||||
|
||||
self.restoreKey = key;
|
||||
|
@ -115,13 +112,6 @@ export class CacheConfig {
|
|||
// Construct the lockfiles portion of the key:
|
||||
// This considers all the files found via globbing for various manifests
|
||||
// and lockfiles.
|
||||
// This part is computed in the "pre"/"restore" part of the job and persisted
|
||||
// into the `state`. That state is loaded in the "post"/"save" part of the
|
||||
// job so we have consistent values even though the "main" actions run
|
||||
// might create/overwrite lockfiles.
|
||||
|
||||
let lockHash = core.getState(STATE_LOCKFILE_HASH);
|
||||
let keyFiles: Array<string> = JSON.parse(core.getState(STATE_LOCKFILES) || "[]");
|
||||
|
||||
// Constructs the workspace config and paths to restore:
|
||||
// The workspaces are given using a `$workspace -> $target` syntax.
|
||||
|
@ -136,30 +126,25 @@ export class CacheConfig {
|
|||
}
|
||||
self.workspaces = workspaces;
|
||||
|
||||
if (!lockHash) {
|
||||
keyFiles = keyFiles.concat(await globFiles("rust-toolchain\nrust-toolchain.toml"));
|
||||
for (const workspace of workspaces) {
|
||||
const root = workspace.root;
|
||||
keyFiles.push(
|
||||
...(await globFiles(
|
||||
`${root}/**/Cargo.toml\n${root}/**/Cargo.lock\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`,
|
||||
)),
|
||||
);
|
||||
}
|
||||
keyFiles = keyFiles.filter(file => !fs.statSync(file).isDirectory());
|
||||
keyFiles.sort((a, b) => a.localeCompare(b));
|
||||
|
||||
hasher = crypto.createHash("sha1");
|
||||
for (const file of keyFiles) {
|
||||
for await (const chunk of fs.createReadStream(file)) {
|
||||
hasher.update(chunk);
|
||||
}
|
||||
}
|
||||
lockHash = hasher.digest("hex");
|
||||
|
||||
core.saveState(STATE_LOCKFILE_HASH, lockHash);
|
||||
core.saveState(STATE_LOCKFILES, JSON.stringify(keyFiles));
|
||||
let keyFiles = await globFiles("rust-toolchain\nrust-toolchain.toml");
|
||||
for (const workspace of workspaces) {
|
||||
const root = workspace.root;
|
||||
keyFiles.push(
|
||||
...(await globFiles(
|
||||
`${root}/**/Cargo.toml\n${root}/**/Cargo.lock\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`,
|
||||
)),
|
||||
);
|
||||
}
|
||||
keyFiles = keyFiles.filter(file => !fs.statSync(file).isDirectory());
|
||||
keyFiles.sort((a, b) => a.localeCompare(b));
|
||||
|
||||
hasher = crypto.createHash("sha1");
|
||||
for (const file of keyFiles) {
|
||||
for await (const chunk of fs.createReadStream(file)) {
|
||||
hasher.update(chunk);
|
||||
}
|
||||
}
|
||||
let lockHash = hasher.digest("hex");
|
||||
|
||||
self.keyFiles = keyFiles;
|
||||
|
||||
|
@ -177,9 +162,37 @@ export class CacheConfig {
|
|||
self.cachePaths.push(dir);
|
||||
}
|
||||
|
||||
const bins = await getCargoBins();
|
||||
self.cargoBins = Array.from(bins.values());
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads and returns the cache config from the action `state`.
|
||||
*
|
||||
* @throws {Error} if the state is not present.
|
||||
* @returns {CacheConfig} the configuration.
|
||||
* @see {@link CacheConfig#saveState}
|
||||
* @see {@link CacheConfig#new}
|
||||
*/
|
||||
static fromState(): CacheConfig {
|
||||
const source = core.getState(STATE_CONFIG);
|
||||
if (!source) {
|
||||
throw new Error("Cache configuration not found in state");
|
||||
}
|
||||
|
||||
const self = new CacheConfig();
|
||||
Object.assign(self, JSON.parse(source));
|
||||
self.workspaces = self.workspaces
|
||||
.map((w: any) => new Workspace(w.root, w.target));
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prints the configuration to the action log.
|
||||
*/
|
||||
printInfo() {
|
||||
core.startGroup("Cache Configuration");
|
||||
core.info(`Workspaces:`);
|
||||
|
@ -207,6 +220,23 @@ export class CacheConfig {
|
|||
}
|
||||
core.endGroup();
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves the configuration to the state store.
|
||||
* This is used to restore the configuration in the post action.
|
||||
*/
|
||||
saveState() {
|
||||
core.saveState(STATE_CONFIG, this);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the cache is up to date.
|
||||
*
|
||||
* @returns `true` if the cache is up to date, `false` otherwise.
|
||||
*/
|
||||
export function isCacheUpToDate(): boolean {
|
||||
return core.getState(STATE_CONFIG) === "";
|
||||
}
|
||||
|
||||
interface RustVersion {
|
||||
|
@ -225,12 +255,6 @@ async function getRustVersion(): Promise<RustVersion> {
|
|||
return Object.fromEntries(splits);
|
||||
}
|
||||
|
||||
async function getPackages(): Promise<string> {
|
||||
let stdout = await getCmdOutput("cargo", ["install", "--list"]);
|
||||
// Make OS independent.
|
||||
return stdout.split(/[\n\r]+/).join("\n");
|
||||
}
|
||||
|
||||
async function globFiles(pattern: string): Promise<string[]> {
|
||||
const globber = await glob.create(pattern, {
|
||||
followSymbolicLinks: false,
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import { cleanTargetDir, getCargoBins } from "./cleanup";
|
||||
import { CacheConfig, STATE_BINS, STATE_KEY } from "./config";
|
||||
import { cleanTargetDir } from "./cleanup";
|
||||
import { CacheConfig } from "./config";
|
||||
|
||||
process.on("uncaughtException", (e) => {
|
||||
core.info(`[warning] ${e.message}`);
|
||||
core.error(e.message);
|
||||
if (e.stack) {
|
||||
core.info(e.stack);
|
||||
core.error(e.stack);
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -29,9 +29,6 @@ async function run() {
|
|||
config.printInfo();
|
||||
core.info("");
|
||||
|
||||
const bins = await getCargoBins();
|
||||
core.saveState(STATE_BINS, JSON.stringify([...bins]));
|
||||
|
||||
core.info(`... Restoring cache ...`);
|
||||
const key = config.cacheKey;
|
||||
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
|
||||
|
@ -39,28 +36,31 @@ async function run() {
|
|||
// TODO: remove this once the underlying bug is fixed.
|
||||
const restoreKey = await cache.restoreCache(config.cachePaths.slice(), key, [config.restoreKey]);
|
||||
if (restoreKey) {
|
||||
core.info(`Restored from cache key "${restoreKey}".`);
|
||||
core.saveState(STATE_KEY, restoreKey);
|
||||
|
||||
if (restoreKey !== key) {
|
||||
const match = restoreKey === key;
|
||||
core.info(`Restored from cache key "${restoreKey}" full match: ${match}.`);
|
||||
if (!match) {
|
||||
// pre-clean the target directory on cache mismatch
|
||||
for (const workspace of config.workspaces) {
|
||||
try {
|
||||
await cleanTargetDir(workspace.target, [], true);
|
||||
} catch {}
|
||||
}
|
||||
|
||||
// We restored the cache but it is not a full match.
|
||||
config.saveState();
|
||||
}
|
||||
|
||||
setCacheHitOutput(restoreKey === key);
|
||||
setCacheHitOutput(match);
|
||||
} else {
|
||||
core.info("No cache found.");
|
||||
config.saveState();
|
||||
|
||||
setCacheHitOutput(false);
|
||||
}
|
||||
} catch (e) {
|
||||
setCacheHitOutput(false);
|
||||
|
||||
core.info(`[warning] ${(e as any).stack}`);
|
||||
core.error(`${(e as any).stack}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
24
src/save.ts
24
src/save.ts
|
@ -3,12 +3,12 @@ import * as core from "@actions/core";
|
|||
import * as exec from "@actions/exec";
|
||||
|
||||
import { cleanBin, cleanGit, cleanRegistry, cleanTargetDir } from "./cleanup";
|
||||
import { CacheConfig, STATE_KEY } from "./config";
|
||||
import { CacheConfig, isCacheUpToDate } from "./config";
|
||||
|
||||
process.on("uncaughtException", (e) => {
|
||||
core.info(`[warning] ${e.message}`);
|
||||
core.error(e.message);
|
||||
if (e.stack) {
|
||||
core.info(e.stack);
|
||||
core.error(e.stack);
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -20,15 +20,15 @@ async function run() {
|
|||
}
|
||||
|
||||
try {
|
||||
const config = await CacheConfig.new();
|
||||
config.printInfo();
|
||||
core.info("");
|
||||
|
||||
if (core.getState(STATE_KEY) === config.cacheKey) {
|
||||
if (isCacheUpToDate()) {
|
||||
core.info(`Cache up-to-date.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const config = CacheConfig.fromState();
|
||||
config.printInfo();
|
||||
core.info("");
|
||||
|
||||
// TODO: remove this once https://github.com/actions/toolkit/pull/553 lands
|
||||
await macOsWorkaround();
|
||||
|
||||
|
@ -45,16 +45,16 @@ async function run() {
|
|||
}
|
||||
|
||||
try {
|
||||
const creates = core.getInput("cache-all-crates").toLowerCase() || "false";
|
||||
core.info(`... Cleaning cargo registry cache-all-crates: ${creates} ...`);
|
||||
await cleanRegistry(allPackages, creates === "true");
|
||||
const crates = core.getInput("cache-all-crates").toLowerCase() || "false"
|
||||
core.info(`... Cleaning cargo registry cache-all-crates: ${crates} ...`);
|
||||
await cleanRegistry(allPackages, crates !== "true");
|
||||
} catch (e) {
|
||||
core.error(`${(e as any).stack}`);
|
||||
}
|
||||
|
||||
try {
|
||||
core.info(`... Cleaning cargo/bin ...`);
|
||||
await cleanBin();
|
||||
await cleanBin(config.cargoBins);
|
||||
} catch (e) {
|
||||
core.error(`${(e as any).stack}`);
|
||||
}
|
||||
|
|
|
@ -22,8 +22,8 @@ export async function getCmdOutput(
|
|||
...options,
|
||||
});
|
||||
} catch (e) {
|
||||
core.info(`[warning] Command failed: ${cmd} ${args.join(" ")}`);
|
||||
core.info(`[warning] ${stderr}`);
|
||||
core.error(`Command failed: ${cmd} ${args.join(" ")}`);
|
||||
core.error(stderr);
|
||||
throw e;
|
||||
}
|
||||
return stdout;
|
||||
|
|
Loading…
Reference in New Issue