rewrite it all

This commit is contained in:
Arpad Borsos 2022-07-09 15:19:29 +02:00
parent 5df06440c6
commit 6ed4c28a7c
No known key found for this signature in database
GPG Key ID: FC7BCA77824B3298
8 changed files with 527 additions and 466 deletions

View File

@ -1,6 +1,6 @@
name: "Rust Cache"
description: "A GitHub Action that implements smart caching for rust/cargo projects with sensible defaults."
author: "Arpad Borsos <arpad.borsos@googlemail.com>"
author: "Arpad Borsos <swatinem@swatinem.de>"
inputs:
key:
description: "An additional key for the cache"
@ -11,14 +11,8 @@ inputs:
envVars:
description: "Additional environment variables to include in the cache key, separeted by spaces"
required: false
working-directory:
description: "The working directory this action should operate in"
required: false
target-dir:
description: "The target dir that should be cleaned and persisted, defaults to `./target`"
required: false
workspace-paths:
description: "Paths to multiple Cargo workspaces, separated by newlines"
workspaces:
description: "Paths to multiple Cargo workspaces and their target directories, separated by newlines"
required: false
cache-on-failure:
description: "Cache even if the build fails. Defaults to false"
@ -27,7 +21,7 @@ outputs:
cache-hit:
description: "A boolean value that indicates an exact match was found"
runs:
using: "node12"
using: "node16"
main: "dist/restore/index.js"
post: "dist/save/index.js"
post-if: "success() || env.CACHE_ON_FAILURE == 'true'"

158
src/cleanup.ts Normal file
View File

@ -0,0 +1,158 @@
import * as core from "@actions/core";
import * as io from "@actions/io";
import fs from "fs";
import path from "path";
import { CacheConfig, STATE_BINS } from "./config";
import { Packages } from "./workspace";
export async function cleanTargetDir(targetDir: string, packages: Packages) {
await fs.promises.unlink(path.join(targetDir, "./.rustc_info.json"));
// TODO: remove all unknown files, clean all directories like profiles
try {
await cleanProfileTarget(path.join(targetDir, "debug"), packages);
} catch {}
try {
await cleanProfileTarget(path.join(targetDir, "release"), packages);
} catch {}
}
async function cleanProfileTarget(profileDir: string, packages: Packages) {
await io.rmRF(path.join(profileDir, "./examples"));
await io.rmRF(path.join(profileDir, "./incremental"));
let dir: fs.Dir;
// remove all *files* from the profile directory
dir = await fs.promises.opendir(profileDir);
for await (const dirent of dir) {
if (dirent.isFile()) {
await rm(dir.path, dirent);
}
}
const keepPkg = new Set(packages.map((p) => p.name));
await rmExcept(path.join(profileDir, "./build"), keepPkg);
await rmExcept(path.join(profileDir, "./.fingerprint"), keepPkg);
const keepDeps = new Set(
packages.flatMap((p) => {
const names = [];
for (const n of [p.name, ...p.targets]) {
const name = n.replace(/-/g, "_");
names.push(name, `lib${name}`);
}
return names;
}),
);
await rmExcept(path.join(profileDir, "./deps"), keepDeps);
}
export async function cleanBin(config: CacheConfig) {
const bins = await config.getCargoBins();
const oldBins = JSON.parse(core.getState(STATE_BINS));
for (const bin of oldBins) {
bins.delete(bin);
}
const dir = await fs.promises.opendir(path.join(config.cargoHome, "bin"));
for await (const dirent of dir) {
if (dirent.isFile() && !bins.has(dirent.name)) {
await rm(dir.path, dirent);
}
}
}
export async function cleanRegistry(config: CacheConfig, registryName: string, packages: Packages) {
await io.rmRF(path.join(config.cargoIndex, registryName, ".cache"));
const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`));
const dir = await fs.promises.opendir(path.join(config.cargoCache, registryName));
for await (const dirent of dir) {
if (dirent.isFile() && !pkgSet.has(dirent.name)) {
await rm(dir.path, dirent);
}
}
}
export async function cleanGit(config: CacheConfig, packages: Packages) {
const coPath = path.join(config.cargoGit, "checkouts");
const dbPath = path.join(config.cargoGit, "db");
const repos = new Map<string, Set<string>>();
for (const p of packages) {
if (!p.path.startsWith(coPath)) {
continue;
}
const [repo, ref] = p.path.slice(coPath.length + 1).split(path.sep);
const refs = repos.get(repo);
if (refs) {
refs.add(ref);
} else {
repos.set(repo, new Set([ref]));
}
}
// we have to keep both the clone, and the checkout, removing either will
// trigger a rebuild
let dir: fs.Dir;
// clean the db
dir = await fs.promises.opendir(dbPath);
for await (const dirent of dir) {
if (!repos.has(dirent.name)) {
await rm(dir.path, dirent);
}
}
// clean the checkouts
dir = await fs.promises.opendir(coPath);
for await (const dirent of dir) {
const refs = repos.get(dirent.name);
if (!refs) {
await rm(dir.path, dirent);
continue;
}
if (!dirent.isDirectory()) {
continue;
}
const refsDir = await fs.promises.opendir(path.join(dir.path, dirent.name));
for await (const dirent of refsDir) {
if (!refs.has(dirent.name)) {
await rm(refsDir.path, dirent);
}
}
}
}
const ONE_WEEK = 7 * 24 * 3600 * 1000;
export async function rmExcept(dirName: string, keepPrefix: Set<string>) {
const dir = await fs.promises.opendir(dirName);
for await (const dirent of dir) {
let name = dirent.name;
const idx = name.lastIndexOf("-");
if (idx !== -1) {
name = name.slice(0, idx);
}
const fileName = path.join(dir.path, dirent.name);
const { mtime } = await fs.promises.stat(fileName);
// we dont really know
if (!keepPrefix.has(name) || Date.now() - mtime.getTime() > ONE_WEEK) {
await rm(dir.path, dirent);
}
}
}
export async function rm(parent: string, dirent: fs.Dirent) {
try {
const fileName = path.join(parent, dirent.name);
core.debug(`deleting "${fileName}"`);
if (dirent.isFile()) {
await fs.promises.unlink(fileName);
} else if (dirent.isDirectory()) {
await io.rmRF(fileName);
}
} catch {}
}

View File

@ -1,323 +0,0 @@
import * as core from "@actions/core";
import * as exec from "@actions/exec";
import * as glob from "@actions/glob";
import * as io from "@actions/io";
import crypto from "crypto";
import fs from "fs";
import os from "os";
import path from "path";
process.on("uncaughtException", (e) => {
core.info(`[warning] ${e.message}`);
if (e.stack) {
core.info(e.stack);
}
});
const cwd = core.getInput("working-directory");
// Read each line of workspace-paths as a unique path
// TODO: this could be read from .cargo config file directly
const workspacePathsInput = core.getInput("workspace-paths") || "./";
const workspacePaths = workspacePathsInput.trim().split("\n");
if (cwd) {
process.chdir(cwd);
}
export const stateBins = "RUST_CACHE_BINS";
export const stateKey = "RUST_CACHE_KEY";
const stateHash = "RUST_CACHE_HASH";
const home = os.homedir();
const cargoHome = process.env.CARGO_HOME || path.join(home, ".cargo");
export const paths = {
cargoHome,
index: path.join(cargoHome, "registry/index"),
cache: path.join(cargoHome, "registry/cache"),
git: path.join(cargoHome, "git"),
workspaces: workspacePaths,
};
interface CacheConfig {
// A list of common paths needing caching
paths: Array<string>;
key: string;
restoreKeys: Array<string>;
// A list of one or more workspace directories
workspaces: Array<string>;
}
const RefKey = "GITHUB_REF";
export function isValidEvent(): boolean {
return RefKey in process.env && Boolean(process.env[RefKey]);
}
export async function getCacheConfig(): Promise<CacheConfig> {
let lockHash = core.getState(stateHash);
if (!lockHash) {
lockHash = await getLockfileHash();
core.saveState(stateHash, lockHash);
}
let key = `v0-rust-`;
const sharedKey = core.getInput("sharedKey");
if (sharedKey) {
key += `${sharedKey}-`;
} else {
const inputKey = core.getInput("key");
if (inputKey) {
key += `${inputKey}-`;
}
const job = process.env.GITHUB_JOB;
if (job) {
key += `${job}-`;
}
}
const extraEnvKeys = core.getInput("envVars").split(/\s+/);
key += `${getEnvKey(extraEnvKeys)}-`;
key += await getRustKey();
return {
paths: [
path.join(cargoHome, "bin"),
path.join(cargoHome, ".crates2.json"),
path.join(cargoHome, ".crates.toml"),
paths.git,
paths.cache,
paths.index,
],
key: `${key}-${lockHash}`,
restoreKeys: [key],
workspaces: paths.workspaces,
};
}
export async function getCargoBins(): Promise<Set<string>> {
try {
const { installs }: { installs: { [key: string]: { bins: Array<string> } } } = JSON.parse(
await fs.promises.readFile(path.join(paths.cargoHome, ".crates2.json"), "utf8"),
);
const bins = new Set<string>();
for (const pkg of Object.values(installs)) {
for (const bin of pkg.bins) {
bins.add(bin);
}
}
return bins;
} catch {
return new Set<string>();
}
}
/**
* Create a key hash, generated from environment variables.
*
* The available environment variables are filtered by a set of defaults that are common for Rust
* projects and should apply to almost all runs, as they modify the Rustc compiler's, Clippy's and
* other tools' behavior.
*
* @param extraKeys additional user-provided keys that are added to the default list. These are
* treated as regular expressions ({@link RegExp}), and will each be surrounded by a `^` and `$`,
* to make sure they are matched against the whole env var name.
* @returns An SHA-1 hash over all the environment variable values, whose names were not filtered
* out. The hash is returned as hex-string, **reduced to half its length**.
*/
function getEnvKey(extraKeys: string[]): string {
const hasher = crypto.createHash("sha1");
const defaultValidKeys = [
/^CARGO_.+$/,
/^CC_.+$/,
/^CXX_.+$/,
/^RUSTC_.+$/,
/^RUSTC$/,
/^RUSTDOC$/,
/^RUSTDOCFLAGS$/,
/^RUSTFLAGS$/,
/^RUSTFMT$/,
];
// Combine default key filters with user-provided ones.
const keyFilter = defaultValidKeys.concat(extraKeys.map((key) => new RegExp(`^${key}$`)));
for (const [key, value] of Object.entries(process.env)) {
if (keyFilter.some((re) => re.test(key)) && value) {
hasher.update(`${key}=${value}`);
}
}
return hasher.digest("hex").slice(0, 20);
}
async function getRustKey(): Promise<string> {
const rustc = await getRustVersion();
return `${rustc.release}-${rustc.host}-${rustc["commit-hash"].slice(0, 12)}`;
}
interface RustVersion {
host: string;
release: string;
"commit-hash": string;
}
async function getRustVersion(): Promise<RustVersion> {
const stdout = await getCmdOutput("rustc", ["-vV"]);
let splits = stdout
.split(/[\n\r]+/)
.filter(Boolean)
.map((s) => s.split(":").map((s) => s.trim()))
.filter((s) => s.length === 2);
return Object.fromEntries(splits);
}
export async function getCmdOutput(
cmd: string,
args: Array<string> = [],
options: exec.ExecOptions = {},
): Promise<string> {
let stdout = "";
await exec.exec(cmd, args, {
silent: true,
listeners: {
stdout(data) {
stdout += data.toString();
},
},
...options,
});
return stdout;
}
async function getLockfileHash(): Promise<string> {
const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock\nrust-toolchain\nrust-toolchain.toml", {
followSymbolicLinks: false,
});
const files = await globber.glob();
core.debug("Lockfile Hash includes: " + JSON.stringify(files));
files.sort((a, b) => a.localeCompare(b));
const hasher = crypto.createHash("sha1");
for (const file of files) {
for await (const chunk of fs.createReadStream(file)) {
hasher.update(chunk);
}
}
return hasher.digest("hex").slice(0, 20);
}
export interface PackageDefinition {
name: string;
version: string;
path: string;
targets: Array<string>;
}
export type Packages = Array<PackageDefinition>;
interface Meta {
packages: Array<{
name: string;
version: string;
manifest_path: string;
targets: Array<{ kind: Array<string>; name: string }>;
}>;
}
export async function getPackages(workspacePaths: Array<string>): Promise<Packages> {
const cwd = process.cwd();
let allPackages: Packages = [];
for (const workspacePath of workspacePaths) {
process.chdir(workspacePath);
const meta: Meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"]));
const workspacePackages = meta.packages
.filter((p) => !p.manifest_path.startsWith(cwd))
.map((p) => {
const targets = p.targets.filter((t) => t.kind[0] === "lib").map((t) => t.name);
return { name: p.name, version: p.version, targets, path: path.dirname(p.manifest_path) };
});
allPackages = allPackages.concat(workspacePackages);
}
process.chdir(cwd);
return allPackages;
}
export async function cleanTarget(targetDir: string, packages: Packages) {
await fs.promises.unlink(path.join(targetDir, "./.rustc_info.json"));
await cleanProfileTarget(targetDir, packages, "debug");
await cleanProfileTarget(targetDir, packages, "release");
}
async function cleanProfileTarget(targetDir: string, packages: Packages, profile: string) {
try {
await fs.promises.access(path.join(targetDir, profile));
} catch {
return;
}
await io.rmRF(path.join(targetDir, profile, "./examples"));
await io.rmRF(path.join(targetDir, profile, "./incremental"));
let dir: fs.Dir;
// remove all *files* from the profile directory
dir = await fs.promises.opendir(path.join(targetDir, profile));
for await (const dirent of dir) {
if (dirent.isFile()) {
await rm(dir.path, dirent);
}
}
const keepPkg = new Set(packages.map((p) => p.name));
await rmExcept(path.join(targetDir, profile, "./build"), keepPkg);
await rmExcept(path.join(targetDir, profile, "./.fingerprint"), keepPkg);
const keepDeps = new Set(
packages.flatMap((p) => {
const names = [];
for (const n of [p.name, ...p.targets]) {
const name = n.replace(/-/g, "_");
names.push(name, `lib${name}`);
}
return names;
}),
);
await rmExcept(path.join(targetDir, profile, "./deps"), keepDeps);
}
const oneWeek = 7 * 24 * 3600 * 1000;
export async function rmExcept(dirName: string, keepPrefix: Set<string>) {
const dir = await fs.promises.opendir(dirName);
for await (const dirent of dir) {
let name = dirent.name;
const idx = name.lastIndexOf("-");
if (idx !== -1) {
name = name.slice(0, idx);
}
const fileName = path.join(dir.path, dirent.name);
const { mtime } = await fs.promises.stat(fileName);
// we dont really know
if (!keepPrefix.has(name) || Date.now() - mtime.getTime() > oneWeek) {
await rm(dir.path, dirent);
}
}
}
export async function rm(parent: string, dirent: fs.Dirent) {
try {
const fileName = path.join(parent, dirent.name);
core.debug(`deleting "${fileName}"`);
if (dirent.isFile()) {
await fs.promises.unlink(fileName);
} else if (dirent.isDirectory()) {
await io.rmRF(fileName);
}
} catch { }
}

238
src/config.ts Normal file
View File

@ -0,0 +1,238 @@
import * as core from "@actions/core";
import * as glob from "@actions/glob";
import crypto from "crypto";
import fs from "fs";
import os from "os";
import path from "path";
import { getCmdOutput } from "./utils";
import { Workspace } from "./workspace";
const STATE_LOCKFILE_HASH = "RUST_CACHE_LOCKFILE_HASH";
const STATE_LOCKFILES = "RUST_CACHE_LOCKFILES";
export const STATE_BINS = "RUST_CACHE_BINS";
export const STATE_KEY = "RUST_CACHE_KEY";
export class CacheConfig {
/** All the paths we want to cache */
public cachePaths: Array<string> = [];
/** The primary cache key */
public cacheKey = "";
/** The secondary (restore) key that only contains the prefix and environment */
public restoreKey = "";
/** The `~/.cargo` directory */
public cargoHome = "";
/** The cargo registry index directory */
public cargoIndex = "";
/** The cargo registry cache directory */
public cargoCache = "";
/** The cargo git checkouts directory */
public cargoGit = "";
/** The workspace configurations */
public workspaces: Array<Workspace> = [];
/** The prefix portion of the cache key */
private keyPrefix = "";
/** The rust version considered for the cache key */
private keyRust = "";
/** The environment variables considered for the cache key */
private keyEnvs: Array<string> = [];
/** The files considered for the cache key */
private keyFiles: Array<string> = [];
private constructor() {}
/**
* Constructs a [`CacheConfig`] with all the paths and keys.
*
* This will read the action `input`s, and read and persist `state` as necessary.
*/
static async new(): Promise<CacheConfig> {
const self = new CacheConfig();
// Construct key prefix:
// This uses either the `sharedKey` input,
// or the `key` input combined with the `job` key.
let key = `v0-rust`;
const sharedKey = core.getInput("sharedKey");
if (sharedKey) {
key += `-${sharedKey}`;
} else {
const inputKey = core.getInput("key");
if (inputKey) {
key += `-${inputKey}`;
}
const job = process.env.GITHUB_JOB;
if (job) {
key += `-${job}`;
}
}
self.keyPrefix = key;
// Construct environment portion of the key:
// This consists of a hash that considers the rust version
// as well as all the environment variables as given by a default list
// and the `envVars` input.
// The env vars are sorted, matched by prefix and hashed into the
// resulting environment hash.
let hasher = crypto.createHash("sha1");
const rustVersion = await getRustVersion();
let keyRust = `${rustVersion.release} ${rustVersion.host}`;
hasher.update(keyRust);
hasher.update(rustVersion["commit-hash"]);
keyRust += ` (${rustVersion["commit-hash"]})`;
self.keyRust = keyRust;
// these prefixes should cover most of the compiler / rust / cargo keys
const envPrefixes = ["CARGO", "CC", "CXX", "CMAKE", "RUST"];
envPrefixes.push(...core.getInput("envVars").split(/\s+/));
// sort the available env vars so we have a more stable hash
const keyEnvs = [];
const envKeys = Object.keys(process.env);
envKeys.sort((a, b) => a.localeCompare(b));
for (const key of envKeys) {
const value = process.env[key];
if (envPrefixes.some((prefix) => key.startsWith(prefix)) && value) {
hasher.update(`${key}=${value}`);
keyEnvs.push(key);
}
}
self.keyEnvs = keyEnvs;
key += `-${hasher.digest("hex")}`;
self.restoreKey = key;
// Construct the lockfiles portion of the key:
// This considers all the files found via globbing for various manifests
// and lockfiles.
// This part is computed in the "pre"/"restore" part of the job and persisted
// into the `state`. That state is loaded in the "post"/"save" part of the
// job so we have consistent values even though the "main" actions run
// might create/overwrite lockfiles.
let lockHash = core.getState(STATE_LOCKFILE_HASH);
let keyFiles: Array<string> = JSON.parse(core.getState(STATE_LOCKFILES) || "[]");
if (!lockHash) {
const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock\nrust-toolchain\nrust-toolchain.toml", {
followSymbolicLinks: false,
});
keyFiles = await globber.glob();
keyFiles.sort((a, b) => a.localeCompare(b));
hasher = crypto.createHash("sha1");
for (const file of keyFiles) {
for await (const chunk of fs.createReadStream(file)) {
hasher.update(chunk);
}
}
lockHash = hasher.digest("hex");
core.saveState(STATE_LOCKFILE_HASH, lockHash);
core.saveState(STATE_LOCKFILES, JSON.stringify(keyFiles));
}
self.keyFiles = keyFiles;
key += `-${lockHash}`;
self.cacheKey = key;
// Constructs some generic paths, workspace config and paths to restore:
// The workspaces are given using a `$workspace -> $target` syntax.
const home = os.homedir();
const cargoHome = process.env.CARGO_HOME || path.join(home, ".cargo");
self.cargoHome = cargoHome;
self.cargoIndex = path.join(cargoHome, "registry/index");
self.cargoCache = path.join(cargoHome, "registry/cache");
self.cargoGit = path.join(cargoHome, "git");
const workspaces: Array<Workspace> = [];
const workspacesInput = core.getInput("workspaces") || ".";
for (const workspace of workspacesInput.trim().split("\n")) {
let [root, target = "target"] = workspace.split(" -> ");
root = path.resolve(root);
target = path.join(root, target);
workspaces.push(new Workspace(root, target));
}
self.workspaces = workspaces;
self.cachePaths = [
path.join(cargoHome, "bin"),
path.join(cargoHome, ".crates2.json"),
path.join(cargoHome, ".crates.toml"),
self.cargoIndex,
self.cargoCache,
self.cargoGit,
...workspaces.map((ws) => ws.target),
];
return self;
}
printInfo() {
core.info(`Cache Paths:`);
for (const path in this.cachePaths) {
core.info(` ${path}`);
}
core.info(`Restore Key:`);
core.info(` ${this.restoreKey}`);
core.info(`Cache Key:`);
core.info(` ${this.cacheKey}`);
core.info(`.. Prefix:`);
core.info(` - ${this.keyPrefix}`);
core.info(`.. Environment considered:`);
core.info(` - Rust Version: ${this.keyRust}`);
for (const env in this.keyEnvs) {
core.info(` - ${env}`);
}
core.info(`.. Lockfiles considered:`);
for (const file in this.keyFiles) {
core.info(` - ${file}`);
}
core.info(`Workspaces configured:`);
for (const workspace of this.workspaces) {
core.info(` ${workspace.root}`);
}
}
public async getCargoBins(): Promise<Set<string>> {
const bins = new Set<string>();
try {
const { installs }: { installs: { [key: string]: { bins: Array<string> } } } = JSON.parse(
await fs.promises.readFile(path.join(this.cargoHome, ".crates2.json"), "utf8"),
);
for (const pkg of Object.values(installs)) {
for (const bin of pkg.bins) {
bins.add(bin);
}
}
} catch {}
return bins;
}
}
interface RustVersion {
host: string;
release: string;
"commit-hash": string;
}
async function getRustVersion(): Promise<RustVersion> {
const stdout = await getCmdOutput("rustc", ["-vV"]);
let splits = stdout
.split(/[\n\r]+/)
.filter(Boolean)
.map((s) => s.split(":").map((s) => s.trim()))
.filter((s) => s.length === 2);
return Object.fromEntries(splits);
}

View File

@ -1,7 +1,15 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import path from "path";
import { cleanTarget, getCacheConfig, getCargoBins, getPackages, stateBins, stateKey } from "./common";
import { cleanTargetDir } from "./cleanup";
import { CacheConfig, STATE_BINS, STATE_KEY } from "./config";
process.on("uncaughtException", (e) => {
core.info(`[warning] ${e.message}`);
if (e.stack) {
core.info(e.stack);
}
});
async function run() {
if (!cache.isFeatureAvailable()) {
@ -17,28 +25,27 @@ async function run() {
core.exportVariable("CACHE_ON_FAILURE", cacheOnFailure);
core.exportVariable("CARGO_INCREMENTAL", 0);
const { paths, key, restoreKeys, workspaces } = await getCacheConfig();
const restorePaths = paths.concat(workspaces);
const config = await CacheConfig.new();
const bins = await getCargoBins();
core.saveState(stateBins, JSON.stringify([...bins]));
const bins = await config.getCargoBins();
core.saveState(STATE_BINS, JSON.stringify([...bins]));
core.info(`Restoring paths:\n ${restorePaths.join("\n ")}`);
core.info(`In directory:\n ${process.cwd()}`);
core.info(`Using keys:\n ${[key, ...restoreKeys].join("\n ")}`);
const restoreKey = await cache.restoreCache(restorePaths, key, restoreKeys);
core.info(`# Restoring cache`);
config.printInfo();
const key = config.cacheKey;
const restoreKey = await cache.restoreCache(config.cachePaths, key, [config.restoreKey]);
if (restoreKey) {
core.info(`Restored from cache key "${restoreKey}".`);
core.saveState(stateKey, restoreKey);
core.saveState(STATE_KEY, restoreKey);
if (restoreKey !== key) {
// pre-clean the target directory on cache mismatch
const packages = await getPackages(workspaces);
core.info("Restoring the following repository packages: " + JSON.stringify(packages));
for (const workspace of config.workspaces) {
const packages = await workspace.getPackages();
for (const workspace of workspaces) {
const target = path.join(workspace, "target");
await cleanTarget(target, packages);
try {
await cleanTargetDir(workspace.target, packages);
} catch {}
}
}

View File

@ -2,20 +2,17 @@ import * as cache from "@actions/cache";
import * as core from "@actions/core";
import * as exec from "@actions/exec";
import * as glob from "@actions/glob";
import * as io from "@actions/io";
import fs from "fs";
import path from "path";
import {
cleanTarget,
getCacheConfig,
getCargoBins,
getPackages,
Packages,
paths,
rm,
stateBins,
stateKey,
} from "./common";
import { cleanBin, cleanGit, cleanRegistry, cleanTargetDir } from "./cleanup";
import { CacheConfig, STATE_KEY } from "./config";
process.on("uncaughtException", (e) => {
core.info(`[warning] ${e.message}`);
if (e.stack) {
core.info(e.stack);
}
});
async function run() {
if (!cache.isFeatureAvailable()) {
@ -23,10 +20,9 @@ async function run() {
}
try {
const { paths, workspaces, key } = await getCacheConfig();
const savePaths = paths.concat(workspaces);
const config = await CacheConfig.new();
if (core.getState(stateKey) === key) {
if (core.getState(STATE_KEY) === config.cacheKey) {
core.info(`Cache up-to-date.`);
return;
}
@ -34,44 +30,42 @@ async function run() {
// TODO: remove this once https://github.com/actions/toolkit/pull/553 lands
await macOsWorkaround();
const registryName = await getRegistryName();
const packages = await getPackages(workspaces);
core.info("Detected repository packages to cache: " + JSON.stringify(packages));
const registryName = await getRegistryName(config);
const allPackages = [];
for (const workspace of config.workspaces) {
const packages = await workspace.getPackages();
allPackages.push(...packages);
try {
await cleanTargetDir(workspace.target, packages);
} catch (e) {
core.info(`[warning] ${(e as any).stack}`);
}
}
if (registryName) {
try {
await cleanRegistry(registryName, packages);
await cleanRegistry(config, registryName, allPackages);
} catch (e) {
core.info(`[warning] ${(e as any).stack}`);
}
}
try {
await cleanBin();
await cleanBin(config);
} catch (e) {
core.info(`[warning] ${(e as any).stack}`);
}
try {
await cleanGit(packages);
await cleanGit(config, allPackages);
} catch (e) {
core.info(`[warning] ${(e as any).stack}`);
}
for (const workspace of workspaces) {
const target = path.join(workspace, "target");
try {
await cleanTarget(target, packages);
}
catch (e) {
core.info(`[warning] ${(e as any).stack}`);
}
}
core.info(`Saving paths:\n ${savePaths.join("\n ")}`);
core.info(`In directory:\n ${process.cwd()}`);
core.info(`Using key:\n ${key}`);
await cache.saveCache(savePaths, key);
core.info(`# Saving cache`);
config.printInfo();
await cache.saveCache(config.cachePaths, config.cacheKey);
} catch (e) {
core.info(`[warning] ${(e as any).stack}`);
}
@ -79,8 +73,8 @@ async function run() {
run();
async function getRegistryName(): Promise<string | null> {
const globber = await glob.create(`${paths.index}/**/.last-updated`, { followSymbolicLinks: false });
async function getRegistryName(config: CacheConfig): Promise<string | null> {
const globber = await glob.create(`${config.cargoIndex}/**/.last-updated`, { followSymbolicLinks: false });
const files = await globber.glob();
if (files.length > 1) {
core.warning(`got multiple registries: "${files.join('", "')}"`);
@ -93,88 +87,10 @@ async function getRegistryName(): Promise<string | null> {
return path.basename(path.dirname(first));
}
async function cleanBin() {
const bins = await getCargoBins();
const oldBins = JSON.parse(core.getState(stateBins));
for (const bin of oldBins) {
bins.delete(bin);
}
const dir = await fs.promises.opendir(path.join(paths.cargoHome, "bin"));
for await (const dirent of dir) {
if (dirent.isFile() && !bins.has(dirent.name)) {
await rm(dir.path, dirent);
}
}
}
async function cleanRegistry(registryName: string, packages: Packages) {
await io.rmRF(path.join(paths.index, registryName, ".cache"));
const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`));
const dir = await fs.promises.opendir(path.join(paths.cache, registryName));
for await (const dirent of dir) {
if (dirent.isFile() && !pkgSet.has(dirent.name)) {
await rm(dir.path, dirent);
}
}
}
async function cleanGit(packages: Packages) {
const coPath = path.join(paths.git, "checkouts");
const dbPath = path.join(paths.git, "db");
const repos = new Map<string, Set<string>>();
for (const p of packages) {
if (!p.path.startsWith(coPath)) {
continue;
}
const [repo, ref] = p.path.slice(coPath.length + 1).split(path.sep);
const refs = repos.get(repo);
if (refs) {
refs.add(ref);
} else {
repos.set(repo, new Set([ref]));
}
}
// we have to keep both the clone, and the checkout, removing either will
// trigger a rebuild
let dir: fs.Dir;
// clean the db
dir = await fs.promises.opendir(dbPath);
for await (const dirent of dir) {
if (!repos.has(dirent.name)) {
await rm(dir.path, dirent);
}
}
// clean the checkouts
dir = await fs.promises.opendir(coPath);
for await (const dirent of dir) {
const refs = repos.get(dirent.name);
if (!refs) {
await rm(dir.path, dirent);
continue;
}
if (!dirent.isDirectory()) {
continue;
}
const refsDir = await fs.promises.opendir(path.join(dir.path, dirent.name));
for await (const dirent of refsDir) {
if (!refs.has(dirent.name)) {
await rm(refsDir.path, dirent);
}
}
}
}
async function macOsWorkaround() {
try {
// Workaround for https://github.com/actions/cache/issues/403
// Also see https://github.com/rust-lang/cargo/issues/8603
await exec.exec("sudo", ["/usr/sbin/purge"], { silent: true });
} catch { }
} catch {}
}

29
src/utils.ts Normal file
View File

@ -0,0 +1,29 @@
import * as core from "@actions/core";
import * as exec from "@actions/exec";
export async function getCmdOutput(
cmd: string,
args: Array<string> = [],
options: exec.ExecOptions = {},
): Promise<string> {
let stdout = "";
let stderr = "";
try {
await exec.exec(cmd, args, {
silent: true,
listeners: {
stdout(data) {
stdout += data.toString();
},
stderr(data) {
stderr += data.toString();
},
},
...options,
});
} catch (e) {
core.error(stderr);
throw e;
}
return stdout;
}

42
src/workspace.ts Normal file
View File

@ -0,0 +1,42 @@
import path from "path";
import { getCmdOutput } from "./utils";
export class Workspace {
constructor(public root: string, public target: string) {}
public async getPackages(): Promise<Packages> {
let packages: Packages = [];
try {
const meta: Meta = JSON.parse(
await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"]),
);
for (const pkg of meta.packages) {
if (!pkg.manifest_path.startsWith(this.root)) {
continue;
}
const targets = pkg.targets.filter((t) => t.kind[0] === "lib").map((t) => t.name);
packages.push({ name: pkg.name, version: pkg.version, targets, path: path.dirname(pkg.manifest_path) });
}
} catch {}
return packages;
}
}
export interface PackageDefinition {
name: string;
version: string;
path: string;
targets: Array<string>;
}
export type Packages = Array<PackageDefinition>;
interface Meta {
packages: Array<{
name: string;
version: string;
manifest_path: string;
targets: Array<{ kind: Array<string>; name: string }>;
}>;
}