avoid calling cargo metadata on pre-cleanup
This commit is contained in:
parent
19c46583c5
commit
3f2513fdf4
|
@ -1,5 +1,9 @@
|
|||
# Changelog
|
||||
|
||||
## 2.0.2
|
||||
|
||||
- Avoid calling `cargo metadata` on pre-cleanup.
|
||||
|
||||
## 2.0.1
|
||||
|
||||
- Primarily just updating dependencies to fix GitHub deprecation notices.
|
||||
|
|
|
@ -64476,6 +64476,7 @@ async function getCmdOutput(cmd, args = [], options = {}) {
|
|||
;// CONCATENATED MODULE: ./src/workspace.ts
|
||||
|
||||
|
||||
|
||||
const SAVE_TARGETS = new Set(["lib", "proc-macro"]);
|
||||
class Workspace {
|
||||
constructor(root, target) {
|
||||
|
@ -64485,9 +64486,11 @@ class Workspace {
|
|||
async getPackages() {
|
||||
let packages = [];
|
||||
try {
|
||||
lib_core.debug(`collecting metadata for "${this.root}"`);
|
||||
const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"], {
|
||||
cwd: this.root,
|
||||
}));
|
||||
lib_core.debug(`workspace "${this.root}" has ${meta.packages.length} packages`);
|
||||
for (const pkg of meta.packages) {
|
||||
if (pkg.manifest_path.startsWith(this.root)) {
|
||||
continue;
|
||||
|
@ -64840,30 +64843,34 @@ async function cleanGit(packages) {
|
|||
}
|
||||
const ONE_WEEK = 7 * 24 * 3600 * 1000;
|
||||
/**
|
||||
* Removes all files or directories in `dirName`, except the ones matching
|
||||
* any string in the `keepPrefix` set.
|
||||
*
|
||||
* The matching strips and trailing `-$hash` suffix.
|
||||
* Removes all files or directories in `dirName` matching some criteria.
|
||||
*
|
||||
* When the `checkTimestamp` flag is set, this will also remove anything older
|
||||
* than one week.
|
||||
*
|
||||
* Otherwise, it will remove everything that does not match any string in the
|
||||
* `keepPrefix` set.
|
||||
* The matching strips and trailing `-$hash` suffix.
|
||||
*/
|
||||
async function rmExcept(dirName, keepPrefix, checkTimestamp = false) {
|
||||
const dir = await external_fs_default().promises.opendir(dirName);
|
||||
for await (const dirent of dir) {
|
||||
if (checkTimestamp) {
|
||||
const fileName = external_path_default().join(dir.path, dirent.name);
|
||||
const { mtime } = await external_fs_default().promises.stat(fileName);
|
||||
const isOutdated = Date.now() - mtime.getTime() > ONE_WEEK;
|
||||
if (isOutdated) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
return;
|
||||
}
|
||||
let name = dirent.name;
|
||||
// strip the trailing hash
|
||||
const idx = name.lastIndexOf("-");
|
||||
if (idx !== -1) {
|
||||
name = name.slice(0, idx);
|
||||
}
|
||||
let isOutdated = false;
|
||||
if (checkTimestamp) {
|
||||
const fileName = external_path_default().join(dir.path, dirent.name);
|
||||
const { mtime } = await external_fs_default().promises.stat(fileName);
|
||||
isOutdated = Date.now() - mtime.getTime() > ONE_WEEK;
|
||||
}
|
||||
if (!keepPrefix.has(name) || isOutdated) {
|
||||
if (!keepPrefix.has(name)) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
}
|
||||
|
@ -64933,8 +64940,7 @@ async function run() {
|
|||
// pre-clean the target directory on cache mismatch
|
||||
for (const workspace of config.workspaces) {
|
||||
try {
|
||||
const packages = await workspace.getPackages();
|
||||
await cleanTargetDir(workspace.target, packages, true);
|
||||
await cleanTargetDir(workspace.target, [], true);
|
||||
}
|
||||
catch { }
|
||||
}
|
||||
|
|
|
@ -64476,6 +64476,7 @@ async function getCmdOutput(cmd, args = [], options = {}) {
|
|||
;// CONCATENATED MODULE: ./src/workspace.ts
|
||||
|
||||
|
||||
|
||||
const SAVE_TARGETS = new Set(["lib", "proc-macro"]);
|
||||
class Workspace {
|
||||
constructor(root, target) {
|
||||
|
@ -64485,9 +64486,11 @@ class Workspace {
|
|||
async getPackages() {
|
||||
let packages = [];
|
||||
try {
|
||||
core.debug(`collecting metadata for "${this.root}"`);
|
||||
const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"], {
|
||||
cwd: this.root,
|
||||
}));
|
||||
core.debug(`workspace "${this.root}" has ${meta.packages.length} packages`);
|
||||
for (const pkg of meta.packages) {
|
||||
if (pkg.manifest_path.startsWith(this.root)) {
|
||||
continue;
|
||||
|
@ -64840,30 +64843,34 @@ async function cleanGit(packages) {
|
|||
}
|
||||
const ONE_WEEK = 7 * 24 * 3600 * 1000;
|
||||
/**
|
||||
* Removes all files or directories in `dirName`, except the ones matching
|
||||
* any string in the `keepPrefix` set.
|
||||
*
|
||||
* The matching strips and trailing `-$hash` suffix.
|
||||
* Removes all files or directories in `dirName` matching some criteria.
|
||||
*
|
||||
* When the `checkTimestamp` flag is set, this will also remove anything older
|
||||
* than one week.
|
||||
*
|
||||
* Otherwise, it will remove everything that does not match any string in the
|
||||
* `keepPrefix` set.
|
||||
* The matching strips and trailing `-$hash` suffix.
|
||||
*/
|
||||
async function rmExcept(dirName, keepPrefix, checkTimestamp = false) {
|
||||
const dir = await external_fs_default().promises.opendir(dirName);
|
||||
for await (const dirent of dir) {
|
||||
if (checkTimestamp) {
|
||||
const fileName = external_path_default().join(dir.path, dirent.name);
|
||||
const { mtime } = await external_fs_default().promises.stat(fileName);
|
||||
const isOutdated = Date.now() - mtime.getTime() > ONE_WEEK;
|
||||
if (isOutdated) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
return;
|
||||
}
|
||||
let name = dirent.name;
|
||||
// strip the trailing hash
|
||||
const idx = name.lastIndexOf("-");
|
||||
if (idx !== -1) {
|
||||
name = name.slice(0, idx);
|
||||
}
|
||||
let isOutdated = false;
|
||||
if (checkTimestamp) {
|
||||
const fileName = external_path_default().join(dir.path, dirent.name);
|
||||
const { mtime } = await external_fs_default().promises.stat(fileName);
|
||||
isOutdated = Date.now() - mtime.getTime() > ONE_WEEK;
|
||||
}
|
||||
if (!keepPrefix.has(name) || isOutdated) {
|
||||
if (!keepPrefix.has(name)) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -180,17 +180,29 @@ export async function cleanGit(packages: Packages) {
|
|||
const ONE_WEEK = 7 * 24 * 3600 * 1000;
|
||||
|
||||
/**
|
||||
* Removes all files or directories in `dirName`, except the ones matching
|
||||
* any string in the `keepPrefix` set.
|
||||
*
|
||||
* The matching strips and trailing `-$hash` suffix.
|
||||
*
|
||||
* Removes all files or directories in `dirName` matching some criteria.
|
||||
*
|
||||
* When the `checkTimestamp` flag is set, this will also remove anything older
|
||||
* than one week.
|
||||
*
|
||||
* Otherwise, it will remove everything that does not match any string in the
|
||||
* `keepPrefix` set.
|
||||
* The matching strips and trailing `-$hash` suffix.
|
||||
*/
|
||||
async function rmExcept(dirName: string, keepPrefix: Set<string>, checkTimestamp = false) {
|
||||
const dir = await fs.promises.opendir(dirName);
|
||||
for await (const dirent of dir) {
|
||||
if (checkTimestamp) {
|
||||
const fileName = path.join(dir.path, dirent.name);
|
||||
const { mtime } = await fs.promises.stat(fileName);
|
||||
const isOutdated = Date.now() - mtime.getTime() > ONE_WEEK;
|
||||
|
||||
if (isOutdated) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
let name = dirent.name;
|
||||
|
||||
// strip the trailing hash
|
||||
|
@ -199,14 +211,7 @@ async function rmExcept(dirName: string, keepPrefix: Set<string>, checkTimestamp
|
|||
name = name.slice(0, idx);
|
||||
}
|
||||
|
||||
let isOutdated = false;
|
||||
if (checkTimestamp) {
|
||||
const fileName = path.join(dir.path, dirent.name);
|
||||
const { mtime } = await fs.promises.stat(fileName);
|
||||
isOutdated = Date.now() - mtime.getTime() > ONE_WEEK;
|
||||
}
|
||||
|
||||
if (!keepPrefix.has(name) || isOutdated) {
|
||||
if (!keepPrefix.has(name)) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,9 +43,7 @@ async function run() {
|
|||
// pre-clean the target directory on cache mismatch
|
||||
for (const workspace of config.workspaces) {
|
||||
try {
|
||||
const packages = await workspace.getPackages();
|
||||
|
||||
await cleanTargetDir(workspace.target, packages, true);
|
||||
await cleanTargetDir(workspace.target, [], true);
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import * as core from "@actions/core";
|
||||
import path from "path";
|
||||
|
||||
import { getCmdOutput } from "./utils";
|
||||
|
@ -10,11 +11,13 @@ export class Workspace {
|
|||
public async getPackages(): Promise<Packages> {
|
||||
let packages: Packages = [];
|
||||
try {
|
||||
core.debug(`collecting metadata for "${this.root}"`);
|
||||
const meta: Meta = JSON.parse(
|
||||
await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"], {
|
||||
cwd: this.root,
|
||||
}),
|
||||
);
|
||||
core.debug(`workspace "${this.root}" has ${meta.packages.length} packages`);
|
||||
for (const pkg of meta.packages) {
|
||||
if (pkg.manifest_path.startsWith(this.root)) {
|
||||
continue;
|
||||
|
|
Loading…
Reference in New Issue