diff options
-rw-r--r-- | .github/mtime_cache/action.js | 215 | ||||
-rw-r--r-- | .github/mtime_cache/action.yml | 10 | ||||
-rw-r--r-- | .github/workflows/ci.yml | 20 | ||||
-rw-r--r-- | Cargo.toml | 2 | ||||
-rwxr-xr-x | tools/lint.js | 1 |
5 files changed, 246 insertions, 2 deletions
diff --git a/.github/mtime_cache/action.js b/.github/mtime_cache/action.js new file mode 100644 index 000000000..ffc7caa7e --- /dev/null +++ b/.github/mtime_cache/action.js @@ -0,0 +1,215 @@ +// This file contains the implementation of a Github Action. Github uses +// Node.js v12.x to run actions, so this is Node code and not Deno code. + +const { spawn } = require("child_process"); +const { dirname, resolve } = require("path"); +const { StringDecoder } = require("string_decoder"); +const { promisify } = require("util"); + +const fs = require("fs"); +const utimes = promisify(fs.utimes); +const mkdir = promisify(fs.mkdir); +const readFile = promisify(fs.readFile); +const writeFile = promisify(fs.writeFile); + +process.on("unhandledRejection", abort); +main().catch(abort); + +async function main() { + const startTime = getTime(); + + const checkCleanPromise = checkClean(); + + const cacheFile = getCacheFile(); + const oldCache = await loadCache(cacheFile); + const newCache = Object.create(null); + + await checkCleanPromise; + + const counters = { + restored: 0, + added: 0, + stale: 0, + invalid: 0, + }; + + for await (const { key, path } of ls()) { + let mtime = oldCache[key]; + if (mtime === undefined) { + mtime = startTime; + counters.added++; + } else if (!mtime || mtime > startTime) { + mtime = startTime; + counters.invalid++; + } else { + counters.restored++; + } + + await utimes(path, startTime, mtime); + newCache[key] = mtime; + } + + for (const key of Object.keys(oldCache)) { + if (!(key in newCache)) counters.stale++; + } + + await saveCache(cacheFile, newCache); + + const stats = { + ...counters, + "cache file": cacheFile, + "time spent": (getTime() - startTime).toFixed(3) + "s", + }; + console.log( + [ + "mtime cache statistics", + ...Object.entries(stats).map(([k, v]) => `* ${k}: ${v}`), + ].join("\n"), + ); +} + +function abort(err) { + console.error(err); + process.exit(1); +} + +function getTime() { + return Date.now() / 1000; +} + +function getCacheFile() { + const cachePath = process.env["INPUT_CACHE-PATH"]; + if (cachePath == null) { + throw new Error("required input 'cache_path' not provided"); + } + + const cacheFile = resolve(cachePath, ".mtime-cache-db.json"); + return cacheFile; +} + +async function loadCache(cacheFile) { + try { + const json = await readFile(cacheFile, { encoding: "utf8" }); + return JSON.parse(json); + } catch (err) { + if (err.code !== "ENOENT") { + console.warn(`failed to load mtime cache from '${cacheFile}': ${err}`); + } + return Object.create(null); + } +} + +async function saveCache(cacheFile, cacheData) { + const cacheDir = dirname(cacheFile); + await mkdir(cacheDir, { recursive: true }); + + const json = JSON.stringify(cacheData, null, 2); + await writeFile(cacheFile, json, { encoding: "utf8" }); +} + +async function checkClean() { + let output = run( + "git", + [ + "status", + "--porcelain=v1", + "--ignore-submodules=untracked", + "--untracked-files=no", + ], + { stdio: ["ignore", "pipe", "inherit"] }, + ); + output = decode(output, "utf8"); + output = split(output, "\n"); + output = filter(output, Boolean); + output = await collect(output); + + if (output.length > 0) { + throw new Error( + ["git work dir dirty", ...output.map((f) => ` ${f}`)].join("\n"), + ); + } +} + +async function* ls(dir = "") { + let output = run( + "git", + ["-C", dir || ".", "ls-files", "--stage", "--eol", "--full-name", "-z"], + { stdio: ["ignore", "pipe", "inherit"] }, + ); + output = decode(output, "utf8"); + output = split(output, "\0"); + output = filter(output, Boolean); + + for await (const entry of output) { + const pat = + /^(?<mode>\d{6}) (?<hash>[0-9a-f]{40}) 0\t(?<eol>[^\t]*?)[ ]*\t(?<name>.*)$/; + const { mode, hash, eol, name } = pat.exec(entry).groups; + const path = dir ? `${dir}/${name}` : name; + + switch (mode) { + case "120000": // Symbolic link. + break; + case "160000": // Git submodule. + yield* ls(path); + break; + default: { + // Regular file. + const key = [mode, hash, eol, path].join("\0"); + yield { key, path }; + } + } + } +} + +async function* run(cmd, args, options) { + const child = spawn(cmd, args, options); + + const promise = new Promise((resolve, reject) => { + child.on("close", (code, signal) => { + if (code === 0 && signal === null) { + resolve(); + } else { + const command = [cmd, ...args].join(" "); + const how = signal === null ? `exit code ${code}` : `signal ${signal}`; + const error = new Error(`Command '${command}' failed: ${how}`); + reject(error); + } + }); + child.on("error", reject); + }); + + yield* child.stdout; + await promise; +} + +async function collect(stream) { + const array = []; + for await (const item of stream) { + array.push(item); + } + return array; +} + +async function* decode(stream, encoding) { + const decoder = new StringDecoder(encoding); + for await (const chunk of stream) { + yield decoder.write(chunk); + } + yield decoder.end(); +} + +async function* filter(stream, fn) { + for await (const item of stream) { + if (fn(item)) yield item; + } +} + +async function* split(stream, separator) { + let buf = ""; + for await (const chunk of stream) { + const parts = (buf + chunk).split(separator); + buf = parts.pop(); + yield* parts.values(); + } + yield buf; +} diff --git a/.github/mtime_cache/action.yml b/.github/mtime_cache/action.yml new file mode 100644 index 000000000..20e7b251f --- /dev/null +++ b/.github/mtime_cache/action.yml @@ -0,0 +1,10 @@ +name: mtime cache +description: + Preserve last-modified timestamps by storing them in the Github Actions cache +inputs: + cache-path: + description: Path where the mtime cache database should be located + required: true +runs: + main: action.js + using: node12 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a341c4a59..1ad068ed5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -44,7 +44,6 @@ jobs: 'refs/heads/main' && !startsWith(github.ref, 'refs/tags/')) }} env: - CARGO_INCREMENTAL: 0 RUST_BACKTRACE: full CARGO_TERM_COLOR: always @@ -59,7 +58,7 @@ jobs: # other commits have landed it will become impossible to rebuild if # the checkout is too shallow. fetch-depth: 5 - submodules: true + submodules: recursive - name: Create source tarballs (release, linux) if: | @@ -168,6 +167,23 @@ jobs: brew install gnu-tar echo "/usr/local/opt/gnu-tar/libexec/gnubin" >> $GITHUB_PATH + - name: Cache + uses: actions/cache@v2 + with: + path: | + ~/.cargo/git + ~/.cargo/registry + ./target + key: + ${{ matrix.os }}-${{ matrix.kind }}-${{ hashFiles('Cargo.lock') }} + restore-keys: | + ${{ matrix.os }}-${{ matrix.kind }}- + + - name: Apply and update mtime cache + uses: ./.github/mtime_cache + with: + cache-path: ./target + - name: test_format.js if: matrix.kind == 'lint' run: deno run --unstable --allow-write --allow-read --allow-run ./tools/format.js --check diff --git a/Cargo.toml b/Cargo.toml index 4d7c8be95..d99f66fd3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -23,11 +23,13 @@ exclude = [ [profile.release] codegen-units = 1 +incremental = true lto = true opt-level = 'z' # Optimize for size [profile.bench] codegen-units = 1 +incremental = true lto = true opt-level = 'z' # Optimize for size diff --git a/tools/lint.js b/tools/lint.js index 7e2e57d2c..299632d90 100755 --- a/tools/lint.js +++ b/tools/lint.js @@ -16,6 +16,7 @@ async function dlint() { const sourceFiles = await getSources(ROOT_PATH, [ "*.js", "*.ts", + ":!:.github/mtime_cache/action.js", ":!:cli/tests/swc_syntax_error.ts", ":!:cli/tests/038_checkjs.js", ":!:cli/tests/error_008_checkjs.js", |