From dde0ee9f483f4836f5445d99a699c151caf4f65d Mon Sep 17 00:00:00 2001 From: usmanyunusov Date: Wed, 27 Apr 2022 19:36:29 +0300 Subject: [PATCH] Refactoring --- .github/workflows/testing.yml | 42 +- README.md | 15 +- bench/running-time/index.js | 34 +- bench/size/index.js | 2 +- lib/bin.js | 51 +- lib/cmd-runner.js | 130 ----- lib/colors.js | 29 ++ lib/config.js | 126 ++++- lib/debug.js | 11 + lib/error.js | 6 + lib/errors.js | 23 - lib/executor.js | 447 +++++++++--------- lib/git-workflow.js | 104 ---- lib/git.js | 283 ++++------- lib/{glob-to-regex.js => glob.js} | 98 ++-- lib/index.js | 135 ++++-- lib/log.js | 81 ++++ lib/renderer.js | 163 ------- lib/reporter.js | 29 -- lib/runner.js | 355 +++++++++----- lib/spinner.js | 227 +++++++++ lib/tasks.js | 57 +++ lib/utils.js | 106 ++--- package.json | 20 +- pnpm-lock.yaml | 52 +- test/git.test.js | 247 ---------- test/integration/allow-empty.test.js | 73 +++ test/integration/base.test.js | 148 ++++++ test/integration/binary-files.test.js | 41 ++ test/integration/diff-options.test.js | 67 +++ test/integration/file-resurrection.test.js | 95 ++++ test/integration/files-outside-cwd.test.js | 58 +++ test/integration/fixtures/configs.js | 2 + test/integration/fixtures/files.js | 13 + test/integration/git-amend.test.js | 49 ++ test/integration/git-submodules.test.js | 62 +++ test/integration/git-worktree.test.js | 53 +++ test/integration/merge-conflict.test.js | 149 ++++++ .../integration/multiple-config-files.test.js | 77 +++ test/integration/not-git-dir.test.js | 36 ++ test/integration/parent-globs.test.js | 48 ++ .../partially-staged-changes.test.js | 123 +++++ test/integration/unstaged-options.test.js | 50 ++ test/integration/untracked-files.test.js | 65 +++ .../integration/utils/file-system-test-rig.js | 51 ++ test/integration/utils/test-rig.js | 64 +++ test/{ => old-unit}/cmd-runner.test.js | 0 test/{ => old-unit}/errors.test.js | 0 test/{ => old-unit}/git-workflow.test.js | 0 test/{ => old-unit}/index.test.js | 0 test/{ => old-unit}/renderer.test.js | 3 +- test/{ => old-unit}/reporter.test.js | 0 test/{ => old-unit}/runner.test.js | 0 test/{ => unit}/config.test.js | 89 +--- .../fixtures/config/cjs-in-js/nano-staged.js | 0 .../fixtures/config/cjs/nano-staged.cjs | 0 .../fixtures/config/esm-in-js/nano-staged.js | 0 .../fixtures/config/json/nano-staged.json | 0 .../fixtures/config/mjs/nano-staged.mjs | 0 .../fixtures/config/no-ext/.nanostagedrc | 0 .../fixtures/config/test-project/dir/index.js | 0 .../fixtures/config/test-project/index.js | 0 .../fixtures/config/test-project/package.json | 0 test/{ => unit}/fixtures/simple/.gitignore | 0 test/unit/git.test.js | 210 ++++++++ test/{ => unit}/glob-to-regex.test.js | 8 +- test/unit/utils.test.js | 28 ++ test/{ => unit}/utils/index.js | 0 test/utils.test.js | 164 ------- 69 files changed, 2974 insertions(+), 1725 deletions(-) mode change 100644 => 100755 lib/bin.js delete mode 100644 lib/cmd-runner.js create mode 100644 lib/colors.js create mode 100644 lib/debug.js create mode 100644 lib/error.js delete mode 100644 lib/errors.js delete mode 100644 lib/git-workflow.js rename lib/{glob-to-regex.js => glob.js} (75%) create mode 100644 lib/log.js delete mode 100644 lib/renderer.js delete mode 100644 lib/reporter.js create mode 100644 lib/spinner.js create mode 100644 lib/tasks.js delete mode 100644 test/git.test.js create mode 100644 test/integration/allow-empty.test.js create mode 100644 test/integration/base.test.js create mode 100644 test/integration/binary-files.test.js create mode 100644 test/integration/diff-options.test.js create mode 100644 test/integration/file-resurrection.test.js create mode 100644 test/integration/files-outside-cwd.test.js create mode 100644 test/integration/fixtures/configs.js create mode 100644 test/integration/fixtures/files.js create mode 100644 test/integration/git-amend.test.js create mode 100644 test/integration/git-submodules.test.js create mode 100644 test/integration/git-worktree.test.js create mode 100644 test/integration/merge-conflict.test.js create mode 100644 test/integration/multiple-config-files.test.js create mode 100644 test/integration/not-git-dir.test.js create mode 100644 test/integration/parent-globs.test.js create mode 100644 test/integration/partially-staged-changes.test.js create mode 100644 test/integration/unstaged-options.test.js create mode 100644 test/integration/untracked-files.test.js create mode 100644 test/integration/utils/file-system-test-rig.js create mode 100644 test/integration/utils/test-rig.js rename test/{ => old-unit}/cmd-runner.test.js (100%) rename test/{ => old-unit}/errors.test.js (100%) rename test/{ => old-unit}/git-workflow.test.js (100%) rename test/{ => old-unit}/index.test.js (100%) rename test/{ => old-unit}/renderer.test.js (97%) rename test/{ => old-unit}/reporter.test.js (100%) rename test/{ => old-unit}/runner.test.js (100%) rename test/{ => unit}/config.test.js (56%) rename test/{ => unit}/fixtures/config/cjs-in-js/nano-staged.js (100%) rename test/{ => unit}/fixtures/config/cjs/nano-staged.cjs (100%) rename test/{ => unit}/fixtures/config/esm-in-js/nano-staged.js (100%) rename test/{ => unit}/fixtures/config/json/nano-staged.json (100%) rename test/{ => unit}/fixtures/config/mjs/nano-staged.mjs (100%) rename test/{ => unit}/fixtures/config/no-ext/.nanostagedrc (100%) rename test/{ => unit}/fixtures/config/test-project/dir/index.js (100%) rename test/{ => unit}/fixtures/config/test-project/index.js (100%) rename test/{ => unit}/fixtures/config/test-project/package.json (100%) rename test/{ => unit}/fixtures/simple/.gitignore (100%) create mode 100644 test/unit/git.test.js rename test/{ => unit}/glob-to-regex.test.js (98%) create mode 100644 test/unit/utils.test.js rename test/{ => unit}/utils/index.js (100%) delete mode 100644 test/utils.test.js diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 045455f..ad82b9d 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -5,27 +5,27 @@ on: pull_request: branches: [master] jobs: - full: - name: Node.js 17 Full - runs-on: ubuntu-latest - steps: - - name: Checkout the repository - uses: actions/checkout@v2 - - name: Install pnpm - uses: pnpm/action-setup@v2 - with: - version: 6.32.9 - - name: Install Node.js - uses: actions/setup-node@v2 - with: - node-version: 17 - cache: pnpm - - name: Install dependencies - run: pnpm install --frozen-lockfile - - name: Run tests - run: pnpm test - env: - FORCE_COLOR: 2 + # full: + # name: Node.js 17 Full + # runs-on: ubuntu-latest + # steps: + # - name: Checkout the repository + # uses: actions/checkout@v2 + # - name: Install pnpm + # uses: pnpm/action-setup@v2 + # with: + # version: 6.32.9 + # - name: Install Node.js + # uses: actions/setup-node@v2 + # with: + # node-version: 17 + # cache: pnpm + # - name: Install dependencies + # run: pnpm install --frozen-lockfile + # - name: Run tests + # run: pnpm test + # env: + # FORCE_COLOR: 2 short: runs-on: ${{ matrix.os }} strategy: diff --git a/README.md b/README.md index 2948cbf..93eb245 100644 --- a/README.md +++ b/README.md @@ -6,9 +6,10 @@ ## Features -- 📦 **Small**: [47kB](https://packagephobia.com/result?p=nano-staged) (142x+ lighter than **lint-staged**). -- 🥇 **Single dependency** ([`picocolors`](https://github.com/alexeyraspopov/picocolors)). +- 💨 **Dependency-free** +- 🤏 **Small**: [48kB](https://packagephobia.com/result?p=nano-staged) (142x+ lighter than **lint-staged**). - ☯️ **Support multiple file states like staged, unstaged, last-commit, changed etc** +- 💪 **Multi configuration** (useful for monorepos) ## Benchmarks @@ -16,8 +17,8 @@ Benchmarks running time for 10 file: ```diff $ node bench/running-time/index.js -- lint-staged 1.394 ms -+ nano-staged 0.968 ms +- lint-staged 0.439 ms ++ nano-staged 0.257 ms ``` The space in node_modules including sub-dependencies: @@ -25,8 +26,8 @@ The space in node_modules including sub-dependencies: ```diff $ node bench/size/index.js Data from packagephobia.com -- lint-staged 6688 kB -+ nano-staged 47 kB +- lint-staged 6707 kB ++ nano-staged 48 kB ``` The performance results were generated on a MBP Late 2013, 2.3 GHz Intel Core i7 by running `npm run bench` in the library folder. See [bench/running-time/index.js](https://github.com/usmanyunusov/nano-staged/blob/master/bench/running-time/index.js) @@ -40,7 +41,9 @@ The performance results were generated on a MBP Late 2013, 2.3 GHz Intel Core i7 ```terminal npm install --save-dev nano-staged ``` + or + ```terminal yarn add nano-staged -D ``` diff --git a/bench/running-time/index.js b/bench/running-time/index.js index 3a04823..90d120a 100644 --- a/bench/running-time/index.js +++ b/bench/running-time/index.js @@ -1,13 +1,11 @@ -import { execFile } from 'child_process' import { resolve, dirname } from 'path' import { fileURLToPath } from 'url' -import { promisify } from 'util' import { nanoid } from 'nanoid' import fs from 'fs-extra' -import { createGit } from '../../lib/git.js' +import { create_git } from '../../lib/git.js' +import { executor } from '../../lib/executor.js' -let spawn = promisify(execFile) let currentDir = dirname(fileURLToPath(import.meta.url)) let cwd = resolve(currentDir, `nano-staged-${nanoid()}`) let runners = ['lint-staged', 'nano-staged'] @@ -22,7 +20,7 @@ async function appendFile(filename, content, dir = cwd) { } async function execGit(args) { - let git = createGit(cwd) + let git = create_git(cwd) await git.exec(args, { cwd }) } @@ -33,7 +31,7 @@ async function initGitRepo() { await appendFile('README.md', '# Test\n') await appendFile('.gitignore', `node_modules/\n`) await execGit(['add', 'README.md']) - await execGit(['commit', '-m initial commit']) + await execGit(['commit', '-m', '"initial commit"']) } async function initProject() { @@ -41,20 +39,24 @@ async function initProject() { 'package.json', `{ "lint-staged": { - "*.js": "prettier --write", - "*.css": "prettier --write" + "*.js": "echo test", + "*.css": "echo test" }, "nano-staged": { - "*.js": "prettier --write", - "*.css": "prettier --write" + "*.js": "echo test", + "*.css": "echo test" } }` ) - await spawn('yarn', ['add', 'lint-staged'], { cwd }) - await spawn('yarn', ['add', resolve(cwd, '../../../../nano-staged')], { + await executor('pnpm', ['add', 'lint-staged'], { cwd, }) + + await executor('pnpm', ['add', resolve(cwd, '../../../../nano-staged')], { + cwd, + }) + await appendFile('a.js', 'var test = {};') await appendFile('b.js', 'var test = {};') await appendFile('c.js', 'var test = {};') @@ -82,7 +84,13 @@ function showTime(name) { async function run() { for (let runner of runners) { before = performance.now() - await spawn(`./node_modules/.bin/${runner}`, { cwd }) + + try { + await executor(`./node_modules/.bin/${runner}`, [], { cwd }) + } catch (error) { + console.log(error) + } + showTime(runner) } } diff --git a/bench/size/index.js b/bench/size/index.js index c4a7819..c764cf5 100644 --- a/bench/size/index.js +++ b/bench/size/index.js @@ -1,7 +1,7 @@ #!/usr/bin/env node import { get } from 'https' -import c from 'picocolors' +import c from '../../lib/colors.js' async function getJSON(url) { return new Promise((resolve) => { diff --git a/lib/bin.js b/lib/bin.js old mode 100644 new mode 100755 index 0701d23..f61344e --- a/lib/bin.js +++ b/lib/bin.js @@ -1,36 +1,51 @@ #!/usr/bin/env node -import nanoStaged from './index.js' -import * as utils from './utils.js' +import { is_color_support } from './colors.js' +import { create_debug } from './debug.js' +import nano_staged from './index.js' -const FORCE_COLOR_LEVEL = utils.getForceColorLevel() - -if (FORCE_COLOR_LEVEL) { - process.env.FORCE_COLOR = FORCE_COLOR_LEVEL.toString() +if (is_color_support) { + process.env.FORCE_COLOR = '1' } process.on('SIGINT', () => {}) +const debug = create_debug('nano-staged:bin') + function run() { - let options = {} + const args = process.argv.reduce((prev, arg) => [...prev, ...arg.split('=')], []) + const opts = {} - for (let i = 2; i < process.argv.length; i++) { - let arg = process.argv[i] + for (let i = 2; i < args.length; i++) { + let arg = args[i] - if (arg === '-c' || arg === '--config') { - options.config = process.argv[++i] - } else if (arg === '-u' || arg === '--unstaged') { - options.unstaged = true + if (arg === '-u' || arg === '--unstaged') { + opts.unstaged = true + } else if (arg === '-d' || arg === '--debug') { + process.env.NS_DEBUG = true } else if (arg === '--allow-empty') { - options.allowEmpty = true + opts.allow_empty = true + } else if (arg === '--shell') { + opts.shell = true + } else if (arg === '--cwd') { + opts.unstaged = args[++i] + } else if (arg === '-c' || arg === '--config') { + opts.config_path = args[++i] + } else if (arg === '--max-arg-length') { + opts.max_arg_length = Number(args[++i]) + } else if (arg === '--diff-filter') { + opts.diff_filter = args[++i] } else if (arg === '--diff') { - options.diff = [] - } else if (options.diff && options.diff.length !== 2) { - options.diff.push(process.argv[i]) + opts.diff = [] + } else if (opts.diff && opts.diff.length !== 2) { + opts.diff.push(args[i]) } } - return nanoStaged(options) + debug('Options parsed from CLI:', opts) + debug('FORCE_COLOR:', process.env.FORCE_COLOR) + + return nano_staged(opts) } run().catch(() => { diff --git a/lib/cmd-runner.js b/lib/cmd-runner.js deleted file mode 100644 index ac9a917..0000000 --- a/lib/cmd-runner.js +++ /dev/null @@ -1,130 +0,0 @@ -import { normalize, relative, resolve, isAbsolute } from 'path' -import c from 'picocolors' - -import { globToRegex } from './glob-to-regex.js' -import { stringArgvToArray } from './utils.js' -import { TaskRunnerError } from './errors.js' -import { executor } from './executor.js' -import { toArray } from './utils.js' - -export function createCmdRunner({ - cwd = process.cwd(), - type = 'staged', - rootPath = '', - config = {}, - files = [], -} = {}) { - const runner = { - async generateCmdTasks() { - const cmdTasks = [] - - for (const [pattern, cmds] of Object.entries(config)) { - const matches = globToRegex(pattern, { extended: true, globstar: pattern.includes('/') }) - const isFn = typeof cmds === 'function' - const task_files = [] - const tasks = [] - - for (let file of files) { - file = normalize(relative(cwd, normalize(resolve(rootPath, file)))).replace(/\\/g, '/') - - if (!pattern.startsWith('../') && (file.startsWith('..') || isAbsolute(file))) { - continue - } - - if (matches.regex.test(file)) { - task_files.push(resolve(cwd, file)) - } - } - - const file_count = task_files.length - const commands = toArray(isFn ? await cmds({ filenames: task_files, type }) : cmds) - const suffix = file_count ? file_count + (file_count > 1 ? ' files' : ' file') : 'no files' - - for (const command of commands) { - const [cmd, ...args] = stringArgvToArray(command) - - if (file_count) { - tasks.push({ - title: command, - run: async () => - executor(cmd, isFn ? args : args.concat(task_files), { - cwd: rootPath, - }), - pattern, - }) - } - } - - cmdTasks.push({ - title: pattern + c.dim(` - ${suffix}`), - file_count, - tasks, - }) - } - - return cmdTasks - }, - - async run(parentTask) { - const errors = [] - - try { - await Promise.all( - parentTask.tasks.map(async (task) => { - task.parent = parentTask - - try { - if (task.file_count) { - task.state = 'run' - await runner.runTask(task) - task.state = 'done' - } else { - task.state = 'warn' - } - } catch (err) { - task.state = 'fail' - errors.push(...err) - } - }) - ) - - if (errors.length) { - throw new TaskRunnerError(errors.join('\n\n')) - } - } catch (err) { - throw err - } - }, - - async runTask(parentTask) { - let skipped = false - let errors = [] - - for (const task of parentTask.tasks) { - task.parent = parentTask - - try { - if (skipped) { - task.state = 'warn' - continue - } - - task.state = 'run' - await task.run() - task.state = 'done' - } catch (error) { - skipped = true - task.title = c.red(task.title) - task.state = 'fail' - errors.push(`${c.red(task.pattern)} ${c.dim('>')} ${task.title}:\n` + error.trim()) - } - } - - if (errors.length) { - throw errors - } - }, - } - - return runner -} diff --git a/lib/colors.js b/lib/colors.js new file mode 100644 index 0000000..d94fac0 --- /dev/null +++ b/lib/colors.js @@ -0,0 +1,29 @@ +import * as tty from 'tty' + +export const is_color_support = + !('NO_COLOR' in process.env || process.argv.includes('--no-color')) && + ('FORCE_COLOR' in process.env || + process.argv.includes('--color') || + process.platform === 'win32' || + (tty.isatty(1) && process.env.TERM !== 'dumb') || + 'CI' in process.env) + +const format = (start, end) => (input) => { + const open = `\x1b[${start}m` + const close = `\x1b[${end}m` + const string = '' + input + const regex = new RegExp(`\\x1b\\[${end}m`, 'g') + + return is_color_support ? open + string.replace(regex, open) + close : String(string) +} + +export default { + inverse: format(7, 27), + yellow: format(93, 39), + green: format(92, 39), + cyan: format(96, 39), + gray: format(90, 39), + bold: format(1, 22), + red: format(91, 39), + dim: format(2, 22), +} diff --git a/lib/config.js b/lib/config.js index d08c569..a80bfb1 100644 --- a/lib/config.js +++ b/lib/config.js @@ -1,6 +1,9 @@ -import { resolve, parse } from 'path' -import { pathToFileURL } from 'url' -import fs from 'fs' +import * as url from 'url' +import * as p from 'path' +import * as fs from 'fs' + +import { create_debug } from './debug.js' +import { normalize } from './utils.js' const places = [ `.nano-staged.js`, @@ -15,44 +18,53 @@ const places = [ 'package.json', ] -async function readConfig(path) { +const debug = create_debug('nano-staged:config') + +async function read(path) { if (fs.existsSync(path) && fs.lstatSync(path).isFile()) { - const { ext, name } = parse(path) + const { ext, name } = p.parse(path) if (ext === '.json' || name === '.nanostagedrc') { - const config = JSON.parse(fs.readFileSync(path, 'utf-8')) + const config = JSON.parse(fs.readFileSync(path, { encoding: 'utf-8' })) return name === 'package' ? config['nano-staged'] : config } if (ext === '.js' || ext === '.mjs' || ext === '.cjs') { - const { default: config } = await import(pathToFileURL(path)) + const config = await import(url.pathToFileURL(path)).then((module) => module.default) return typeof config === 'function' ? { '*': config } : config } } } -export async function getConfig(cwd = process.cwd(), config = undefined) { - try { - if (config) { - return typeof config === 'string' ? await readConfig(resolve(config)) : config - } +async function find(cwd = process.cwd()) { + debug('Searching for configuration from `%s`...', cwd) - let up = resolve(cwd) + let up = p.resolve(cwd) + let path + try { do { cwd = up for (const place of places) { - config = await readConfig(resolve(cwd, place)) - if (config) return config + path = normalize(p.join(cwd, place)) + const config = await read(path) + + if (config) { + debug('Successfully loaded config from `%s`:\n%O', path, config) + return { path, config } + } } - up = resolve(cwd, '..') + up = p.resolve(cwd, '..') } while (up !== cwd) } catch { - return undefined + debug('Failed to load configuration `%s`', path) + return { path, config: null } } + + return { path, config: null } } -export function validConfig(config) { +export function validate(config) { return !!( config && Object.keys(config).length && @@ -64,7 +76,83 @@ export function validConfig(config) { (typeof config[key] === 'string' || typeof config[key] === 'function' || (Array.isArray(config[key]) && - config[key].every((cmd) => cmd && typeof cmd === 'string'))) + config[key].every( + (cmd) => (cmd && typeof cmd === 'string') || typeof cmd === 'function' + ))) ) ) } + +export async function search({ cwd, search_dirs, config_path, config_obj }) { + if (config_obj) { + debug('Using single direct configuration object...') + return { '': config_obj } + } + + if (config_path) { + debug('Using single configuration path...') + const config = await read(config_path) + return config ? { [config_path]: config } : {} + } + + const configs = {} + const passible_configs = await Promise.all(search_dirs.map((dir) => find(dir))) + const sorted_configs = passible_configs.sort((a, b) => + a.path.split('/').length > b.path.split('/').length ? -1 : 1 + ) + + debug('Found possible config files:\n', sorted_configs) + + for (const { config, path } of sorted_configs) { + if (path.startsWith(normalize(cwd))) { + configs[path] = config + } + } + + if (Object.keys(configs).length === 0) { + const { config, path } = await find(cwd) + + if (config) { + configs[config] = path + } + } + + return configs +} + +export function group_files({ configs, files, is_single }) { + const files_set = new Set(files) + const files_by_config = {} + + for (const [path, config] of Object.entries(configs)) { + if (is_single) { + files_by_config[path] = { config, files } + break + } + + const dir = normalize(p.dirname(path)) + + const include_all_files = Object.keys(config).some((glob) => glob.startsWith('..')) + const scoped_files = new Set(include_all_files ? files_set : undefined) + + if (!include_all_files) { + for (const file of files_set) { + const rel = p.relative(dir, file) + + if (rel && !rel.startsWith('..') && !p.isAbsolute(rel)) { + scoped_files.add(file) + } + } + } + + for (const file of scoped_files) { + files_set.delete(file) + } + + files_by_config[path] = { config, files: Array.from(scoped_files) } + } + + debug('Files by config:\n', files_by_config) + + return files_by_config +} diff --git a/lib/debug.js b/lib/debug.js new file mode 100644 index 0000000..1247c7e --- /dev/null +++ b/lib/debug.js @@ -0,0 +1,11 @@ +import * as util from 'util' +import c from './colors.js' + +export const create_debug = + (name) => + (...args) => { + if ('NS_DEBUG' in process.env) { + args[0] = c.inverse(c.bold(c.green(` ${name.toUpperCase()} `))) + ' ' + args[0] + process.stderr.write(util.format(...args) + '\n') + } + } diff --git a/lib/error.js b/lib/error.js new file mode 100644 index 0000000..ad2b683 --- /dev/null +++ b/lib/error.js @@ -0,0 +1,6 @@ +export class NanoStagedError extends Error { + constructor(event) { + super(event.reason) + this.event = event + } +} diff --git a/lib/errors.js b/lib/errors.js deleted file mode 100644 index 5240231..0000000 --- a/lib/errors.js +++ /dev/null @@ -1,23 +0,0 @@ -const MESSAGES = { - noConfig: () => 'Create Nano Staged config.', - noFileConfig: (path) => `Nano Staged config file *${path}* is not found.`, - invalidConfig: () => 'Nano Staged config invalid.', - noGitRepo: () => 'Nano Staged didn’t find git directory.', - noFiles: (type) => `No ${type} files found.`, - noMatchingFiles: () => 'No files match any configured task.', -} - -export class NanoStagedError extends Error { - constructor(type, ...args) { - super(MESSAGES[type](...args)) - this.name = 'NanoStagedError' - this.type = type - } -} - -export class TaskRunnerError extends Error { - constructor(errors) { - super(errors) - this.name = 'TaskRunnerError' - } -} diff --git a/lib/executor.js b/lib/executor.js index 8d81900..67e8a61 100644 --- a/lib/executor.js +++ b/lib/executor.js @@ -1,321 +1,314 @@ -/* c8 ignore start */ import { spawn } from 'child_process' -import { promises as fs } from 'fs' -import path from 'path' +import * as p from 'path' +import * as fs from 'fs' + +import { normalize } from './utils.js' -const IS_WINDOWS = process.platform === 'win32' -const ENV_PATH_KEY = getPathKey(process.env) -const RE_EXECUTABLE = /\.(?:com|exe)$/i -const RE_META_CHARS = /([()\][%!^"`<>&|;, *?])/g const RE_IS_CMD_SHIM = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i +const RE_META_CHARS = /([()\][%!^"`<>&|;, *?])/g +const RE_EXECUTABLE = /\.(?:com|exe)$/i +const RE_SHEBANG = /^#!(.*)/ +const SPACES_REGEXP = / +/g + +const IS_WINDOWS = process.platform === 'win32' +const MAX_BUFFER = 1000 * 1000 * 100 + +function esc_cmd(str) { + return str.replace(RE_META_CHARS, '^$1') +} + +function esc_arg(str, double = false) { + str = `"` + `${str}`.replace(/(\\*)"/g, '$1$1\\"').replace(/(\\*)$/, '$1$1') + `"` + str = esc_cmd(str) -function escapeCommand(arg) { - return arg.replace(RE_META_CHARS, '^$1') + return double ? esc_cmd(str) : str } -function escapeArgument(arg, doubleEscapeMetaChars = false) { - arg = `"` + `${arg}`.replace(/(\\*)"/g, '$1$1\\"').replace(/(\\*)$/, '$1$1') + `"` - arg = arg.replace(RE_META_CHARS, '^$1') +function is_exe(path, opts) { + let stat = fs.statSync(path) + let exts = opts.path_ext !== undefined ? opts.path_ext : process.env.PATHEXT - if (doubleEscapeMetaChars) { - arg = arg.replace(RE_META_CHARS, '^$1') + if (!stat.isSymbolicLink() && !stat.isFile()) return false + if (!exts) return true + + exts = exts.split(';') + if (exts.indexOf('') !== -1) return true + + for (const ext of exts) { + const $ext = ext.toLowerCase() + if ($ext && path.substr(-$ext.length).toLowerCase() === $ext) { + return true + } } - return arg + return false } -function getSpawnArgs(cmd, args) { +function get_path_key(env = process.env, path_key = 'PATH') { if (IS_WINDOWS) { - if (isCmdFile(cmd)) { - let line = `/D /S /C "${escapeCommand(cmd)}` - for (const arg of args) { - line += ' ' - line += escapeArgument(arg, RE_IS_CMD_SHIM.test(cmd)) - } - line += '"' + path_key = 'Path' - return [line] + for (const key in env) { + if (key.toLowerCase() === 'path') { + return key + } } } - return args + return path_key } -function endsWith(str, end) { - return str.endsWith(end) -} +function get_env_path(cwd) { + let entries = [] + let cur = cwd -function isCmdFile(cmd) { - let upperCMD = cmd.toUpperCase() - return endsWith(upperCMD, '.CMD') || endsWith(upperCMD, '.BAT') -} - -function getSpawnFileName(cmd) { - if (IS_WINDOWS) { - if (isCmdFile(cmd)) { - return process.env['COMSPEC'] || 'cmd.exe' + while (true) { + entries.push(p.join(cur, 'node_modules', '.bin')) + const parent = p.dirname(cur) + if (parent === cur) { + break } + cur = parent } - return cmd + entries.push(process.env.PATH) + return entries.join(p.delimiter) } -async function getPrefix(root) { - let original = (root = path.resolve(root)) +function get_path_info(cmd, opts) { + const colon = IS_WINDOWS ? ';' : ':' + + const path_env = + cmd.match(/\//) || (IS_WINDOWS && cmd.match(/\\/)) + ? [''] + : [ + ...(IS_WINDOWS ? [process.cwd()] : []), + ...(opts.path || process.env.PATH || '').split(colon), + ] + const path_ext_exe = IS_WINDOWS + ? opts.path_ext || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM' + : '' + const path_ext = IS_WINDOWS ? path_ext_exe.split(colon) : [''] - while (path.basename(root) === 'node_modules') { - root = path.dirname(root) + if (IS_WINDOWS) { + if (cmd.indexOf('.') !== -1 && path_ext[0] !== '') path_ext.unshift('') } - if (original !== root) { - return Promise.resolve(root) - } else { - return Promise.resolve(getPrefixFromTree(root)) + return { + path_ext_exe, + path_env, + path_ext, } } -function getPrefixFromTree(current) { - if (isRooted(current)) { - return false - } else { - return Promise.all([ - fs.stat(path.join(current, 'package.json')).catch(() => ''), - fs.stat(path.join(current, 'node_modules')).catch(() => ''), - ]).then(([hasPkg, hasModules]) => { - if (hasPkg || hasModules) { - return current - } else { - return getPrefixFromTree(path.dirname(current)) - } - }) - } -} +const which_sync = (cmd, opts = {}) => { + const { path_ext_exe, path_env, path_ext } = get_path_info(cmd, opts) -function getPathKey(env = process.env) { - let pathKey = 'PATH' + for (const env of path_env) { + const path_part = /^".*"$/.test(env) ? env.slice(1, -1) : env - if (IS_WINDOWS) { - pathKey = 'Path' + const p_cmd = p.join(path_part, cmd) + const pp = !path_part && /^\.[\\\/]/.test(cmd) ? cmd.slice(0, 2) + p_cmd : p_cmd - for (const key in env) { - if (key.toLowerCase() === 'path') { - pathKey = key - } + for (const ext of path_ext) { + const cur = pp + ext + + try { + if (is_exe(cur, { path_ext: path_ext_exe })) { + return cur + } + } catch {} } } - return pathKey + throw new Error(`not found: ${cmd}`) } -function isRooted(p) { - p = normalizeSeparators(p) +function resolve_cmd_attemp(parsed, without_path_ext) { + const env = parsed.opts.env || process.env + const cwd = process.cwd() + const has_custom_cwd = parsed.opts.cwd != null + const should_switch_cwd = has_custom_cwd && process.chdir !== undefined && !process.chdir.disabled + const path_key = get_path_key(env) - if (IS_WINDOWS) { - return p.match(/^[a-z]+:[/\\]?$/i) + if (should_switch_cwd) { + try { + process.chdir(parsed.opts.cwd) + } catch {} } - return p === '/' -} + let resolved -async function tryGetExecutablePath(filePath, extensions) { - let stats = undefined try { - stats = await fs.stat(filePath) - } catch (err) { - if (err.code !== 'ENOENT') { - console.log( - `Unexpected error attempting to determine if executable file exists '${filePath}': ${err}` - ) + resolved = which_sync(parsed.cmd, { + path: env[path_key], + path_ext: without_path_ext ? p.delimiter : undefined, + }) + } catch { + } finally { + if (should_switch_cwd) { + process.chdir(cwd) } } - if (stats && stats.isFile()) { - if (IS_WINDOWS) { - const upperExt = path.extname(filePath).toUpperCase() - if (extensions.some((validExt) => validExt.toUpperCase() === upperExt)) { - return filePath - } - } else { - if (isUnixExecutable(stats)) { - return filePath - } - } + + if (resolved) { + resolved = p.resolve(has_custom_cwd ? parsed.opts.cwd : '', resolved) } - const originalFilePath = filePath - for (const extension of extensions) { - filePath = originalFilePath + extension + return resolved +} - stats = undefined - try { - stats = await fs.stat(filePath) - } catch (err) { - if (err.code !== 'ENOENT') { - console.log( - `Unexpected error attempting to determine if executable file exists '${filePath}': ${err}` - ) - } - } +function resolve_cmd(parsed) { + return resolve_cmd_attemp(parsed) || resolve_cmd_attemp(parsed, true) +} - if (stats && stats.isFile()) { - if (IS_WINDOWS) { - try { - const directory = path.dirname(filePath) - const upperName = path.basename(filePath).toUpperCase() - for (const actualName of await fs.readdir(directory)) { - if (upperName === actualName.toUpperCase()) { - filePath = path.join(directory, actualName) - break - } - } - } catch (err) { - console.log( - `Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}` - ) - } +function shebang_cmd(str = '') { + const match = str.match(RE_SHEBANG) - return filePath - } else { - if (isUnixExecutable(stats)) { - return filePath - } - } - } + if (!match) { + return null } - return '' -} + const [path, argument] = match[0].replace(/#! ?/, '').split(' ') + const binary = path.split('/').pop() -function normalizeSeparators(p = '') { - return IS_WINDOWS ? p.replace(/\//g, '\\').replace(/\\\\+/g, '\\') : p.replace(/\/\/+/g, '/') + return binary === 'env' ? argument : argument ? `${binary} ${argument}` : binary } -function isUnixExecutable(stats) { - return ( - (stats.mode & 1) > 0 || - ((stats.mode & 8) > 0 && stats.gid === process.getgid()) || - ((stats.mode & 64) > 0 && stats.uid === process.getuid()) - ) -} +function shebang_read(cmd) { + const size = 150 + const buffer = Buffer.alloc(size) -async function findInPath(tool) { - let extensions = [] - let directories = [] - let matches = [] + let fd - if (IS_WINDOWS && process.env['PATHEXT']) { - for (let extension of process.env['PATHEXT'].split(path.delimiter)) { - if (extension) { - extensions.push(extension) - } - } - } + try { + fd = fs.openSync(cmd, 'r') + fs.readSync(fd, buffer, 0, size, 0) + fs.closeSync(fd) + } catch {} - if (isRooted(tool)) { - let filePath = await tryGetExecutablePath(tool, extensions) + return shebang_cmd(buffer.toString()) +} - if (filePath) { - return [filePath] - } +function shebang_detect(parsed) { + parsed.file = resolve_cmd(parsed) - return [] - } + const shebang = parsed.file && shebang_read(parsed.file) - if (tool.includes(path.sep)) { - return [] + if (shebang) { + parsed.args.unshift(parsed.file) + parsed.cmd = shebang + return resolve_cmd(parsed) } - if (process.env[ENV_PATH_KEY]) { - for (let p of process.env[ENV_PATH_KEY].split(path.delimiter)) { - if (p) { - directories.push(p) - } - } - } + return parsed.file +} - for (let directory of directories) { - let filePath = await tryGetExecutablePath(path.join(directory, tool), extensions) +export const parse_cmd = (cmd) => { + const tokens = [] - if (filePath) { - matches.push(filePath) + for (const token of cmd.trim().split(SPACES_REGEXP)) { + const prev_token = tokens[tokens.length - 1] + if (prev_token && prev_token.endsWith('\\')) { + tokens[tokens.length - 1] = `${prev_token.slice(0, -1)} ${token}` + } else { + tokens.push(token) } } - return matches + return tokens } -async function which(tool, check) { - if (!tool) { - throw `'tool' is required` - } - - if (check) { - let result = await which(tool, false) +export function executor(cmd, args = [], opts = {}) { + let resolve, reject + let promise = new ProcessPromise((...args) => ([resolve, reject] = args)) - if (!result) { - throw `${tool} does not exist` - } - - return result + promise.ctx = { + cmd, + args, + opts, + resolve, + reject, } - let matches = await findInPath(tool) + setImmediate(() => promise.run()) - if (matches && matches.length > 0) { - return matches[0] - } + return promise +} - return '' +export function executor_cmds(cmd, opts) { + const [file, ...args] = parse_cmd(cmd) + return executor(file, args, opts) } -export async function executor(cmd, args = [], opts = {}) { - let prefix = await getPrefix(process.cwd()) +class ProcessPromise extends Promise { + get parsed() { + const { cmd, args, opts } = this.ctx + const $parsed = { cmd, args, opts, file: undefined } - if (prefix) { - let local = path.join(prefix, 'node_modules', '.bin') - process.env[ENV_PATH_KEY] = `${local}${path.delimiter}${process.env.PATH}` - } + if (IS_WINDOWS && !opts.shell) { + const cmd_file = shebang_detect($parsed) + + if (!RE_EXECUTABLE.test(cmd_file)) { + $parsed.cmd = esc_cmd(normalize($parsed.cmd)) + $parsed.args = $parsed.args.map((arg) => esc_arg(arg, RE_IS_CMD_SHIM.test(cmd_file))) + $parsed.args = ['/d', '/s', '/c', `"${[$parsed.cmd, ...$parsed.args].join(' ')}"`] + $parsed.cmd = process.env.comspec || 'cmd.exe' + $parsed.opts.windowsVerbatimArguments = true + } + } - let commandFile = await which(cmd, true) + $parsed.opts = { + maxBuffer: MAX_BUFFER, + encoding: 'utf8', + buffer: true, + ...$parsed.opts, + env: { + ...$parsed.opts.env, + ...process.env, + PATH: get_env_path($parsed.opts.cwd), + }, + } + + if (IS_WINDOWS && p.basename($parsed.cmd, '.exe') === 'cmd') { + $parsed.args.unshift('/q') + } - if (IS_WINDOWS && !RE_EXECUTABLE.test(commandFile)) { - cmd = getSpawnFileName(commandFile) - args = getSpawnArgs(commandFile, args) - opts.windowsVerbatimArguments = true + return $parsed } - let child = spawn(cmd, args, { - ...opts, - env: { - ...process.env, - ...opts.env, - }, - }) + output(str) { + const LF = typeof str === 'string' ? '\n' : '\n'.charCodeAt() + const CR = typeof str === 'string' ? '\r' : '\r'.charCodeAt() - let output = '' + if (str[str.length - 1] === LF) str = str.slice(0, -1) + if (str[str.length - 1] === CR) str = str.slice(0, -1) - if (child.stdout) { - child.stdout.on('data', (data) => { - output += data - }) + return str } - if (child.stderr) { - child.stderr.on('data', (data) => { - output += data - }) - } + run() { + const { resolve, reject } = this.ctx + const { cmd, args, opts } = this.parsed - return new Promise((resolve, reject) => { - child.on('error', reject) + let child = spawn(cmd, args, opts) + let combined = '' + let on_stdout = (data) => (combined += data) + let on_stderr = (data) => (combined += data) + + child.on('error', () => reject(this.output(combined))) child.on('close', (code) => { + const out = this.output(combined) + if (code === 0) { - resolve(output) + resolve(out) } else { - reject(output) + reject(out) } }) - }) -} -/* c8 ignore end */ + child.stdout.on('data', on_stdout) + child.stderr.on('data', on_stderr) + } +} diff --git a/lib/git-workflow.js b/lib/git-workflow.js deleted file mode 100644 index 7462153..0000000 --- a/lib/git-workflow.js +++ /dev/null @@ -1,104 +0,0 @@ -import fs from 'fs' -import { resolve } from 'path' - -import { createGit } from './git.js' - -export function createGitWorkflow({ allowEmpty = false, dotPath = '', rootPath = '' } = {}) { - const git = createGit(rootPath) - const patch = { - unstaged: resolve(dotPath, './nano-staged_partial.patch'), - original: resolve(dotPath, './nano-staged.patch'), - } - - const workflow = { - hasPatch(path = '') { - let has = false - - if (path) { - try { - let buffer = fs.readFileSync(path) - has = buffer && buffer.toString() - } catch { - has = false - } - } - - return Boolean(has) - }, - - async backupOriginalState() { - try { - await git.diff(patch.original) - } catch (e) { - throw e - } - }, - - async backupUnstagedFiles(files = []) { - if (files.length) { - try { - await git.diff(patch.unstaged, files) - await git.checkout(files) - } catch (e) { - throw e - } - } - }, - - async applyModifications(files = []) { - if (files.length) { - try { - if (!(await git.exec(['diff', 'HEAD'])) && !allowEmpty) { - throw 'Prevented an empty git commit!' - } - - await git.add(files) - } catch (e) { - throw e - } - } - }, - - async restoreUnstagedFiles(files = []) { - if (files.length) { - try { - await git.apply(patch.unstaged) - } catch { - try { - await git.apply(patch.unstaged, true) - } catch { - throw 'Merge conflict!!! Unstaged changes not restored.' - } - } - } - }, - - async restoreOriginalState() { - try { - await git.checkout('.') - - if (workflow.hasPatch(patch.original)) { - await git.apply(patch.original) - } - } catch (e) { - throw e - } - }, - - async cleanUp() { - try { - if (workflow.hasPatch(patch.original)) { - fs.unlinkSync(patch.original) - } - - if (workflow.hasPatch(patch.unstaged)) { - fs.unlinkSync(patch.unstaged) - } - } catch (e) { - throw e - } - }, - } - - return workflow -} diff --git a/lib/git.js b/lib/git.js index 43cc863..9267582 100644 --- a/lib/git.js +++ b/lib/git.js @@ -1,20 +1,11 @@ -import { join, normalize, resolve } from 'path' -import fs from 'fs' +import * as p from 'path' +import * as fs from 'fs' +import { to_array, normalize } from './utils.js' +import { create_debug } from './debug.js' import { executor } from './executor.js' -import { toArray } from './utils.js' - -const ADDED = 'A'.charCodeAt(0) -const COPIED = 'C'.charCodeAt(0) -const DELETED = 'D'.charCodeAt(0) -const MODIFIED = 'M'.charCodeAt(0) -const RENAMED = 'R'.charCodeAt(0) -const SPACE = ' '.charCodeAt(0) - -export const STAGED_CODE = 1 << 0 -export const CHANGED_CODE = 1 << 1 -export const DELETED_CODE = 1 << 2 +const NO_SUBMODULE_RECURSE = ['-c', 'submodule.recurse=false'] const APPLY_ARGS = ['-v', '--whitespace=nowarn', '--recount', '--unidiff-zero'] const DIFF_ARGS = [ '--binary', @@ -27,120 +18,26 @@ const DIFF_ARGS = [ '--submodule=short', ] -function group(entries = []) { - const deleted = [] - const changed = [] - const working = [] - - for (let { path, type, rename } of entries) { - path = rename || path - - if (!working.includes(path)) { - if (type === CHANGED_CODE) { - changed.push(path) - } - - if (type === DELETED_CODE) { - deleted.push(path) - } +const renamed_path = (path) => (/\x00/.test(path) ? path.split(/\x00/) : [undefined, path]) +const debug = create_debug('nano-staged:git') - working.push(path) - } - } - - return { working, deleted, changed } -} - -export function createGit(cwd = process.cwd()) { +export function create_git(cwd = process.cwd()) { const git = { - cwd, - async exec(args = [], opts = {}) { + debug('Running git command', args) + try { - return await executor('git', args, { + return await executor('git', NO_SUBMODULE_RECURSE.concat(args), { ...opts, - cwd: opts.cwd || git.cwd, + cwd: opts.cwd || cwd, }) } catch (e) { throw e } }, - async diff(fileName, files = [], opts = {}) { - const args = ['diff', ...DIFF_ARGS, '--output', fileName] - - if (files.length) { - args.push('--') - args.push(...files) - } - - await git.exec(args, opts) - }, - - async diffFileName(ref1, ref2, opts = {}) { - const args = ['diff', '--name-only', '--no-ext-diff', '--diff-filter=ACMR', '-z'] - - if (ref1) { - args.push(ref1) - } - if (ref2) { - args.push(ref2) - } - - try { - return await git.exec([...args, '--'], opts) - } catch { - return '' - } - }, - - async apply(patch, allowConflicts = false, opts = {}) { - const args = ['apply', ...APPLY_ARGS] - - if (allowConflicts) { - args.push('-3') - } - - if (patch) { - args.push(patch) - } - - await git.exec(args, opts) - }, - - async getGitPaths(opts = {}) { - const paths = { - root: null, - dot: null, - } - - delete process.env.GIT_DIR - delete process.env.GIT_WORK_TREE - - try { - const line = await git.exec(['rev-parse', '--show-toplevel'], opts) - const git_path = line ? normalize(line.trimLeft().replace(/[\r\n]+$/, '')) : '' - const git_config_path = normalize(fs.realpathSync(join(git_path, '.git'))) - - if (git_path) { - paths.root = git_path - paths.dot = git_config_path - } - - if (fs.lstatSync(git_config_path).isFile()) { - const file = fs.readFileSync(git_config_path, 'utf-8').toString() - const path = resolve(git_path, file.replace(/^gitdir: /, '')).trim() - paths.dot = path - } - - return paths - } catch { - return paths - } - }, - async add(paths, opts = {}) { - paths = toArray(paths) + paths = to_array(paths) if (paths.length) { const args = ['add', '-A', '--', ...paths] @@ -149,7 +46,7 @@ export function createGit(cwd = process.cwd()) { }, async checkout(paths, opts = {}) { - paths = toArray(paths) + paths = to_array(paths) if (paths.length) { const args = ['checkout', '-q', '--force', '--', ...paths] @@ -160,106 +57,110 @@ export function createGit(cwd = process.cwd()) { async status(opts = {}) { const env = { GIT_OPTIONAL_LOCKS: '0' } const args = ['status', '-z', '-u'] - const result = [] try { - const raw = await git.exec(args, { env, ...opts }) - - let i = 0 - let lastIndex - - while (i < raw.length) { - if (i + 4 >= raw.length) { - return [] - } - - const entry = { - x: raw.charCodeAt(i++), - y: raw.charCodeAt(i++), - path: '', - rename: undefined, - } - - i++ - - if (entry.x === RENAMED || entry.x === COPIED) { - lastIndex = raw.indexOf('\0', i) - - if (!~lastIndex) { - return [] - } - - entry.rename = raw.substring(i, lastIndex) - i = lastIndex + 1 - } + return await git.exec(args, { ...opts, env: { ...opts.env, env } }) + } catch { + return '' + } + }, - lastIndex = raw.indexOf('\0', i) + async diff_name([ref1, ref2], opts = {}) { + const args = ['diff', '--name-only', '--no-ext-diff', '-z'] - if (!~lastIndex) { - return [] - } + if (opts.staged) { + args.push('--staged') + } - entry.path = raw.substring(i, lastIndex) + if (opts.filter != null && opts.filter.length > 0) { + args.push(`--diff-filter=${opts.filter.trim()}`) + } - if (entry.path[entry.path.length - 1] !== '/') { - result.push(entry) - } + if (ref1) args.push(ref1) + if (ref2) args.push(ref2) - i = lastIndex + 1 - } + try { + const raw = await git.exec(args, opts) + const files = raw ? raw.replace(/\u0000$/, '').split('\u0000') : [] - return result + return files.map((file) => normalize(p.resolve(opts.cwd || cwd, file))) } catch { return [] } }, - async changedFiles(refs = [], opts = {}) { - const [ref1, ref2] = refs - const lines = await git.diffFileName(ref1, ref2, opts) - const files = lines ? lines.replace(/\u0000$/, '').split('\u0000') : [] - const result = files.map((path) => ({ type: CHANGED_CODE, path, rename: undefined })) + async diff_patch(file_name, files = [], opts = {}) { + const args = ['diff', ...DIFF_ARGS, '--output', file_name] - return group(result) + if (files.length) { + args.push('--', ...files) + } + + await git.exec(args, opts) }, - async stagedFiles(opts = {}) { - const entries = await git.status(opts) - const result = [] + async apply(patch, allowConflicts = false, opts = {}) { + const args = ['apply', ...APPLY_ARGS] - for (const entry of entries) { - const { x, y } = entry + if (allowConflicts) args.push('-3') + if (patch) args.push(patch) - if (x === ADDED || x === MODIFIED || x === RENAMED || x === COPIED) { - if (y === ADDED || y === COPIED || y === MODIFIED || y === RENAMED) { - entry.type = CHANGED_CODE - } else if (y === DELETED) { - entry.type = DELETED_CODE - } else { - entry.type = STAGED_CODE - } + await git.exec(args, opts) + }, - result.push(entry) - } + async paths(opts = {}) { + const paths = { + root: null, + dot: null, } - return group(result) - }, + delete process.env.GIT_DIR + delete process.env.GIT_WORK_TREE - async unstagedFiles(opts = {}) { - const entries = await git.status(opts) - const result = [] + try { + const cd_up = await git.exec(['rev-parse', '--show-cdup'], opts) + const git_root = cd_up + ? normalize(p.resolve(opts.cwd || cwd, cd_up.trim())) + : opts.cwd || cwd + const git_config_path = normalize(fs.realpathSync(p.join(git_root, '.git'))) + + git_root && (paths.root = git_root) + git_config_path && (paths.dot = git_config_path) + + if (paths.dot && fs.lstatSync(paths.dot).isFile()) { + const file = fs.readFileSync(paths.dot, 'utf-8').toString() + const path = p.resolve(git_root, file.replace(/^gitdir: /, '')).trim() + paths.dot = path + } - for (const entry of entries) { - const { y } = entry + debug('Resolved git directory to be `%s`', paths.root) + debug('Resolved git config directory to be `%s`', paths.dot) - if (y !== SPACE && y !== DELETED) { - entry.type = CHANGED_CODE - result.push(entry) - } + return paths + } catch (err) { + debug('Failed to resolve git repo with error:', err) + return paths } + }, + + async status_name(filtered = () => true, opts = {}) { + const status = await git.status(opts) + const result = status + .split(/\x00(?=[ AMDRCU?!]{2} |$)/) + .filter(Boolean) + .map((line) => { + const [to, from] = renamed_path(line.substring(3)) + const [x, y] = line.substring(0, 2) + const name = normalize(p.resolve(opts.cwd || cwd, to || from)) + + return { x, y, name } + }) + .filter(filtered) + .map(({ name }) => name) + + debug('Found status filtered files', result) - return group(result) + return result }, } diff --git a/lib/glob-to-regex.js b/lib/glob.js similarity index 75% rename from lib/glob-to-regex.js rename to lib/glob.js index 3a4bcda..dc7a602 100644 --- a/lib/glob-to-regex.js +++ b/lib/glob.js @@ -22,16 +22,18 @@ const AT = '@'.charCodeAt(0) const GLOBSTAR = `((?:[^/]*(?:/|$))*)` const WILDCARD = `([^/]*)` -export function globToRegex(glob, opts = {}) { +export function globrex(glob, opts = {}) { let { extended = false, globstar = false, flags = '' } = opts - - let inRange = false - let inGroup = false - let stack = [] - let regex = '' - let pos = 0 - - let code, next + let code, + next, + pos = 0, + regex = '', + stack = [], + inGroup = false, + inRange = false, + add = (str) => { + regex += str + } while (pos < glob.length) { code = glob.charCodeAt(pos) @@ -42,54 +44,54 @@ export function globToRegex(glob, opts = {}) { case CARRET: case EQUALS: case POINT: { - regex += `\\${glob[pos]}` + add(`\\${glob[pos]}`) break } case SLASH: { - regex += `\\${glob[pos]}` + add(`\\${glob[pos]}`) if (glob.charCodeAt(pos + 1) === SLASH) { - regex += '?' + add('?') } break } case OPEN_PARENTHESES: { if (stack.length) { - regex += glob[pos] + add(glob[pos]) break } - regex += `\\${glob[pos]}` + add(`\\${glob[pos]}`) break } case CLOSE_PARENTHESES: { if (stack.length) { - regex += glob[pos] + add(glob[pos]) let type = stack.pop() if (type === '@') { - regex += '{1}' + add('{1}') } else if (type === '!') { - regex += '([^/]*)' + add('([^/]*)') } else { - regex += type + add(type) } break } - regex += `\\${glob[pos]}` + add(`\\${glob[pos]}`) break } case PIPE: { if (stack.length) { - regex += glob[pos] + add(glob[pos]) break } - regex += `\\${glob[pos]}` + add(`\\${glob[pos]}`) break } @@ -99,7 +101,7 @@ export function globToRegex(glob, opts = {}) { break } - regex += `\\${glob[pos]}` + add(`\\${glob[pos]}`) break } @@ -113,21 +115,21 @@ export function globToRegex(glob, opts = {}) { case BANG: { if (extended) { if (inRange) { - regex += `^` + add(`^`) break } if (glob.charCodeAt(pos + 1) === OPEN_PARENTHESES) { stack.push(glob[pos]) - regex += `(?!` + add(`(?!`) pos++ break } - regex += `\\${glob[pos]}` + add(`\\${glob[pos]}`) break } - regex += `\\${glob[pos]}` + add(`\\${glob[pos]}`) break } @@ -136,12 +138,12 @@ export function globToRegex(glob, opts = {}) { if (glob.charCodeAt(pos + 1) === OPEN_PARENTHESES) { stack.push(glob[pos]) } else { - regex += `.` + add(`.`) } break } - regex += `\\${glob[pos]}` + add(`\\${glob[pos]}`) break } @@ -151,11 +153,11 @@ export function globToRegex(glob, opts = {}) { let value = glob.slice(pos + 2, next) if (value === 'alnum') { - regex += `(\\w|\\d)` + add(`(\\w|\\d)`) } else if (value === 'space') { - regex += `\\s` + add(`\\s`) } else if (value === 'digit') { - regex += `\\d` + add(`\\d`) } pos = next + 1 @@ -164,54 +166,54 @@ export function globToRegex(glob, opts = {}) { if (extended) { inRange = true - regex += glob[pos] + add(glob[pos]) break } - regex += `\\${glob[pos]}` + add(`\\${glob[pos]}`) break } case CLOSE_SQUARE: { if (extended) { inRange = false - regex += glob[pos] + add(glob[pos]) break } - regex += `\\${glob[pos]}` + add(`\\${glob[pos]}`) break } case OPEN_CURLY: { if (extended) { inGroup = true - regex += `(` + add(`(`) break } - regex += `\\${glob[pos]}` + add(`\\${glob[pos]}`) break } case CLOSE_CURLY: { if (extended) { inGroup = false - regex += `)` + add(`)`) break } - regex += `\\${glob[pos]}` + add(`\\${glob[pos]}`) break } case COMA: { if (inGroup) { - regex += `|` + add(`|`) break } - regex += `\\${glob[pos]}` + add(`\\${glob[pos]}`) break } @@ -230,17 +232,17 @@ export function globToRegex(glob, opts = {}) { let nextChar = glob[pos + 1] if (!globstar) { - regex += `.*` + add(`.*`) } else { - let isGlobstar = + let is_globstar = starCount > 1 && (prevChar === '/' || prevChar === undefined) && (nextChar === '/' || nextChar === undefined) - if (isGlobstar) { - regex += GLOBSTAR + if (is_globstar) { + add(GLOBSTAR) pos++ } else { - regex += WILDCARD + add(WILDCARD) } } @@ -248,7 +250,7 @@ export function globToRegex(glob, opts = {}) { } default: { - regex += glob[pos] + add(glob[pos]) break } } @@ -260,5 +262,5 @@ export function globToRegex(glob, opts = {}) { regex = `^${regex}$` } - return { regex: new RegExp(regex, flags) } + return new RegExp(regex, flags) } diff --git a/lib/index.js b/lib/index.js index f24a3fa..7f57f55 100644 --- a/lib/index.js +++ b/lib/index.js @@ -1,67 +1,120 @@ -import { getConfig, validConfig } from './config.js' -import { createReporter } from './reporter.js' -import { NanoStagedError } from './errors.js' -import { createRunner } from './runner.js' -import { createGit } from './git.js' -import { toArray } from './utils.js' +import * as p from 'path' + +import { NanoStagedError } from './error.js' +import { create_runner } from './runner.js' +import { create_debug } from './debug.js' +import { create_tasks } from './tasks.js' +import { create_git } from './git.js' +import { to_array } from './utils.js' +import * as config from './config.js' +import { log } from './log.js' +import c from './colors.js' + +const DIVIDER = c.red(c.dim('⎯'.repeat(process.stdout.columns || 30))) +const MAX_CLI_LENGTH = (() => { + if (process.platform === 'darwin') return 262144 + if (process.platform === 'win32') return 8191 + return 131072 +})() + +const debug = create_debug('nano-staged:index') export default async function (options) { const opts = { + max_arg_length: MAX_CLI_LENGTH / 2, stream: process.stderr, - cwd: process.cwd(), - allowEmpty: false, - config: undefined, + allow_empty: false, unstaged: false, - diff: false, + shell: false, ...options, } - const reporter = createReporter(opts.stream) - const git = createGit(opts.cwd) + debug('Running all scripts with options `%s`', opts) + + opts.cwd_is_explicit = !!opts.cwd + opts.cwd = opts.cwd_is_explicit ? p.resolve(opts.cwd) : process.cwd() + debug('Using working directory `%s`', opts.cwd) try { - const config = await getConfig(opts.cwd, opts.config) - const git_paths = await git.getGitPaths() + const type = opts.unstaged ? 'unstaged' : opts.diff ? 'diff' : 'staged' + const git = create_git(opts.cwd) + const git_paths = await git.paths() - if (!config) { - if (typeof opts.config === 'string') { - throw new NanoStagedError('noFileConfig', opts.config) - } else { - throw new NanoStagedError('noConfig') - } + if (!git_paths.root) { + throw new NanoStagedError({ type: 'failure', reason: 'no-git-repo' }) } - if (!validConfig(config)) { - throw new NanoStagedError('invalidConfig') + const files = opts.unstaged + ? await git.status_name(({ y }) => !'D '.includes(y), { + cwd: git_paths.root, + }) + : await git.diff_name(opts.diff || [], { + staged: !opts.diff, + filter: opts.diff_filter || 'ACMR', + cwd: git_paths.root, + }) + + debug(`Loaded list of ${type} files in git:\n%O`, files) + + if (!files.length) { + return log({ type: 'info', detail: 'no-files', runner_type: type }) } - if (!git_paths.root) { - throw new NanoStagedError('noGitRepo') + const configs = await config.search({ + search_dirs: [...files.reduce((set, file) => set.add(p.dirname(file)), new Set())], + config_path: opts.config_path, + config_obj: opts.config, + cwd: opts.cwd, + }) + + if (Object.keys(configs).length === 0) { + throw new NanoStagedError( + opts.config_path + ? { type: 'failure', reason: 'no-path-config', path: opts.config_path } + : { type: 'failure', reason: 'no-config' } + ) } - let files, type - - if (opts.unstaged) { - files = await git.unstagedFiles({ cwd: git_paths.root }) - type = 'unstaged' - } else if (opts.diff && Array.isArray(opts.diff)) { - files = await git.changedFiles(opts.diff, { cwd: git_paths.root }) - type = 'diff' - } else { - files = await git.stagedFiles({ cwd: git_paths.root }) - type = 'staged' + for (const [path, current_config] of Object.entries(configs)) { + if (!config.validate(current_config)) { + throw new NanoStagedError({ type: 'failure', reason: 'invalid-config', path }) + } } - if (!files.working.length) { - reporter.error(new NanoStagedError('noFiles', type)) - return + const files_by_config = config.group_files({ + is_single: opts.config || opts.config_path !== undefined, + configs, + files, + }) + + const { config_tasks, matched_files } = create_tasks({ ...opts, files_by_config }) + + if (!config_tasks.some(({ tasks }) => tasks.some(({ files }) => files.length > 0))) { + return log({ type: 'info', detail: 'no-matching-files' }) } - await createRunner({ ...opts, config, git_paths, files, type }).run() + await create_runner({ + ...opts, + matched_files, + config_tasks, + git_paths, + type, + }).run() + + debug('Tasks were executed successfully!') } catch (errors) { - for (const error of toArray(errors)) { - reporter.error(error) + console.log(DIVIDER) + + for (const e of to_array(errors)) { + if (e instanceof NanoStagedError) { + log(e.event) + } else { + console.error(`Unexpected error: ${e.toString()}`) + console.error(e.stack) + } } + + console.log(DIVIDER) throw errors } } diff --git a/lib/log.js b/lib/log.js new file mode 100644 index 0000000..be8c2a1 --- /dev/null +++ b/lib/log.js @@ -0,0 +1,81 @@ +import c from './colors.js' + +function fail(line) { + console.error(c.red('× ' + line)) +} + +function warn(line) { + console.warn(c.cyan('- ' + line)) +} + +export function log(entry) { + switch (entry.type) { + default: { + throw new Error(`Unknown event type: ${type}`) + } + case 'failure': { + const reason = entry.reason + switch (reason) { + default: { + throw new Error(`Unknown failure type: ${reason}`) + } + case 'no-config': { + fail('Create Nano Staged config.') + break + } + case 'no-path-config': { + fail(`Nano Staged config file ${c.yellow(entry.path)} is not found.`) + break + } + case 'invalid-config': { + fail(`Nano Staged ${c.yellow(entry.path)} config invalid.`) + break + } + case 'no-git-repo': { + fail('Nano Staged didn’t find git directory.') + break + } + case 'merge-conflict': { + fail('Merge conflict! Unstaged changes have not been restored.') + break + } + case 'empty-git-commit': { + fail('Prevented an empty git commit!') + break + } + } + break + } + + case 'output': { + const stream = entry.stream + switch (stream) { + default: { + throw new Error(`Unknown output stream: ${stream}`) + } + case 'stderr': { + process.stderr.write(entry.data) + break + } + } + break + } + + case 'info': { + const detail = entry.detail + switch (detail) { + default: { + throw new Error(`Unknown info event detail: ${detail}`) + } + case 'no-files': { + warn(`No ${entry.runner_type} files found.`) + break + } + case 'no-matching-files': { + warn('No files match any configured task.') + break + } + } + } + } +} diff --git a/lib/renderer.js b/lib/renderer.js deleted file mode 100644 index 23915c4..0000000 --- a/lib/renderer.js +++ /dev/null @@ -1,163 +0,0 @@ -import readline from 'readline' -import c from 'picocolors' - -const spinnerMap = new WeakMap() -const spinnerFrames = ['-', '\\', '|', '/'] - -function getSpinner() { - let index = 0 - - return () => { - index = ++index % spinnerFrames.length - return spinnerFrames[index] - } -} - -function getLines(str = '', width = 80) { - return str - .replace(/\u001b[^m]*?m/g, '') - .split('\n') - .reduce((col, l) => (col += Math.max(1, Math.ceil(l.length / width))), 0) -} - -function getStateSymbol(task) { - if (task.state === 'done') { - return c.green('√') - } else if (task.state === 'fail') { - return c.red('×') - } else if (task.state === 'warn') { - return c.yellow('↓') - } else if (task.state === 'run') { - let spinner = spinnerMap.get(task) - - if (!spinner) { - spinner = getSpinner() - spinnerMap.set(task, spinner) - } - - return c.yellow(spinner()) - } else { - return c.gray('*') - } -} - -function getTitles(task) { - const titles = [task.title] - let current = task - - while (current.parent) { - current = current.parent - if (current.title) titles.unshift(current.title) - } - - return titles -} - -function renderTree(tasks, level = 0) { - let output = [] - - for (const task of tasks) { - const title = task.title - const prefix = `${getStateSymbol(task)} ` - - output.push(' '.repeat(level) + prefix + title) - - if (task.tasks && task.tasks.length > 0) { - if (task.state !== 'done') { - output = output.concat(renderTree(task.tasks, level + 1)) - } - } - } - - return output.join('\n') -} - -function renderCI(tasks) { - let output = '' - - for (const task of tasks) { - if (task.state && task.state !== 'end' && task.state !== 'run' && !task.tasks) { - const title = getTitles(task).join(c.yellow(' ≫ ')) - const prefix = `${getStateSymbol(task)} ` - - output += prefix + title + '\n' - task.state = 'end' - } - - if (task.tasks && task.tasks.length > 0) { - output += renderCI(task.tasks) - } - } - - return output -} - -export function createRenderer(stream, { isTTY = true } = {}) { - let tasks = [] - let lines = 0 - let timer - - return { - clear() { - for (let i = 0; i < lines; i++) { - i > 0 && readline.moveCursor(stream, 0, -1) - readline.cursorTo(stream, 0) - readline.clearLine(stream, 0) - } - lines = 0 - }, - - write(str, clear = false) { - if (clear) { - this.clear() - } - - stream.write(str) - }, - - render() { - const output = isTTY ? renderTree(tasks) : renderCI(tasks) - - if (isTTY) { - this.write(output, true) - lines = getLines(output, stream.columns) - } else { - this.write(output) - } - - return this - }, - - spin(task) { - task && tasks.push(task) - return this.render() - }, - - loop() { - timer = setTimeout(() => this.loop(), 130) - return this.spin() - }, - - start(task) { - tasks.push(task) - - if (timer) return this - if (isTTY) stream.write(`\x1b[?25l`) - - return this.loop() - }, - - stop() { - if (timer) timer = clearTimeout(timer) - - if (isTTY) { - this.write(`${renderTree(tasks)}\n`, true) - this.write(`\x1b[?25h`) - } else { - this.write(renderCI(tasks)) - } - - return this - }, - } -} diff --git a/lib/reporter.js b/lib/reporter.js deleted file mode 100644 index f73c8a0..0000000 --- a/lib/reporter.js +++ /dev/null @@ -1,29 +0,0 @@ -import c from 'picocolors' - -import { NanoStagedError, TaskRunnerError } from './errors.js' - -export function createReporter(stream = process.stderr) { - function print(lines) { - stream.write(lines) - } - - const reporter = { - error(err) { - if (err instanceof NanoStagedError) { - const msg = err.message.replace(/\*([^*]+)\*/g, c.yellow('$1')) - - if (['noFiles', 'noMatchingFiles'].includes(err.type)) { - print(`${c.cyan(`-`)} ${msg}\n`) - } else { - print(`${c.red('×')} ${c.red(msg)}\n`) - } - } else if (err instanceof TaskRunnerError) { - print(`\n${err.message || err}\n`) - } else { - print(`\n${c.red(err.message || err)}\n`) - } - }, - } - - return reporter -} diff --git a/lib/runner.js b/lib/runner.js index 54caf70..03e38c4 100644 --- a/lib/runner.js +++ b/lib/runner.js @@ -1,154 +1,285 @@ -import { createGitWorkflow } from './git-workflow.js' -import { createCmdRunner } from './cmd-runner.js' -import { createReporter } from './reporter.js' -import { createRenderer } from './renderer.js' -import { NanoStagedError } from './errors.js' +import * as p from 'path' +import * as fs from 'fs' -export function createRunner({ allowEmpty, git_paths, config, stream, files, type, cwd }) { - const reporter = createReporter(stream) - const renderer = createRenderer(stream, { isTTY: !process.env.CI }) +import { to_array, split_in_chunks, str_argv_to_array, normalize } from './utils.js' +import { executor, executor_cmds } from './executor.js' +import { create_spinner } from './spinner.js' +import { NanoStagedError } from './error.js' +import { create_debug } from './debug.js' +import { create_git } from './git.js' +import c from './colors.js' - const runner = { - async run() { - const changes = [...files.changed, ...files.deleted] +const debug = create_debug('nano-staged:runner') - const gitWorkflow = createGitWorkflow({ - allowEmpty, - rootPath: git_paths.root, - dotPath: git_paths.dot, - }) +export function create_runner({ + max_arg_length, + matched_files, + config_tasks, + allow_empty, + git_paths, + stream, + shell, + type, + cwd, +}) { + const git = create_git(git_paths.root) - const cmdRunner = createCmdRunner({ - rootPath: git_paths.root, - files: files.working, - config, - type, - cwd, - }) + const patch_unstaged = p.resolve(git_paths.dot, './nano-staged_partial.patch') + const patch_original = p.resolve(git_paths.dot, './nano-staged.patch') + + const should_backup = type !== 'diff' && type !== 'unstaged' + const root = { skip_all: false, revert: false, clear: true, errors: [], children: [] } + + const is_tty = !('NS_DEBUG' in process.env) && !process.env.CI && process.env.TERM !== 'dumb' + const spinner = create_spinner(root, { is_tty, stream }) + + const has_patch = (path) => { + debug('Reading patch `%s`', path) + + try { + const buffer = path && fs.readFileSync(path) + return buffer && buffer.toString() ? true : false + } catch { + return false + } + } + + const run_task = async ({ pattern, files, commands }, spinner) => { + const task_group = [] + + await spinner.group(async (spinner) => { + for (const cmd of to_array(commands)) { + const is_fn = typeof cmd === 'function' + const commands = is_fn ? await cmd({ filenames: files, type }) : cmd + + for (const command of to_array(commands)) { + const [cmd, ...args] = str_argv_to_array(command) + + debug('cmd:', cmd) + debug('args:', args) + + task_group.push( + spinner(command, async ({ update }) => { + if (spinner.skip) { + update({ state: 'warning' }) + return + } - const cmdTasks = await cmdRunner.generateCmdTasks() + try { + const options = { + cwd: /^git(\.exe)?/i.test(cmd) ? git_paths.root : cwd, + shell, + } - if (!cmdTasks.some((task) => task.file_count > 0)) { - reporter.error(new NanoStagedError('noMatchingFiles')) - return + if (shell) { + await executor_cmds(is_fn ? command : `${command} ${files.join(' ')}`, options) + } else { + await executor(cmd, is_fn ? args : args.concat(files), options) + } + } catch (e) { + spinner.skip = true + + let error = (e.message || e).trim() + let msg = error ? '\n' + error : '' + let fail = c.inverse(c.bold(c.red(` FAIL `))) + + update({ title: c.red(command) }) + throw `${fail} ${c.red(pattern)} ${c.dim('>')} ${c.red(command)}:${msg}` + } + }) + ) + } } - let enabled = false - let revert = false - let clear = true - let errors = [] - let tasks = [] + return task_group + }) - tasks.push({ - title: `Preparing nano-staged`, - run: async () => { - try { - await gitWorkflow.backupOriginalState() - } catch (e) { - enabled = true - throw e - } - }, - }) + return task_group.map(({ task }) => task.error).filter(Boolean) + } - tasks.push({ - title: `Backing up unstaged changes for staged files`, - run: async () => { - try { - await gitWorkflow.backupUnstagedFiles(changes) - } catch (e) { - revert = true - throw e + const run_tasks = async (tasks, spinner) => { + const result = await Promise.all( + tasks.map((task) => { + const count = task.files.length + const count_title = `${count} ${count > 1 ? 'files' : 'file'}` + const suffix = count > 0 ? count_title : `no files` + const title = task.pattern + c.dim(` - ${suffix}`) + + return spinner(title, async ({ spinner, update }) => { + if (count === 0) { + update({ state: 'warning' }) + return } - }, - skipped: () => enabled || type === 'unstaged' || type === 'diff' || changes.length === 0, + + await run_task(task, spinner).then((errors) => { + if (errors.length > 0) { + throw errors + } + }) + }) }) + ) - tasks.push({ - title: `Running tasks for ${type} files`, - run: async (task) => { - task.tasks = cmdTasks + return result + .map(({ error }) => error) + .filter(Boolean) + .join('\n\n') + } + const handle_error = (e) => { + root.errors.push(e) + throw e + } + + const runner = { + async run() { + spinner.start() + + await spinner('Preparing nano-staged...', async () => { + try { + debug('Backing up original state...') + await git.diff_patch(patch_original) + root.partially_staged = await git.status_name( + ({ x, y }) => 'AMRC'.includes(x) && 'ACMRD'.includes(y) + ) + debug('Done backing up original state!') + } catch (e) { + root.skip_all = true + handle_error(e) + } + }) + + if (!root.skip_all && should_backup && root.partially_staged.length > 0) { + await spinner('Backing up unstaged changes for staged files...', async () => { try { - await cmdRunner.run(task) + debug('Backing up usntaged files...') + await git.diff_patch(patch_unstaged, root.partially_staged) + await git.checkout(root.partially_staged) + debug('Done backing up usntaged files!') } catch (e) { - revert = true - throw e + root.revert = true + handle_error(e) } - }, - skipped: () => enabled || revert, - }) + }) + } - tasks.push({ - title: `Applying modifications from tasks`, - run: async () => { + if (!root.skip_all && !root.revert) { + await spinner(`Running tasks for ${type} files...`, async ({ spinner, update }) => { try { - await gitWorkflow.applyModifications(files.working) + const result = await Promise.all( + config_tasks.map(({ index, path, tasks, files, chunks_len }) => { + const config_name = path ? normalize(p.relative(cwd, path)) : 'Config object' + const skipped = tasks.every(({ files }) => files.length === 0) + const files_title = `${files.length} ${files.length > 1 ? 'files' : 'file'}` + const suffix = skipped ? 'no tasks to run' : files_title + const chunk_title = chunks_len > 1 ? `(chunk ${index + 1}/${chunks_len})...` : '' + const title = config_name + c.dim(` - ${suffix} ${skipped ? '' : chunk_title}`) + + return spinner(title, async ({ spinner, update }) => { + if (skipped) { + update({ state: 'warning' }) + return + } + + await run_tasks(tasks, spinner).then((errors) => { + if (errors.length > 0) { + throw errors + } + }) + }) + }) + ) + + const errors = result.map(({ error }) => error).filter(Boolean) + + if (errors.length > 0) { + throw new NanoStagedError({ + type: 'output', + stream: 'stderr', + data: errors.join('\n\n') + '\n', + }) + } } catch (e) { - revert = true - throw e + root.revert = true + handle_error(e) } - }, - skipped: () => enabled || revert || type === 'unstaged' || type === 'diff', - }) + }) + } - tasks.push({ - title: `Restoring unstaged changes for staged files`, - run: async () => { + if (!root.skip_all && !root.revert && should_backup) { + await spinner(`Applying modifications from tasks...`, async () => { try { - await gitWorkflow.restoreUnstagedFiles(changes) + debug('Adding task modifications to index...') + for (const chunk of split_in_chunks(matched_files, max_arg_length)) { + await git.add(chunk) + } + + if ((await git.diff_name([], { staged: true })).length === 0 && !allow_empty) { + throw new NanoStagedError({ type: 'failure', reason: 'empty-git-commit' }) + } + debug('Done adding task modifications to index!') } catch (e) { - throw e + root.revert = true + handle_error(e) } - }, - skipped: () => - enabled || revert || type === 'unstaged' || type === 'diff' || changes.length === 0, - }) + }) + } - tasks.push({ - title: `Restoring to original state because of errors`, - run: async () => { + if (!root.skip_all && !root.revert && should_backup && root.partially_staged.length > 0) { + await spinner(`Restoring unstaged changes for staged files...`, async () => { try { - await gitWorkflow.restoreOriginalState() - } catch (e) { - clear = false - throw e + debug('Restoring unstaged changes...') + await git.apply(patch_unstaged) + } catch (apply_error) { + debug('Error while restoring changes:') + debug(apply_error) + debug('Retrying with 3-way merge') + + try { + await git.apply(patch_unstaged, true) + } catch (three_way_apply_error) { + debug('Error while restoring unstaged changes using 3-way merge:') + debug(three_way_apply_error) + handle_error(new NanoStagedError({ type: 'failure', reason: 'merge-conflict' })) + } } - }, - skipped: () => enabled || !revert, - }) + }) + } - tasks.push({ - title: `Cleaning up temporary to patch files`, - run: async () => { + if (!root.skip_all && root.revert) { + await spinner(`Restoring to original state because of errors...`, async () => { try { - await gitWorkflow.cleanUp() + debug('Restoring original state...') + await git.checkout('.') + + if (has_patch(patch_original)) { + await git.apply(patch_original) + } + debug('Done restoring original state!') } catch (e) { - throw e + root.clear = false + handle_error(e) } - }, - skipped: () => enabled || !clear, - }) - - for (const task of tasks) { - if (task.skipped ? !task.skipped() : true) { - renderer.start(task) + }) + } + if (!root.skip_all && root.clear) { + await spinner('Cleaning up temporary to patch files...', async () => { try { - task.state = 'run' - await task.run(task) - task.state = 'done' + debug('Removing temp files...') + if (has_patch(patch_original)) fs.unlinkSync(patch_original) + if (has_patch(patch_unstaged)) fs.unlinkSync(patch_unstaged) + debug('Done removing temp files!') } catch (e) { - task.state = 'fail' - errors.push(e) + handle_error(e) } - } + }) } - renderer.stop() + spinner.stop() - if (errors.length) { - throw errors + if (root.errors.length > 0) { + throw root.errors } }, } diff --git a/lib/spinner.js b/lib/spinner.js new file mode 100644 index 0000000..16ce5ae --- /dev/null +++ b/lib/spinner.js @@ -0,0 +1,227 @@ +import readline from 'readline' +import c from './colors.js' + +let spinners = new WeakMap() +let renderer + +function get_spinner(task) { + let spinner = spinners.get(task) + + if (!spinner) { + let i = 0 + spinner = () => '\\|/-'[i++ % 4] + spinners.set(task, spinner) + } + + return spinner() +} + +function get_lines(str = '', width = 80) { + return str + .replace(/\u001b[^m]*?m/g, '') + .split('\n') + .reduce((col, l) => (col += Math.max(1, Math.ceil(l.length / width))), 0) +} + +function get_symbol(task) { + const nesting = task.children.length > 0 + const symbols = { + success: c.green('√'), + error: c.red(nesting ? '❯' : '×'), + warning: c.yellow('↓'), + loading: c.yellow(nesting ? '❯' : get_spinner(task)), + } + + return symbols[task.state] || c.gray('*') +} + +function get_titles(task) { + let titles = [task.title] + let current = task + + while (current.parent) { + current = current.parent + if (current.title) titles.unshift(current.title) + } + + return titles +} + +function multiline_render(root, level = 0) { + let output = [] + + for (const task of root.children) { + const title = task.title + const prefix = `${get_symbol(task)} ` + + output.push(' '.repeat(level) + prefix + title) + + if (task.children.length > 0 && task.state !== 'success') { + output = output.concat(multiline_render(task, level + 1)) + } + } + + return output.join('\n') +} + +function line_render(root) { + let output = '' + + for (const task of root.children) { + if ( + task.state !== 'done' && + task.state !== 'pending' && + task.state !== 'loading' && + task.children.length === 0 + ) { + const title = get_titles(task).join(c.yellow(' ≫ ')) + const prefix = `${get_symbol(task)} ` + + output += prefix + title + '\n' + task.state = 'done' + } + + if (task.children.length > 0) { + output += line_render(task) + } + } + + return output +} + +function create_renderer(root, { stream = process.stderr, is_tty = true } = {}) { + let lines = 0 + let timer + + const renderer = { + is_tty, + + clear() { + for (let i = 0; i < lines; i++) { + i > 0 && readline.moveCursor(stream, 0, -1) + readline.cursorTo(stream, 0) + readline.clearLine(stream, 0) + } + }, + + write(str, is_clear = false) { + if (is_clear) { + this.clear() + lines = 0 + } + + stream.write(str) + }, + + render() { + const output = this.is_tty ? multiline_render(root) : line_render(root) + + this.write(output, this.is_tty) + lines = get_lines(output, stream.columns) + + return this + }, + + loop() { + timer = this.is_tty ? setTimeout(() => this.loop(), 130) : true + return this.render() + }, + + start() { + if (timer) return this + if (this.is_tty) stream.write(`\x1b[?25l`) + + return this.loop() + }, + + stop() { + if (!timer) return this + + timer = this.is_tty ? clearTimeout(timer) : false + + if (this.is_tty) { + this.write(`${multiline_render(root)}\n`, true) + this.write(`\x1b[?25h`) + } else { + this.write(line_render(root)) + } + }, + } + + return renderer +} + +function register(root, title, fn, opts) { + const index = root.children.push({ + error: undefined, + state: 'pending', + parent: root, + children: [], + skip: false, + title, + }) + + const task = root.children[index - 1] + + return { + task, + async run() { + try { + task.state = 'loading' + + await fn({ + spinner: create_spinner(task, opts), + update(next) { + for (let key in next) { + task[key] = next[key] + } + }, + }) + } catch (error) { + task.state = 'error' + task.error = error + } finally { + if (task.state === 'loading') { + task.state = 'success' + } + + if (!renderer.is_tty) { + renderer.render() + } + } + }, + } +} + +export const create_spinner = (root, opts) => { + if (!renderer) { + renderer = create_renderer(root, opts) + } + + const spinner = async (title, fn) => { + const task = register(root, title, fn, opts) + await task.run() + + return { + get state() { + return task.task.state + }, + get error() { + return task.task.error + }, + } + } + + spinner.skip = false + spinner.start = renderer.start.bind(renderer) + spinner.stop = renderer.stop.bind(renderer) + spinner.group = async (create_tasks) => { + const tasks = await create_tasks((title, fn) => register(root, title, fn, opts)) + + for (const task of tasks) { + await task.run() + } + } + + return spinner +} diff --git a/lib/tasks.js b/lib/tasks.js new file mode 100644 index 0000000..b043b32 --- /dev/null +++ b/lib/tasks.js @@ -0,0 +1,57 @@ +import * as p from 'path' + +import { split_in_chunks, normalize } from './utils.js' +import { create_debug } from './debug.js' +import { globrex } from './glob.js' + +const debug = create_debug('nano-staged:create-tasks') + +export function create_tasks({ files_by_config, cwd_is_explicit, max_arg_length, cwd }) { + const is_multiple = Object.keys(files_by_config).length > 1 + const matched_files = new Set() + const config_tasks = [] + + for (const [path, { config, files }] of Object.entries(files_by_config)) { + const group_cwd = is_multiple && !cwd_is_explicit ? p.dirname(path) : cwd + const chunks = [...split_in_chunks(files, max_arg_length)] + const chunks_len = chunks.length + + for (const [index, files] of chunks.entries()) { + const tasks = [] + + for (const [pattern, commands] of Object.entries(config)) { + const matches = globrex(pattern, { extended: true, globstar: pattern.includes('/') }) + const task_files = [] + + for (let file of files) { + file = normalize(p.relative(group_cwd, file)) + + if (!pattern.startsWith('../') && (file.startsWith('..') || p.isAbsolute(file))) { + continue + } + + if (matches.test(file)) { + file = normalize(p.resolve(group_cwd, file)) + matched_files.add(file) + task_files.push(file) + } + } + + const task = { files: task_files, commands, pattern } + debug('Generated task: \n%O', task) + + tasks.push(task) + } + + config_tasks.push({ + chunks_len, + tasks, + index, + files, + path, + }) + } + } + + return { config_tasks, matched_files } +} diff --git a/lib/utils.js b/lib/utils.js index d683b8f..e7f6607 100644 --- a/lib/utils.js +++ b/lib/utils.js @@ -1,74 +1,64 @@ -import { fileURLToPath } from 'url' -import { readFileSync } from 'fs' -import process from 'process' -import { join } from 'path' -import c from 'picocolors' -import tty from 'tty' -import os from 'os' - const REG_STR = /([^\s'"]([^\s'"]*(['"])([^\3]*?)\3)+[^\s'"]*)|[^\s'"]+|(['"])([^\5]*?)\5/gi -export function toArray(val) { - return Array.isArray(val) ? val : [val] +export function normalize(path) { + let len = path.length, + prefix = '' + + if (path === '\\' || path === '/') return '/' + if (len <= 1) return path + if (len > 4 && path[3] === '\\') { + const ch = path[2] + + if ((ch === '?' || ch === '.') && path.slice(0, 2) === '\\\\') { + path = path.slice(2) + prefix = '//' + } + } + + return prefix + path.split(/[/\\]+/).join('/') } -export function showVersion(print) { - let pkg = readFileSync(join(fileURLToPath(import.meta.url), '../..', 'package.json')) - let pkgJson = JSON.parse(pkg.toString()) - print.write(`Nano Staged ${c.bold(`v${pkgJson.version}`)}\n`) +export function to_array(val) { + return Array.isArray(val) ? val : [val] } -export function stringArgvToArray(str = '') { - let args = [] - let match +export function* split_in_chunks(array, max_chunk_len) { + let cur = [] + let len = 0 - while (true) { - match = REG_STR.exec(str) + for (const value of array) { + let new_len = len + value.length - if (!match) { - return args + if (new_len > max_chunk_len && cur.length > 0) { + yield cur + cur = [] + new_len = value.length } - for (let arg of [match[1], match[6], match[0]]) { - if (typeof arg === 'string') { - args.push(arg) - } - } + cur.push(value) + len = new_len } -} -function hasFlags(...flags) { - return flags - .reduce((acc, flag) => [...acc, '-' + flag, '--' + flag], []) - .some((flag) => process.argv.includes(flag)) + if (cur.length > 0) { + yield cur + } } -export function getForceColorLevel() { - if (hasFlags('no-color', 'no-colors', 'color=false', 'color=never')) { - return 0 - } else if (process.env.FORCE_COLOR) { - return Math.min(Number.parseInt(process.env.FORCE_COLOR, 10), 3) - } else if (process.env.FORCE_NO_COLOR) { - return 0 - } else if (!tty.isatty(1)) { - return 0 - } else if (process.env.TERM === 'dumb') { - return 0 - } else if (process.platform === 'win32') { - const osRelease = os.release().split('.') - if (Number(osRelease[0]) >= 10 && Number(osRelease[2]) >= 10_586) { - return Number(osRelease[2]) >= 14_931 ? 3 : 2 +export function str_argv_to_array(str = '') { + let args = [] + let match = null + + do { + match = REG_STR.exec(str) + + if (match !== null) { + for (let arg of [match[1], match[6], match[0]]) { + if (typeof arg === 'string') { + args.push(arg) + } + } } - return 1 - } else if (process.env.COLORTERM === 'truecolor') { - return 3 - } else if (/-256(color)?$/i.test(process.env.TERM)) { - return 2 - } else if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(process.env.TERM)) { - return 1 - } else if (process.env.COLORTERM) { - return 1 - } else { - return 0 - } + } while (match !== null) + + return args } diff --git a/package.json b/package.json index 0002f66..b5da05f 100644 --- a/package.json +++ b/package.json @@ -6,28 +6,30 @@ "license": "MIT", "repository": "usmanyunusov/nano-staged", "type": "module", - "bin": "./lib/bin.js", - "exports": "./lib/index.js", + "bin": { + "nano-staged": "./lib/bin.js" + }, + "exports": { + ".": "./lib/index.js", + "./package.json": "./package.json" + }, "engines": { "node": "^12.20.0 || ^14.13.1 || >=16.0.0" }, "scripts": { "lint": "prettier --write lib/**/*.js", - "unit": "cross-env CI=true node --loader=esmock --no-warnings ./node_modules/uvu/bin.js test \"\\.test\\.js$\"", + "unit": "cross-env LC_ALL=en_US.utf8 CI=true node --loader=esmock --no-warnings ./node_modules/uvu/bin.js test/integration \"\\.test\\.js$\"", "test": "c8 pnpm unit", - "bench": "node bench/running-time/index.js && node bench/size/index.js" - }, - "dependencies": { - "picocolors": "^1.0.0" + "bench": "node bench/running-time/index.js" }, "devDependencies": { - "c8": "^7.11.2", + "c8": "^7.11.3", "clean-publish": "^3.4.5", "cross-env": "^7.0.3", "esmock": "^1.7.5", "fs-extra": "^10.1.0", "nanodelay": "^2.0.2", - "nanoid": "^3.3.3", + "nanoid": "^3.3.4", "prettier": "^2.6.2", "uvu": "^0.5.3" }, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 9052017..1c5b998 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1,28 +1,24 @@ -lockfileVersion: 5.3 +lockfileVersion: 5.4 specifiers: - c8: ^7.11.2 + c8: ^7.11.3 clean-publish: ^3.4.5 cross-env: ^7.0.3 esmock: ^1.7.5 fs-extra: ^10.1.0 nanodelay: ^2.0.2 - nanoid: ^3.3.3 - picocolors: ^1.0.0 + nanoid: ^3.3.4 prettier: ^2.6.2 uvu: ^0.5.3 -dependencies: - picocolors: 1.0.0 - devDependencies: - c8: 7.11.2 + c8: 7.11.3 clean-publish: 3.4.5 cross-env: 7.0.3 esmock: 1.7.5 fs-extra: 10.1.0 nanodelay: 2.0.2 - nanoid: 3.3.3 + nanoid: 3.3.4 prettier: 2.6.2 uvu: 0.5.3 @@ -37,20 +33,20 @@ packages: engines: {node: '>=8'} dev: true - /@jridgewell/resolve-uri/3.0.6: - resolution: {integrity: sha512-R7xHtBSNm+9SyvpJkdQl+qrM3Hm2fea3Ef197M3mUug+v+yR+Rhfbs7PBtcBUVnIWJ4JcAdjvij+c8hXS9p5aw==} + /@jridgewell/resolve-uri/3.0.7: + resolution: {integrity: sha512-8cXDaBBHOr2pQ7j77Y6Vp5VDT2sIqWyWQ56TjEq4ih/a4iST3dItRe8Q9fp0rrIl9DoKhWQtUQz/YpOxLkXbNA==} engines: {node: '>=6.0.0'} dev: true - /@jridgewell/sourcemap-codec/1.4.11: - resolution: {integrity: sha512-Fg32GrJo61m+VqYSdRSjRXMjQ06j8YIYfcTqndLYVAaHmroZHLJZCydsWBOTDqXS2v+mjxohBWEMfg97GXmYQg==} + /@jridgewell/sourcemap-codec/1.4.13: + resolution: {integrity: sha512-GryiOJmNcWbovBxTfZSF71V/mXbgcV3MewDe3kIMCLyIh5e7SKAeUZs+rMnJ8jkMolZ/4/VsdBmMrw3l+VdZ3w==} dev: true - /@jridgewell/trace-mapping/0.3.9: - resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} + /@jridgewell/trace-mapping/0.3.13: + resolution: {integrity: sha512-o1xbKhp9qnIAoHJSWd6KlCZfqslL4valSF81H8ImioOAxluWYWOpWkpyktY2vnt4tbrX9XYaxovq6cgowaJp2w==} dependencies: - '@jridgewell/resolve-uri': 3.0.6 - '@jridgewell/sourcemap-codec': 1.4.11 + '@jridgewell/resolve-uri': 3.0.7 + '@jridgewell/sourcemap-codec': 1.4.13 dev: true /@nodelib/fs.scandir/2.1.5: @@ -108,8 +104,8 @@ packages: fill-range: 7.0.1 dev: true - /c8/7.11.2: - resolution: {integrity: sha512-6ahJSrhS6TqSghHm+HnWt/8Y2+z0hM/FQyB1ybKhAR30+NYL9CTQ1uwHxuWw6U7BHlHv6wvhgOrH81I+lfCkxg==} + /c8/7.11.3: + resolution: {integrity: sha512-6YBmsaNmqRm9OS3ZbIiL2EZgi1+Xc4O24jL3vMYGE6idixYuGdy76rIfIdltSKDj9DpLNrcXSonUTR1miBD0wA==} engines: {node: '>=10.12.0'} hasBin: true dependencies: @@ -275,8 +271,8 @@ packages: is-glob: 4.0.3 dev: true - /glob/7.2.0: - resolution: {integrity: sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==} + /glob/7.2.3: + resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==} dependencies: fs.realpath: 1.0.0 inflight: 1.0.6 @@ -433,8 +429,8 @@ packages: engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0} dev: true - /nanoid/3.3.3: - resolution: {integrity: sha512-p1sjXuopFs0xg+fPASzQ28agW1oHD7xDsd9Xkf3T15H3c/cifrFHVwrh74PdoklAPi+i7MdRsE47vm2r6JoB+w==} + /nanoid/3.3.4: + resolution: {integrity: sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw==} engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} hasBin: true dev: true @@ -474,10 +470,6 @@ packages: engines: {node: '>=8'} dev: true - /picocolors/1.0.0: - resolution: {integrity: sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==} - dev: false - /picomatch/2.3.1: resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} engines: {node: '>=8.6'} @@ -511,7 +503,7 @@ packages: resolution: {integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==} hasBin: true dependencies: - glob: 7.2.0 + glob: 7.2.3 dev: true /run-parallel/1.2.0: @@ -580,7 +572,7 @@ packages: engines: {node: '>=8'} dependencies: '@istanbuljs/schema': 0.1.3 - glob: 7.2.0 + glob: 7.2.3 minimatch: 3.1.2 dev: true @@ -611,7 +603,7 @@ packages: resolution: {integrity: sha512-HcvgY/xaRm7isYmyx+lFKA4uQmfUbN0J4M0nNItvzTvH/iQ9kW5j/t4YSR+Ge323/lrgDAWJoF46tzGQHwBHFw==} engines: {node: '>=10.12.0'} dependencies: - '@jridgewell/trace-mapping': 0.3.9 + '@jridgewell/trace-mapping': 0.3.13 '@types/istanbul-lib-coverage': 2.0.4 convert-source-map: 1.8.0 dev: true diff --git a/test/git.test.js b/test/git.test.js deleted file mode 100644 index 2762398..0000000 --- a/test/git.test.js +++ /dev/null @@ -1,247 +0,0 @@ -import { equal, is } from 'uvu/assert' -import { join, resolve } from 'path' -import { test } from 'uvu' -import fs from 'fs-extra' - -import { writeFile, makeDir, appendFile, fixture, removeFile } from './utils/index.js' -import { createGit } from '../lib/git.js' - -let cwd = fixture('simple/git-test') -let patchPath = join(cwd, 'nano-staged.patch') - -async function execGit(args) { - let git = createGit(cwd) - await git.exec(args, { cwd }) -} - -test.before.each(async () => { - await makeDir(cwd) - await execGit(['init']) - await execGit(['config', 'user.name', '"test"']) - await execGit(['config', 'user.email', '"test@test.com"']) - await appendFile('README.md', '# Test\n', cwd) - await execGit(['add', 'README.md']) - await execGit(['commit', '-m initial commit']) -}) - -test.after.each(async () => { - await removeFile(cwd) -}) - -test('should return "null" when git dir is not found', async () => { - let git = createGit(cwd) - git.exec = async () => null - - let git_paths = await git.getGitPaths() - - is(git_paths.root, null) - is(git_paths.dot, null) -}) - -test('should return "null" when run error', async () => { - let git = createGit(cwd) - git.exec = async () => Promise.reject() - - let git_paths = await git.getGitPaths() - - is(git_paths.root, null) - is(git_paths.dot, null) -}) - -test('should return path when git dir is found', async () => { - let git = createGit(cwd) - let git_paths = await git.getGitPaths() - - is(git_paths.root, fixture('simple/git-test')) - is( - git_paths.dot, - process.platform === 'win32' - ? fixture('simple/git-test') + '\\.git' - : fixture('simple/git-test') + '/.git' - ) -}) - -test('should create patch to file', async () => { - let git = createGit(cwd) - - await writeFile('README.md', '# Test\n## Test', cwd) - await git.diff(patchPath) - - let patch = await fs.readFile(patchPath) - is( - patch.toString(), - 'diff --git a/README.md b/README.md\n' + - 'index 8ae0569..a07c500 100644\n' + - '--- a/README.md\n' + - '+++ b/README.md\n' + - '@@ -1,0 +2 @@\n' + - '+## Test\n' + - '\\ No newline at end of file\n' - ) -}) - -test('should create patch to files', async () => { - let git = createGit(cwd) - - await appendFile('a.js', 'let a = {};', cwd) - await git.add(join(cwd, 'a.js')) - await removeFile(join(cwd, 'a.js')) - await git.diff(patchPath, [join(cwd, 'a.js')]) - - let patch = await fs.readFile(patchPath) - is( - patch.toString(), - 'diff --git a/a.js b/a.js\n' + - 'deleted file mode 100644\n' + - 'index 36b56ef..0000000\n' + - '--- a/a.js\n' + - '+++ /dev/null\n' + - '@@ -1 +0,0 @@\n' + - '-let a = {};\n' + - '\\ No newline at end of file\n' - ) -}) - -test('should checkout to files', async () => { - let git = createGit(cwd) - - await appendFile('a.js', 'let a = {};', cwd) - await git.add('.') - await writeFile('a.js', 'let b = {};', cwd) - await git.checkout(join(cwd, 'a.js')) - - equal(await git.status(), [{ x: 65, y: 32, path: 'a.js', rename: undefined }]) -}) - -test('should apply to patch file', async () => { - let git = createGit(cwd) - - await writeFile('README.md', '# Test\n## Test', cwd) - await git.diff(patchPath) - await git.apply(patchPath) - - is((await fs.stat(patchPath)).isFile(), true) -}) - -test('should error when not apply patch file', async () => { - let git = createGit(cwd) - - try { - await git.apply('test.patch', true) - } catch (error) { - is(error, "error: can't open patch 'test.patch': No such file or directory\n") - } -}) - -test('should add to files', async () => { - let git = createGit(cwd) - - await appendFile('a.js', 'let a = {};', cwd) - await git.add(['.']) - - equal(await git.status(), [{ x: 65, y: 32, path: 'a.js', rename: undefined }]) -}) - -test('should parse status correctly', async () => { - let git = createGit(cwd) - - await appendFile('a.js', 'let a = {};', cwd) - await appendFile('b.js', 'let a = {};', cwd) - await git.add(['b.js']) - - equal(await git.status(), [ - { x: 65, y: 32, path: 'b.js', rename: undefined }, - { x: 63, y: 63, path: 'a.js', rename: undefined }, - ]) - - git.exec = async () => '' - equal(await git.status(), []) - - git.exec = async () => ' ' - equal(await git.status(), []) - - git.exec = async () => 'M rename.js' - equal(await git.status(), []) - - git.exec = async () => 'RM rename.js' - equal(await git.status(), []) - - git.exec = async () => ' ' - equal(await git.status(), []) - - git.exec = async () => { - throw new Error('fatal: not a git repository (or any of the parent directories): .git') - } - equal(await git.status(), []) -}) - -test('should diff to file correctly', async () => { - let git = createGit(cwd) - - is(await git.diffFileName(), '') - - await writeFile('README.md', '# Test\n## Test', cwd) - await execGit(['add', 'README.md']) - await execGit(['commit', '-m change README.md']) - - is(await git.diffFileName('HEAD', 'HEAD^1'), 'README.md\x00') - - git.exec = async () => { - throw new Error('Error') - } - - is(await git.diffFileName(), '') -}) - -test('should get diff file correctly', async () => { - let git = createGit(cwd) - - git.diffFileName = async () => 'add.js\x00' - equal(await git.changedFiles(), { working: ['add.js'], deleted: [], changed: ['add.js'] }) - - git.diffFileName = async () => '' - equal(await git.changedFiles(), { working: [], deleted: [], changed: [] }) -}) - -test('should get staged files correctly', async () => { - let git = createGit(cwd) - - git.exec = async () => - '?? new.js\x00A stage.js\x00MM mod.js\x00AM test/add.js\x00RM rename.js\x00origin.js\x00CM' + - ' test/copy.js\x00test/base.js\x00MD remove.js\x00D delete.js\x00' - - equal(await git.stagedFiles(), { - working: ['stage.js', 'mod.js', 'test/add.js', 'rename.js', 'test/copy.js', 'remove.js'], - deleted: ['remove.js'], - changed: ['mod.js', 'test/add.js', 'rename.js', 'test/copy.js'], - }) -}) - -test('should get unstaged files correctly', async () => { - let git = createGit(cwd) - - git.exec = async () => - 'A add.js\x00AD add_remove.js\x00MM mod.js\x00?? test/add.js\x00RM rename.js\x00origin.js\x00CM' + - ' test/copy.js\x00test/base.js\x00MD remove.js\x00D delete.js\x00' - - equal(await git.unstagedFiles(), { - working: ['mod.js', 'test/add.js', 'rename.js', 'test/copy.js'], - deleted: [], - changed: ['mod.js', 'test/add.js', 'rename.js', 'test/copy.js'], - }) -}) - -test('should handle git worktrees', async () => { - let git = createGit(cwd) - let work_tree_dir = resolve(cwd, 'worktree') - - await execGit(['branch', 'test']) - await execGit(['worktree', 'add', work_tree_dir, 'test']) - - equal(await git.getGitPaths({ cwd: work_tree_dir }), { - root: fixture('simple/git-test/worktree'), - dot: fixture('simple/git-test/.git/worktrees/worktree'), - }) -}) - -test.run() diff --git a/test/integration/allow-empty.test.js b/test/integration/allow-empty.test.js new file mode 100644 index 0000000..561f05f --- /dev/null +++ b/test/integration/allow-empty.test.js @@ -0,0 +1,73 @@ +import * as assert from 'uvu/assert' +import { suite } from 'uvu' + +import { pretty_js, ugly_js } from './fixtures/files.js' +import { NanoStagedTestRig } from './utils/test-rig.js' +import { prettier_write } from './fixtures/configs.js' + +const test = suite('integration') + +test.before.each(async (ctx) => { + try { + ctx.rig = new NanoStagedTestRig() + await ctx.rig.git_init() + } catch (e) { + console.error('uvu before error', e) + process.exit(1) + } +}) + +test.after.each(async (ctx) => { + try { + await ctx.rig.cleanup() + } catch (e) { + console.error('uvu after error', e) + process.exit(1) + } +}) + +test('fails when without `--allow-empty`, to prevent an empty git commit', async ({ rig }) => { + try { + await rig.write('.nano-staged.json', JSON.stringify(prettier_write)) + await rig.write('test.js', pretty_js) + + await rig.git.exec(['add', '.']) + await rig.git.exec(['commit', '-m', 'committed pretty file']) + + await rig.write('test.js', ugly_js) + await rig.git.exec(['add', 'test.js']) + + await rig.commit() + } catch (error) { + assert.match(error, 'Prevented an empty git commit!') + assert.is((await rig.git.exec(['rev-list', '--count', 'HEAD'])).trim(), '2') + assert.is((await rig.git.exec(['log', '-1', '--pretty=%B'])).trim(), 'committed pretty file') + assert.is(await rig.read('test.js'), pretty_js) + } +}) + +test('with `--allow-empty` creates empty commit', async ({ rig }) => { + await rig.write('.nano-staged.json', JSON.stringify(prettier_write)) + await rig.write('test.js', pretty_js) + + await rig.git.exec(['add', '.']) + await rig.git.exec(['commit', '-m', 'committed pretty file']) + + await rig.write('test.js', ugly_js) + await rig.git.exec(['add', 'test.js']) + + await rig.commit({ + nano_staged: ['--allow-empty'], + git_commit: ['-m', 'test', '--allow-empty'], + }) + + const commit_count = await rig.git.exec(['rev-list', '--count', 'HEAD']) + const last_commit = await rig.git.exec(['log', '-1', '--pretty=%B']) + const file = await rig.read('test.js') + + assert.is(commit_count.trim(), '3') + assert.is(last_commit.trim(), 'test') + assert.is(file, pretty_js) +}) + +test.run() diff --git a/test/integration/base.test.js b/test/integration/base.test.js new file mode 100644 index 0000000..0cfd630 --- /dev/null +++ b/test/integration/base.test.js @@ -0,0 +1,148 @@ +import * as assert from 'uvu/assert' +import { suite } from 'uvu' +import path from 'path' + +import { prettier_write, prettier_list_diff } from './fixtures/configs.js' +import { pretty_js, ugly_js, invalid_js } from './fixtures/files.js' +import { NanoStagedTestRig } from './utils/test-rig.js' + +const test = suite('integration') + +test.before.each(async (ctx) => { + try { + ctx.rig = new NanoStagedTestRig() + await ctx.rig.git_init() + } catch (e) { + console.error('uvu before error', e) + process.exit(1) + } +}) + +test.after.each(async (ctx) => { + try { + await ctx.rig.cleanup() + } catch (e) { + console.error('uvu after error', e) + process.exit(1) + } +}) + +test('commits entire staged file when no errors from linter', async ({ rig }) => { + await rig.write('.nano-staged.json', JSON.stringify(prettier_list_diff)) + + await rig.write('test file.js', pretty_js) + await rig.git.exec(['add', 'test file.js']) + + await rig.commit() + + assert.is((await rig.git.exec(['rev-list', '--count', 'HEAD'])).trim(), '2') + assert.is((await rig.git.exec(['log', '-1', '--pretty=%B'])).trim(), 'test') + assert.is(await rig.read('test file.js'), pretty_js) +}) + +test('commits entire staged file when no errors and linter modifies file', async ({ rig }) => { + await rig.write('.nano-staged.json', JSON.stringify(prettier_write)) + + await rig.write('test.js', ugly_js) + await rig.git.exec(['add', 'test.js']) + + await rig.write('test2.js', ugly_js) + await rig.git.exec(['add', 'test2.js']) + + await rig.commit() + + assert.is((await rig.git.exec(['rev-list', '--count', 'HEAD'])).trim(), '2') + assert.is((await rig.git.exec(['log', '-1', '--pretty=%B'])).trim(), 'test') + assert.is(await rig.read('test.js'), pretty_js) + assert.is(await rig.read('test2.js'), pretty_js) +}) + +test('fails to commit entire staged file when errors from linter', async ({ rig }) => { + await rig.write('.nano-staged.json', JSON.stringify(prettier_list_diff)) + + await rig.write('test.js', ugly_js) + await rig.git.exec(['add', 'test.js']) + + const status = await rig.git.exec(['status']) + + await rig.commit().catch(async (error) => { + assert.match(error, 'Restoring to original state because of errors') + assert.is((await rig.git.exec(['rev-list', '--count', 'HEAD'])).trim(), '1') + assert.is((await rig.git.exec(['log', '-1', '--pretty=%B'])).trim(), 'initial commit') + assert.is(await rig.git.exec(['status']), status) + assert.is(await rig.read('test.js'), ugly_js) + }) +}) + +test('fails to commit entire staged file when errors from linter and linter modifies files', async ({ + rig, +}) => { + await rig.write('.nano-staged.json', JSON.stringify(prettier_write)) + + await rig.write('test.js', invalid_js) + await rig.git.exec(['add', 'test.js']) + + const status = await rig.git.exec(['status']) + + await rig.commit().catch(async (error) => { + assert.match(error, 'Restoring to original state because of errors') + assert.is((await rig.git.exec(['rev-list', '--count', 'HEAD'])).trim(), '1') + assert.is((await rig.git.exec(['log', '-1', '--pretty=%B'])).trim(), 'initial commit') + assert.is(await rig.git.exec(['status']), status) + assert.is(await rig.read('test.js'), invalid_js) + }) +}) + +test('clears unstaged changes when linter applies same changes', async ({ rig }) => { + await rig.write('.nano-staged.json', JSON.stringify(prettier_write)) + + await rig.append('test.js', ugly_js) + await rig.git.exec(['add', 'test.js']) + + await rig.remove(path.join(rig.temp, 'test.js')) + await rig.append('test.js', pretty_js) + + await rig.commit() + + assert.is((await rig.git.exec(['rev-list', '--count', 'HEAD'])).trim(), '2') + assert.is((await rig.git.exec(['log', '-1', '--pretty=%B'])).trim(), 'test') + assert.is((await rig.git.exec(['show', 'HEAD:test.js'])).trim(), pretty_js.trim()) + assert.match((await rig.git.exec(['status'])).trim(), 'nothing added to commit') + assert.is(await rig.read('test.js'), pretty_js) +}) + +test('clears unstaged changes when linter applies same changes', async ({ rig }) => { + await rig.write('.nano-staged.json', JSON.stringify(prettier_write)) + + await rig.append('test.js', ugly_js) + await rig.git.exec(['add', 'test.js']) + + await rig.remove(path.join(rig.temp, 'test.js')) + await rig.append('test.js', pretty_js) + + await rig.commit() + + assert.is((await rig.git.exec(['rev-list', '--count', 'HEAD'])).trim(), '2') + assert.is((await rig.git.exec(['log', '-1', '--pretty=%B'])).trim(), 'test') + assert.is((await rig.git.exec(['show', 'HEAD:test.js'])).trim(), pretty_js.trim()) + assert.match((await rig.git.exec(['status'])).trim(), 'nothing added to commit') + assert.is(await rig.read('test.js'), pretty_js) +}) + +test('runs chunked tasks when necessary', async ({ rig }) => { + await rig.write('.nano-staged.json', JSON.stringify(prettier_list_diff)) + + await rig.write('test.js', pretty_js) + await rig.git.exec(['add', 'test.js']) + await rig.write('--test2.js', pretty_js) + await rig.git.exec(['add', '--', '--test2.js']) + + await rig.commit({ nano_staged: ['--max-arg-length', 10] }) + + assert.is((await rig.git.exec(['rev-list', '--count', 'HEAD'])).trim(), '2') + assert.is((await rig.git.exec(['log', '-1', '--pretty=%B'])).trim(), 'test') + assert.is(await rig.read('test.js'), pretty_js) + assert.is(await rig.read('--test2.js'), pretty_js) +}) + +test.run() diff --git a/test/integration/binary-files.test.js b/test/integration/binary-files.test.js new file mode 100644 index 0000000..1dae10e --- /dev/null +++ b/test/integration/binary-files.test.js @@ -0,0 +1,41 @@ +import * as assert from 'uvu/assert' +import { suite } from 'uvu' + +import { prettier_list_diff } from './fixtures/configs.js' +import { NanoStagedTestRig } from './utils/test-rig.js' + +const test = suite('integration') + +test.before.each(async (ctx) => { + try { + ctx.rig = new NanoStagedTestRig() + await ctx.rig.git_init() + } catch (e) { + console.error('uvu before error', e) + process.exit(1) + } +}) + +test.after.each(async (ctx) => { + try { + await ctx.rig.cleanup() + } catch (e) { + console.error('uvu after error', e) + process.exit(1) + } +}) + +test('handles binary files', async ({ rig }) => { + await rig.write('.nano-staged.json', JSON.stringify(prettier_list_diff)) + await rig.write('.gitattributes', 'binary\n') + await rig.write('binary', Buffer.from('Hello, World!', 'binary')) + + await rig.git.exec(['add', 'binary']) + await rig.commit({ nano_staged: ['-d'] }) + + assert.is((await rig.git.exec(['rev-list', '--count', 'HEAD'])).trim(), '2') + assert.is((await rig.git.exec(['log', '-1', '--pretty=%B'])).trim(), 'test') + assert.is(Buffer.from(await rig.read('binary'), 'binary').toString(), 'Hello, World!') +}) + +test.run() diff --git a/test/integration/diff-options.test.js b/test/integration/diff-options.test.js new file mode 100644 index 0000000..32e7ea1 --- /dev/null +++ b/test/integration/diff-options.test.js @@ -0,0 +1,67 @@ +import * as assert from 'uvu/assert' +import { suite } from 'uvu' + +import { prettier_list_diff } from './fixtures/configs.js' +import { NanoStagedTestRig } from './utils/test-rig.js' +import { ugly_js } from './fixtures/files.js' + +const test = suite('integration') + +test.before.each(async (ctx) => { + try { + ctx.rig = new NanoStagedTestRig() + await ctx.rig.git_init() + } catch (e) { + console.error('uvu before error', e) + process.exit(1) + } +}) + +test.after.each(async (ctx) => { + try { + await ctx.rig.cleanup() + } catch (e) { + console.error('uvu after error', e) + process.exit(1) + } +}) + +test('supports overriding file list using --diff', async ({ rig }) => { + await rig.write('.nano-staged.json', JSON.stringify(prettier_list_diff)) + await rig.append('test.js', ugly_js) + await rig.git.exec(['add', 'test.js']) + await rig.git.exec(['commit', '-m', 'ugly'], { cwd: rig.temp }) + + const hashes = (await rig.git.exec(['log', '--format=format:%H'])).trim().split('\n') + + rig.no_commit = true + + await rig + .commit({ + nano_staged: ['--diff', `${hashes[1]}...${hashes[0]}`], + }) + .catch((error) => { + assert.match(error, 'prettier --list-different') + assert.match(error, 'test.js') + }) + + assert.is(hashes.length, 2) +}) + +test('supports overriding default --diff-filter', async ({ rig }) => { + await rig.write('.nano-staged.json', JSON.stringify(prettier_list_diff)) + await rig.append('test.js', ugly_js) + await rig.git.exec(['add', 'test.js']) + + rig.no_commit = true + + await rig + .commit({ + nano_staged: ['--diff-filter', 'D'], + }) + .then((result) => { + assert.match(result, 'No staged files found.') + }) +}) + +test.run() diff --git a/test/integration/file-resurrection.test.js b/test/integration/file-resurrection.test.js new file mode 100644 index 0000000..5fdafbb --- /dev/null +++ b/test/integration/file-resurrection.test.js @@ -0,0 +1,95 @@ +import * as assert from 'uvu/assert' +import { suite } from 'uvu' +import fs from 'fs-extra' +import path from 'path' + +import { prettier_list_diff } from './fixtures/configs.js' +import { NanoStagedTestRig } from './utils/test-rig.js' +import { pretty_js, ugly_js } from './fixtures/files.js' + +const test = suite('integration') + +test.before.each(async (ctx) => { + try { + ctx.rig = new NanoStagedTestRig() + await ctx.rig.git_init() + } catch (e) { + console.error('uvu before error', e) + process.exit(1) + } +}) + +test.after.each(async (ctx) => { + try { + await ctx.rig.cleanup() + } catch (e) { + console.error('uvu after error', e) + process.exit(1) + } +}) + +test('does not resurrect removed files due to git bug when tasks pass', async ({ rig }) => { + await rig.write('.nano-staged.json', JSON.stringify(prettier_list_diff)) + + await rig.remove('README.md') + await rig.write('test.js', pretty_js) + await rig.git.exec(['add', 'test.js']) + + await rig.commit() + + assert.is(fs.existsSync(path.join(rig.temp, 'README.md')), false) +}) + +test('does not resurrect removed files in complex case', async ({ rig }) => { + await rig.write('.nano-staged.json', JSON.stringify(prettier_list_diff)) + await rig.write('test.js', pretty_js) + await rig.git.exec(['add', 'test.js']) + await rig.remove('test.js') + + const readme = await rig.read('README.md') + + await rig.remove('README.md') + await rig.git.exec(['add', 'README.md']) + await rig.write('README_NEW.md', readme) + await rig.git.exec(['add', 'README_NEW.md']) + await rig.remove('README_NEW.md') + + assert.match( + await rig.git.exec(['status', '--porcelain']), + 'RD README.md -> README_NEW.md\nAD test.js\n?? .nano-staged.json' + ) + + await rig.commit() + + assert.match( + await rig.git.exec(['status', '--porcelain']), + ' D README_NEW.md\n D test.js\n?? .nano-staged.json' + ) + + assert.is(fs.existsSync(path.join(rig.temp, 'test.js')), false) + assert.is(fs.existsSync(path.join(rig.temp, 'README_NEW.md')), false) +}) + +test('does not resurrect removed files due to git bug when tasks fail', async ({ rig }) => { + await rig.write('.nano-staged.json', JSON.stringify(prettier_list_diff)) + await rig.remove('README.md') + await rig.write('test.js', ugly_js) + await rig.git.exec(['add', 'test.js']) + + assert.match( + await rig.git.exec(['status', '--porcelain']), + ' D README.md\nA test.js\n?? .nano-staged.json' + ) + + await rig.commit({ nano_staged: ['--allow-empty'] }).catch((error) => { + assert.match(error, 'Restoring to original state because of errors...') + }) + + assert.match( + await rig.git.exec(['status', '--porcelain']), + ' D README.md\nA test.js\n?? .nano-staged.json' + ) + assert.is(fs.existsSync(path.join(rig.temp, 'README_NEW.md')), false) +}) + +test.run() diff --git a/test/integration/files-outside-cwd.test.js b/test/integration/files-outside-cwd.test.js new file mode 100644 index 0000000..dc909cf --- /dev/null +++ b/test/integration/files-outside-cwd.test.js @@ -0,0 +1,58 @@ +import * as assert from 'uvu/assert' +import * as path from 'path' +import { suite } from 'uvu' + +import { pretty_js, ugly_js } from './fixtures/files.js' +import { NanoStagedTestRig } from './utils/test-rig.js' +import { prettier_write } from './fixtures/configs.js' + +const test = suite('integration') + +test.before.each(async (ctx) => { + try { + ctx.rig = new NanoStagedTestRig() + await ctx.rig.git_init() + } catch (e) { + console.error('uvu before error', e) + process.exit(1) + } +}) + +test.after.each(async (ctx) => { + try { + await ctx.rig.cleanup() + } catch (e) { + console.error('uvu after error', e) + process.exit(1) + } +}) + +test('does not care about staged file outside current cwd with another staged file', async ({ + rig, +}) => { + await rig.write('file.js', ugly_js) + await rig.write('deeper/file.js', ugly_js) + await rig.write('deeper/.nano-staged.json', JSON.stringify(prettier_write)) + await rig.git.exec(['add', '.']) + + await rig.commit(undefined, path.join(rig.temp, 'deeper')) + + assert.is(await rig.read('deeper/file.js'), pretty_js) + assert.is(await rig.read('file.js'), ugly_js) +}) + +test('not care about staged file outside current cwd without any other staged files', async ({ + rig, +}) => { + await rig.write('file.js', ugly_js) + await rig.write('deeper/.nano-staged.json', JSON.stringify(prettier_write)) + await rig.git.exec(['add', '.']) + + await rig.commit(undefined, path.join(rig.temp, 'deeper')).then((result) => { + assert.match(result, 'No files match any configured task.') + }) + + assert.is(await rig.read('file.js'), ugly_js) +}) + +test.run() diff --git a/test/integration/fixtures/configs.js b/test/integration/fixtures/configs.js new file mode 100644 index 0000000..18f9584 --- /dev/null +++ b/test/integration/fixtures/configs.js @@ -0,0 +1,2 @@ +export const prettier_write = { '*.js': 'prettier --write' } +export const prettier_list_diff = { '*.js': 'prettier --list-different' } diff --git a/test/integration/fixtures/files.js b/test/integration/fixtures/files.js new file mode 100644 index 0000000..e2189df --- /dev/null +++ b/test/integration/fixtures/files.js @@ -0,0 +1,13 @@ +export const pretty_js = `module.exports = { + foo: "bar", +}; +` + +export const ugly_js = `module.exports = { + 'foo': 'bar' +} +` + +export const invalid_js = `module.exports = { + 'foo': 'bar' +` diff --git a/test/integration/git-amend.test.js b/test/integration/git-amend.test.js new file mode 100644 index 0000000..0709cc9 --- /dev/null +++ b/test/integration/git-amend.test.js @@ -0,0 +1,49 @@ +import * as assert from 'uvu/assert' +import { suite } from 'uvu' + +import { prettier_list_diff } from './fixtures/configs.js' +import { NanoStagedTestRig } from './utils/test-rig.js' +import { pretty_js } from './fixtures/files.js' + +const test = suite('integration') + +test.before.each(async (ctx) => { + try { + ctx.rig = new NanoStagedTestRig() + await ctx.rig.git_init() + } catch (e) { + console.error('uvu before error', e) + process.exit(1) + } +}) + +test.after.each(async (ctx) => { + try { + await ctx.rig.cleanup() + } catch (e) { + console.error('uvu after error', e) + process.exit(1) + } +}) + +test('works when amending previous commit with unstaged changes', async ({ rig }) => { + await rig.write('.nano-staged.json', JSON.stringify(prettier_list_diff)) + await rig.append('README.md', '\n## Amended\n') + await rig.git.exec(['add', 'README.md']) + await rig.append('README.md', '\n## Edited\n') + await rig.append('test-untracked.js', pretty_js) + await rig.commit({ git_commit: ['--amend', '--no-edit'] }) + + assert.is((await rig.git.exec(['rev-list', '--count', 'HEAD'])).trim(), '1') + assert.is((await rig.git.exec(['log', '-1', '--pretty=%B'])).trim(), 'initial commit') + + assert.is(await rig.read('README.md'), '# Test\n\n## Amended\n\n## Edited\n') + assert.is(await rig.read('test-untracked.js'), pretty_js) + + const status = await rig.git.exec(['status']) + assert.match(status, 'modified: README.md') + assert.match(status, 'test-untracked.js') + assert.match(status, 'no changes added to commit') +}) + +test.run() diff --git a/test/integration/git-submodules.test.js b/test/integration/git-submodules.test.js new file mode 100644 index 0000000..70eb989 --- /dev/null +++ b/test/integration/git-submodules.test.js @@ -0,0 +1,62 @@ +import * as assert from 'uvu/assert' +import { suite } from 'uvu' +import path from 'path' + +import { prettier_list_diff } from './fixtures/configs.js' +import { NanoStagedTestRig } from './utils/test-rig.js' +import { pretty_js } from './fixtures/files.js' + +const test = suite('integration') + +test.before.each(async (ctx) => { + try { + ctx.rig = new NanoStagedTestRig() + await ctx.rig.git_init() + } catch (e) { + console.error('uvu before error', e) + process.exit(1) + } +}) + +test.after.each(async (ctx) => { + try { + await ctx.rig.cleanup() + } catch (e) { + console.error('uvu after error', e) + process.exit(1) + } +}) + +test('handles git submodules', async ({ rig }) => { + let submodule_dir = path.resolve(rig.temp, 'submodule-temp') + + await rig.append('.nano-staged.json', JSON.stringify(prettier_list_diff)) + await rig.ensure(submodule_dir) + await rig.git.exec(['init'], { cwd: submodule_dir }) + await rig.git.exec(['config', 'user.name', '"test"'], { cwd: submodule_dir }) + await rig.git.exec(['config', 'user.email', '"test@test.com"'], { cwd: submodule_dir }) + await rig.append('README.md', '# Test\n', submodule_dir) + await rig.git.exec(['add', 'README.md'], { cwd: submodule_dir }) + await rig.git.exec(['commit', '-m initial commit'], { cwd: submodule_dir }) + + await rig.git.exec(['submodule', 'add', '--force', './submodule-temp', './submodule']) + submodule_dir = path.resolve(rig.temp, 'submodule') + + await rig.git.exec(['config', 'user.name', '"test"'], { cwd: submodule_dir }) + await rig.git.exec(['config', 'user.email', '"test@test.com"'], { cwd: submodule_dir }) + + await rig.append('test.js', pretty_js, submodule_dir) + await rig.git.exec(['add', 'test.js'], { cwd: submodule_dir }) + + await rig.commit(undefined, submodule_dir) + + const commit_count = await rig.git.exec(['rev-list', '--count', 'HEAD'], { cwd: submodule_dir }) + const last_commit = await rig.git.exec(['log', '-1', '--pretty=%B'], { cwd: submodule_dir }) + const file = await rig.read('test.js', submodule_dir) + + assert.is(commit_count.trim(), '2') + assert.is(last_commit.trim(), 'test') + assert.is(file, pretty_js) +}) + +test.run() diff --git a/test/integration/git-worktree.test.js b/test/integration/git-worktree.test.js new file mode 100644 index 0000000..4d41edd --- /dev/null +++ b/test/integration/git-worktree.test.js @@ -0,0 +1,53 @@ +import * as assert from 'uvu/assert' +import { suite } from 'uvu' +import path from 'path' + +import { prettier_list_diff } from './fixtures/configs.js' +import { NanoStagedTestRig } from './utils/test-rig.js' +import { pretty_js } from './fixtures/files.js' + +const test = suite('integration') + +test.before.each(async (ctx) => { + try { + ctx.rig = new NanoStagedTestRig() + await ctx.rig.git_init() + } catch (e) { + console.error('uvu before error', e) + process.exit(1) + } +}) + +test.after.each(async (ctx) => { + try { + await ctx.rig.cleanup() + } catch (e) { + console.error('uvu after error', e) + process.exit(1) + } +}) + +test('handles git submodules', async ({ rig }) => { + let work_tree_dir = path.resolve(rig.temp, 'worktree-temp') + + await rig.append('.nano-staged.json', JSON.stringify(prettier_list_diff)) + await rig.ensure(work_tree_dir) + + await rig.git.exec(['branch', 'test']) + await rig.git.exec(['worktree', 'add', work_tree_dir, 'test']) + + await rig.append('test.js', pretty_js, work_tree_dir) + await rig.git.exec(['add', 'test.js'], { cwd: work_tree_dir }) + + await rig.commit(undefined, work_tree_dir) + + const commit_count = await rig.git.exec(['rev-list', '--count', 'HEAD'], { cwd: work_tree_dir }) + const last_commit = await rig.git.exec(['log', '-1', '--pretty=%B'], { cwd: work_tree_dir }) + const file = await rig.read('test.js', work_tree_dir) + + assert.is(commit_count.trim(), '2') + assert.is(last_commit.trim(), 'test') + assert.is(file, pretty_js) +}) + +test.run() diff --git a/test/integration/merge-conflict.test.js b/test/integration/merge-conflict.test.js new file mode 100644 index 0000000..9acb71e --- /dev/null +++ b/test/integration/merge-conflict.test.js @@ -0,0 +1,149 @@ +import * as assert from 'uvu/assert' +import { suite } from 'uvu' + +import { prettier_write, prettier_list_diff } from './fixtures/configs.js' +import { NanoStagedTestRig } from './utils/test-rig.js' + +const test = suite('integration') + +test.before.each(async (ctx) => { + try { + ctx.rig = new NanoStagedTestRig() + await ctx.rig.git_init() + } catch (e) { + console.error('uvu before error', e) + process.exit(1) + } +}) + +test.after.each(async (ctx) => { + try { + await ctx.rig.cleanup() + } catch (e) { + console.error('uvu after error', e) + process.exit(1) + } +}) + +test('handles merge conflicts', async ({ rig }) => { + const file_in_branchA = `module.exports = "foo";\n` + const file_in_branchB = `module.exports = 'bar'\n` + const file_in_branchB_fixed = `module.exports = "bar";\n` + const merge_conflict = + '<<<<<<< HEAD\n' + + 'module.exports = "foo";\n' + + '=======\n' + + 'module.exports = "bar";\n' + + '>>>>>>> branch-b\n' + + { + await rig.git.exec(['checkout', '-b', 'branch-a']) + await rig.append('test.js', file_in_branchA) + await rig.append('.nano-staged.json', JSON.stringify(prettier_write)) + await rig.git.exec(['add', '.']) + await rig.commit({ git_commit: ['-m commit a'] }) + + assert.is(await rig.read('test.js'), file_in_branchA) + } + + await rig.git.exec(['checkout', 'master']) + + { + await rig.git.exec(['checkout', '-b', 'branch-b']) + await rig.append('test.js', file_in_branchB) + await rig.append('.nano-staged.json', JSON.stringify(prettier_write)) + await rig.git.exec(['add', '.']) + await rig.commit({ git_commit: ['-m commit b'] }) + + assert.is(await rig.read('test.js'), file_in_branchB_fixed) + } + + await rig.git.exec(['checkout', 'master']) + await rig.git.exec(['merge', 'branch-a']) + + assert.is(await rig.read('test.js'), file_in_branchA) + assert.match(await rig.git.exec(['log', '-1', '--pretty=%B']), 'commit a') + + await rig.git.exec(['merge', 'branch-b']).catch((error) => { + assert.match(error, 'Merge conflict in test.js') + }) + + assert.match(await rig.read('test.js'), merge_conflict) + + await rig.write('test.js', file_in_branchB) + + assert.is(await rig.read('test.js'), file_in_branchB) + + await rig.git.exec(['add', '.']) + await rig.commit({ git_commit: ['--no-edit'] }) + + assert.is((await rig.git.exec(['rev-list', '--count', 'HEAD'])).trim(), '4') + + const log = await rig.git.exec(['log', '-1', '--pretty=%B']) + + assert.match(log, `Merge branch 'branch-b`) + assert.match(log, `Conflicts:`) + assert.match(log, `test.js`) + assert.is(await rig.read('test.js'), file_in_branchB_fixed) +}) + +test('handles merge conflict when task errors', async ({ rig }) => { + const file_in_branchA = `module.exports = "foo";\n` + const file_in_branchB = `module.exports = 'bar'\n` + const file_in_branchB_fixed = `module.exports = "bar";\n` + const merge_conflict = + '<<<<<<< HEAD\n' + + 'module.exports = "foo";\n' + + '=======\n' + + 'module.exports = "bar";\n' + + '>>>>>>> branch-b\n' + + { + await rig.git.exec(['checkout', '-b', 'branch-a']) + await rig.append('test.js', file_in_branchA) + await rig.append('.nano-staged.json', JSON.stringify(prettier_write)) + await rig.git.exec(['add', '.']) + await rig.commit({ git_commit: ['-m commit a'] }) + + assert.is(await rig.read('test.js'), file_in_branchA) + } + + await rig.git.exec(['checkout', 'master']) + + { + await rig.git.exec(['checkout', '-b', 'branch-b']) + await rig.append('test.js', file_in_branchB) + await rig.append('.nano-staged.json', JSON.stringify(prettier_write)) + await rig.git.exec(['add', '.']) + await rig.commit({ git_commit: ['-m commit b'] }) + + assert.is(await rig.read('test.js'), file_in_branchB_fixed) + } + + await rig.git.exec(['checkout', 'master']) + await rig.git.exec(['merge', 'branch-a']) + + assert.is(await rig.read('test.js'), file_in_branchA) + assert.match(await rig.git.exec(['log', '-1', '--pretty=%B']), 'commit a') + + await rig.git.exec(['merge', 'branch-b']).catch((error) => { + assert.match(error, 'Merge conflict in test.js') + }) + + assert.match(await rig.read('test.js'), merge_conflict) + + await rig.write('test.js', file_in_branchB) + + assert.is(await rig.read('test.js'), file_in_branchB) + + await rig.git.exec(['add', '.']) + await rig.write('.nano-staged.json', JSON.stringify(prettier_list_diff)) + await rig.commit().catch(async (error) => { + assert.match(error, 'Restoring to original state because of errors') + assert.is((await rig.git.exec(['rev-list', '--count', 'HEAD'])).trim(), '2') + assert.match(await rig.git.exec(['status']), 'All conflicts fixed but you are still merging') + assert.is(await rig.read('test.js'), file_in_branchB) + }) +}) + +test.run() diff --git a/test/integration/multiple-config-files.test.js b/test/integration/multiple-config-files.test.js new file mode 100644 index 0000000..bf17257 --- /dev/null +++ b/test/integration/multiple-config-files.test.js @@ -0,0 +1,77 @@ +import * as assert from 'uvu/assert' +import * as path from 'path' +import { suite } from 'uvu' + +import { NanoStagedTestRig } from './utils/test-rig.js' + +const test = suite('integration') + +test.before.each(async (ctx) => { + try { + ctx.rig = new NanoStagedTestRig() + await ctx.rig.git_init() + } catch (e) { + console.error('uvu before error', e) + process.exit(1) + } +}) + +test.after.each(async (ctx) => { + try { + await ctx.rig.cleanup() + } catch (e) { + console.error('uvu after error', e) + process.exit(1) + } +}) + +test('supports multiple configuration files', async ({ rig }) => { + await rig.write('file.js', '') + await rig.write('deeper/file.js', '') + await rig.write('deeper/even/file.js', '') + await rig.write('deeper/even/deeper/file.js', '') + await rig.write('a/very/deep/file/path/file.js', '') + + await rig.git.exec(['add', '.']) + + const echo_js_config = (echo) => + `module.exports = { '*.js': ({ filenames }) => filenames.map((f) => \`echo "${echo}" > \${f}\`) }` + + await rig.write('.nano-staged.js', echo_js_config('level-0')) + await rig.write('deeper/.nano-staged.js', echo_js_config('level-1')) + await rig.write('deeper/even/.nano-staged.js', echo_js_config('level-2')) + + await rig.commit({ nano_staged: ['--shell', '-d'] }) + + assert.match(await rig.read('file.js'), 'level-0') + assert.match(await rig.read('deeper/file.js'), 'level-1') + assert.match(await rig.read('deeper/even/file.js'), 'level-2') + assert.match(await rig.read('deeper/even/deeper/file.js'), 'level-2') + assert.match(await rig.read('a/very/deep/file/path/file.js'), 'level-0') +}) + +test('ignores multiple configs files outside cwd', async ({ rig }) => { + await rig.write('file.js', '') + await rig.write('deeper/file.js', '') + await rig.write('deeper/even/file.js', '') + await rig.write('deeper/even/deeper/file.js', '') + await rig.write('a/very/deep/file/path/file.js', '') + + const echo_js_config = (echo) => + `module.exports = { '*.js': ({ filenames }) => filenames.map((f) => \`echo "${echo}" > \${f}\`) }` + + await rig.write('.nano-staged.js', echo_js_config('level-0')) + await rig.write('deeper/.nano-staged.js', echo_js_config('level-1')) + await rig.write('deeper/even/.nano-staged.js', echo_js_config('level-2')) + + await rig.git.exec(['add', '.']) + await rig.commit({ nano_staged: ['--shell'] }, path.join(rig.temp, 'deeper')) + + assert.match(await rig.read('file.js'), '') + assert.match(await rig.read('deeper/file.js'), 'level-1') + assert.match(await rig.read('deeper/even/file.js'), 'level-2') + assert.match(await rig.read('deeper/even/deeper/file.js'), 'level-2') + assert.match(await rig.read('a/very/deep/file/path/file.js'), '') +}) + +test.run() diff --git a/test/integration/not-git-dir.test.js b/test/integration/not-git-dir.test.js new file mode 100644 index 0000000..2e2df36 --- /dev/null +++ b/test/integration/not-git-dir.test.js @@ -0,0 +1,36 @@ +import * as assert from 'uvu/assert' +import { suite } from 'uvu' + +import { NanoStagedTestRig } from './utils/test-rig.js' +import { prettier_write } from './fixtures/configs.js' + +const test = suite('integration') + +test.before.each(async (ctx) => { + try { + ctx.rig = new NanoStagedTestRig() + } catch (e) { + console.error('uvu before error', e) + process.exit(1) + } +}) + +test.after.each(async (ctx) => { + try { + await ctx.rig.cleanup() + } catch (e) { + console.error('uvu after error', e) + process.exit(1) + } +}) + +test('fails when not in a git directory', async ({ rig }) => { + try { + await rig.write('.nano-staged.json', JSON.stringify(prettier_write)) + await rig.commit() + } catch (error) { + assert.match(error, 'Nano Staged didn’t find git directory.') + } +}) + +test.run() diff --git a/test/integration/parent-globs.test.js b/test/integration/parent-globs.test.js new file mode 100644 index 0000000..9b34f32 --- /dev/null +++ b/test/integration/parent-globs.test.js @@ -0,0 +1,48 @@ +import * as assert from 'uvu/assert' +import * as path from 'path' +import { suite } from 'uvu' + +import { NanoStagedTestRig } from './utils/test-rig.js' + +const test = suite('integration') + +test.before.each(async (ctx) => { + try { + ctx.rig = new NanoStagedTestRig() + await ctx.rig.git_init() + } catch (e) { + console.error('uvu before error', e) + process.exit(1) + } +}) + +test.after.each(async (ctx) => { + try { + await ctx.rig.cleanup() + } catch (e) { + console.error('uvu after error', e) + process.exit(1) + } +}) + +test('works with parent glob "../*.js"', async ({ rig }) => { + await rig.write('file.js', '') + await rig.write('deeper/file.js', '') + await rig.write('deeper/even/file.js', '') + await rig.write('deeper/even/deeper/file.js', '') + await rig.write('a/very/deep/file/path/file.js', '') + await rig.git.exec(['add', '.']) + await rig.write( + 'deeper/even/.nano-staged.js', + `module.exports = { '../*.js': ({ filenames }) => filenames.map((f) => \`echo level-2 > \${f}\`) }` + ) + await rig.commit({ nano_staged: ['--shell', '-d'] }, path.join(rig.temp, 'deeper/even')) + + assert.match(await rig.read('file.js'), '') + assert.match(await rig.read('deeper/file.js'), 'level-2') + assert.match(await rig.read('deeper/even/file.js'), '') + assert.match(await rig.read('deeper/even/deeper/file.js'), '') + assert.match(await rig.read('a/very/deep/file/path/file.js'), '') +}) + +test.run() diff --git a/test/integration/partially-staged-changes.test.js b/test/integration/partially-staged-changes.test.js new file mode 100644 index 0000000..992fb1c --- /dev/null +++ b/test/integration/partially-staged-changes.test.js @@ -0,0 +1,123 @@ +import * as assert from 'uvu/assert' +import { suite } from 'uvu' + +import { prettier_write, prettier_list_diff } from './fixtures/configs.js' +import { invalid_js, pretty_js, ugly_js } from './fixtures/files.js' +import { NanoStagedTestRig } from './utils/test-rig.js' + +const test = suite('integration') + +test.before.each(async (ctx) => { + try { + ctx.rig = new NanoStagedTestRig() + await ctx.rig.git_init() + } catch (e) { + console.error('uvu before error', e) + process.exit(1) + } +}) + +test.after.each(async (ctx) => { + try { + await ctx.rig.cleanup() + } catch (e) { + console.error('uvu after error', e) + process.exit(1) + } +}) + +test('commits partial change from partially staged file when no errors from linter', async ({ + rig, +}) => { + const appended = `\nconsole.log("test");\n` + + await rig.append('.nano-staged.json', JSON.stringify(prettier_list_diff)) + await rig.append('test.js', pretty_js) + await rig.git.exec(['add', 'test.js']) + await rig.append('test.js', appended) + + const result = await rig.commit() + + assert.match(result, 'Backing up unstaged changes for staged files') + assert.match(result, 'Applying modifications from tasks') + assert.match(result, 'Restoring unstaged changes for staged files') + + assert.is((await rig.git.exec(['rev-list', '--count', 'HEAD'])).trim(), '2') + assert.is((await rig.git.exec(['log', '-1', '--pretty=%B'])).trim(), 'test') + assert.is((await rig.git.exec(['show', 'HEAD:test.js'])).trim(), pretty_js.trim()) + + const status = await rig.git.exec(['status']) + + assert.match(status, 'modified: test.js') + assert.match(status, 'no changes added to commit') + assert.is(await rig.read('test.js'), pretty_js + appended) +}) + +test('commits partial change from partially staged file when no errors from linter and linter modifies file', async ({ + rig, +}) => { + const appended = '\n\nconsole.log("test");\n' + + await rig.append('.nano-staged.json', JSON.stringify(prettier_write)) + await rig.append('test.js', ugly_js) + await rig.git.exec(['add', 'test.js']) + await rig.append('test.js', appended) + + await rig.commit() + + assert.is((await rig.git.exec(['rev-list', '--count', 'HEAD'])).trim(), '2') + assert.is((await rig.git.exec(['log', '-1', '--pretty=%B'])).trim(), 'test') + assert.is((await rig.git.exec(['show', 'HEAD:test.js'])).trim(), pretty_js.trim()) + + const status = await rig.git.exec(['status']) + + assert.match(status, 'modified: test.js') + assert.match(status, 'no changes added to commit') + assert.is(await rig.read('test.js'), pretty_js + appended) +}) + +test('fails to commit partial change from partially staged file when errors from linter', async ({ + rig, +}) => { + const appended = '\nconsole.log("test");\n' + + await rig.append('.nano-staged.json', JSON.stringify(prettier_list_diff)) + await rig.append('test.js', ugly_js) + await rig.git.exec(['add', 'test.js']) + await rig.append('test.js', appended) + + const status = await rig.git.exec(['status']) + + await rig.commit().catch((error) => { + assert.match(error, 'Restoring to original state because of errors') + }) + + assert.is((await rig.git.exec(['rev-list', '--count', 'HEAD'])).trim(), '1') + assert.is((await rig.git.exec(['log', '-1', '--pretty=%B'])).trim(), 'initial commit') + assert.is(await rig.git.exec(['status']), status) + assert.is(await rig.read('test.js'), ugly_js + appended) +}) + +test('fails to commit partial change from partially staged file when errors from linter and linter modifies files', async ({ + rig, +}) => { + const appended = '\nconsole.log("test");\n' + + await rig.append('.nano-staged.json', JSON.stringify(prettier_write)) + await rig.append('test.js', invalid_js) + await rig.git.exec(['add', 'test.js']) + await rig.append('test.js', appended) + + const status = await rig.git.exec(['status']) + + await rig.commit().catch((error) => { + assert.match(error, 'Restoring to original state because of errors') + }) + + assert.is((await rig.git.exec(['rev-list', '--count', 'HEAD'])).trim(), '1') + assert.is((await rig.git.exec(['log', '-1', '--pretty=%B'])).trim(), 'initial commit') + assert.is(await rig.git.exec(['status']), status) + assert.is(await rig.read('test.js'), invalid_js + appended) +}) + +test.run() diff --git a/test/integration/unstaged-options.test.js b/test/integration/unstaged-options.test.js new file mode 100644 index 0000000..943c5a3 --- /dev/null +++ b/test/integration/unstaged-options.test.js @@ -0,0 +1,50 @@ +import * as assert from 'uvu/assert' +import { suite } from 'uvu' + +import { pretty_js, ugly_js } from './fixtures/files.js' +import { NanoStagedTestRig } from './utils/test-rig.js' +import { prettier_write } from './fixtures/configs.js' + +const test = suite('integration') + +test.before.each(async (ctx) => { + try { + ctx.rig = new NanoStagedTestRig() + await ctx.rig.git_init() + } catch (e) { + console.error('uvu before error', e) + process.exit(1) + } +}) + +test.after.each(async (ctx) => { + try { + await ctx.rig.cleanup() + } catch (e) { + console.error('uvu after error', e) + process.exit(1) + } +}) + +test('linting unstaged files when with `--unstaged`', async ({ rig }) => { + await rig.write('.nano-staged.json', JSON.stringify(prettier_write)) + + await rig.write('test.js', pretty_js) + await rig.git.exec(['add', 'test.js']) + + await rig.write('test.js', ugly_js) + await rig.write('test2.js', ugly_js) + + rig.no_commit = true + + await rig.commit({ + nano_staged: ['-u'], + }) + + assert.is((await rig.git.exec(['rev-list', '--count', 'HEAD'])).trim(), '1') + assert.is((await rig.git.exec(['log', '-1', '--pretty=%B'])).trim(), 'initial commit') + assert.is(await rig.read('test.js'), pretty_js) + assert.is(await rig.read('test2.js'), pretty_js) +}) + +test.run() diff --git a/test/integration/untracked-files.test.js b/test/integration/untracked-files.test.js new file mode 100644 index 0000000..a250f39 --- /dev/null +++ b/test/integration/untracked-files.test.js @@ -0,0 +1,65 @@ +import * as assert from 'uvu/assert' +import { suite } from 'uvu' + +import { invalid_js, pretty_js } from './fixtures/files.js' +import { prettier_list_diff } from './fixtures/configs.js' +import { NanoStagedTestRig } from './utils/test-rig.js' + +const test = suite('integration') + +test.before.each(async (ctx) => { + try { + ctx.rig = new NanoStagedTestRig() + await ctx.rig.git_init() + } catch (e) { + console.error('uvu before error', e) + process.exit(1) + } +}) + +test.after.each(async (ctx) => { + try { + await ctx.rig.cleanup() + } catch (e) { + console.error('uvu after error', e) + process.exit(1) + } +}) + +test('ignores untracked files', async ({ rig }) => { + await rig.append('.nano-staged.json', JSON.stringify(prettier_list_diff)) + await rig.append('test.js', pretty_js) + await rig.git.exec(['add', 'test.js']) + + await rig.append('test-untracked.js', pretty_js) + await rig.append('.gitattributes', 'binary\n') + await rig.write('binary', Buffer.from('Hello, World!', 'binary')) + + await rig.commit() + + assert.is((await rig.git.exec(['rev-list', '--count', 'HEAD'])).trim(), '2') + assert.is((await rig.git.exec(['log', '-1', '--pretty=%B'])).trim(), 'test') + assert.is(await rig.read('test.js'), pretty_js) + assert.is(await rig.read('test-untracked.js'), pretty_js) + assert.is(Buffer.from(await rig.read('binary'), 'binary').toString(), 'Hello, World!') +}) + +test('ingores untracked files when task fails', async ({ rig }) => { + await rig.append('.nano-staged.json', JSON.stringify(prettier_list_diff)) + await rig.append('test.js', invalid_js) + await rig.git.exec(['add', 'test.js']) + + await rig.append('test-untracked.js', pretty_js) + await rig.append('.gitattributes', 'binary\n') + await rig.write('binary', Buffer.from('Hello, World!', 'binary')) + + await rig.commit().catch(async () => { + assert.is((await rig.git.exec(['rev-list', '--count', 'HEAD'])).trim(), '1') + assert.is((await rig.git.exec(['log', '-1', '--pretty=%B'])).trim(), 'initial commit') + assert.is(await rig.read('test.js'), invalid_js) + assert.is(await rig.read('test-untracked.js'), pretty_js) + assert.is(Buffer.from(await rig.read('binary'), 'binary').toString(), 'Hello, World!') + }) +}) + +test.run() diff --git a/test/integration/utils/file-system-test-rig.js b/test/integration/utils/file-system-test-rig.js new file mode 100644 index 0000000..fb774f8 --- /dev/null +++ b/test/integration/utils/file-system-test-rig.js @@ -0,0 +1,51 @@ +import { nanoid } from 'nanoid' +import { tmpdir } from 'os' +import fs from 'fs-extra' +import path from 'path' + +function create_temp() { + const temp_dir = fs.realpathSync(tmpdir()) + const work_dir = path.join(temp_dir, `nano-staged-${nanoid()}`) + + fs.ensureDirSync(work_dir) + + return path.normalize(work_dir) +} + +export class FileSystemTestRig { + temp = create_temp() + + async ensure(dir) { + await fs.ensureDir(dir) + } + + async append(file, content, dir = this.temp) { + const file_path = path.isAbsolute(file) ? file : path.join(dir, file) + const file_dir = path.parse(file_path).dir + + await fs.ensureDir(file_dir) + await fs.appendFile(file_path, content) + } + + async write(file, content, dir = this.temp) { + const file_path = path.isAbsolute(file) ? file : path.join(dir, file) + const file_dir = path.parse(file_path).dir + + await fs.ensureDir(file_dir) + await fs.writeFile(file_path, content) + } + + async read(file, dir = this.temp) { + const file_path = path.isAbsolute(file) ? file : path.join(dir, file) + return await fs.readFile(file_path, { encoding: 'utf-8' }) + } + + async remove(file, dir = this.temp) { + const file_path = path.isAbsolute(file) ? file : path.join(dir, file) + await fs.remove(file_path) + } + + async copy(file, new_file) { + await fs.copy(file, new_file) + } +} diff --git a/test/integration/utils/test-rig.js b/test/integration/utils/test-rig.js new file mode 100644 index 0000000..f8f7ef1 --- /dev/null +++ b/test/integration/utils/test-rig.js @@ -0,0 +1,64 @@ +import { fileURLToPath } from 'url' +import { platform } from 'os' +import path from 'path' + +import { FileSystemTestRig } from './file-system-test-rig.js' +import { executor } from '../../../lib/executor.js' +import { create_git } from '../../../lib/git.js' + +const is_windows = platform() === 'win32' +const __filename = fileURLToPath(import.meta.url) +const __dirname = path.dirname(__filename) +const nano_staged_bin = path.resolve(__dirname, '../../../lib/bin.js') + +export class NanoStagedTestRig extends FileSystemTestRig { + initial_commit = true + no_commit = false + git = create_git(this.temp) + + constructor(props) { + super(props) + this.with_git = false + } + + async git_init() { + await this.git.exec(['init']) + + if (is_windows) { + await this.git.exec(['config', 'core.autocrlf', 'input']) + } + + await this.git.exec(['config', 'user.name', '"nano-staged"']) + await this.git.exec(['config', 'user.email', '"test@nanostaged.com"']) + await this.git.exec(['config', 'merge.conflictstyle', 'merge']) + + if (this.initial_commit) { + await this.append('README.md', '# Test\n') + await this.git.exec(['add', 'README.md']) + await this.git.exec(['commit', '-m initial commit']) + } + + this.with_git = true + } + + async cleanup() { + await this.remove(this.temp) + } + + async commit(options, cwd = this.temp) { + const nano_staged_args = Array.isArray(options?.nano_staged) ? options.nano_staged : [] + const git_commit_args = Array.isArray(options?.git_commit) ? options.git_commit : ['-m test'] + + try { + const result = await executor(nano_staged_bin, nano_staged_args, { cwd, env: { CI: 1 } }) + + if (this.with_git && !this.no_commit) { + await this.git.exec(['commit', ...git_commit_args], { cwd }) + } + + return result + } catch (error) { + throw error + } + } +} diff --git a/test/cmd-runner.test.js b/test/old-unit/cmd-runner.test.js similarity index 100% rename from test/cmd-runner.test.js rename to test/old-unit/cmd-runner.test.js diff --git a/test/errors.test.js b/test/old-unit/errors.test.js similarity index 100% rename from test/errors.test.js rename to test/old-unit/errors.test.js diff --git a/test/git-workflow.test.js b/test/old-unit/git-workflow.test.js similarity index 100% rename from test/git-workflow.test.js rename to test/old-unit/git-workflow.test.js diff --git a/test/index.test.js b/test/old-unit/index.test.js similarity index 100% rename from test/index.test.js rename to test/old-unit/index.test.js diff --git a/test/renderer.test.js b/test/old-unit/renderer.test.js similarity index 97% rename from test/renderer.test.js rename to test/old-unit/renderer.test.js index b52988a..70d723d 100644 --- a/test/renderer.test.js +++ b/test/old-unit/renderer.test.js @@ -1,6 +1,5 @@ -import { is, equal } from 'uvu/assert' +import { is } from 'uvu/assert' import { test } from 'uvu' -import { delay } from 'nanodelay' import { createRenderer } from '../lib/renderer.js' import { createStdout } from './utils/index.js' diff --git a/test/reporter.test.js b/test/old-unit/reporter.test.js similarity index 100% rename from test/reporter.test.js rename to test/old-unit/reporter.test.js diff --git a/test/runner.test.js b/test/old-unit/runner.test.js similarity index 100% rename from test/runner.test.js rename to test/old-unit/runner.test.js diff --git a/test/config.test.js b/test/unit/config.test.js similarity index 56% rename from test/config.test.js rename to test/unit/config.test.js index b0e254e..76b725d 100644 --- a/test/config.test.js +++ b/test/unit/config.test.js @@ -4,65 +4,65 @@ import esmock from 'esmock' import { join } from 'path' import { test } from 'uvu' -import { getConfig, validConfig } from '../lib/config.js' +import { get_config, valid_config } from '../../lib/config.js' import { fixture } from './utils/index.js' test('should return "undefined" when config file is not found', async () => { - is(await getConfig(join(homedir(), 'test')), undefined) + is(await get_config(join(homedir(), 'test')), undefined) }) test('should load config from "package.json"', async () => { - equal(await getConfig(fixture('config/test-project/dir')), { + equal(await get_config(fixture('config/test-project/dir')), { '*': 'my-tasks', }) }) test('should return "object" config', async () => { - equal(await getConfig(process.cwd(), { '*': 'my-tasks' }), { + equal(await get_config(process.cwd(), { '*': 'my-tasks' }), { '*': 'my-tasks', }) }) test('should load JSON config file', async () => { - let config = await getConfig(fixture('config/json')) + let config = await get_config(fixture('config/json')) equal(config, { '*': 'my-tasks' }) }) test('should load EMS config file from .js file', async () => { - let config = await getConfig(fixture('config/esm-in-js')) + let config = await get_config(fixture('config/esm-in-js')) equal(config['*'](), 'my-tasks') }) test('should load EMS config file from .mjs file', async () => { - let config = await getConfig(fixture('config/mjs')) + let config = await get_config(fixture('config/mjs')) equal(config['*'](), 'my-tasks') }) test('should load CJS config file from .cjs file', async () => { - let config = await getConfig(fixture('config/cjs')) + let config = await get_config(fixture('config/cjs')) equal(config, { '*': 'my-tasks' }) }) test('should load CJS config file from absolute path', async () => { - let config = await getConfig(process.cwd(), fixture('config/cjs/nano-staged.cjs')) + let config = await get_config(process.cwd(), fixture('config/cjs/nano-staged.cjs')) equal(config, { '*': 'my-tasks' }) }) test('should load CJS config file from relative path', async () => { - let config = await getConfig( + let config = await get_config( process.cwd(), - join('test', 'fixtures', 'config', 'cjs', 'nano-staged.cjs') + join('test', 'unit', 'fixtures', 'config', 'cjs', 'nano-staged.cjs') ) equal(config, { '*': 'my-tasks' }) }) test('should load no extension config file', async () => { - let config = await getConfig(fixture('config/no-ext')) + let config = await get_config(fixture('config/no-ext')) equal(config, { '*': 'my-tasks' }) }) test('should return "undefined" when error', async () => { - const { getConfig } = await esmock('../lib/config.js', { + const { get_config } = await esmock('../../lib/config.js', { fs: { promises: { readFile: async () => Promise.reject(), @@ -70,88 +70,47 @@ test('should return "undefined" when error', async () => { }, }) - is(await getConfig(), undefined) + is(await get_config(), undefined) }) test('config undefined', async () => { - is(validConfig(), false) + is(valid_config(), false) }) test('config empty', async () => { - is(validConfig({}), false) + is(valid_config({}), false) }) test('config single cmd', async () => { - is( - validConfig({ - '*': 'my-tasks', - }), - true - ) + is(valid_config({ '*': 'my-tasks' }), true) }) test('config array cmds', async () => { - is( - validConfig({ - '*': ['my-tasks'], - }), - true - ) + is(valid_config({ '*': ['my-tasks'] }), true) }) test('config glob empty', async () => { - is( - validConfig({ - '': ['my-tasks'], - }), - false - ) + is(valid_config({ '': ['my-tasks'] }), false) }) test('config single cmd empty', async () => { - is( - validConfig({ - '*': '', - }), - false - ) + is(valid_config({ '*': '' }), false) }) test('config array cmds empty', async () => { - is( - validConfig({ - '*': ['', ''], - }), - false - ) + is(valid_config({ '*': ['', ''] }), false) }) test('config cmd not string', async () => { - is( - validConfig({ - '': 1, - }), - false - ) + is(valid_config({ '': 1 }), false) }) test('config glob and cmd empty', async () => { - is( - validConfig({ - '': '', - }), - false - ) + is(valid_config({ '': '' }), false) }) test('config one task invalid', async () => { - is( - validConfig({ - '*': '', - '*.js': 'my-task', - }), - false - ) + is(valid_config({ '*': '', '*.js': 'my-task' }), false) }) test.run() diff --git a/test/fixtures/config/cjs-in-js/nano-staged.js b/test/unit/fixtures/config/cjs-in-js/nano-staged.js similarity index 100% rename from test/fixtures/config/cjs-in-js/nano-staged.js rename to test/unit/fixtures/config/cjs-in-js/nano-staged.js diff --git a/test/fixtures/config/cjs/nano-staged.cjs b/test/unit/fixtures/config/cjs/nano-staged.cjs similarity index 100% rename from test/fixtures/config/cjs/nano-staged.cjs rename to test/unit/fixtures/config/cjs/nano-staged.cjs diff --git a/test/fixtures/config/esm-in-js/nano-staged.js b/test/unit/fixtures/config/esm-in-js/nano-staged.js similarity index 100% rename from test/fixtures/config/esm-in-js/nano-staged.js rename to test/unit/fixtures/config/esm-in-js/nano-staged.js diff --git a/test/fixtures/config/json/nano-staged.json b/test/unit/fixtures/config/json/nano-staged.json similarity index 100% rename from test/fixtures/config/json/nano-staged.json rename to test/unit/fixtures/config/json/nano-staged.json diff --git a/test/fixtures/config/mjs/nano-staged.mjs b/test/unit/fixtures/config/mjs/nano-staged.mjs similarity index 100% rename from test/fixtures/config/mjs/nano-staged.mjs rename to test/unit/fixtures/config/mjs/nano-staged.mjs diff --git a/test/fixtures/config/no-ext/.nanostagedrc b/test/unit/fixtures/config/no-ext/.nanostagedrc similarity index 100% rename from test/fixtures/config/no-ext/.nanostagedrc rename to test/unit/fixtures/config/no-ext/.nanostagedrc diff --git a/test/fixtures/config/test-project/dir/index.js b/test/unit/fixtures/config/test-project/dir/index.js similarity index 100% rename from test/fixtures/config/test-project/dir/index.js rename to test/unit/fixtures/config/test-project/dir/index.js diff --git a/test/fixtures/config/test-project/index.js b/test/unit/fixtures/config/test-project/index.js similarity index 100% rename from test/fixtures/config/test-project/index.js rename to test/unit/fixtures/config/test-project/index.js diff --git a/test/fixtures/config/test-project/package.json b/test/unit/fixtures/config/test-project/package.json similarity index 100% rename from test/fixtures/config/test-project/package.json rename to test/unit/fixtures/config/test-project/package.json diff --git a/test/fixtures/simple/.gitignore b/test/unit/fixtures/simple/.gitignore similarity index 100% rename from test/fixtures/simple/.gitignore rename to test/unit/fixtures/simple/.gitignore diff --git a/test/unit/git.test.js b/test/unit/git.test.js new file mode 100644 index 0000000..c4cea4e --- /dev/null +++ b/test/unit/git.test.js @@ -0,0 +1,210 @@ +import { equal, is } from 'uvu/assert' +import { join, resolve } from 'path' +import { test } from 'uvu' +import fs from 'fs-extra' + +import { writeFile, makeDir, appendFile, fixture, removeFile } from './utils/index.js' +import { create_git } from '../../lib/git.js' + +const cwd = fixture('simple/git-test') +const patchPath = join(cwd, 'nano-staged.patch') + +async function execGit(args) { + const git = create_git(cwd) + await git.exec(args, { cwd }) +} + +test.before.each(async (ctx) => { + await makeDir(cwd) + await execGit(['init']) + await execGit(['config', 'user.name', '"test"']) + await execGit(['config', 'user.email', '"test@test.com"']) + await appendFile('README.md', '# Test\n', cwd) + await execGit(['add', 'README.md']) + await execGit(['commit', '-m initial commit']) + + ctx.git = create_git(cwd) +}) + +test.after.each(async () => { + await removeFile(cwd) +}) + +test('should return "null" when git dir is not found', async ({ git }) => { + git.exec = async () => { + throw Error() + } + + const paths = await git.paths() + + is(paths.root, null) + is(paths.dot, null) +}) + +test('should return "null" when run error', async ({ git }) => { + git.exec = async () => Promise.reject() + + const paths = await git.paths({ cwd }) + + is(paths.root, null) + is(paths.dot, null) +}) + +test('should return path when git dir is found', async ({ git }) => { + const paths = await git.paths() + + is(paths.root, fixture('simple/git-test')) + is( + paths.dot, + process.platform === 'win32' + ? fixture('simple/git-test') + '\\.git' + : fixture('simple/git-test') + '/.git' + ) +}) + +test('should create patch to file', async ({ git }) => { + await writeFile('README.md', '# Test\n## Test', cwd) + await git.diff_patch(patchPath) + + const patch = await fs.readFile(patchPath) + is( + patch.toString(), + 'diff --git a/README.md b/README.md\n' + + 'index 8ae0569..a07c500 100644\n' + + '--- a/README.md\n' + + '+++ b/README.md\n' + + '@@ -1,0 +2 @@\n' + + '+## Test\n' + + '\\ No newline at end of file\n' + ) +}) + +test('should create patch to files', async ({ git }) => { + await appendFile('a.js', 'let a = {};', cwd) + await git.add(join(cwd, 'a.js')) + await removeFile(join(cwd, 'a.js')) + await git.diff_patch(patchPath, [join(cwd, 'a.js')]) + + let patch = await fs.readFile(patchPath) + is( + patch.toString(), + 'diff --git a/a.js b/a.js\n' + + 'deleted file mode 100644\n' + + 'index 36b56ef..0000000\n' + + '--- a/a.js\n' + + '+++ /dev/null\n' + + '@@ -1 +0,0 @@\n' + + '-let a = {};\n' + + '\\ No newline at end of file\n' + ) +}) + +test('should checkout to files', async ({ git }) => { + await appendFile('a.js', 'let a = {};', cwd) + await git.add('.') + await writeFile('a.js', 'let b = {};', cwd) + await git.checkout(join(cwd, 'a.js')) + + is(await git.status(), 'A a.js\x00') +}) + +test('should apply to patch file', async ({ git }) => { + await writeFile('README.md', '# Test\n## Test', cwd) + await git.diff_patch(patchPath) + await git.apply(patchPath) + + is((await fs.stat(patchPath)).isFile(), true) +}) + +test('should error when not apply patch file', async ({ git }) => { + try { + await git.apply('test.patch', true) + } catch (error) { + is(error, "error: can't open patch 'test.patch': No such file or directory") + } +}) + +test('should add to files', async ({ git }) => { + await appendFile('a.js', 'let a = {};', cwd) + await git.add(['.']) + + is(await git.status(), 'A a.js\x00') +}) + +test('should parse status correctly', async ({ git }) => { + await appendFile('a.js', 'let a = {};', cwd) + await appendFile('b.js', 'let a = {};', cwd) + await git.add(['b.js']) + + is(await git.status(), 'A b.js\x00?? a.js\x00') + + git.exec = async () => { + throw new Error('fatal: not a git repository (or any of the parent directories): .git') + } + is(await git.status(), '') +}) + +test('should get diff file correctly', async ({ git }) => { + git.exec = async () => 'a.js\x00b.js\x00' + + equal(await git.diff_name(['main', 'origin/main'], { staged: false, filter: 'M' }), [ + resolve(cwd, 'a.js'), + resolve(cwd, 'b.js'), + ]) +}) + +test('should get staged files correctly', async ({ git }) => { + git.exec = async () => 'a.js\x00b.js\x00' + equal(await git.diff_name([], { staged: true, filter: 'ACMR' }), [ + resolve(cwd, 'a.js'), + resolve(cwd, 'b.js'), + ]) + + git.exec = async () => '' + equal(await git.diff_name([], { staged: true, filter: 'ACMR' }), []) +}) + +test('should get unstaged files correctly', async ({ git }) => { + git.exec = async () => 'a.js\x00b.js\x00' + + equal(await git.diff_name([], { staged: false, filter: 'M' }), [ + resolve(cwd, 'a.js'), + resolve(cwd, 'b.js'), + ]) +}) + +test('should get empty array', async ({ git }) => { + git.exec = async () => { + throw Error('fails') + } + + equal(await git.diff_name([], { staged: true, filter: 'M' }), []) +}) + +test('should handle git worktrees', async ({ git }) => { + const work_tree_dir = resolve(cwd, 'worktree') + + await git.exec(['branch', 'test']) + await git.exec(['worktree', 'add', work_tree_dir, 'test']) + + equal(await git.paths({ cwd: work_tree_dir }), { + root: fixture('simple/git-test/worktree'), + dot: fixture('simple/git-test/.git/worktrees/worktree'), + }) +}) + +test('should get uncommitted files', async ({ git }) => { + git.status = async () => + '?? new.js\x00A stage.js\x00MM mod.js\x00AM test/add.js\x00RM rename.js\x00origin.js\x00CM' + + ' test/copy.js\x00test/base.js\x00MD remove.js\x00D delete.js\x00' + + equal(await git.uncommitted_files(), [ + resolve(cwd, 'mod.js'), + resolve(cwd, 'test/add.js'), + resolve(cwd, 'rename.js'), + resolve(cwd, 'test/copy.js'), + resolve(cwd, 'remove.js'), + ]) +}) + +test.run() diff --git a/test/glob-to-regex.test.js b/test/unit/glob-to-regex.test.js similarity index 98% rename from test/glob-to-regex.test.js rename to test/unit/glob-to-regex.test.js index dfabb0a..63024a8 100644 --- a/test/glob-to-regex.test.js +++ b/test/unit/glob-to-regex.test.js @@ -1,11 +1,11 @@ import { is } from 'uvu/assert' import { test } from 'uvu' -import { globToRegex } from '../lib/glob-to-regex.js' +import { globrex } from '../../lib/glob.js' -function match(glob, path, opts = {}) { - let regex = globToRegex(glob, opts) - return regex.regex.test(path) +function match(g, path, opts = {}) { + let regex = globrex(g, opts) + return regex.test(path) } test('Standard * matching', () => { diff --git a/test/unit/utils.test.js b/test/unit/utils.test.js new file mode 100644 index 0000000..dac6062 --- /dev/null +++ b/test/unit/utils.test.js @@ -0,0 +1,28 @@ +import { equal } from 'uvu/assert' +import process from 'process' +import { test } from 'uvu' +import tty from 'tty' + +import { to_array, str_argv_to_array } from '../../lib/utils.js' + +test.before.each(() => { + Object.defineProperty(process, 'platform', { + value: 'linux', + }) + process.env = {} + process.argv = [] + tty.isatty = () => true +}) + +test('single to array', () => { + equal(to_array('path'), ['path']) + equal(to_array(['path']), ['path']) +}) + +test('string to args', () => { + equal(str_argv_to_array('cmd --test config --test'), ['cmd', '--test', 'config', '--test']) + equal(str_argv_to_array(''), []) + equal(str_argv_to_array(), []) +}) + +test.run() diff --git a/test/utils/index.js b/test/unit/utils/index.js similarity index 100% rename from test/utils/index.js rename to test/unit/utils/index.js diff --git a/test/utils.test.js b/test/utils.test.js deleted file mode 100644 index 6343569..0000000 --- a/test/utils.test.js +++ /dev/null @@ -1,164 +0,0 @@ -import { equal, is } from 'uvu/assert' -import process from 'process' -import { test } from 'uvu' -import tty from 'tty' -import os from 'os' - -import { toArray, showVersion, stringArgvToArray, getForceColorLevel } from '../lib/utils.js' -import { createStdout } from './utils/index.js' - -test.before.each(() => { - Object.defineProperty(process, 'platform', { - value: 'linux', - }) - process.env = {} - process.argv = [] - tty.isatty = () => true -}) - -test('single to array', () => { - equal(toArray('path'), ['path']) - equal(toArray(['path']), ['path']) -}) - -test('print version', () => { - let stdout = createStdout() - showVersion(stdout) - is(stdout.out.replace(/\d+\.\d+\.\d+/, '0.1.0'), 'Nano Staged \x1B[1mv0.1.0\x1B[22m\n') -}) - -test('string to args', () => { - equal(stringArgvToArray('cmd --test config --test'), ['cmd', '--test', 'config', '--test']) - equal(stringArgvToArray(''), []) - equal(stringArgvToArray(), []) -}) - -test('FORCE_COLOR: 1', () => { - process.env = { FORCE_COLOR: '1' } - is(getForceColorLevel(), 1) - - process.env = { FORCE_COLOR: '' } - is(getForceColorLevel(), 0) - - process.env = { FORCE_COLOR: '256' } - is(getForceColorLevel(), 3) - - process.env = { FORCE_NO_COLOR: true } - is(getForceColorLevel(), 0) -}) - -test('tty.isatty: false', () => { - tty.isatty = () => false - is(getForceColorLevel(), 0) -}) - -test('Windows 10 build 10586', () => { - Object.defineProperty(process, 'platform', { - value: 'win32', - }) - Object.defineProperty(process.versions, 'node', { - value: '8.0.0', - }) - os.release = () => '10.0.10586' - - is(getForceColorLevel(), 2) -}) - -test('Windows 10 build 14931', () => { - Object.defineProperty(process, 'platform', { - value: 'win32', - }) - Object.defineProperty(process.versions, 'node', { - value: '8.0.0', - }) - os.release = () => '10.0.14931' - - is(getForceColorLevel(), 3) -}) - -test('Windows 10 build 10586', () => { - Object.defineProperty(process, 'platform', { - value: 'win32', - }) - Object.defineProperty(process.versions, 'node', { - value: '8.0.0', - }) - os.release = () => '10.0.10240' - - is(getForceColorLevel(), 1) -}) - -test('COLORTERM', () => { - process.env = { COLORTERM: true } - is(getForceColorLevel(), 1) -}) - -test('COLORTERM:truecolor', () => { - process.env = { COLORTERM: 'truecolor' } - is(getForceColorLevel(), 3) -}) - -test('TERM:dumb', () => { - process.env = { TERM: 'dumb' } - is(getForceColorLevel(), 0) -}) - -test('TERM:xterm-256color', () => { - process.env = { TERM: 'xterm-256color' } - is(getForceColorLevel(), 2) -}) - -test('TERM:screen-256color', () => { - process.env = { TERM: 'screen-256color' } - is(getForceColorLevel(), 2) -}) - -test('support putty-256color', () => { - process.env = { TERM: 'putty-256color' } - is(getForceColorLevel(), 2) -}) - -test('TERM:rxvt', () => { - process.env.TERM = 'rxvt' - is(getForceColorLevel(), 1) -}) - -test('default', () => { - is(getForceColorLevel(), 0) -}) - -test('prefer level 2/xterm over COLORTERM', () => { - process.env = { COLORTERM: '1', TERM: 'xterm-256color' } - is(getForceColorLevel(), 2) -}) - -test('return level 1 when `TERM` is set to dumb when `FORCE_COLOR` is set', () => { - process.env = { FORCE_COLOR: '1', TERM: 'dumb' } - is(getForceColorLevel(), 1) -}) - -test('--no-color', () => { - process.env = { TERM: 'xterm-256color' } - process.argv = ['--no-colors'] - is(getForceColorLevel(), 0) -}) - -test('--no-colors', () => { - process.env = { TERM: 'xterm-256color' } - process.argv = ['--no-colors'] - is(getForceColorLevel(), 0) -}) - -test('-color=false', () => { - process.env = { TERM: 'xterm-256color' } - process.argv = ['--color=false'] - is(getForceColorLevel(), 0) -}) - -test('--color=never', () => { - process.env = { TERM: 'xterm-256color' } - process.argv = ['--color=never'] - is(getForceColorLevel(), 0) -}) - -test.run()