diff --git a/DEVELOPING.md b/DEVELOPING.md index 12e2d28c46..e39c0ab881 100644 --- a/DEVELOPING.md +++ b/DEVELOPING.md @@ -1,6 +1,6 @@ ## Pre-requisites -1. We are using Node 8. If you need to work with multiple versions of Node, you +1. We are using Node 10 LTS. If you need to work with multiple versions of Node, you might consider using [nvm](https://github.com/creationix/nvm). 1. This repository uses [yarn](https://yarnpkg.com/) to manage node dependencies. Please install yarn globally using `npm install --global yarn`. @@ -17,7 +17,7 @@ You would only do this once after you cloned the repository. When you are ready to commit -1. We enforces commit message format. We recommend using [commitizen](https://github.com/commitizen/cz-cli) by installing it with `yarn add --global commitizen` then commit using `git cz` which will prompt you questions to format the commit message. +1. We enforce commit message format. We recommend using [commitizen](https://github.com/commitizen/cz-cli) by installing it with `yarn global add commitizen` then commit using `git cz` which will prompt you questions to format the commit message. 1. Before commit and push, husky will run several hooks to ensure the message and that everything lints and compiles properly. ## List of Useful commands @@ -28,7 +28,7 @@ This compiles the typescript to javascript. ### `yarn clean` -This cleans all generated files and directories. Run `yarn cleal-all` will also clean up the node_module directories. +This cleans all generated files and directories. Run `yarn clean-all` to also clean up the node_module directories. ### `yarn test` @@ -36,10 +36,10 @@ This tests the typescript using ts-node. ### `yarn lint` -This lists all the typescript. If there are no errors/warnings -from tslint, then you get a clean output. But, if they are errors from tslint, +This lints all the typescript. If there are no errors/warnings +from tslint, then you get clean output. But, if there are errors from tslint, you will see a long error that can be confusing – just focus on the tslint -errors. The results of this is deeper than what the tslint extension in VS Code +errors. The results of this are deeper than what the tslint extension in VS Code does because of [semantic lint rules](https://palantir.github.io/tslint/usage/type-checking/) which requires a tsconfig.json to be passed to tslint. diff --git a/messages/config.json b/messages/config.json index 0c6e35318b..91d37ff559 100644 --- a/messages/config.json +++ b/messages/config.json @@ -1,8 +1,10 @@ { - "UnknownConfigKey": "Unknown config name \"%s\"", - "InvalidConfigValue": "Invalid config value. %s", - "InvalidInstanceUrl": "Specify a valid Salesforce instance URL", - "InvalidApiVersion": "Specify a valid Salesforce API version, for example, 42.0", - "InvalidBooleanConfigValue": "The config value can only be set to true or false.", - "InvalidProjectWorkspace": "This directory does not contain a valid Salesforce DX project" -} \ No newline at end of file + "UnknownConfigKey": "Unknown config name \"%s\"", + "InvalidConfigValue": "Invalid config value. %s", + "InvalidInstanceUrl": "Specify a valid Salesforce instance URL", + "InvalidApiVersion": "Specify a valid Salesforce API version, for example, 42.0", + "InvalidBooleanConfigValue": "The config value can only be set to true or false.", + "InvalidProjectWorkspace": "This directory does not contain a valid Salesforce DX project", + "SchemaValidationWarning": "The config file: %s is not schema valid\nDue to: %s", + "SchemaValidationErrorAction": "Check the file: %s for invalid entries" +} diff --git a/package.json b/package.json index ddd3412149..d71b5eb3ef 100644 --- a/package.json +++ b/package.json @@ -40,22 +40,23 @@ "dependencies": { "@salesforce/bunyan": "^2.0.0", "@salesforce/kit": "^1.0.0", + "@salesforce/schemas": "^1.0.1", "@salesforce/ts-types": "^1.0.0", "@types/jsforce": "1.9.2", "debug": "^3.1.0", "jsen": "0.6.6", "jsforce": "1.9.3", "jsonwebtoken": "8.5.0", - "mkdirp": "0.5.1", + "mkdirp": "1.0.4", "sfdx-faye": "^1.0.9" }, "devDependencies": { - "@salesforce/ts-sinon": "^1.0.0", "@salesforce/dev-scripts": "0.3.14", + "@salesforce/ts-sinon": "^1.0.0", "@types/debug": "0.0.30", "@types/jsen": "0.0.19", "@types/jsonwebtoken": "8.3.2", - "@types/mkdirp": "0.5.2", + "@types/mkdirp": "1.0.0", "@types/shelljs": "0.7.8", "commitizen": "^3.0.5", "husky": "^1", diff --git a/src/config/configFile.ts b/src/config/configFile.ts index 38f2b85a93..13b58d76a1 100644 --- a/src/config/configFile.ts +++ b/src/config/configFile.ts @@ -10,11 +10,15 @@ import { constants as fsConstants, Stats as fsStats } from 'fs'; import { homedir as osHomedir } from 'os'; import { dirname as pathDirname, join as pathJoin } from 'path'; import { Global } from '../global'; +import { Logger } from '../logger'; +import { Messages } from '../messages'; import { SfdxError } from '../sfdxError'; import { fs } from '../util/fs'; import { resolveProjectPath } from '../util/internal'; import { BaseConfigStore, ConfigContents } from './configStore'; +Messages.importMessagesDirectory(pathJoin(__dirname)); + /** * Represents a json config file used to manage settings and state. Global config * files are stored in the home directory hidden state folder (.sfdx) and local config @@ -68,6 +72,13 @@ export class ConfigFile extends BaseConfigStore return isGlobal ? osHomedir() : await resolveProjectPath(); } + // whether file contents have been read + protected hasRead = false; + + // Initialized in init + protected logger!: Logger; + protected messages!: Messages; + // Initialized in create private path!: string; @@ -98,14 +109,22 @@ export class ConfigFile extends BaseConfigStore } /** - * Read the config file and set the config contents. Returns the config contents of the config file. + * Read the config file and set the config contents. Returns the config contents of the config file. As an + * optimization, files are only read once per process and updated in memory and via `write()`. To force + * a read from the filesystem pass `force=true`. * **Throws** *{@link SfdxError}{ name: 'UnexpectedJsonFileFormat' }* There was a problem reading or parsing the file. * @param [throwOnNotFound = false] Optionally indicate if a throw should occur on file read. + * @param [force = false] Optionally force the file to be read from disk even when already read within the process. */ - public async read(throwOnNotFound = false): Promise { + public async read(throwOnNotFound = false, force = false): Promise { try { - const obj = await fs.readJsonMap(this.getPath()); - this.setContentsFromObject(obj); + // Only need to read config files once. They are kept up to date + // internally and updated persistently via write(). + if (!this.hasRead || force) { + this.logger.info(`Reading config file: ${this.getPath()}`); + const obj = await fs.readJsonMap(this.getPath()); + this.setContentsFromObject(obj); + } return this.getContents(); } catch (err) { if (err.code === 'ENOENT') { @@ -115,6 +134,10 @@ export class ConfigFile extends BaseConfigStore } } throw err; + } finally { + // Necessarily set this even when an error happens to avoid infinite re-reading. + // To attempt another read, pass `force=true`. + this.hasRead = true; } } @@ -131,6 +154,7 @@ export class ConfigFile extends BaseConfigStore await fs.mkdirp(pathDirname(this.getPath())); + this.logger.info(`Writing to config file: ${this.getPath()}`); await fs.writeJson(this.getPath(), this.toObject()); return this.getContents(); @@ -186,6 +210,7 @@ export class ConfigFile extends BaseConfigStore * options.throwOnNotFound is true. */ protected async init(): Promise { + this.logger = await Logger.child(this.constructor.name); const statics = this.constructor as typeof ConfigFile; let defaultOptions = {}; try { @@ -214,6 +239,8 @@ export class ConfigFile extends BaseConfigStore configRootFolder = pathJoin(configRootFolder, Global.STATE_FOLDER); } + this.messages = Messages.loadMessages('@salesforce/core', 'config'); + this.path = pathJoin(configRootFolder, this.options.filePath ? this.options.filePath : '', this.options.filename); await this.read(this.options.throwOnNotFound); } diff --git a/src/config/configStore.ts b/src/config/configStore.ts index 5cbae28681..f90fdf8d51 100644 --- a/src/config/configStore.ts +++ b/src/config/configStore.ts @@ -69,7 +69,7 @@ export abstract class BaseConfigStore extends public constructor(options: T) { super(options); this.options = options; - this.setContents(this.options.contents || {}); + this.setContents(this.options.contents); } /** @@ -216,7 +216,7 @@ export abstract class BaseConfigStore extends } // Allows extended classes the ability to override the set method. i.e. maybe they don't want - // nexted object set from kit. + // nested object set from kit. protected setMethod(contents: ConfigContents, key: string, value?: ConfigValue) { set(contents, key, value); } diff --git a/src/logger.ts b/src/logger.ts index 67499e4c96..f075f0519e 100644 --- a/src/logger.ts +++ b/src/logger.ts @@ -23,7 +23,7 @@ import { Optional } from '@salesforce/ts-types'; import * as Debug from 'debug'; -import * as EventEmitter from 'events'; +import { EventEmitter } from 'events'; import * as os from 'os'; import * as path from 'path'; import { Writable } from 'stream'; diff --git a/src/schema/validator.ts b/src/schema/validator.ts index bd50dc9ec4..89b0ef5799 100644 --- a/src/schema/validator.ts +++ b/src/schema/validator.ts @@ -38,7 +38,7 @@ export class SchemaValidator { * Creates a new `SchemaValidator` instance given a logger and path to a schema file. * * @param logger An {@link Logger} instance on which to base this class's logger. - * @param schemaPath The path from which the schema with which to validate should be loaded. + * @param schemaPath The path to the schema file to load and use for validation. */ public constructor(logger: Logger, private schemaPath: string) { this.logger = logger.child('SchemaValidator'); diff --git a/src/sfdxProject.ts b/src/sfdxProject.ts index b2beaf8c73..bd32f9e613 100644 --- a/src/sfdxProject.ts +++ b/src/sfdxProject.ts @@ -4,18 +4,55 @@ * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ - -import { defaults } from '@salesforce/kit'; +import { sep as pathSep } from 'path'; import { ConfigAggregator } from './config/configAggregator'; import { ConfigFile } from './config/configFile'; import { ConfigContents } from './config/configStore'; +import { defaults, env } from '@salesforce/kit'; import { JsonMap } from '@salesforce/ts-types'; -import { SfdxError } from './sfdxError'; +import { SchemaValidator } from './schema/validator'; import { resolveProjectPath, SFDX_PROJECT_JSON } from './util/internal'; + +import { SfdxError } from './sfdxError'; import { sfdc } from './util/sfdc'; +export type PackageDirDependency = { + package: string; + versionNumber?: string; + [k: string]: unknown; +}; + +export type PackageDir = { + ancestorId?: string; + ancestorVersion?: string; + default?: boolean; + definitionFile?: string; + dependencies?: PackageDirDependency[]; + includeProfileUserLicenses?: boolean; + package?: string; + path: string; + postInstallScript?: string; + postInstallUrl?: string; + releaseNotesUrl?: string; + uninstallScript?: string; + versionDescription?: string; + versionName?: string; + versionNumber?: string; +}; + +export type ProjectJson = ConfigContents & { + packageDirectories: PackageDir[]; + namespace?: string; + sourceApiVersion?: string; + sfdcLoginUrl?: string; + signupTargetLoginUrl?: string; + oauthLocalPort?: number; + plugins?: { [k: string]: unknown }; + packageAliases?: { [k: string]: string }; +}; + /** * The sfdx-project.json config object. This file determines if a folder is a valid sfdx project. * @@ -57,6 +94,9 @@ export class SfdxProjectJson extends ConfigFile { if (upperCaseKey) { throw SfdxError.create('@salesforce/core', 'core', 'InvalidJsonCasing', [upperCaseKey, this.getPath()]); } + + await this.schemaValidate(); + return contents; } @@ -67,9 +107,15 @@ export class SfdxProjectJson extends ConfigFile { throw SfdxError.create('@salesforce/core', 'core', 'InvalidJsonCasing', [upperCaseKey, this.getPath()]); } + await this.schemaValidate(); + return super.write(newContents); } + public getContents(): ProjectJson { + return super.getContents() as ProjectJson; + } + public getDefaultOptions(options?: ConfigFile.Options): ConfigFile.Options { const defaultOptions: ConfigFile.Options = { isState: false @@ -78,6 +124,57 @@ export class SfdxProjectJson extends ConfigFile { Object.assign(defaultOptions, options || {}); return defaultOptions; } + + /** + * Validates sfdx-project.json against the schema. + * + * Set the `SFDX_PROJECT_JSON_VALIDATION` environment variable to `true` to throw an error when schema validation fails. + * A warning is logged by default when the file is invalid. + * + * ***See*** [sfdx-project.schema.json] (https://raw.githubusercontent.com/forcedotcom/schemas/master/schemas/sfdx-project.schema.json) + */ + public async schemaValidate(): Promise { + if (!this.hasRead) { + // read calls back into this method after necessarily setting this.hasRead=true + await this.read(); + } else { + try { + const projectJsonSchemaPath = require.resolve('@salesforce/schemas/sfdx-project.schema.json'); + const validator = new SchemaValidator(this.logger, projectJsonSchemaPath); + await validator.load(); + await validator.validate(this.getContents()); + } catch (err) { + if (env.getBoolean('SFDX_PROJECT_JSON_VALIDATION', false)) { + err.name = 'SfdxSchemaValidationError'; + const sfdxError = SfdxError.wrap(err); + sfdxError.actions = [this.messages.getMessage('SchemaValidationErrorAction', [this.getPath()])]; + throw sfdxError; + } else { + this.logger.warn(this.messages.getMessage('SchemaValidationWarning', [this.getPath(), err.message])); + } + } + } + } + + /** + * Returns the `packageDirectories` within sfdx-project.json, first reading + * and validating the file if necessary. + */ + public async getPackageDirectories(): Promise { + // Ensure sfdx-project.json has first been read and validated. + if (!this.hasRead) { + await this.read(); + } + + const contents = this.getContents(); + const packageDirs: PackageDir[] = contents.packageDirectories.map(packageDir => { + // Change packageDir paths to have path separators that match the OS + const regex = pathSep === '/' ? /\\/g : /\//g; + packageDir.path = packageDir.path.replace(regex, pathSep); + return packageDir; + }); + return packageDirs; + } } /** @@ -98,7 +195,13 @@ export class SfdxProject { * **Throws** *{@link SfdxError}{ name: 'InvalidProjectWorkspace' }* If the current folder is not located in a workspace. */ public static async resolve(path?: string): Promise { - return new SfdxProject(await this.resolveProjectPath(path)); + const _path = path || process.cwd(); + if (!SfdxProject.instances.has(_path)) { + const project = new SfdxProject(await this.resolveProjectPath(_path)); + SfdxProject.instances.set(_path, project); + } + // @ts-ignore Because of the pattern above this is guaranteed to return an instance + return SfdxProject.instances.get(_path); } /** @@ -116,6 +219,9 @@ export class SfdxProject { return resolveProjectPath(dir); } + // Cache of SfdxProject instances per path. + private static instances = new Map(); + private projectConfig: any; // tslint:disable-line:no-any // Dynamically referenced in retrieveSfdxProjectJson diff --git a/src/testSetup.ts b/src/testSetup.ts index 66e08ca479..5851149027 100644 --- a/src/testSetup.ts +++ b/src/testSetup.ts @@ -287,6 +287,9 @@ export const stubContext = (testContext: TestContext) => { ) { const stub: ConfigStub = testContext.configStubs[this.constructor.name] || {}; + // @ts-ignore set this to true to avoid an infinite loop in tests when reading config files. + this.hasRead = true; + if (stub.readFn) { return await stub.readFn.call(this); } diff --git a/src/util/fs.ts b/src/util/fs.ts index c25bb7228a..6ab1e2e1ad 100644 --- a/src/util/fs.ts +++ b/src/util/fs.ts @@ -74,7 +74,7 @@ export const fs = { * Promisified version of {@link https://npmjs.com/package/mkdirp|mkdirp}. */ // @ts-ignore TODO: figure out how to bind to correct promisify overload - mkdirp: (folderPath: string, mode?: string | object): Promise => promisify(mkdirpLib)(folderPath, mode), + mkdirp: (folderPath: string, mode?: string | object): Promise => mkdirpLib(folderPath, mode), /** * Deletes a folder recursively, removing all descending files and folders. @@ -158,5 +158,19 @@ export const fs = { encoding: 'utf8', mode: fs.DEFAULT_USER_FILE_MODE }); + }, + + /** + * Checks if a file path exists + * + * @param filePath the file path to check the existence of + */ + fileExists: async (filePath: string): Promise => { + try { + await fs.access(filePath); + return true; + } catch (err) { + return false; + } } }; diff --git a/src/util/sfdc.ts b/src/util/sfdc.ts index be9cc8ce52..178318e9b2 100644 --- a/src/util/sfdc.ts +++ b/src/util/sfdc.ts @@ -26,6 +26,7 @@ export const sfdc = { // Source https://help.salesforce.com/articleView?id=000003652&type=1 const whitelistOfSalesforceDomainPatterns: string[] = [ + '.cloudforce.com', '.content.force.com', '.force.com', '.salesforce.com', diff --git a/test/unit/config/configFileTest.ts b/test/unit/config/configFileTest.ts index f4cbae636a..c5b26fffd5 100644 --- a/test/unit/config/configFileTest.ts +++ b/test/unit/config/configFileTest.ts @@ -8,7 +8,8 @@ import { expect } from 'chai'; import * as Path from 'path'; import { ConfigFile } from '../../../src/config/configFile'; -import { testSetup } from '../../../src/testSetup'; +import { shouldThrow, testSetup } from '../../../src/testSetup'; +import { fs } from '../../../src/util/fs'; const $$ = testSetup(); @@ -75,11 +76,96 @@ describe('Config', () => { }); describe('default options', () => { - it(' get applied with passed in options', async () => { + it('get applied with passed in options', async () => { // Pass in custom options const config = await TestConfig.create({ isState: true }); // Creation doesn't fail with missing file name expect(config.getPath()).contains('testFileName'); }); }); + + describe('read()', () => { + let readJsonMapStub; + let config: TestConfig; + + const testFileContents = { + foo: 'bar' + }; + + beforeEach(async () => { + $$.SANDBOXES.CONFIG.restore(); + readJsonMapStub = $$.SANDBOX.stub(fs, 'readJsonMap'); + }); + + it('caches file contents', async () => { + readJsonMapStub.callsFake(async () => testFileContents); + // TestConfig.create() calls read() + config = await TestConfig.create(await TestConfig.getOptions('test', false, true)); + expect(readJsonMapStub.calledOnce).to.be.true; + + // @ts-ignore -> hasRead is protected. Ignore for testing. + expect(config.hasRead).to.be.true; + expect(config.getContents()).to.deep.equal(testFileContents); + + // Read again. Stub should still only be called once. + const contents2 = await config.read(); + expect(readJsonMapStub.calledOnce).to.be.true; + expect(contents2).to.deep.equal(testFileContents); + }); + + it('sets contents as empty object when file does not exist', async () => { + const err = new Error(); + err['code'] = 'ENOENT'; + readJsonMapStub.throws(err); + + config = await TestConfig.create(await TestConfig.getOptions('test', false, true)); + expect(readJsonMapStub.calledOnce).to.be.true; + + // @ts-ignore -> hasRead is protected. Ignore for testing. + expect(config.hasRead).to.be.true; + expect(config.getContents()).to.deep.equal({}); + }); + + it('throws when file does not exist and throwOnNotFound=true', async () => { + const err = new Error('not here'); + err.name = 'FileNotFound'; + err['code'] = 'ENOENT'; + readJsonMapStub.throws(err); + + const configOptions = { + filename: 'test', + isGlobal: true, + throwOnNotFound: true + }; + + try { + await shouldThrow(TestConfig.create(configOptions)); + } catch (e) { + expect(e).to.have.property('name', 'FileNotFound'); + } + }); + + it('sets hasRead=false by default', async () => { + const configOptions = await TestConfig.getOptions('test', false, true); + const testConfig = new TestConfig(configOptions); + // @ts-ignore -> hasRead is protected. Ignore for testing. + expect(testConfig.hasRead).to.be.false; + }); + + it('forces another read of the config file with force=true', async () => { + readJsonMapStub.callsFake(async () => testFileContents); + // TestConfig.create() calls read() + config = await TestConfig.create(await TestConfig.getOptions('test', false, true)); + expect(readJsonMapStub.calledOnce).to.be.true; + + // @ts-ignore -> hasRead is protected. Ignore for testing. + expect(config.hasRead).to.be.true; + expect(config.getContents()).to.deep.equal(testFileContents); + + // Read again. Stub should now be called twice. + const contents2 = await config.read(false, true); + expect(readJsonMapStub.calledTwice).to.be.true; + expect(contents2).to.deep.equal(testFileContents); + }); + }); }); diff --git a/test/unit/config/configTest.ts b/test/unit/config/configTest.ts index 5b2424278a..39444a3bef 100644 --- a/test/unit/config/configTest.ts +++ b/test/unit/config/configTest.ts @@ -59,6 +59,10 @@ describe('Config', () => { .withArgs(config.getPath()) .returns(Promise.resolve(clone(configFileContents))); + // Manipulate config.hasRead to force a read + // @ts-ignore -> hasRead is protected. Ignore for testing. + config.hasRead = false; + const content: ConfigContents = await config.read(); expect(content.defaultusername).to.equal(configFileContents.defaultusername); diff --git a/test/unit/projectTest.ts b/test/unit/projectTest.ts index bbafe77967..dd94e00675 100644 --- a/test/unit/projectTest.ts +++ b/test/unit/projectTest.ts @@ -5,9 +5,11 @@ * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ import { assert, expect } from 'chai'; +import { sep as pathSep } from 'path'; +import { env } from '@salesforce/kit'; import { SfdxProject, SfdxProjectJson } from '../../src/sfdxProject'; -import { testSetup } from '../../src/testSetup'; +import { shouldThrow, testSetup } from '../../src/testSetup'; import * as internal from '../../src/util/internal'; // Setup the test environment. @@ -18,6 +20,8 @@ describe('SfdxProject', async () => { beforeEach(async () => { projectPath = await $$.localPathRetriever($$.id); + // @ts-ignore SfdxProject.instances is private so override for testing. + SfdxProject.instances.clear(); }); describe('json', async () => { @@ -31,9 +35,111 @@ describe('SfdxProject', async () => { const json = await SfdxProjectJson.create({}); expect(json.get('packageAliases')['MyName']).to.equal('somePackage'); }); + it('read calls schemaValidate', async () => { + const defaultOptions = SfdxProjectJson.getDefaultOptions(); + const sfdxProjectJson = new SfdxProjectJson(defaultOptions); + const schemaValidateStub = $$.SANDBOX.stub(sfdxProjectJson, 'schemaValidate'); + schemaValidateStub.returns(Promise.resolve()); + await sfdxProjectJson.read(); + expect(schemaValidateStub.calledOnce).to.be.true; + }); + it('write calls schemaValidate', async () => { + const defaultOptions = SfdxProjectJson.getDefaultOptions(); + const sfdxProjectJson = new SfdxProjectJson(defaultOptions); + const schemaValidateStub = $$.SANDBOX.stub(sfdxProjectJson, 'schemaValidate'); + schemaValidateStub.returns(Promise.resolve()); + await sfdxProjectJson.write(); + expect(schemaValidateStub.calledOnce).to.be.true; + }); + it('getPackageDirectories should transform packageDir paths to have path separators that match the OS', async () => { + let defaultPP: string; + let transformedDefaultPP: string; + let otherPP: string; + let transformedOtherPP: string; + + if (pathSep === '/') { + // posix test + defaultPP = 'default\\foo'; + transformedDefaultPP = 'default/foo'; + otherPP = 'other\\bar'; + transformedOtherPP = 'other/bar'; + } else { + // windows test + defaultPP = 'default/foo'; + transformedDefaultPP = 'default\\foo'; + otherPP = 'other/bar'; + transformedOtherPP = 'other\\bar'; + } + + $$.setConfigStubContents('SfdxProjectJson', { + contents: { + packageDirectories: [{ path: defaultPP, default: true }, { path: otherPP, default: false }] + } + }); + const sfdxProjectJson = await SfdxProjectJson.create({}); + const packageDirs = await sfdxProjectJson.getPackageDirectories(); + + expect(packageDirs).to.deep.equal([ + { path: transformedDefaultPP, default: true }, + { path: transformedOtherPP, default: false } + ]); + }); + it('schemaValidate validates sfdx-project.json', async () => { + $$.setConfigStubContents('SfdxProjectJson', { + contents: { + packageDirectories: [{ path: 'force-app', default: true }, { path: 'common', default: false }], + namespace: 'test_ns', + sourceApiVersion: '48.0' + } + }); + const loggerSpy = $$.SANDBOX.spy($$.TEST_LOGGER, 'warn'); + // create() calls read() which calls schemaValidate() + await SfdxProjectJson.create({}); + expect(loggerSpy.called).to.be.false; + }); + it('schemaValidate throws when SFDX_PROJECT_JSON_VALIDATION=true and invalid file', async () => { + $$.setConfigStubContents('SfdxProjectJson', { + contents: { + packageDirectories: [{ path: 'force-app', default: true }], + foo: 'bar' + } + }); + $$.SANDBOX.stub(env, 'getBoolean').callsFake(envVarName => envVarName === 'SFDX_PROJECT_JSON_VALIDATION'); + const expectedError = "Validation errors:\n should NOT have additional properties 'foo'"; + try { + // create() calls read() which calls schemaValidate() + await shouldThrow(SfdxProjectJson.create({})); + } catch (e) { + expect(e.name).to.equal('SfdxSchemaValidationError'); + expect(e.message).to.equal(expectedError); + } + }); + it('schemaValidate warns when SFDX_PROJECT_JSON_VALIDATION=false and invalid file', async () => { + $$.setConfigStubContents('SfdxProjectJson', { + contents: { + packageDirectories: [{ path: 'force-app', default: true }], + foo: 'bar' + } + }); + const loggerSpy = $$.SANDBOX.spy($$.TEST_LOGGER, 'warn'); + // create() calls read() which calls schemaValidate() + await SfdxProjectJson.create({}); + expect(loggerSpy.calledOnce).to.be.true; + expect(loggerSpy.calledWithMatch('sfdx-project.json is not schema valid')).to.be.true; + }); }); describe('resolve', () => { + it('caches the sfdx-project.json per path', async () => { + // @ts-ignore SfdxProject.instances is private so override for testing. + const instanceSetSpy = $$.SANDBOX.spy(SfdxProject.instances, 'set'); + $$.SANDBOX.stub(internal, 'resolveProjectPath').callsFake(() => projectPath); + const project1 = await SfdxProject.resolve('foo'); + expect(instanceSetSpy.calledOnce).to.be.true; + const project2 = await SfdxProject.resolve('foo'); + expect(instanceSetSpy.calledOnce).to.be.true; + expect(project1).to.equal(project2); + }); it('with working directory', async () => { $$.SANDBOX.stub(internal, 'resolveProjectPath').callsFake(() => projectPath); const project = await SfdxProject.resolve(); diff --git a/test/unit/util/fsTest.ts b/test/unit/util/fsTest.ts index 15a36ef7b4..357fedd90a 100644 --- a/test/unit/util/fsTest.ts +++ b/test/unit/util/fsTest.ts @@ -213,4 +213,17 @@ describe('util/fs', () => { }); }); }); + + describe('fileExists', () => { + it('should return true if the file exists', async () => { + $$.SANDBOX.stub(fs, 'access').returns(Promise.resolve(true)); + const exists = await fs.fileExists('foo/bar.json'); + expect(exists).to.be.true; + }); + + it('should return false if the file does not exist', async () => { + const exists = await fs.fileExists('foo/bar.json'); + expect(exists).to.be.false; + }); + }); }); diff --git a/yarn.lock b/yarn.lock index 61a766ce39..a51899a77c 100644 --- a/yarn.lock +++ b/yarn.lock @@ -370,6 +370,11 @@ "@salesforce/ts-types" "^1.2.1" tslib "^1.10.0" +"@salesforce/schemas@^1.0.1": + version "1.0.1" + resolved "https://registry.npmjs.org/@salesforce/schemas/-/schemas-1.0.1.tgz#d1db56759d2b22a7688e1821aec564e979237ad2" + integrity sha512-78pP1GB/DbIS8nSWGL0GpQ27g02drrEo0vzYdRipGYAIXHMzlh1gqEsq0pOiIQlPm1MxWyEqbmf4GG5qSVsd0Q== + "@salesforce/ts-sinon@^1.0.0": version "1.1.1" resolved "https://registry.npmjs.org/@salesforce/ts-sinon/-/ts-sinon-1.1.1.tgz#0d8bb57bd9d714ff1b01de22a5262488053515fb" @@ -570,10 +575,10 @@ resolved "https://registry.npmjs.org/@types/minimatch/-/minimatch-3.0.3.tgz#3dca0e3f33b200fc7d1139c0cd96c1268cadfd9d" integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA== -"@types/mkdirp@0.5.2": - version "0.5.2" - resolved "https://registry.npmjs.org/@types/mkdirp/-/mkdirp-0.5.2.tgz#503aacfe5cc2703d5484326b1b27efa67a339c1f" - integrity sha512-U5icWpv7YnZYGsN4/cmh3WD2onMY0aJIiTE6+51TwJCttdHvtCYmkBNOobHlXwrJRL0nkH9jH4kD+1FAdMN4Tg== +"@types/mkdirp@1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@types/mkdirp/-/mkdirp-1.0.0.tgz#16ce0eabe4a9a3afe64557ad0ee6886ec3d32927" + integrity sha512-ONFY9//bCEr3DWKON3iDv/Q8LXnhaYYaNDeFSN0AtO5o4sLf9F0pstJKKKjQhXE0kJEeHs8eR6SAsROhhc2Csw== dependencies: "@types/node" "*" @@ -4340,6 +4345,11 @@ mkdirp@0.5.1, mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@~0.5.0, mkdirp@~0.5.1: dependencies: minimist "0.0.8" +mkdirp@1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" + integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== + mocha@^5: version "5.2.0" resolved "https://registry.npmjs.org/mocha/-/mocha-5.2.0.tgz#6d8ae508f59167f940f2b5b3c4a612ae50c90ae6"