diff --git a/schemas/force-source-status.json b/schemas/force-source-status.json index 9bbf5689e..fcfa2f835 100644 --- a/schemas/force-source-status.json +++ b/schemas/force-source-status.json @@ -12,25 +12,7 @@ "type": "object", "properties": { "state": { - "type": "string", - "enum": [ - "Local Deleted", - "Local Add", - "Local Changed", - "Local Unchanged", - "Remote Deleted", - "Remote Add", - "Remote Changed", - "Remote Unchanged", - "Local Deleted (Conflict)", - "Local Add (Conflict)", - "Local Changed (Conflict)", - "Local Unchanged (Conflict)", - "Remote Deleted (Conflict)", - "Remote Add (Conflict)", - "Remote Changed (Conflict)", - "Remote Unchanged (Conflict)" - ] + "$ref": "#/definitions/StatusStateString" }, "fullName": { "type": "string" @@ -52,12 +34,36 @@ "enum": ["Deleted", "Add", "Changed", "Unchanged"] }, "origin": { - "type": "string", - "enum": ["Local", "Remote"] + "$ref": "#/definitions/StatusOrigin" } }, "required": ["state", "fullName", "type", "origin"], "additionalProperties": false + }, + "StatusStateString": { + "type": "string", + "enum": [ + "Local Deleted", + "Local Add", + "Local Changed", + "Local Unchanged", + "Remote Deleted", + "Remote Add", + "Remote Changed", + "Remote Unchanged", + "Local Deleted (Conflict)", + "Local Add (Conflict)", + "Local Changed (Conflict)", + "Local Unchanged (Conflict)", + "Remote Deleted (Conflict)", + "Remote Add (Conflict)", + "Remote Changed (Conflict)", + "Remote Unchanged (Conflict)" + ] + }, + "StatusOrigin": { + "type": "string", + "enum": ["Local", "Remote"] } } } diff --git a/src/hooks/diagnostics.ts b/src/hooks/diagnostics.ts index 731c4c588..a0b0cd659 100644 --- a/src/hooks/diagnostics.ts +++ b/src/hooks/diagnostics.ts @@ -110,16 +110,15 @@ const apiVersionTest = async (doctor: SfDoctor): Promise => { }; // check sfdx-project.json for sourceApiVersion -const getSourceApiVersion = async (): Promise => { +const getSourceApiVersion = async (): Promise => { try { const project = SfProject.getInstance(); const projectJson = await project.resolveProjectConfig(); - return projectJson.sourceApiVersion as string; + return projectJson.sourceApiVersion as string | undefined; } catch (error) { const errMsg = (error as Error).message; getLogger().debug(`Cannot determine sourceApiVersion due to: ${errMsg}`); } - return ''; }; // check max API version for default orgs @@ -139,7 +138,7 @@ const getMaxApiVersion = async (aggregator: ConfigAggregator, aliasOrUsername: s // Comparing undefined with undefined would return false. // Comparing 55.0 with 55.0 would return false. // Comparing 55.0 with 56.0 would return true. -const diff = (version1: string, version2: string): boolean => { +const diff = (version1: string | undefined, version2: string | undefined): boolean => { getLogger().debug(`Comparing API versions: [${version1},${version2}]`); return (version1?.length && version2?.length && version1 !== version2) as boolean; }; diff --git a/test/commands/source/deploy.test.ts b/test/commands/source/deploy.test.ts index 09e6c7f06..8b72163ea 100644 --- a/test/commands/source/deploy.test.ts +++ b/test/commands/source/deploy.test.ts @@ -87,9 +87,8 @@ describe('force:source:deploy', () => { const runDeployCmd = async (params: string[], options?: { sourceApiVersion?: string }) => { const cmd = new TestDeploy(params, oclifConfigStub); cmd.project = SfProject.getInstance(); - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore - cmd.configAggregator = await (await ConfigAggregator.create({ customConfigMeta: ConfigMeta })).reload(); + + cmd.configAggregator = await (await ConfigAggregator.create({ customConfigMeta: ConfigMeta.default })).reload(); sandbox.stub(cmd.project, 'getDefaultPackage').returns({ name: '', path: '', fullPath: defaultDir }); sandbox.stub(cmd.project, 'getUniquePackageDirectories').returns([{ fullPath: defaultDir, path: '', name: '' }]); sandbox.stub(cmd.project, 'getPackageDirectories').returns([{ fullPath: defaultDir, path: '', name: '' }]); diff --git a/test/coverageUtils.test.ts b/test/coverageUtils.test.ts index bdf0c6ba1..71f7877a7 100644 --- a/test/coverageUtils.test.ts +++ b/test/coverageUtils.test.ts @@ -7,7 +7,7 @@ import { expect } from 'chai'; import { MockTestOrgData, TestContext } from '@salesforce/core/lib/testSetup.js'; import { AuthInfo, Connection } from '@salesforce/core'; -import { createSandbox, SinonSandbox } from 'sinon'; +import sinon from 'sinon'; import chalk from 'chalk'; import { prepCoverageForDisplay, @@ -298,10 +298,10 @@ describe('transform md RunTestResult', () => { const testData = new MockTestOrgData(); let sampleTestResult = getSampleTestResult(); - let sandboxStub: SinonSandbox; + let sandboxStub: sinon.SinonSandbox; beforeEach(async () => { sampleTestResult = getSampleTestResult(); - sandboxStub = createSandbox(); + sandboxStub = sinon.createSandbox(); $$.setConfigStubContents('StateAggregator', { contents: { diff --git a/test/formatters/mdDeployResultFormatter.test.ts b/test/formatters/mdDeployResultFormatter.test.ts index 2b7d318b3..73791885a 100644 --- a/test/formatters/mdDeployResultFormatter.test.ts +++ b/test/formatters/mdDeployResultFormatter.test.ts @@ -22,7 +22,7 @@ describe('mdDeployResultFormatter', () => { const deployResultTestSuccess = getDeployResult('passedTest'); const deployResultTestSuccessAndFailure = getDeployResult('passedAndFailedTest'); - let ux; + let ux: Ux; let logStub: sinon.SinonStub; let styledHeaderStub: sinon.SinonStub; @@ -32,6 +32,9 @@ describe('mdDeployResultFormatter', () => { logStub = sandbox.stub(); styledHeaderStub = sandbox.stub(); tableStub = sandbox.stub(); + // ux is a stubbed Ux + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore ux = stubInterface(sandbox, { log: logStub, styledHeader: styledHeaderStub, @@ -49,7 +52,7 @@ describe('mdDeployResultFormatter', () => { it('should return expected json for a success', () => { process.exitCode = 0; const expectedSuccessResults = deployResultSuccess.response; - const formatter = new MdDeployResultFormatter(ux as Ux, {}, deployResultSuccess); + const formatter = new MdDeployResultFormatter(ux, {}, deployResultSuccess); const json = formatter.getJson(); expect(json).to.deep.equal(expectedSuccessResults); @@ -59,14 +62,14 @@ describe('mdDeployResultFormatter', () => { process.exitCode = 1; const expectedFailureResults = deployResultFailure.response; - const formatter = new MdDeployResultFormatter(ux as Ux, {}, deployResultFailure); + const formatter = new MdDeployResultFormatter(ux, {}, deployResultFailure); expect(formatter.getJson()).to.deep.equal(expectedFailureResults); }); it('should return expected json for a partial success', () => { process.exitCode = 69; const expectedPartialSuccessResponse = deployResultPartialSuccess.response; - const formatter = new MdDeployResultFormatter(ux as Ux, {}, deployResultPartialSuccess); + const formatter = new MdDeployResultFormatter(ux, {}, deployResultPartialSuccess); expect(formatter.getJson()).to.deep.equal(expectedPartialSuccessResponse); }); @@ -74,7 +77,7 @@ describe('mdDeployResultFormatter', () => { process.exitCode = 0; const expectedSuccessResults = deployResultSuccess.response; - const formatter = new MdDeployResultFormatter(ux as Ux, { concise: true }, deployResultSuccess); + const formatter = new MdDeployResultFormatter(ux, { concise: true }, deployResultSuccess); const json = formatter.getJson(); // a few checks that it's the rest of the json @@ -88,7 +91,7 @@ describe('mdDeployResultFormatter', () => { describe('display', () => { it('should output as expected for a success (no table)', () => { process.exitCode = 0; - const formatter = new MdDeployResultFormatter(ux as Ux, {}, deployResultSuccess); + const formatter = new MdDeployResultFormatter(ux, {}, deployResultSuccess); formatter.display(); expect(logStub.callCount).to.equal(0); expect(tableStub.callCount).to.equal(0); @@ -97,7 +100,7 @@ describe('mdDeployResultFormatter', () => { it('should output as expected for a verbose success (has table)', () => { process.exitCode = 0; - const formatter = new MdDeployResultFormatter(ux as Ux, { verbose: true }, deployResultSuccess); + const formatter = new MdDeployResultFormatter(ux, { verbose: true }, deployResultSuccess); formatter.display(); expect(styledHeaderStub.callCount).to.equal(1); expect(logStub.callCount).to.equal(1); @@ -108,7 +111,7 @@ describe('mdDeployResultFormatter', () => { it('should output as expected for a failure and exclude duplicate information', () => { process.exitCode = 1; - const formatter = new MdDeployResultFormatter(ux as Ux, {}, deployResultFailure); + const formatter = new MdDeployResultFormatter(ux, {}, deployResultFailure); try { formatter.display(); @@ -128,7 +131,7 @@ describe('mdDeployResultFormatter', () => { deployFailure.response.details.componentFailures = []; deployFailure.response.details.componentSuccesses = []; delete deployFailure.response.details.runTestResult; - const formatter = new MdDeployResultFormatter(ux as Ux, {}, deployFailure); + const formatter = new MdDeployResultFormatter(ux, {}, deployFailure); sandbox.stub(formatter, 'isSuccess').returns(false); try { @@ -144,7 +147,7 @@ describe('mdDeployResultFormatter', () => { it('should output as expected for a test failure with verbose', () => { process.exitCode = 1; - const formatter = new MdDeployResultFormatter(ux as Ux, { verbose: true }, deployResultTestFailure); + const formatter = new MdDeployResultFormatter(ux, { verbose: true }, deployResultTestFailure); try { formatter.display(); throw new Error('should have thrown'); @@ -161,7 +164,7 @@ describe('mdDeployResultFormatter', () => { it('should output as expected for passing tests with verbose', () => { process.exitCode = 0; - const formatter = new MdDeployResultFormatter(ux as Ux, { verbose: true }, deployResultTestSuccess); + const formatter = new MdDeployResultFormatter(ux, { verbose: true }, deployResultTestSuccess); formatter.display(); expect(styledHeaderStub.args[0][0]).to.include('Deployed Source'); expect(styledHeaderStub.args[1][0]).to.include('Component Failures [1]'); @@ -171,7 +174,7 @@ describe('mdDeployResultFormatter', () => { it('should output as expected for passing and failing tests with verbose', () => { process.exitCode = 1; - const formatter = new MdDeployResultFormatter(ux as Ux, { verbose: true }, deployResultTestSuccessAndFailure); + const formatter = new MdDeployResultFormatter(ux, { verbose: true }, deployResultTestSuccessAndFailure); try { formatter.display(); throw new Error('should have thrown'); @@ -188,7 +191,7 @@ describe('mdDeployResultFormatter', () => { it('shows success AND failures for partialSucceeded', () => { process.exitCode = 69; - const formatter = new MdDeployResultFormatter(ux as Ux, { verbose: true }, deployResultPartialSuccess); + const formatter = new MdDeployResultFormatter(ux, { verbose: true }, deployResultPartialSuccess); formatter.display(); expect(styledHeaderStub.callCount, 'styledHeaderStub.callCount').to.equal(2); expect(logStub.callCount, 'logStub.callCount').to.equal(3); diff --git a/test/formatters/pullFormatter.test.ts b/test/formatters/pullFormatter.test.ts index e7520f1bb..92ac545d1 100644 --- a/test/formatters/pullFormatter.test.ts +++ b/test/formatters/pullFormatter.test.ts @@ -25,7 +25,7 @@ describe('PullFormatter', () => { const retrieveResultEmpty = getRetrieveResult('empty'); const retrieveResultWarnings = getRetrieveResult('warnings'); - let ux; + let ux: Ux; let logStub: sinon.SinonStub; let styledHeaderStub: sinon.SinonStub; let tableStub: sinon.SinonStub; @@ -42,6 +42,8 @@ describe('PullFormatter', () => { logStub = sandbox.stub(); styledHeaderStub = sandbox.stub(); tableStub = sandbox.stub(); + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore ux = stubInterface(sandbox, { log: logStub, styledHeader: styledHeaderStub, @@ -58,14 +60,14 @@ describe('PullFormatter', () => { it('should return expected json for a success', () => { process.exitCode = 0; const expectedSuccessResults: PullResponse['pulledSource'] = retrieveResultSuccess.getFileResponses(); - const formatter = new PullResultFormatter(ux as Ux, {}, retrieveResultSuccess); + const formatter = new PullResultFormatter(ux, {}, retrieveResultSuccess); expect(formatter.getJson().pulledSource).to.deep.equal(expectedSuccessResults); }); it('should return expected json for a failure', () => { process.exitCode = 1; const expectedFailureResults: PullResponse['pulledSource'] = retrieveResultFailure.getFileResponses(); - const formatter = new PullResultFormatter(ux as Ux, {}, retrieveResultFailure); + const formatter = new PullResultFormatter(ux, {}, retrieveResultFailure); try { formatter.getJson().pulledSource; throw new Error('should have thrown'); @@ -79,14 +81,14 @@ describe('PullFormatter', () => { it('should return expected json for an InProgress', () => { const expectedInProgressResults: PullResponse['pulledSource'] = retrieveResultInProgress.getFileResponses(); - const formatter = new PullResultFormatter(ux as Ux, {}, retrieveResultInProgress); + const formatter = new PullResultFormatter(ux, {}, retrieveResultInProgress); expect(formatter.getJson().pulledSource).to.deep.equal(expectedInProgressResults); }); describe('display', () => { it('should output as expected for a success', () => { process.exitCode = 0; - const formatter = new PullResultFormatter(ux as Ux, {}, retrieveResultSuccess); + const formatter = new PullResultFormatter(ux, {}, retrieveResultSuccess); formatter.display(); expect(styledHeaderStub.called).to.equal(true); expect(logStub.called).to.equal(false); @@ -100,7 +102,7 @@ describe('PullFormatter', () => { it('should output as expected for an InProgress', () => { process.exitCode = 68; const options = { waitTime: 33 }; - const formatter = new PullResultFormatter(ux as Ux, options, retrieveResultInProgress); + const formatter = new PullResultFormatter(ux, options, retrieveResultInProgress); formatter.display(); expect(styledHeaderStub.called).to.equal(false); expect(logStub.called).to.equal(true); @@ -112,7 +114,7 @@ describe('PullFormatter', () => { it('should output as expected for a Failure', () => { process.exitCode = 1; - const formatter = new PullResultFormatter(ux as Ux, {}, retrieveResultFailure); + const formatter = new PullResultFormatter(ux, {}, retrieveResultFailure); sandbox.stub(formatter, 'isSuccess').returns(false); formatter.display(); @@ -123,7 +125,7 @@ describe('PullFormatter', () => { it('should output as expected for warnings', () => { process.exitCode = 0; - const formatter = new PullResultFormatter(ux as Ux, {}, retrieveResultWarnings); + const formatter = new PullResultFormatter(ux, {}, retrieveResultWarnings); formatter.display(); // Should call styledHeader for warnings and the standard "Retrieved Source" header expect(styledHeaderStub.calledTwice).to.equal(true); @@ -137,7 +139,7 @@ describe('PullFormatter', () => { it('should output a message when no results were returned', () => { process.exitCode = 0; - const formatter = new PullResultFormatter(ux as Ux, {}, retrieveResultEmpty); + const formatter = new PullResultFormatter(ux, {}, retrieveResultEmpty); formatter.display(); expect(styledHeaderStub.called).to.equal(true); expect(logStub.called).to.equal(true); diff --git a/test/formatters/pushResultFormatter.test.ts b/test/formatters/pushResultFormatter.test.ts index c56f2307a..70efe679e 100644 --- a/test/formatters/pushResultFormatter.test.ts +++ b/test/formatters/pushResultFormatter.test.ts @@ -22,12 +22,15 @@ describe('PushResultFormatter', () => { const sandbox = new TestContext().SANDBOX; - let uxMock; + let uxMock: Ux; let tableStub: sinon.SinonStub; let headerStub: sinon.SinonStub; beforeEach(() => { tableStub = sandbox.stub(); headerStub = sandbox.stub(); + // ux is a stubbed Ux + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore uxMock = stubInterface(sandbox, { table: tableStub, styledHeader: headerStub, @@ -52,7 +55,7 @@ describe('PushResultFormatter', () => { }; it('returns expected json for success', () => { process.exitCode = 0; - const formatter = new PushResultFormatter(uxMock as Ux, {}, deployResultSuccess); + const formatter = new PushResultFormatter(uxMock, {}, deployResultSuccess); expect(formatter.getJson().pushedSource).to.deep.equal([ { filePath: 'classes/ProductController.cls', @@ -64,7 +67,7 @@ describe('PushResultFormatter', () => { }); it('returns expected json for success with replaements', () => { process.exitCode = 0; - const formatter = new PushResultFormatter(uxMock as Ux, {}, deployResultSuccessWithReplacements); + const formatter = new PushResultFormatter(uxMock, {}, deployResultSuccessWithReplacements); const result = formatter.getJson(); expect(result.pushedSource).to.deep.equal([ { @@ -79,7 +82,7 @@ describe('PushResultFormatter', () => { }); }); it('returns expected json for failure', () => { - const formatter = new PushResultFormatter(uxMock as Ux, {}, deployResultFailure); + const formatter = new PushResultFormatter(uxMock, {}, deployResultFailure); process.exitCode = 1; try { @@ -100,18 +103,18 @@ describe('PushResultFormatter', () => { describe('json with quiet', () => { it('honors quiet flag for json successes', () => { process.exitCode = 0; - const formatter = new PushResultFormatter(uxMock as Ux, { quiet: true }, deployResultSuccess); + const formatter = new PushResultFormatter(uxMock, { quiet: true }, deployResultSuccess); expect(formatter.getJson().pushedSource).to.deep.equal([]); expect(formatter.getJson().replacements).to.be.undefined; }); it('omits replacements', () => { process.exitCode = 0; - const formatter = new PushResultFormatter(uxMock as Ux, { quiet: true }, deployResultSuccessWithReplacements); + const formatter = new PushResultFormatter(uxMock, { quiet: true }, deployResultSuccessWithReplacements); expect(formatter.getJson().pushedSource).to.deep.equal([]); expect(formatter.getJson().replacements).to.be.undefined; }); it('honors quiet flag for json failure', () => { - const formatter = new PushResultFormatter(uxMock as Ux, { quiet: true }, deployResultFailure); + const formatter = new PushResultFormatter(uxMock, { quiet: true }, deployResultFailure); try { formatter.getJson(); throw new Error('should have thrown'); @@ -126,14 +129,14 @@ describe('PushResultFormatter', () => { describe('human output', () => { it('returns expected output for success', () => { process.exitCode = 0; - const formatter = new PushResultFormatter(uxMock as Ux, {}, deployResultSuccess); + const formatter = new PushResultFormatter(uxMock, {}, deployResultSuccess); formatter.display(); expect(headerStub.callCount, JSON.stringify(headerStub.args)).to.equal(1); expect(tableStub.callCount, JSON.stringify(tableStub.args)).to.equal(1); }); it('returns expected output for success with replacements', () => { process.exitCode = 0; - const formatter = new PushResultFormatter(uxMock as Ux, {}, deployResultSuccessWithReplacements); + const formatter = new PushResultFormatter(uxMock, {}, deployResultSuccessWithReplacements); formatter.display(); expect(headerStub.callCount, JSON.stringify(headerStub.args)).to.equal(2); expect(headerStub.args[0][0]).to.include('Pushed Source'); @@ -147,7 +150,7 @@ describe('PushResultFormatter', () => { deployFailure.response.details.componentFailures = []; deployFailure.response.details.componentSuccesses = []; delete deployFailure.response.details.runTestResult; - const formatter = new PushResultFormatter(uxMock as Ux, {}, [deployFailure]); + const formatter = new PushResultFormatter(uxMock, {}, [deployFailure]); sandbox.stub(formatter, 'isSuccess').returns(false); try { @@ -163,7 +166,7 @@ describe('PushResultFormatter', () => { describe('quiet', () => { it('does not display successes for quiet', () => { process.exitCode = 0; - const formatter = new PushResultFormatter(uxMock as Ux, { quiet: true }, deployResultSuccess); + const formatter = new PushResultFormatter(uxMock, { quiet: true }, deployResultSuccess); formatter.display(); expect(headerStub.callCount, JSON.stringify(headerStub.args)).to.equal(0); expect(formatter.getJson().pushedSource).to.deep.equal([]); @@ -171,7 +174,7 @@ describe('PushResultFormatter', () => { }); it('displays errors and throws for quiet', () => { process.exitCode = 1; - const formatter = new PushResultFormatter(uxMock as Ux, { quiet: true }, deployResultFailure); + const formatter = new PushResultFormatter(uxMock, { quiet: true }, deployResultFailure); try { formatter.display(); throw new Error('should have thrown'); diff --git a/test/formatters/retrieveResultFormatter.test.ts b/test/formatters/retrieveResultFormatter.test.ts index 8a5c35a15..f7f56be75 100644 --- a/test/formatters/retrieveResultFormatter.test.ts +++ b/test/formatters/retrieveResultFormatter.test.ts @@ -25,7 +25,7 @@ describe('RetrieveResultFormatter', () => { const retrieveResultEmpty = getRetrieveResult('empty'); const retrieveResultWarnings = getRetrieveResult('warnings'); - let ux; + let ux: Ux; let logStub: sinon.SinonStub; let styledHeaderStub: sinon.SinonStub; let tableStub: sinon.SinonStub; @@ -42,6 +42,9 @@ describe('RetrieveResultFormatter', () => { logStub = sandbox.stub(); styledHeaderStub = sandbox.stub(); tableStub = sandbox.stub(); + // ux is a stubbed Ux + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore ux = stubInterface(sandbox, { log: logStub, styledHeader: styledHeaderStub, @@ -61,7 +64,7 @@ describe('RetrieveResultFormatter', () => { warnings: [], response: cloneJson(retrieveResultSuccess.response), }; - const formatter = new RetrieveResultFormatter(ux as Ux, {}, retrieveResultSuccess); + const formatter = new RetrieveResultFormatter(ux, {}, retrieveResultSuccess); expect(formatter.getJson()).to.deep.equal(expectedSuccessResults); }); @@ -72,7 +75,7 @@ describe('RetrieveResultFormatter', () => { warnings: [], response: cloneJson(retrieveResultFailure.response), }; - const formatter = new RetrieveResultFormatter(ux as Ux, {}, retrieveResultFailure); + const formatter = new RetrieveResultFormatter(ux, {}, retrieveResultFailure); expect(formatter.getJson()).to.deep.equal(expectedFailureResults); }); @@ -83,7 +86,7 @@ describe('RetrieveResultFormatter', () => { warnings: [], response: cloneJson(retrieveResultInProgress.response), }; - const formatter = new RetrieveResultFormatter(ux as Ux, {}, retrieveResultInProgress); + const formatter = new RetrieveResultFormatter(ux, {}, retrieveResultInProgress); expect(formatter.getJson()).to.deep.equal(expectedInProgressResults); }); @@ -95,7 +98,7 @@ describe('RetrieveResultFormatter', () => { warnings: [], response: cloneJson(retrieveResultSuccess.response), }; - const formatter = new RetrieveResultFormatter(ux as Ux, { packages: [testPkg] }, retrieveResultSuccess); + const formatter = new RetrieveResultFormatter(ux, { packages: [testPkg] }, retrieveResultSuccess); expect(formatter.getJson()).to.deep.equal(expectedSuccessResults); }); @@ -108,14 +111,14 @@ describe('RetrieveResultFormatter', () => { warnings, response: cloneJson(retrieveResultWarnings.response), }; - const formatter = new RetrieveResultFormatter(ux as Ux, {}, retrieveResultWarnings); + const formatter = new RetrieveResultFormatter(ux, {}, retrieveResultWarnings); expect(formatter.getJson()).to.deep.equal(expectedSuccessResults); }); }); describe('display', () => { it('should output as expected for a success', () => { - const formatter = new RetrieveResultFormatter(ux as Ux, {}, retrieveResultSuccess); + const formatter = new RetrieveResultFormatter(ux, {}, retrieveResultSuccess); formatter.display(); expect(styledHeaderStub.called).to.equal(true); expect(logStub.called).to.equal(false); @@ -128,7 +131,7 @@ describe('RetrieveResultFormatter', () => { it('should output as expected for an InProgress', () => { const options = { waitTime: 33 }; - const formatter = new RetrieveResultFormatter(ux as Ux, options, retrieveResultInProgress); + const formatter = new RetrieveResultFormatter(ux, options, retrieveResultInProgress); formatter.display(); expect(styledHeaderStub.called).to.equal(false); expect(logStub.called).to.equal(true); @@ -139,7 +142,7 @@ describe('RetrieveResultFormatter', () => { }); it('should output as expected for a Failure', () => { - const formatter = new RetrieveResultFormatter(ux as Ux, {}, retrieveResultFailure); + const formatter = new RetrieveResultFormatter(ux, {}, retrieveResultFailure); sandbox.stub(formatter, 'isSuccess').returns(false); formatter.display(); @@ -149,7 +152,7 @@ describe('RetrieveResultFormatter', () => { }); it('should output as expected for warnings', () => { - const formatter = new RetrieveResultFormatter(ux as Ux, {}, retrieveResultWarnings); + const formatter = new RetrieveResultFormatter(ux, {}, retrieveResultWarnings); formatter.display(); // Should call styledHeader for warnings and the standard "Retrieved Source" header expect(styledHeaderStub.calledTwice).to.equal(true); @@ -162,7 +165,7 @@ describe('RetrieveResultFormatter', () => { }); it('should output a message when no results were returned', () => { - const formatter = new RetrieveResultFormatter(ux as Ux, {}, retrieveResultEmpty); + const formatter = new RetrieveResultFormatter(ux, {}, retrieveResultEmpty); formatter.display(); expect(styledHeaderStub.called).to.equal(true); expect(logStub.called).to.equal(true); diff --git a/test/formatters/statusResultFormatter.test.ts b/test/formatters/statusResultFormatter.test.ts index 58242ddf2..16ee6ed26 100644 --- a/test/formatters/statusResultFormatter.test.ts +++ b/test/formatters/statusResultFormatter.test.ts @@ -40,14 +40,16 @@ const fakeResult: StatusResult[] = [ describe('status results', () => { const sandbox = new TestContext().SANDBOX; - let ux; + let ux: Ux; let logStub: sinon.SinonStub; let tableStub: sinon.SinonStub; beforeEach(() => { logStub = sandbox.stub(); tableStub = sandbox.stub(); - + // ux is a stubbed Ux + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore ux = stubInterface(sandbox, { log: logStub, table: tableStub, @@ -59,32 +61,32 @@ describe('status results', () => { }); it('returns expected json', () => { - const formatter = new StatusFormatter(ux as Ux, {}, fakeResult); + const formatter = new StatusFormatter(ux, {}, fakeResult); expect(formatter.getJson()).deep.equal(fakeResult); }); describe('human display', () => { it('includes ignored files without the concise option', () => { - const formatter = new StatusFormatter(ux as Ux, { concise: false }, fakeResult); + const formatter = new StatusFormatter(ux, { concise: false }, fakeResult); formatter.display(); expect(tableStub.callCount).to.equal(1); expect(tableStub.firstCall.args[0]).to.have.equal(fakeResult); }); it('omits ignored files with the concise option', () => { - const formatter = new StatusFormatter(ux as Ux, { concise: true }, fakeResult); + const formatter = new StatusFormatter(ux, { concise: true }, fakeResult); formatter.display(); expect(tableStub.callCount).to.equal(1); expect(tableStub.firstCall.args[0]).to.deep.equal([fakeResult[2]]); }); it('shows no results when there are none', () => { - const formatter = new StatusFormatter(ux as Ux, { concise: false }, []); + const formatter = new StatusFormatter(ux, { concise: false }, []); formatter.display(); expect(logStub.callCount).to.equal(1); expect(logStub.firstCall.args[0]).to.contain('No local or remote changes found.'); }); it('shows no results when there are none because concise omitted them', () => { - const formatter = new StatusFormatter(ux as Ux, { concise: true }, [fakeResult[0]]); + const formatter = new StatusFormatter(ux, { concise: true }, [fakeResult[0]]); formatter.display(); expect(logStub.callCount).to.equal(1); expect(logStub.firstCall.args[0]).to.contain('No local or remote changes found.'); diff --git a/test/nuts/digitalExperienceBundle/deb.tracking.nut.ts b/test/nuts/digitalExperienceBundle/deb.tracking.nut.ts index f0f09144e..d768019ed 100644 --- a/test/nuts/digitalExperienceBundle/deb.tracking.nut.ts +++ b/test/nuts/digitalExperienceBundle/deb.tracking.nut.ts @@ -53,7 +53,7 @@ describe('deb -- tracking/push/pull', () => { const statusResult = execCmd('force:source:status --local --json', { ensureExitCode: 0, - }).jsonOutput.result; + }).jsonOutput?.result; assertDEBMeta(statusResult, 'B'); }); @@ -77,7 +77,7 @@ describe('deb -- tracking/push/pull', () => { const statusResult = execCmd('force:source:status --local --json', { ensureExitCode: 0, - }).jsonOutput.result; + }).jsonOutput?.result; assertViewHomeStatus(statusResult, 'B', 'CONTENT'); }); @@ -100,7 +100,7 @@ describe('deb -- tracking/push/pull', () => { const statusResult = execCmd('force:source:status --local --json', { ensureExitCode: 0, - }).jsonOutput.result; + }).jsonOutput?.result; assertViewHomeStatus(statusResult, 'B', 'META'); }); @@ -123,7 +123,7 @@ describe('deb -- tracking/push/pull', () => { const pulledSource = execCmd('force:source:pull --forceoverwrite --json', { ensureExitCode: 0, - }).jsonOutput?.result.pulledSource; + }).jsonOutput?.result?.pulledSource; assertAllDEBAndTheirDECounts(pulledSource, 0, false); }); @@ -131,9 +131,9 @@ describe('deb -- tracking/push/pull', () => { it('should not see any local/remote changes in deb/de', () => { const statusResult = execCmd('force:source:status --json', { ensureExitCode: 0, - }).jsonOutput.result; + }).jsonOutput?.result; - expect(statusResult.every((s) => s.type !== TYPES.DE?.name && s.type !== TYPES.DEB.name)).to.be.true; + expect(statusResult?.every((s) => s.type !== TYPES.DE?.name && s.type !== TYPES.DEB.name)).to.be.true; }); }); @@ -143,7 +143,7 @@ describe('deb -- tracking/push/pull', () => { const statusResult = execCmd('force:source:status --local --json', { ensureExitCode: 0, - }).jsonOutput.result; + }).jsonOutput?.result; assertViewHomeStatus(statusResult, 'B', 'FR_VARIANT'); }); @@ -159,7 +159,7 @@ describe('deb -- tracking/push/pull', () => { const statusResult = execCmd('force:source:status --local --json', { ensureExitCode: 0, - }).jsonOutput.result; + }).jsonOutput?.result; assertDocumentDetailPageA(statusResult); }); @@ -178,7 +178,7 @@ describe('deb -- tracking/push/pull', () => { const statusResult = execCmd('force:source:status --local --json', { ensureExitCode: 0, - }).jsonOutput.result; + }).jsonOutput?.result; assertDocumentDetailPageA(statusResult); }); diff --git a/test/nuts/digitalExperienceBundle/helper.ts b/test/nuts/digitalExperienceBundle/helper.ts index 173afe825..5bfd700af 100644 --- a/test/nuts/digitalExperienceBundle/helper.ts +++ b/test/nuts/digitalExperienceBundle/helper.ts @@ -38,10 +38,10 @@ export function assertAllDEBAndTheirDECounts( ).to.deep.equal([51, 51, 1, 1]); } -export function assertSingleDEBAndItsDECounts(resp: CustomFileResponses, debFullName: string) { +export function assertSingleDEBAndItsDECounts(resp: CustomFileResponses | undefined, debFullName: string) { expect(resp).to.have.length(52); expect( - resp.reduce( + resp?.reduce( (acc: [number, number], curr) => { if (curr.type === TYPES.DE?.name && curr.fullName.includes(debFullName)) acc[0]++; if (curr.type === TYPES.DEB.name && curr.fullName === debFullName) acc[1]++; @@ -68,30 +68,33 @@ export function assertDECountsOfAllDEB(resp?: CustomFileResponses) { ).to.deep.equal([51, 51]); } -export function assertDECountOfSingleDEB(resp: CustomFileResponses) { +export function assertDECountOfSingleDEB(resp?: CustomFileResponses) { expect(resp).to.have.length(51); - expect(resp.every((s) => s.type === TYPES.DE?.name)).to.be.true; + expect(resp?.every((s) => s.type === TYPES.DE?.name)).to.be.true; } -export function assertDEBMeta(resp: CustomFileResponses, deb: 'A' | 'B') { +export function assertDEBMeta(resp: CustomFileResponses | undefined, deb: 'A' | 'B') { expect(resp).to.have.length(1); - resp[0].filePath = relative(process.cwd(), resp[0].filePath); + // if only to satisfy compiler - the assertion above ensures this is true + if (resp?.length && resp[0].filePath) { + resp[0].filePath = relative(process.cwd(), resp[0].filePath); - expect(resp[0]).to.include({ - type: TYPES.DEB.name, - fullName: DEBS[deb].FULL_NAME, - filePath: DEBS[deb].FILES.META.RELATIVE_PATH, - }); + expect(resp[0]).to.include({ + type: TYPES.DEB.name, + fullName: DEBS[deb].FULL_NAME, + filePath: DEBS[deb].FILES.META.RELATIVE_PATH, + }); + } } -export function assertViewHome(resp: CustomFileResponses, deb: 'A' | 'B') { +export function assertViewHome(resp: CustomFileResponses | undefined, deb: 'A' | 'B') { expect(resp).to.have.length(3); expect( - resp.map((s) => ({ + resp?.map((s) => ({ type: s.type, fullName: s.fullName, - filePath: relative(process.cwd(), s.filePath), + filePath: relative(process.cwd(), s.filePath as string), })) ).to.have.deep.members([ { @@ -113,28 +116,29 @@ export function assertViewHome(resp: CustomFileResponses, deb: 'A' | 'B') { } export function assertViewHomeStatus( - resp: CustomFileResponses, + resp: CustomFileResponses | undefined, deb: 'A' | 'B', type: 'CONTENT' | 'META' | 'FR_VARIANT' ) { expect(resp).to.have.length(1); - resp[0].filePath = relative(process.cwd(), resp[0].filePath); - - expect(resp[0]).to.include({ - type: TYPES.DE?.name, - fullName: DEBS[deb].DE.VIEW_HOME.FULL_NAME, - filePath: DEBS[deb].DE.VIEW_HOME.FILES[type].RELATIVE_PATH, - }); + if (resp) { + resp[0].filePath = relative(process.cwd(), resp[0].filePath as string); + expect(resp[0]).to.include({ + type: TYPES.DE?.name, + fullName: DEBS[deb].DE.VIEW_HOME.FULL_NAME, + filePath: DEBS[deb].DE.VIEW_HOME.FILES[type].RELATIVE_PATH, + }); + } } -export function assertDocumentDetailPageA(resp: CustomFileResponses) { +export function assertDocumentDetailPageA(resp?: CustomFileResponses) { expect(resp).to.have.length(4); expect( - resp.map((s) => ({ + resp?.map((s) => ({ type: s.type, fullName: s.fullName, - filePath: relative(process.cwd(), s.filePath), + filePath: relative(process.cwd(), s.filePath as string), })) ).to.have.deep.members([ { @@ -176,7 +180,11 @@ export async function assertDocumentDetailPageADelete(session: TestSession, asse } } -export function assertViewHomeFRVariantDelete(resp: CustomFileResponses, deb: 'A' | 'B', projectDir: string) { +export function assertViewHomeFRVariantDelete( + resp: CustomFileResponses | undefined, + deb: 'A' | 'B', + projectDir: string +) { expect(resp).to.have.length(2); const inboundFiles = execCmd( diff --git a/test/nuts/folderTypes.nut.ts b/test/nuts/folderTypes.nut.ts index 67757172d..12e432740 100644 --- a/test/nuts/folderTypes.nut.ts +++ b/test/nuts/folderTypes.nut.ts @@ -72,8 +72,8 @@ describe('metadata types that go in folders', () => { }, ]; - const getRelativeFileResponses = (resp: FileResponse[]) => - resp.map((s) => { + const getRelativeFileResponses = (resp?: FileResponse[]) => + resp?.map((s) => { // grab the last 2 directories with the file only s.filePath = s.filePath?.split(path.sep).slice(-3).join(path.sep); return s; @@ -89,15 +89,15 @@ describe('metadata types that go in folders', () => { it('can deploy email templates via the manifest', () => { const deployResults = execCmd('force:source:deploy -x package.xml --json').jsonOutput; - expect(deployResults.status, JSON.stringify(deployResults)).to.equal(0); - const deployedSource = getRelativeFileResponses(deployResults.result.deployedSource); + expect(deployResults?.status, JSON.stringify(deployResults)).to.equal(0); + const deployedSource = getRelativeFileResponses(deployResults?.result.deployedSource); expect(deployedSource).to.have.deep.members(getExpectedSource('Created')); }); it('can retrieve email templates via the manifest', () => { const retrieveResults = execCmd('force:source:retrieve -x package.xml --json').jsonOutput; - expect(retrieveResults.status, JSON.stringify(retrieveResults)).to.equal(0); - const retrievedSource = getRelativeFileResponses(retrieveResults.result.inboundFiles); + expect(retrieveResults?.status, JSON.stringify(retrieveResults)).to.equal(0); + const retrievedSource = getRelativeFileResponses(retrieveResults?.result.inboundFiles); expect(retrievedSource).to.have.deep.members(getExpectedSource('Changed')); }); }); diff --git a/test/nuts/force.nut.ts b/test/nuts/force.nut.ts index bdadf96ca..038244085 100644 --- a/test/nuts/force.nut.ts +++ b/test/nuts/force.nut.ts @@ -20,10 +20,10 @@ describe('force command', () => { }); }); it('returns an apiVersion in JSON', () => { - const result = execCmd<{ apiVersion: string }>('force --json', { ensureExitCode: 0 }).jsonOutput.result; + const result = execCmd<{ apiVersion: string }>('force --json', { ensureExitCode: 0 }).jsonOutput?.result; expect(result).to.be.an('object').that.has.all.keys('apiVersion'); - expect(result.apiVersion).to.match(/^\d{2,}\.0$/); - expect(parseInt(result.apiVersion, 10)).to.be.greaterThan(53); + expect(result?.apiVersion).to.match(/^\d{2,}\.0$/); + expect(parseInt(result?.apiVersion ?? '', 10)).to.be.greaterThan(53); }); it('executes the cloud/links without JSON', () => { const result = execCmd('force', { ensureExitCode: 0 }).shellOutput as string; diff --git a/test/nuts/mdapi.nut.ts b/test/nuts/mdapi.nut.ts index 7e357972d..f012323e7 100644 --- a/test/nuts/mdapi.nut.ts +++ b/test/nuts/mdapi.nut.ts @@ -67,11 +67,11 @@ describe('1k files in mdapi:deploy', () => { const res = execCmd<{ checkOnly: boolean; done: boolean }>('force:mdapi:deploy -d mdapiFormat -w 100 --json', { ensureExitCode: 0, }).jsonOutput; - expect(res.status).to.equal(0); + expect(res?.status).to.equal(0); // check that the deploy actually happened, not just based on the exit code, otherwise something like // https://github.com/forcedotcom/cli/issues/1531 could happen - expect(res.result.checkOnly).to.be.false; - expect(res.result.done).to.be.true; + expect(res?.result?.checkOnly).to.be.false; + expect(res?.result?.done).to.be.true; }); }); describe('mdapi NUTs', () => { @@ -105,22 +105,22 @@ describe('mdapi NUTs', () => { }); describe('mdapi:deploy:cancel', () => { - const cancelAssertions = (deployId: string, result: ExecCmdResult): void => { - if (result.jsonOutput.status === 0) { + const cancelAssertions = (deployId: string | undefined, result: ExecCmdResult): void => { + if (result?.jsonOutput?.status === 0) { // a successful cancel - const json = result.jsonOutput.result; + const json = result?.jsonOutput?.result; expect(json).to.have.property('canceledBy'); expect(json).to.have.property('status'); expect(json.status).to.equal(RequestStatus.Canceled); expect(json.id).to.equal(deployId); - } else if (result.jsonOutput.status === 1 && result.jsonOutput.result) { + } else if (result?.jsonOutput?.status === 1 && result?.jsonOutput.result) { // status = 1 because the deploy is in Succeeded status - const json = result.jsonOutput.result; + const json = result?.jsonOutput?.result; expect(json.status).to.equal(RequestStatus.Succeeded); } else { // the other allowable error is that the server is telling us the deploy succeeded - expect(result.jsonOutput.name, JSON.stringify(result)).to.equal('CancelFailed'); - expect(result.jsonOutput.message, JSON.stringify(result)).to.equal( + expect(result?.jsonOutput?.name, JSON.stringify(result)).to.equal('CancelFailed'); + expect(result?.jsonOutput?.message, JSON.stringify(result)).to.equal( 'The cancel command failed due to: INVALID_ID_FIELD: Deployment already completed' ); } @@ -133,7 +133,7 @@ describe('mdapi NUTs', () => { ensureExitCode: 0, }).jsonOutput; const result = execCmd('force:mdapi:deploy:cancel --json'); - cancelAssertions(deploy.result.id, result); + cancelAssertions(deploy?.result?.id, result); }); it('will cancel an mdapi deploy via the specified deploy id', () => { @@ -143,7 +143,7 @@ describe('mdapi NUTs', () => { ensureExitCode: 0, }).jsonOutput; const result = execCmd('force:mdapi:deploy:cancel --json'); - cancelAssertions(deploy.result.id, result); + cancelAssertions(deploy?.result?.id, result); }); }); @@ -173,12 +173,12 @@ describe('mdapi NUTs', () => { // Verify unpackaged.zip exists in retrieveTargetDir const retrievedZip = fs.existsSync(retrieveTargetDirPath); expect(retrievedZip, 'retrieved zip was not in expected path').to.be.true; - const result = rv.jsonOutput.result; - expect(result.status).to.equal(RequestStatus.Succeeded); - expect(result.success).to.be.true; - expect(result.fileProperties).to.be.an('array').with.length.greaterThan(50); + const result = rv.jsonOutput?.result; + expect(result?.status).to.equal(RequestStatus.Succeeded); + expect(result?.success).to.be.true; + expect(result?.fileProperties).to.be.an('array').with.length.greaterThan(50); const zipFileLocation = path.join(retrieveTargetDirPath, 'unpackaged.zip'); - expect(result.zipFilePath).to.equal(zipFileLocation); + expect(result?.zipFilePath).to.equal(zipFileLocation); }); it('retrieves content from manifest using manifest api version', async () => { @@ -206,12 +206,12 @@ describe('mdapi NUTs', () => { // Verify unpackaged.zip exists in retrieveTargetDir const retrievedZip = fs.existsSync(retrieveTargetDirPath); expect(retrievedZip, 'retrieved zip was not in expected path').to.be.true; - const result = rv.jsonOutput.result; - expect(result.status).to.equal(RequestStatus.Succeeded); - expect(result.success).to.be.true; - expect(result.fileProperties).to.be.an('array').with.length.greaterThan(5); + const result = rv.jsonOutput?.result; + expect(result?.status).to.equal(RequestStatus.Succeeded); + expect(result?.success).to.be.true; + expect(result?.fileProperties).to.be.an('array').with.length.greaterThan(5); const zipFileLocation = path.join(retrieveTargetDirPath, 'unpackaged.zip'); - expect(result.zipFilePath).to.equal(zipFileLocation); + expect(result?.zipFilePath).to.equal(zipFileLocation); }); it('retrieves content with named zip and unzips', () => { @@ -230,12 +230,12 @@ describe('mdapi NUTs', () => { expect(unzipDir, 'retrieved zip was not extracted to expected path').to.be.true; expect(fs.readdirSync(extractPath)).to.deep.equal(['unpackaged']); expect(rv.jsonOutput, JSON.stringify(rv)).to.exist; - const result = rv.jsonOutput.result; - expect(result.status).to.equal(RequestStatus.Succeeded); - expect(result.success).to.be.true; - expect(result.fileProperties).to.be.an('array').with.length.greaterThan(5); + const result = rv.jsonOutput?.result; + expect(result?.status).to.equal(RequestStatus.Succeeded); + expect(result?.success).to.be.true; + expect(result?.fileProperties).to.be.an('array').with.length.greaterThan(5); const zipFileLocation = path.join(retrieveTargetDirPath, zipName); - expect(result.zipFilePath).to.equal(zipFileLocation); + expect(result?.zipFilePath).to.equal(zipFileLocation); }); }); @@ -247,7 +247,7 @@ describe('mdapi NUTs', () => { const rv1 = execCmd(retrieveCmd, { ensureExitCode: 0 }); expect(rv1.jsonOutput, JSON.stringify(rv1)).to.exist; - const result1 = rv1.jsonOutput.result; + const result1 = rv1.jsonOutput?.result; expect(result1).to.have.property('done', false); expect(result1).to.have.property('id'); expect(result1).to.have.property('state', 'Queued'); @@ -257,32 +257,32 @@ describe('mdapi NUTs', () => { // Async report, from stash let reportCmd = 'force:mdapi:retrieve:report -w 0 --json'; const rv2 = execCmd(reportCmd, { ensureExitCode: 0 }); - const result2 = rv2.jsonOutput.result; + const result2 = rv2.jsonOutput?.result; - let syncResult: RetrieveCommandResult; + let syncResult: RetrieveCommandResult | undefined; // It's possible that the async retrieve request is already done, so account for that - // and treat it like a sync result. - if (result2.done) { + // and treat it like a sync result?. + if (result2?.done) { syncResult = result2 as unknown as RetrieveCommandResult; } else { - expect(result2).to.have.property('id', result1.id); + expect(result2).to.have.property('id', result1?.id); // To prevent flapping we expect 1 of 3 likely states. All depends // on how responsive the message queue is. - expect(result2.state).to.be.oneOf(['Queued', 'Pending', 'InProgress']); - expect(result2.status).to.be.oneOf(['Queued', 'Pending', 'InProgress']); + expect(result2?.state).to.be.oneOf(['Queued', 'Pending', 'InProgress']); + expect(result2?.status).to.be.oneOf(['Queued', 'Pending', 'InProgress']); expect(result2).to.have.property('timedOut', true); // Now sync report, from stash reportCmd = 'force:mdapi:retrieve:report -w 10 --json'; const rv3 = execCmd(reportCmd, { ensureExitCode: 0 }); - syncResult = rv3.jsonOutput.result; + syncResult = rv3.jsonOutput?.result; } - expect(syncResult.status).to.equal(RequestStatus.Succeeded); - expect(syncResult.success).to.be.true; - expect(syncResult.fileProperties).to.be.an('array').with.length.greaterThan(50); + expect(syncResult?.status).to.equal(RequestStatus.Succeeded); + expect(syncResult?.success).to.be.true; + expect(syncResult?.fileProperties).to.be.an('array').with.length.greaterThan(50); const zipFileLocation = path.join(retrieveTargetDirPath, 'unpackaged.zip'); - expect(syncResult.zipFilePath).to.equal(zipFileLocation); + expect(syncResult?.zipFilePath).to.equal(zipFileLocation); }); it('retrieves report (sync) with overrides of stash', () => { @@ -290,7 +290,7 @@ describe('mdapi NUTs', () => { const rv1 = execCmd(retrieveCmd, { ensureExitCode: 0 }); expect(rv1.jsonOutput, JSON.stringify(rv1)).to.exist; - const result1 = rv1.jsonOutput.result; + const result1 = rv1.jsonOutput?.result; const name = 'dreamhouse'; const zipName = `${name}.zip`; @@ -298,17 +298,17 @@ describe('mdapi NUTs', () => { const retrieveTargetDirPath = path.join(session.project.dir, retrieveTargetDir); const extractPath = path.join(retrieveTargetDirPath, name); - const reportCmd = `force:mdapi:retrieve:report -i ${result1.id} -z -n ${zipName} -r ${retrieveTargetDir} --json`; + const reportCmd = `force:mdapi:retrieve:report -i ${result1?.id} -z -n ${zipName} -r ${retrieveTargetDir} --json`; const rv2 = execCmd(reportCmd, { ensureExitCode: 0 }); expect(rv2.jsonOutput, JSON.stringify(rv2)).to.exist; - const result2 = rv2.jsonOutput.result; - expect(result2.status).to.equal(RequestStatus.Succeeded); - expect(result2.success).to.be.true; - expect(result2.id).to.equal(result1.id); - expect(result2.fileProperties).to.be.an('array').with.length.greaterThan(5); + const result2 = rv2.jsonOutput?.result; + expect(result2?.status).to.equal(RequestStatus.Succeeded); + expect(result2?.success).to.be.true; + expect(result2?.id).to.equal(result1?.id); + expect(result2?.fileProperties).to.be.an('array').with.length.greaterThan(5); const zipFileLocation = path.join(retrieveTargetDirPath, zipName); - expect(result2.zipFilePath).to.equal(zipFileLocation); + expect(result2?.zipFilePath).to.equal(zipFileLocation); const retrievedZip = fs.existsSync(path.join(retrieveTargetDirPath, zipName)); expect(retrievedZip, 'retrieved zip was not in expected path').to.be.true; const unzipDir = fs.existsSync(extractPath); @@ -326,19 +326,19 @@ describe('mdapi NUTs', () => { const rv1 = execCmd(retrieveCmd, { ensureExitCode: 0 }); expect(rv1.jsonOutput, JSON.stringify(rv1)).to.exist; - const result1 = rv1.jsonOutput.result; + const result1 = rv1.jsonOutput?.result; const reportCmd = 'force:mdapi:retrieve:report --json'; const rv2 = execCmd(reportCmd, { ensureExitCode: 0 }); expect(rv2.jsonOutput, JSON.stringify(rv2)).to.exist; - const result2 = rv2.jsonOutput.result; - expect(result2.status).to.equal(RequestStatus.Succeeded); - expect(result2.success).to.be.true; - expect(result2.id).to.equal(result1.id); - expect(result2.fileProperties).to.be.an('array').with.length.greaterThan(5); + const result2 = rv2.jsonOutput?.result; + expect(result2?.status).to.equal(RequestStatus.Succeeded); + expect(result2?.success).to.be.true; + expect(result2?.id).to.equal(result1?.id); + expect(result2?.fileProperties).to.be.an('array').with.length.greaterThan(5); const zipFileLocation = path.join(retrieveTargetDirPath, zipName); - expect(result2.zipFilePath).to.equal(zipFileLocation); + expect(result2?.zipFilePath).to.equal(zipFileLocation); const retrievedZip = fs.existsSync(path.join(retrieveTargetDirPath, zipName)); expect(retrievedZip, 'retrieved zip was not in expected path').to.be.true; const unzipDir = fs.existsSync(extractPath); @@ -380,10 +380,10 @@ describe('mdapi NUTs', () => { ).jsonOutput; // this output is a change from mdapi:deploy:report which returned NOTHING after the progress bar - expect(reportCommandResponse.result, JSON.stringify(reportCommandResponse)).to.have.property('status'); + expect(reportCommandResponse?.result, JSON.stringify(reportCommandResponse)).to.have.property('status'); expect( [RequestStatus.Pending, RequestStatus.Succeeded, RequestStatus.Failed, RequestStatus.InProgress].includes( - reportCommandResponse.result.status + reportCommandResponse?.result?.status as RequestStatus ) ); }); @@ -425,16 +425,16 @@ describe('mdapi NUTs', () => { }); describe('Deploy directory using default org and request report using jobid parameter from a different org', () => { - let deployCommandResponse: MdDeployResult; + let deployCommandResponse: MdDeployResult | undefined; it('should deploy a directory', () => { deployCommandResponse = execCmd( 'force:mdapi:deploy --deploydir mdapiOut --json --soapdeploy', { ensureExitCode: 0 } - ).jsonOutput.result; + ).jsonOutput?.result; }); it('should fail report', () => { const errorReport = execCmd( - `force:mdapi:deploy:report --wait 200 --jobid ${deployCommandResponse.id} --targetusername nonDefaultOrg`, + `force:mdapi:deploy:report --wait 200 --jobid ${deployCommandResponse?.id} --targetusername nonDefaultOrg`, { ensureExitCode: 1 } ).shellOutput.stderr; expect(errorReport).to.include('INVALID_CROSS_REFERENCE_KEY: invalid cross reference id'); @@ -442,16 +442,16 @@ describe('mdapi NUTs', () => { }); describe('validate a deployment and deploy that', () => { - let deployCommandResponse: MdDeployResult; + let deployCommandResponse: MdDeployResult | undefined; it('should check-only deploy a directory with tests', () => { deployCommandResponse = execCmd( 'force:mdapi:deploy --deploydir mdapiOut --json --soapdeploy --checkonly --testlevel RunAllTestsInOrg --wait 100', { ensureExitCode: 0 } - ).jsonOutput.result; + ).jsonOutput?.result; }); it('should deploy validated Id', () => { execCmd( - `force:mdapi:deploy --wait -1 --validateddeployrequestid ${deployCommandResponse.id} --ignorewarnings -o`, + `force:mdapi:deploy --wait -1 --validateddeployrequestid ${deployCommandResponse?.id} --ignorewarnings -o`, { ensureExitCode: 0, } diff --git a/test/nuts/nestedLwc.nut.ts b/test/nuts/nestedLwc.nut.ts index 3418766cb..27dc025b7 100644 --- a/test/nuts/nestedLwc.nut.ts +++ b/test/nuts/nestedLwc.nut.ts @@ -30,16 +30,16 @@ describe('Nested LWCs', () => { }); it('pushes nested LWC', () => { - const pushResults = execCmd('force:source:push --json', { ensureExitCode: 0 }).jsonOutput.result; - expect(pushResults.pushedSource.some((r) => r.fullName === 'cmpA')).to.be.true; - expect(pushResults.pushedSource.some((r) => r.fullName === 'cmpB')).to.be.true; + const pushResults = execCmd('force:source:push --json', { ensureExitCode: 0 }).jsonOutput?.result; + expect(pushResults?.pushedSource.some((r) => r.fullName === 'cmpA')).to.be.true; + expect(pushResults?.pushedSource.some((r) => r.fullName === 'cmpB')).to.be.true; }); it('deploys nested LWC', () => { const deployResults = execCmd('force:source:deploy --json -p force-app', { ensureExitCode: 0 }) - .jsonOutput.result; - expect(deployResults.deployedSource.some((r) => r.fullName === 'cmpA')).to.be.true; - expect(deployResults.deployedSource.some((r) => r.fullName === 'cmpB')).to.be.true; + .jsonOutput?.result; + expect(deployResults?.deployedSource.some((r) => r.fullName === 'cmpA')).to.be.true; + expect(deployResults?.deployedSource.some((r) => r.fullName === 'cmpB')).to.be.true; }); after(async () => { diff --git a/test/nuts/partialBundleDelete.nut.ts b/test/nuts/partialBundleDelete.nut.ts index 2676a6850..bb491dcbe 100644 --- a/test/nuts/partialBundleDelete.nut.ts +++ b/test/nuts/partialBundleDelete.nut.ts @@ -40,7 +40,7 @@ describe('Partial Bundle Delete Retrieves', () => { ], }); projectPath = path.join(session.project.dir, 'force-app', 'main', 'default'); - scratchOrgUsername = session.orgs.get('default').username; + scratchOrgUsername = session.orgs.get('default')?.username; }); after(async () => { @@ -73,7 +73,7 @@ describe('Partial Bundle Delete Retrieves', () => { // Create an actual connection to the org we created for the TestSession, then stub // retrieve() and checkRetrieveStatus() and others to simulate retrieving a partial bundle delete. const connection = await Connection.create({ - authInfo: await AuthInfo.create(session.orgs.get(scratchOrgUsername)), + authInfo: await AuthInfo.create(session.orgs.get(scratchOrgUsername ?? '')), }); sandbox .stub(SfProject.prototype, 'getDefaultPackage') @@ -96,7 +96,7 @@ describe('Partial Bundle Delete Retrieves', () => { ); return compSet; }); - const result = await Retrieve.run(['-p', forgotPasswordDE, '--json', '-o', scratchOrgUsername]); + const result = await Retrieve.run(['-p', forgotPasswordDE, '--json', '-o', scratchOrgUsername ?? '']); // SDR retrieval code should remove this file expect(fs.existsSync(forgotPasswordTranslationFile)).to.be.false; @@ -149,7 +149,7 @@ describe('Partial Bundle Delete Retrieves', () => { expect(inboundFiles).to.be.an('array').and.not.empty; // find the deleted entry for testFile.css - const deletedFileResponse = inboundFiles.find((fr) => fr.state === 'Deleted'); + const deletedFileResponse = inboundFiles?.find((fr) => fr.state === 'Deleted'); expect(deletedFileResponse).to.deep.equal({ fullName: 'pageTemplate_2_7_3', type: 'AuraDefinitionBundle', @@ -182,7 +182,7 @@ describe('Partial Bundle Delete Retrieves', () => { expect(inboundFiles).to.be.an('array').and.not.empty; // find the deleted entry for testFile.css - const deletedFileResponse = inboundFiles.find((fr) => fr.state === 'Deleted'); + const deletedFileResponse = inboundFiles?.find((fr) => fr.state === 'Deleted'); expect(deletedFileResponse).to.deep.equal({ fullName: 'propertyTile', type: 'LightningComponentBundle', diff --git a/test/nuts/seeds/deploy.async.seed.ts b/test/nuts/seeds/deploy.async.seed.ts index 71785827b..02d76d8ed 100644 --- a/test/nuts/seeds/deploy.async.seed.ts +++ b/test/nuts/seeds/deploy.async.seed.ts @@ -11,11 +11,11 @@ import { Result } from '@salesforce/source-testkit/lib/types.js'; import { execCmd } from '@salesforce/cli-plugins-testkit'; import { RequestStatus } from '@salesforce/source-deploy-retrieve'; import { expect } from 'chai'; -import { TEST_REPOS_MAP } from '../testMatrix.js'; +import { RepoConfig, TEST_REPOS_MAP } from '../testMatrix.js'; import { DeployCancelCommandResult } from '../../../src/formatters/deployCancelResultFormatter.js'; // DO NOT TOUCH. generateNuts.ts will insert these values -const REPO = TEST_REPOS_MAP.get('%REPO_URL%'); +const REPO = TEST_REPOS_MAP.get('%REPO_URL%') as RepoConfig; context('Async Deploy NUTs [name: %REPO_NAME%]', () => { let testkit: SourceTestkit; @@ -52,19 +52,19 @@ context('Async Deploy NUTs [name: %REPO_NAME%]', () => { args: '--coverageformatters clover --junit', })) as Result<{ id: string; result: { id: string } }>; - testkit.expect.toHaveProperty(deploy.result, 'id'); - testkit.expect.toHavePropertyAndNotValue(deploy.result, 'status', 'Succeeded'); + testkit.expect.toHaveProperty(deploy?.result ?? {}, 'id'); + testkit.expect.toHavePropertyAndNotValue(deploy?.result ?? {}, 'status', 'Succeeded'); - const status = getBoolean(report.result, 'done'); + const status = getBoolean(report?.result, 'done'); if (status) { // if the deploy finished, expect changes and a 'succeeded' status - testkit.expect.toHavePropertyAndValue(report.result, 'status', 'Succeeded'); - testkit.expect.toHaveProperty(report.result, 'numberComponentsDeployed'); - testkit.expect.toHaveProperty(report.result, 'deployedSource'); - testkit.expect.toHaveProperty(report.result, 'deploys'); + testkit.expect.toHavePropertyAndValue(report?.result ?? {}, 'status', 'Succeeded'); + testkit.expect.toHaveProperty(report?.result ?? {}, 'numberComponentsDeployed'); + testkit.expect.toHaveProperty(report?.result ?? {}, 'deployedSource'); + testkit.expect.toHaveProperty(report?.result ?? {}, 'deploys'); } else { // the deploy could be InProgress, Pending, or Queued, at this point - expect(['Pending', 'InProgress', 'Queued']).to.include(getString(report.result, 'status')); + expect(['Pending', 'InProgress', 'Queued']).to.include(getString(report?.result, 'status')); await testkit.expect.filesToNotBeDeployed(testkit.packageGlobs); } }); @@ -77,25 +77,25 @@ context('Async Deploy NUTs [name: %REPO_NAME%]', () => { const deploy = await testkit.deploy({ args: `--sourcepath ${testkit.packageNames.join(',')} --wait 0`, }); - testkit.expect.toHaveProperty(deploy.result, 'id'); + testkit.expect.toHaveProperty(deploy?.result ?? {}, 'id'); - const result = execCmd(`force:source:deploy:cancel -i ${deploy.result.id} --json`); + const result = execCmd(`force:source:deploy:cancel -i ${deploy?.result.id} --json`); - if (result.jsonOutput.status === 0) { + if (result.jsonOutput?.status === 0) { // a successful cancel - const json = result.jsonOutput.result; + const json = result.jsonOutput?.result; expect(json).to.have.property('canceledBy'); expect(json).to.have.property('status'); expect(json.status).to.equal(RequestStatus.Canceled); - expect(json.id).to.equal(deploy.result.id); - } else if (result.jsonOutput.status === 1 && result.jsonOutput.result) { + expect(json.id).to.equal(deploy?.result?.id); + } else if (result.jsonOutput?.status === 1 && result.jsonOutput?.result) { // status = 1 because the deploy is in Succeeded status - const json = result.jsonOutput.result; - expect(json.status).to.equal(RequestStatus.Succeeded); + const json = result.jsonOutput?.result; + expect(json?.status).to.equal(RequestStatus.Succeeded); } else { // the other allowable error is that the server is telling us the deploy succeeded - expect(result.jsonOutput.name, JSON.stringify(result)).to.equal('CancelFailed'); - expect(result.jsonOutput.message, JSON.stringify(result)).to.equal( + expect(result.jsonOutput?.name, JSON.stringify(result)).to.equal('CancelFailed'); + expect(result.jsonOutput?.message, JSON.stringify(result)).to.equal( 'The cancel command failed due to: INVALID_ID_FIELD: Deployment already completed' ); } diff --git a/test/nuts/seeds/deploy.manifest.seed.ts b/test/nuts/seeds/deploy.manifest.seed.ts index 36971c785..0e9a6b6b1 100644 --- a/test/nuts/seeds/deploy.manifest.seed.ts +++ b/test/nuts/seeds/deploy.manifest.seed.ts @@ -9,10 +9,10 @@ import path from 'node:path'; import { SourceTestkit } from '@salesforce/source-testkit'; import { get } from '@salesforce/ts-types'; import { FileResponse } from '@salesforce/source-deploy-retrieve'; -import { TEST_REPOS_MAP } from '../testMatrix.js'; +import { RepoConfig, TEST_REPOS_MAP } from '../testMatrix.js'; // DO NOT TOUCH. generateNuts.ts will insert these values -const REPO = TEST_REPOS_MAP.get('%REPO_URL%'); +const REPO = TEST_REPOS_MAP.get('%REPO_URL%') as RepoConfig; context('Deploy manifest NUTs [name: %REPO_NAME%]', () => { let testkit: SourceTestkit; @@ -53,7 +53,7 @@ context('Deploy manifest NUTs [name: %REPO_NAME%]', () => { } it('should throw an error if the package.xml is not valid', async () => { - const deploy = await testkit.deploy({ args: '--manifest DOES_NOT_EXIST.xml', exitCode: 1 }); + const deploy = (await testkit.deploy({ args: '--manifest DOES_NOT_EXIST.xml', exitCode: 1 })) ?? {}; testkit.expect.errorToHaveName(deploy, 'SfError'); }); }); diff --git a/test/nuts/seeds/deploy.metadata.seed.ts b/test/nuts/seeds/deploy.metadata.seed.ts index 690de590b..94e32a612 100644 --- a/test/nuts/seeds/deploy.metadata.seed.ts +++ b/test/nuts/seeds/deploy.metadata.seed.ts @@ -8,10 +8,10 @@ import { SourceTestkit } from '@salesforce/source-testkit'; import { get } from '@salesforce/ts-types'; import { FileResponse } from '@salesforce/source-deploy-retrieve'; -import { TEST_REPOS_MAP } from '../testMatrix.js'; +import { RepoConfig, TEST_REPOS_MAP } from '../testMatrix.js'; // DO NOT TOUCH. generateNuts.ts will insert these values -const REPO = TEST_REPOS_MAP.get('%REPO_URL%'); +const REPO = TEST_REPOS_MAP.get('%REPO_URL%') as RepoConfig; context('Deploy metadata NUTs [name: %REPO_NAME%]', () => { let testkit: SourceTestkit; @@ -51,7 +51,7 @@ context('Deploy metadata NUTs [name: %REPO_NAME%]', () => { it('should throw an error if the metadata is not valid', async () => { const deploy = await testkit.deploy({ args: '--metadata DOES_NOT_EXIST', exitCode: 1 }); - testkit.expect.errorToHaveName(deploy, 'SfError'); + testkit.expect.errorToHaveName(deploy ?? {}, 'SfError'); }); it('should not deploy metadata outside of a package directory', async () => { diff --git a/test/nuts/seeds/deploy.quick.seed.ts b/test/nuts/seeds/deploy.quick.seed.ts index 3ab37af91..fb9294519 100644 --- a/test/nuts/seeds/deploy.quick.seed.ts +++ b/test/nuts/seeds/deploy.quick.seed.ts @@ -8,10 +8,10 @@ import { SourceTestkit } from '@salesforce/source-testkit'; import { get } from '@salesforce/ts-types'; import { FileResponse } from '@salesforce/source-deploy-retrieve'; -import { TEST_REPOS_MAP } from '../testMatrix.js'; +import { RepoConfig, TEST_REPOS_MAP } from '../testMatrix.js'; // DO NOT TOUCH. generateNuts.ts will insert these values -const REPO = TEST_REPOS_MAP.get('%REPO_URL%'); +const REPO = TEST_REPOS_MAP.get('%REPO_URL%') as RepoConfig; context('Quick Deploy NUTs [name: %REPO_NAME%] [exec: %EXECUTABLE%]', () => { let testkit: SourceTestkit; @@ -39,15 +39,17 @@ context('Quick Deploy NUTs [name: %REPO_NAME%] [exec: %EXECUTABLE%]', () => { const checkOnly = await testkit.deploy({ args: `--sourcepath ${testkit.packageNames.join(',')} --testlevel RunLocalTests --checkonly --ignoreerrors`, }); - testkit.expect.toHaveProperty(checkOnly.result, 'id'); - await testkit.expect.filesToNotBeDeployed(testkit.packageGlobs); + if (checkOnly?.result) { + testkit.expect.toHaveProperty(checkOnly.result, 'id'); + await testkit.expect.filesToNotBeDeployed(testkit.packageGlobs); - const quickDeploy = await testkit.deploy({ - args: `--validateddeployrequestid ${checkOnly.result.id}`, - }); - testkit.expect.toHavePropertyAndValue(quickDeploy.result, 'status', 'Succeeded'); + const quickDeploy = await testkit.deploy({ + args: `--validateddeployrequestid ${checkOnly.result.id}`, + }); + testkit.expect.toHavePropertyAndValue(quickDeploy?.result ?? {}, 'status', 'Succeeded'); - const fileResponse = get(quickDeploy, 'result.deployedSource') as FileResponse[]; - await testkit.expect.filesToBeDeployedViaResult(testkit.packageGlobs, [], fileResponse); + const fileResponse = get(quickDeploy, 'result.deployedSource') as FileResponse[]; + await testkit.expect.filesToBeDeployedViaResult(testkit.packageGlobs, [], fileResponse); + } }); }); diff --git a/test/nuts/seeds/deploy.sourcepath.seed.ts b/test/nuts/seeds/deploy.sourcepath.seed.ts index 26ede8e33..86936ee7e 100644 --- a/test/nuts/seeds/deploy.sourcepath.seed.ts +++ b/test/nuts/seeds/deploy.sourcepath.seed.ts @@ -9,10 +9,10 @@ import path from 'node:path'; import { SourceTestkit } from '@salesforce/source-testkit'; import { get } from '@salesforce/ts-types'; import { FileResponse } from '@salesforce/source-deploy-retrieve'; -import { TEST_REPOS_MAP } from '../testMatrix.js'; +import { RepoConfig, TEST_REPOS_MAP } from '../testMatrix.js'; // DO NOT TOUCH. generateNuts.ts will insert these values -const REPO = TEST_REPOS_MAP.get('%REPO_URL%'); +const REPO = TEST_REPOS_MAP.get('%REPO_URL%') as RepoConfig; context('Deploy sourcepath NUTs [name: %REPO_NAME%]', () => { let testkit: SourceTestkit; @@ -47,13 +47,13 @@ context('Deploy sourcepath NUTs [name: %REPO_NAME%]', () => { it('should throw an error if the sourcepath is not valid', async () => { const deploy = await testkit.deploy({ args: '--sourcepath DOES_NOT_EXIST', exitCode: 1 }); - testkit.expect.errorToHaveName(deploy, 'SfError'); + testkit.expect.errorToHaveName(deploy ?? {}, 'SfError'); try { // old message, can be removed after SDR strict mode PR is merged - testkit.expect.errorToHaveMessage(deploy, 'not a valid source file path'); + testkit.expect.errorToHaveMessage(deploy ?? {}, 'not a valid source file path'); } catch (e) { // new message - testkit.expect.errorToHaveMessage(deploy, 'File or folder not found'); + testkit.expect.errorToHaveMessage(deploy ?? {}, 'File or folder not found'); } }); }); diff --git a/test/nuts/seeds/deploy.testlevel.seed.ts b/test/nuts/seeds/deploy.testlevel.seed.ts index fbab8347e..64efd2542 100644 --- a/test/nuts/seeds/deploy.testlevel.seed.ts +++ b/test/nuts/seeds/deploy.testlevel.seed.ts @@ -6,10 +6,10 @@ */ import { SourceTestkit } from '@salesforce/source-testkit'; -import { TEST_REPOS_MAP } from '../testMatrix.js'; +import { RepoConfig, TEST_REPOS_MAP } from '../testMatrix.js'; // DO NOT TOUCH. generateNuts.ts will insert these values -const REPO = TEST_REPOS_MAP.get('%REPO_URL%'); +const REPO = TEST_REPOS_MAP.get('%REPO_URL%') as RepoConfig; context('Deploy testlevel NUTs [name: %REPO_NAME%]', () => { let testkit: SourceTestkit; diff --git a/test/nuts/seeds/mpd.retrieve.seed.ts b/test/nuts/seeds/mpd.retrieve.seed.ts index 18997ecc6..38fb65efa 100644 --- a/test/nuts/seeds/mpd.retrieve.seed.ts +++ b/test/nuts/seeds/mpd.retrieve.seed.ts @@ -74,11 +74,8 @@ context('MPD Retrieve NUTs', () => { beforeEach(async () => { await Promise.all( - Object.entries(originalState).map(([filename, contents]) => testkit.writeFile(filename, contents)) + Object.entries(originalState).map(([filename, contents]) => testkit.writeFile(filename, contents as string)) ); - // for (const [filename, contents] of Object.entries(originalState)) { - // await testkit.writeFile(filename, contents); - // } }); describe('--metadata CustomLabels', () => { diff --git a/test/nuts/seeds/retrieve.manifest.seed.ts b/test/nuts/seeds/retrieve.manifest.seed.ts index fd218af0c..ea0c95957 100644 --- a/test/nuts/seeds/retrieve.manifest.seed.ts +++ b/test/nuts/seeds/retrieve.manifest.seed.ts @@ -8,10 +8,10 @@ import path from 'node:path'; import { SourceTestkit } from '@salesforce/source-testkit'; import { JsonMap } from '@salesforce/ts-types'; -import { TEST_REPOS_MAP } from '../testMatrix.js'; // DO NOT TOUCH. generateNuts.ts will insert these values +import { RepoConfig, TEST_REPOS_MAP } from '../testMatrix.js'; // DO NOT TOUCH. generateNuts.ts will insert these values // DO NOT TOUCH. generateNuts.ts will insert these values -const REPO = TEST_REPOS_MAP.get('%REPO_URL%'); +const REPO = TEST_REPOS_MAP.get('%REPO_URL%') as RepoConfig; context('Retrieve manifest NUTs [name: %REPO_NAME%]', () => { let testkit: SourceTestkit; diff --git a/test/nuts/seeds/retrieve.metadata.seed.ts b/test/nuts/seeds/retrieve.metadata.seed.ts index b8c55f578..18760288e 100644 --- a/test/nuts/seeds/retrieve.metadata.seed.ts +++ b/test/nuts/seeds/retrieve.metadata.seed.ts @@ -8,10 +8,10 @@ import path from 'node:path'; import { SourceTestkit } from '@salesforce/source-testkit'; import { JsonMap } from '@salesforce/ts-types'; -import { TEST_REPOS_MAP } from '../testMatrix.js'; +import { RepoConfig, TEST_REPOS_MAP } from '../testMatrix.js'; // DO NOT TOUCH. generateNuts.ts will insert these values -const REPO = TEST_REPOS_MAP.get('%REPO_URL%'); +const REPO = TEST_REPOS_MAP.get('%REPO_URL%') as RepoConfig; context('Retrieve metadata NUTs [name: %REPO_NAME%]', () => { let testkit: SourceTestkit; diff --git a/test/nuts/seeds/retrieve.retrievetargetdir.seed.ts b/test/nuts/seeds/retrieve.retrievetargetdir.seed.ts index 7c58a451d..ca97b2a51 100644 --- a/test/nuts/seeds/retrieve.retrievetargetdir.seed.ts +++ b/test/nuts/seeds/retrieve.retrievetargetdir.seed.ts @@ -7,10 +7,10 @@ import { SourceTestkit } from '@salesforce/source-testkit'; import { JsonMap } from '@salesforce/ts-types'; -import { TEST_REPOS_MAP } from '../testMatrix.js'; +import { RepoConfig, TEST_REPOS_MAP } from '../testMatrix.js'; // DO NOT TOUCH. generateNuts.ts will insert these values -const REPO = TEST_REPOS_MAP.get('%REPO_URL%'); +const REPO = TEST_REPOS_MAP.get('%REPO_URL%') as RepoConfig; context('Retrieve metadata NUTs [name: %REPO_NAME%]', () => { let testkit: SourceTestkit; diff --git a/test/nuts/seeds/retrieve.sourcepath.seed.ts b/test/nuts/seeds/retrieve.sourcepath.seed.ts index afe22a8c1..d5e611575 100644 --- a/test/nuts/seeds/retrieve.sourcepath.seed.ts +++ b/test/nuts/seeds/retrieve.sourcepath.seed.ts @@ -8,10 +8,10 @@ import path from 'node:path'; import { SourceTestkit } from '@salesforce/source-testkit'; import { JsonMap } from '@salesforce/ts-types'; -import { TEST_REPOS_MAP } from '../testMatrix.js'; // DO NOT TOUCH. generateNuts.ts will insert these values +import { RepoConfig, TEST_REPOS_MAP } from '../testMatrix.js'; // DO NOT TOUCH. generateNuts.ts will insert these values // DO NOT TOUCH. generateNuts.ts will insert these values -const REPO = TEST_REPOS_MAP.get('%REPO_URL%'); +const REPO = TEST_REPOS_MAP.get('%REPO_URL%') as RepoConfig; context('Retrieve Sourcepath NUTs [name: %REPO_NAME%]', () => { let testkit: SourceTestkit; diff --git a/test/nuts/territory2.nut.ts b/test/nuts/territory2.nut.ts index 03076e3b2..262e3616b 100644 --- a/test/nuts/territory2.nut.ts +++ b/test/nuts/territory2.nut.ts @@ -53,8 +53,8 @@ describe('territories', () => { it('deploy', () => { const deployResults = execCmd('force:source:deploy -x package.xml --json', { ensureExitCode: 0, - }).jsonOutput.result; - expect(deployResults.deployedSource.length).to.equal(8); + }).jsonOutput?.result; + expect(deployResults?.deployedSource.length).to.equal(8); }); it('retrieve without local metadata', async () => { @@ -63,8 +63,8 @@ describe('territories', () => { await fs.promises.mkdir(path.join(session.project.dir, 'force-app')); const retrieveResults = execCmd('force:source:retrieve -x package.xml --json', { ensureExitCode: 0, - }).jsonOutput.result; - expect(retrieveResults.inboundFiles).to.have.length(8); + }).jsonOutput?.result; + expect(retrieveResults?.inboundFiles).to.have.length(8); }); }); diff --git a/test/nuts/testMatrix.ts b/test/nuts/testMatrix.ts index 57b0e4b5f..94cecd307 100644 --- a/test/nuts/testMatrix.ts +++ b/test/nuts/testMatrix.ts @@ -250,6 +250,8 @@ const testRepos: RepoConfig[] = [ * 2. have normalized file paths */ export const TEST_REPOS_MAP = new Map( + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore Object.entries(keyBy(normalizeFilePaths(testRepos), 'gitUrl')) ); diff --git a/test/nuts/trackingCommands/basics.nut.ts b/test/nuts/trackingCommands/basics.nut.ts index c53c5e7b1..4c958e580 100644 --- a/test/nuts/trackingCommands/basics.nut.ts +++ b/test/nuts/trackingCommands/basics.nut.ts @@ -49,10 +49,10 @@ describe('end-to-end-test for tracking with an org (single packageDir)', () => { it('detects the initial metadata status', () => { const result = execCmd('force:source:status --json', { ensureExitCode: 0, - }).jsonOutput.result; + }).jsonOutput?.result; expect(result).to.be.an.instanceof(Array); // the fields should be populated - expect(result.every((row) => row.type && row.fullName)).to.equal(true); + expect(result?.every((row) => row.type && row.fullName)).to.equal(true); }); it('pushes the initial metadata to the org', () => { const resp = execCmd('force:source:push --json'); @@ -61,28 +61,28 @@ describe('end-to-end-test for tracking with an org (single packageDir)', () => { expect(pushedSource).to.be.an.instanceof(Array); expect(pushedSource, JSON.stringify(pushedSource)).to.have.lengthOf(itemsInEBikesPush); expect( - pushedSource.every((r) => r.state !== ComponentStatus.Failed), - JSON.stringify(pushedSource.filter((r) => r.state === ComponentStatus.Failed)) + pushedSource?.every((r) => r.state !== ComponentStatus.Failed), + JSON.stringify(pushedSource?.filter((r) => r.state === ComponentStatus.Failed)) ).to.equal(true); }); it('sees no local changes (all were committed from push), but profile updated in remote', () => { const localResult = execCmd('force:source:status --json --local', { ensureExitCode: 0, - }).jsonOutput.result; - expect(localResult.filter(filterIgnored)).to.deep.equal([]); + }).jsonOutput?.result; + expect(localResult?.filter(filterIgnored)).to.deep.equal([]); const remoteResult = execCmd('force:source:status --json --remote', { ensureExitCode: 0, - }).jsonOutput.result; - expect(remoteResult.some((item) => item.type === 'Profile')).to.equal(true); + }).jsonOutput?.result; + expect(remoteResult?.some((item) => item.type === 'Profile')).to.equal(true); }); it('can pull the remote profile', () => { const pullResult = execCmd('force:source:pull --json', { ensureExitCode: 0, - }).jsonOutput.result; + }).jsonOutput?.result; expect( - pullResult.pulledSource.some((item) => item.type === 'Profile'), + pullResult?.pulledSource?.some((item) => item?.type === 'Profile'), JSON.stringify(pullResult) ).to.equal(true); }); @@ -90,8 +90,8 @@ describe('end-to-end-test for tracking with an org (single packageDir)', () => { it('sees no local or remote changes', () => { const result = execCmd('force:source:status --json', { ensureExitCode: 0, - }).jsonOutput.result; - expect(result.filter((r) => r.type === 'Profile').filter(filterIgnored), JSON.stringify(result)).to.have.length( + }).jsonOutput?.result; + expect(result?.filter((r) => r.type === 'Profile').filter(filterIgnored), JSON.stringify(result)).to.have.length( 0 ); }); @@ -104,8 +104,8 @@ describe('end-to-end-test for tracking with an org (single packageDir)', () => { ]); const result = execCmd('force:source:status --json --local', { ensureExitCode: 0, - }).jsonOutput.result; - expect(result.filter(filterIgnored)).to.deep.equal([ + }).jsonOutput?.result; + expect(result?.filter(filterIgnored)).to.deep.equal([ { type: 'ApexClass', state: 'Local Deleted', @@ -129,9 +129,9 @@ describe('end-to-end-test for tracking with an org (single packageDir)', () => { it('does not see any change in remote status', () => { const result = execCmd('force:source:status --json --remote', { ensureExitCode: 0, - }).jsonOutput.result; + }).jsonOutput?.result; expect( - result.filter((r) => r.fullName === 'TestOrderController'), + result?.filter((r) => r.fullName === 'TestOrderController'), JSON.stringify(result) ).to.have.length(0); }); @@ -145,8 +145,8 @@ describe('end-to-end-test for tracking with an org (single packageDir)', () => { it('sees no local changes', () => { const result = execCmd('force:source:status --json --local', { ensureExitCode: 0, - }).jsonOutput.result; - expect(result.filter(filterIgnored), JSON.stringify(result)).to.be.an.instanceof(Array).with.length(0); + }).jsonOutput?.result; + expect(result?.filter(filterIgnored), JSON.stringify(result)).to.be.an.instanceof(Array).with.length(0); }); }); @@ -156,7 +156,7 @@ describe('end-to-end-test for tracking with an org (single packageDir)', () => { JSON.parse(shelljs.exec('sfdx force:config:get defaultdevhubusername --json', { silent: true })) as { result: [{ location: string; value: string }]; } - ).result.find((config) => config.location === 'Local').value; + ).result?.find((config) => config.location === 'Local')?.value; const failure = execCmd(`force:source:status -u ${hubUsername} --remote --json`, { ensureExitCode: 1, }).jsonOutput as unknown as { name: string }; @@ -215,8 +215,8 @@ describe('end-to-end-test for tracking with an org (single packageDir)', () => { it('sees no local changes', () => { const result = execCmd('force:source:status --json --local', { ensureExitCode: 0, - }).jsonOutput.result; - expect(result.filter(filterIgnored), JSON.stringify(result)).to.be.an.instanceof(Array).with.length(2); + }).jsonOutput?.result; + expect(result?.filter(filterIgnored), JSON.stringify(result)).to.be.an.instanceof(Array).with.length(2); }); }); }); diff --git a/test/nuts/trackingCommands/conflicts.nut.ts b/test/nuts/trackingCommands/conflicts.nut.ts index e742d30c5..25dffc532 100644 --- a/test/nuts/trackingCommands/conflicts.nut.ts +++ b/test/nuts/trackingCommands/conflicts.nut.ts @@ -48,15 +48,15 @@ describe('conflict detection and resolution', () => { const pushedSource = pushResult.jsonOutput?.result.pushedSource; expect(pushedSource, JSON.stringify(pushedSource)).to.have.lengthOf(itemsInEBikesPush); expect( - pushedSource.every((r) => r.state !== ComponentStatus.Failed), - JSON.stringify(pushedSource.filter((r) => r.state === ComponentStatus.Failed)) + pushedSource?.every((r) => r.state !== ComponentStatus.Failed), + JSON.stringify(pushedSource?.filter((r) => r.state === ComponentStatus.Failed)) ).to.equal(true); }); it('edits a remote file', async () => { const conn = await Connection.create({ authInfo: await AuthInfo.create({ - username: session.orgs.get('default').username, + username: session.orgs.get('default')?.username, }), }); const app = await conn.singleRecordQuery<{ Id: string; Metadata: any }>( @@ -74,9 +74,9 @@ describe('conflict detection and resolution', () => { }); const result = execCmd('force:source:status --json --remote', { ensureExitCode: 0, - }).jsonOutput.result; + }).jsonOutput?.result; expect( - result.filter((r) => r.type === 'CustomApplication'), + result?.filter((r) => r.type === 'CustomApplication'), JSON.stringify(result) ).to.have.lengthOf(1); }); @@ -135,7 +135,7 @@ describe('conflict detection and resolution', () => { ); // Ensure JSON structure on push errors const json = pushResponse.jsonOutput; - expect(json.data).to.deep.equal([ + expect(json?.data).to.deep.equal([ { state: 'Conflict', fullName: 'EBikes', @@ -143,13 +143,13 @@ describe('conflict detection and resolution', () => { filePath, }, ]); - expect(json.code).to.equal(1); - expect(json.exitCode).to.equal(1); - expect(json.status).to.equal(1); - expect(json.name).to.equal('sourceConflictDetected'); - expect(json.message).to.include("We couldn't complete the operation due to conflicts."); - expect(json.stack).to.include('sourceConflictDetected'); - expect(json.context).to.equal('Push'); + expect(json?.code).to.equal(1); + expect(json?.exitCode).to.equal(1); + expect(json?.status).to.equal(1); + expect(json?.name).to.equal('sourceConflictDetected'); + expect(json?.message).to.include("We couldn't complete the operation due to conflicts."); + expect(json?.stack).to.include('sourceConflictDetected'); + expect(json?.context).to.equal('Push'); // @ts-expect-error it's SfCommand.Error expect(json.commandName).to.include('Push'); }); diff --git a/test/nuts/trackingCommands/customLabels.nut.ts b/test/nuts/trackingCommands/customLabels.nut.ts index cfcc1b0af..86d9a6e2f 100644 --- a/test/nuts/trackingCommands/customLabels.nut.ts +++ b/test/nuts/trackingCommands/customLabels.nut.ts @@ -51,7 +51,7 @@ describe('CustomLabel source tracking', () => { ); const conn = await Connection.create({ authInfo: await AuthInfo.create({ - username: session.orgs.get('default').username, + username: session.orgs.get('default')?.username, }), }); const id = ( @@ -62,10 +62,10 @@ describe('CustomLabel source tracking', () => { await conn.tooling.sobject('CustomLabel').delete(id); expect((await conn.tooling.query('SELECT Id FROM CustomLabel')).totalSize).to.equal(2); - const result = execCmd('force:source:pull -f --json', { ensureExitCode: 0 }).jsonOutput.result; - expect(result.pulledSource.length).to.equal(1); - expect(result.pulledSource[0].state).to.equal('Deleted'); - expect(result.pulledSource[0].fullName).to.equal('DeleteMe'); + const result = execCmd('force:source:pull -f --json', { ensureExitCode: 0 }).jsonOutput?.result; + expect(result?.pulledSource.length).to.equal(1); + expect(result?.pulledSource[0].state).to.equal('Deleted'); + expect(result?.pulledSource[0].fullName).to.equal('DeleteMe'); expect(fs.existsSync(clFile)).to.be.true; expect(fs.readFileSync(clFile, { encoding: 'utf-8' })).to.not.include('DeleteMe'); expect(fs.readFileSync(clFile, { encoding: 'utf-8' })).to.include('KeepMe1'); @@ -82,7 +82,7 @@ describe('CustomLabel source tracking', () => { ); const conn = await Connection.create({ authInfo: await AuthInfo.create({ - username: session.orgs.get('default').username, + username: session.orgs.get('default')?.username, }), }); const ids = (await conn.tooling.query<{ Id: string }>('SELECT Id FROM CustomLabel')).records.map((r) => r.Id); diff --git a/test/nuts/trackingCommands/deployRetrieveDelete.nut.ts b/test/nuts/trackingCommands/deployRetrieveDelete.nut.ts index e502187cd..38a390bc5 100644 --- a/test/nuts/trackingCommands/deployRetrieveDelete.nut.ts +++ b/test/nuts/trackingCommands/deployRetrieveDelete.nut.ts @@ -39,32 +39,32 @@ describe('-t flag for deploy, retrieve, and delete', () => { it('detects the initial metadata status', () => { const result = execCmd('force:source:status --json', { ensureExitCode: 0, - }).jsonOutput.result; + }).jsonOutput?.result; expect(result).to.be.an.instanceof(Array); // the fields should be populated - expect(result.every((row) => row.type && row.fullName)).to.equal(true); + expect(result?.every((row) => row.type && row.fullName)).to.equal(true); }); it('deploy the initial metadata to the org with tracking', () => { const result = execCmd('force:source:deploy -p force-app,my-app,foo-bar/app -t --json', { ensureExitCode: 0, - }).jsonOutput.result; - expect(result.deployedSource).to.be.an.instanceof(Array); - expect(result.deployedSource, JSON.stringify(result)).to.have.length.greaterThan(10); + }).jsonOutput?.result; + expect(result?.deployedSource).to.be.an.instanceof(Array); + expect(result?.deployedSource, JSON.stringify(result)).to.have.length.greaterThan(10); expect( - result.deployedSource.every((r) => r.state !== ComponentStatus.Failed), + result?.deployedSource.every((r) => r.state !== ComponentStatus.Failed), JSON.stringify(result) ).to.equal(true); }); it('sees no local changes (all were committed from deploy), but profile updated in remote', () => { const localResult = execCmd('force:source:status --json --local', { ensureExitCode: 0, - }).jsonOutput.result; + }).jsonOutput?.result; expect(localResult).to.deep.equal([]); const remoteResult = execCmd('force:source:status --json --remote', { ensureExitCode: 0, - }).jsonOutput.result; - expect(remoteResult.some((item) => item.type === 'Profile')).to.equal(true); + }).jsonOutput?.result; + expect(remoteResult?.some((item) => item.type === 'Profile')).to.equal(true); }); }); describe('retrieve and status', () => { @@ -73,7 +73,7 @@ describe('-t flag for deploy, retrieve, and delete', () => { ensureExitCode: 0, }).jsonOutput?.result; expect( - retrieveResult.inboundFiles.some((item) => item.type === 'Profile'), + retrieveResult?.inboundFiles.some((item) => item.type === 'Profile'), JSON.stringify(retrieveResult) ).to.equal(true); }); @@ -81,9 +81,9 @@ describe('-t flag for deploy, retrieve, and delete', () => { it('sees no local or remote changes', () => { const result = execCmd('force:source:status --json', { ensureExitCode: 0, - }).jsonOutput.result; + }).jsonOutput?.result; expect( - result.filter((r) => r.type === 'Profile'), + result?.filter((r) => r.type === 'Profile'), JSON.stringify(result) ).to.have.length(0); }); @@ -96,7 +96,7 @@ describe('-t flag for deploy, retrieve, and delete', () => { }); execCmd( - `force:source:deploy -p force-app,my-app,foo-bar/app -o -u ${session.orgs.get('default').username} --json`, + `force:source:deploy -p force-app,my-app,foo-bar/app -o -u ${session.orgs.get('default')?.username} --json`, { ensureExitCode: 0, } @@ -104,7 +104,7 @@ describe('-t flag for deploy, retrieve, and delete', () => { execCmd( `force:source:deploy -p force-app,my-app,foo-bar/app -o --targetusername ${ - session.orgs.get('default').username + session.orgs.get('default')?.username } --json`, { ensureExitCode: 0, @@ -113,7 +113,7 @@ describe('-t flag for deploy, retrieve, and delete', () => { execCmd( `force:source:deploy -p force-app,my-app,foo-bar/app -o --target-org ${ - session.orgs.get('default').username + session.orgs.get('default')?.username } --json`, { ensureExitCode: 0, diff --git a/test/nuts/trackingCommands/forceIgnore.nut.ts b/test/nuts/trackingCommands/forceIgnore.nut.ts index 5f5e8b91a..35a124057 100644 --- a/test/nuts/trackingCommands/forceIgnore.nut.ts +++ b/test/nuts/trackingCommands/forceIgnore.nut.ts @@ -47,7 +47,7 @@ describe('forceignore changes', () => { originalForceIgnore = await fs.promises.readFile(path.join(session.project.dir, '.forceignore'), 'utf8'); conn = await Connection.create({ authInfo: await AuthInfo.create({ - username: session.orgs.get('default').username, + username: session.orgs.get('default')?.username, }), }); }); @@ -72,7 +72,7 @@ describe('forceignore changes', () => { it('shows the file in status as ignored', () => { const output = execCmd('force:source:status --json', { ensureExitCode: 0, - }).jsonOutput.result; + }).jsonOutput?.result; expect(output, JSON.stringify(output)).to.deep.include({ state: 'Local Add', fullName: 'IgnoreTest', @@ -115,7 +115,7 @@ describe('forceignore changes', () => { // all 4 files should have been pushed expect(unIgnoredOutput).to.have.length(4); - unIgnoredOutput.map((result) => { + unIgnoredOutput?.map((result) => { expect(result.type === 'ApexClass'); expect(result.state === ComponentStatus.Created); }); @@ -144,14 +144,14 @@ describe('forceignore changes', () => { // gets file into source tracking const statusOutput = execCmd('force:source:status --json --remote', { ensureExitCode: 0, - }).jsonOutput.result; - expect(statusOutput.some((result) => result.fullName === 'CreatedClass')).to.equal(true); + }).jsonOutput?.result; + expect(statusOutput?.some((result) => result.fullName === 'CreatedClass')).to.equal(true); // pull doesn't retrieve that change const pullOutput = execCmd('force:source:pull --json', { ensureExitCode: 0, - }).jsonOutput.result; - expect(pullOutput.pulledSource.some((result) => result.fullName === 'CreatedClass')).to.equal(false); + }).jsonOutput?.result; + expect(pullOutput?.pulledSource.some((result) => result.fullName === 'CreatedClass')).to.equal(false); }); }); }); diff --git a/test/nuts/trackingCommands/lwc.nut.ts b/test/nuts/trackingCommands/lwc.nut.ts index 7ec449230..b6ee05e35 100644 --- a/test/nuts/trackingCommands/lwc.nut.ts +++ b/test/nuts/trackingCommands/lwc.nut.ts @@ -54,8 +54,8 @@ describe('lwc', () => { ); const result = execCmd('force:source:status --json', { ensureExitCode: 0, - }).jsonOutput.result; - expect(result.find((r) => r.filePath === cssPathRelative)).to.have.property('actualState', 'Changed'); + }).jsonOutput?.result; + expect(result?.find((r) => r.filePath === cssPathRelative)).to.have.property('actualState', 'Changed'); }); it('pushes lwc css change', () => { @@ -63,7 +63,7 @@ describe('lwc', () => { ensureExitCode: 0, }).jsonOutput?.result.pushedSource; // we get a result for each bundle member, even though only one changed - expect(result.filter((r) => r.fullName === 'heroDetails')).to.have.length(4); + expect(result?.filter((r) => r.fullName === 'heroDetails')).to.have.length(4); }); it('sees no local changes', () => { @@ -98,10 +98,10 @@ describe('lwc', () => { const result = execCmd('force:source:push --json', { ensureExitCode: 0, }).jsonOutput?.result.pushedSource; - const bundleMembers = result.filter((r) => r.fullName === 'heroDetails'); + const bundleMembers = result?.filter((r) => r.fullName === 'heroDetails'); expect(bundleMembers).to.have.length(4); - expect(bundleMembers.filter((r) => r.state === 'Deleted')).to.have.length(1); - expect(bundleMembers.filter((r) => r.state === 'Changed')).to.have.length(3); + expect(bundleMembers?.filter((r) => r.state === 'Deleted')).to.have.length(1); + expect(bundleMembers?.filter((r) => r.state === 'Changed')).to.have.length(3); }); it('sees no local changes', () => { @@ -129,9 +129,9 @@ describe('lwc', () => { const result = execCmd('force:source:status --json', { ensureExitCode: 0, }).jsonOutput?.result.filter((r) => r.origin === 'Local'); - expect(result.filter(filterIgnored)).to.have.length(4); - expect(result.filter(filterIgnored).filter((r) => r.actualState === 'Deleted')).to.have.length(3); - expect(result.filter(filterIgnored).filter((r) => r.actualState === 'Changed')).to.have.length(1); + expect(result?.filter(filterIgnored)).to.have.length(4); + expect(result?.filter(filterIgnored).filter((r) => r.actualState === 'Deleted')).to.have.length(3); + expect(result?.filter(filterIgnored).filter((r) => r.actualState === 'Changed')).to.have.length(1); }); it('push deletes the LWC remotely', () => { @@ -139,10 +139,10 @@ describe('lwc', () => { ensureExitCode: 0, }).jsonOutput?.result.pushedSource; // there'll also be changes for the changed Hero component html, but we've already tested changing a bundle member - const bundleMembers = result.filter((r) => r.fullName === 'heroDetails'); + const bundleMembers = result?.filter((r) => r.fullName === 'heroDetails'); expect(bundleMembers).to.have.length(3); expect( - bundleMembers.every((r) => r.state === 'Deleted'), + bundleMembers?.every((r) => r.state === 'Deleted'), JSON.stringify(bundleMembers, undefined, 2) ).to.be.true; }); diff --git a/test/nuts/trackingCommands/mpd-non-sequential.nut.ts b/test/nuts/trackingCommands/mpd-non-sequential.nut.ts index 6c0ff8c46..4f2ccdc3f 100644 --- a/test/nuts/trackingCommands/mpd-non-sequential.nut.ts +++ b/test/nuts/trackingCommands/mpd-non-sequential.nut.ts @@ -31,7 +31,7 @@ describe('multiple pkgDirectories pushed as one deploy', () => { conn = await Connection.create({ authInfo: await AuthInfo.create({ - username: session.orgs.get('default').username, + username: session.orgs.get('default')?.username, }), }); }); @@ -48,7 +48,7 @@ describe('multiple pkgDirectories pushed as one deploy', () => { }).jsonOutput?.result.pushedSource; expect(result).to.be.an.instanceof(Array); // the fields should be populated - expect(result.every((row) => row.type && row.fullName)).to.equal(true); + expect(result?.every((row) => row.type && row.fullName)).to.equal(true); }); it('should have 2 deployments', async () => { diff --git a/test/nuts/trackingCommands/mpd-sequential.nut.ts b/test/nuts/trackingCommands/mpd-sequential.nut.ts index 41504ff9c..6b6a23d11 100644 --- a/test/nuts/trackingCommands/mpd-sequential.nut.ts +++ b/test/nuts/trackingCommands/mpd-sequential.nut.ts @@ -44,7 +44,7 @@ describe('multiple pkgDirs deployed sequentially', () => { conn = await Connection.create({ authInfo: await AuthInfo.create({ - username: session.orgs.get('default').username, + username: session.orgs.get('default')?.username, }), }); }); @@ -61,7 +61,7 @@ describe('multiple pkgDirs deployed sequentially', () => { }).jsonOutput?.result.pushedSource; expect(result).to.be.an.instanceof(Array); // the fields should be populated - expect(result.every((row) => row.type && row.fullName)).to.equal(true); + expect(result?.every((row) => row.type && row.fullName)).to.equal(true); }); it('should have 4 deployments', async () => { diff --git a/test/nuts/trackingCommands/remoteChanges.nut.ts b/test/nuts/trackingCommands/remoteChanges.nut.ts index 97838e806..ab80a67f7 100644 --- a/test/nuts/trackingCommands/remoteChanges.nut.ts +++ b/test/nuts/trackingCommands/remoteChanges.nut.ts @@ -38,7 +38,7 @@ describe('remote changes', () => { }); conn = await Connection.create({ authInfo: await AuthInfo.create({ - username: session.orgs.get('default').username, + username: session.orgs.get('default')?.username, }), }); }); @@ -55,8 +55,8 @@ describe('remote changes', () => { const pushedSource = pushResult.jsonOutput?.result.pushedSource; expect(pushedSource, JSON.stringify(pushedSource)).to.have.lengthOf(itemsInEBikesPush); expect( - pushedSource.every((r) => r.state !== ComponentStatus.Failed), - JSON.stringify(pushedSource.filter((r) => r.state === ComponentStatus.Failed)) + pushedSource?.every((r) => r.state !== ComponentStatus.Failed), + JSON.stringify(pushedSource?.filter((r) => r.state === ComponentStatus.Failed)) ).to.equal(true); }); @@ -87,28 +87,28 @@ describe('remote changes', () => { it('can see the delete in status', () => { const result = execCmd('force:source:status --json --remote', { ensureExitCode: 0, - }).jsonOutput.result; + }).jsonOutput?.result; // it shows up as one class on the server, but 2 files when pulled expect( - result.filter((r) => r.state.includes('Delete')), + result?.filter((r) => r.state.includes('Delete')), JSON.stringify(result) ).to.have.length(1); }); it('does not see any change in local status', () => { const result = execCmd('force:source:status --json --local', { ensureExitCode: 0, - }).jsonOutput.result; - expect(result.filter(filterIgnored)).to.deep.equal([]); + }).jsonOutput?.result; + expect(result?.filter(filterIgnored)).to.deep.equal([]); }); it('can pull the delete', () => { - const result = execCmd('force:source:pull --json', { ensureExitCode: 0 }).jsonOutput.result; + const result = execCmd('force:source:pull --json', { ensureExitCode: 0 }).jsonOutput?.result; // ebikes ignore file doesn't catch this somehow on windows (probably that slash) // https://github.com/trailheadapps/ebikes-lwc/blob/3e5baf83d97bc71660feaa9922f8fed2e686f5f8/.forceignore#L136-L137 - const filteredSource = result.pulledSource.filter((r) => !r.fullName.includes('prm_channel_reports_folder')); + const filteredSource = result?.pulledSource.filter((r) => !r.fullName.includes('prm_channel_reports_folder')); // the 2 files for the apexClass, and possibly one for the Profile (depending on whether it got created in time) expect(filteredSource).to.have.length.greaterThanOrEqual(2); expect(filteredSource).to.have.length.lessThanOrEqual(4); - result.pulledSource + result?.pulledSource .filter((r) => r.fullName === 'TestOrderController') .map((r) => expect(r.state).to.equal('Deleted')); }); @@ -127,13 +127,13 @@ describe('remote changes', () => { it('sees correct local and remote status', () => { const remoteResult = execCmd('force:source:status --json --remote', { ensureExitCode: 0, - }).jsonOutput.result; - expect(remoteResult.filter((r) => r.state.includes('Remote Deleted'))).to.deep.equal([]); + }).jsonOutput?.result; + expect(remoteResult?.filter((r) => r.state.includes('Remote Deleted'))).to.deep.equal([]); const localStatus = execCmd('force:source:status --json --local', { ensureExitCode: 0, - }).jsonOutput.result; - expect(localStatus.filter(filterIgnored)).to.deep.equal([]); + }).jsonOutput?.result; + expect(localStatus?.filter(filterIgnored)).to.deep.equal([]); }); }); @@ -152,32 +152,32 @@ describe('remote changes', () => { it('can see the add in status', () => { const result = execCmd('force:source:status --json --remote', { ensureExitCode: 0, - }).jsonOutput.result; + }).jsonOutput?.result; expect( - result.some((r) => r.fullName === className), + result?.some((r) => r.fullName === className), JSON.stringify(result) ).to.equal(true); }); it('can pull the add', () => { - const result = execCmd('force:source:pull --json', { ensureExitCode: 0 }).jsonOutput.result; + const result = execCmd('force:source:pull --json', { ensureExitCode: 0 }).jsonOutput?.result; // SDR marks all retrieves as 'Changed' even if it creates new local files. This is different from toolbelt, which marked those as 'Created' - result.pulledSource + result?.pulledSource .filter((r) => r.fullName === className) .map((r) => expect(r.state, JSON.stringify(r)).to.equal('Created')); }); it('sees correct local and remote status', () => { const remoteResult = execCmd('force:source:status --json --remote', { ensureExitCode: 0, - }).jsonOutput.result; + }).jsonOutput?.result; expect( - remoteResult.filter((r) => r.fullName === className), + remoteResult?.filter((r) => r.fullName === className), JSON.stringify(remoteResult) ).deep.equal([]); const localStatus = execCmd('force:source:status --json --local', { ensureExitCode: 0, - }).jsonOutput.result; - expect(localStatus.filter(filterIgnored)).to.deep.equal([]); + }).jsonOutput?.result; + expect(localStatus?.filter(filterIgnored)).to.deep.equal([]); }); }); diff --git a/test/nuts/translation.nut.ts b/test/nuts/translation.nut.ts index 41e3d55c8..581bff291 100644 --- a/test/nuts/translation.nut.ts +++ b/test/nuts/translation.nut.ts @@ -51,22 +51,22 @@ describe('translations', () => { await fs.promises.writeFile(fieldFile, original.replace('spanish', 'espaƱol')); const statusResult = execCmd('force:source:status --json', { ensureExitCode: 0, - }).jsonOutput.result; + }).jsonOutput?.result; - expect(statusResult[0].type).to.equal('CustomObjectTranslation'); + expect(statusResult?.at(0)?.type).to.equal('CustomObjectTranslation'); }); it('push local change', () => { const pushResult = execCmd('force:source:push --json', { ensureExitCode: 0, - }).jsonOutput.result; - expect(pushResult.pushedSource.every((s) => s.type === 'CustomObjectTranslation')).to.be.true; + }).jsonOutput?.result; + expect(pushResult?.pushedSource.every((s) => s.type === 'CustomObjectTranslation')).to.be.true; }); it('sees no local changes', () => { const statusResult = execCmd('force:source:status --json', { ensureExitCode: 0, - }).jsonOutput.result; + }).jsonOutput?.result; expect(statusResult).to.deep.equal([]); }); }); @@ -86,8 +86,8 @@ describe('translations', () => { it('deploy', () => { const deployResults = execCmd('force:source:deploy -x package.xml --json', { ensureExitCode: 0, - }).jsonOutput.result; - expect(deployResults.deployedSource.length).to.equal(7); + }).jsonOutput?.result; + expect(deployResults?.deployedSource.length).to.equal(7); }); it('retrieve without local metadata', async () => { @@ -96,8 +96,8 @@ describe('translations', () => { await fs.promises.mkdir(path.join(session.project.dir, 'force-app')); const retrieveResults = execCmd('force:source:retrieve -x package.xml --json', { ensureExitCode: 0, - }).jsonOutput.result; - expect(retrieveResults.inboundFiles).to.have.length(7); + }).jsonOutput?.result; + expect(retrieveResults?.inboundFiles).to.have.length(7); }); });