Skip to content

Commit

Permalink
feat: Skipping failed tests from JSON-Schema-Test-Suite
Browse files Browse the repository at this point in the history
We'll re-enable them once the feature is implemented.

feat: Skipping failed tests from JSON-Schema-Test-Suite

We'll re-enable them once the feature is implemented.

fix: typo

fix: typo
  • Loading branch information
antoniocapelo committed Feb 6, 2025
1 parent 00e81ec commit 0728e56
Show file tree
Hide file tree
Showing 6 changed files with 680 additions and 2 deletions.
1 change: 1 addition & 0 deletions next/jest.config.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ const config = {
roots,
moduleNameMapper,
testPathIgnorePatterns,
reporters: ['default', '<rootDir>/test/validation/json_schema_test_suite_tracker.js'],
}

export default config
1 change: 1 addition & 0 deletions next/test/validation/constants.js
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
export const JSON_SCHEMA_SUITE_FAILED_TESTS_FILE = 'failed-json-schema-test-suite.json'
582 changes: 582 additions & 0 deletions next/test/validation/failed-json-schema-test-suite.json

Large diffs are not rendered by default.

14 changes: 14 additions & 0 deletions next/test/validation/helpers.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import fs from 'node:fs'
import path from 'node:path'
import { JSON_SCHEMA_SUITE_FAILED_TESTS_FILE } from './constants'

export function loadJsonSchemaSuiteFailedTests(): string[] {
try {
const content = fs.readFileSync(path.join(__dirname, JSON_SCHEMA_SUITE_FAILED_TESTS_FILE), 'utf8')
return JSON.parse(content).failedTests
}
catch (error) {
console.error('An error occurred when loading the file with failed tests:', error)
return []
}
}
30 changes: 28 additions & 2 deletions next/test/validation/json_schema_test_suite.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import path from 'node:path'
import util from 'node:util'
import { describe, expect, it } from '@jest/globals'
import { createHeadlessForm } from '../../src'
import { loadJsonSchemaSuiteFailedTests } from './helpers'

interface Test {
description: string
Expand All @@ -30,17 +31,42 @@ expect.extend({
},
})

describe.skip('JSON Schema Test Suite', () => {
const testsToSkip = loadJsonSchemaSuiteFailedTests()

/**
* This test suite is running the JSON-Schema-Test-Suite (https://github.com/json-schema-org/JSON-Schema-Test-Suite/tree/main/tests/draft2020-12).
* We can run the whole suite immediately as the 'next' json-schema-form version
* is still under development. Until all features are implemented, some of the
* tests in this suite would fail. Because of that, we're skipping the tests that
* are failing, and we'll re-enable them once the feature is implemented.
*
* Note: For this version to be considered "Done", no tests from this suite
* should be skipped.
*/
describe('JSON Schema Test Suite', () => {
const testsDir = path.join(__dirname, '..', '..', 'json-schema-test-suite', 'tests', 'draft2020-12')
const testFiles = fs.readdirSync(testsDir).filter(file => file.endsWith('.json'))

for (const file of testFiles) {
const testFile: TestSchema[] = JSON.parse(fs.readFileSync(path.join(testsDir, file), 'utf8'))

const runTestIfFeatureImplemented = (testName: string, testFn: () => void) => {
const shouldRun = !testsToSkip.includes(testName)

if (shouldRun) {
it(testName, testFn)
}
else {
// Skipping test as the feature being tested is not implemented yet`
it.skip(testName, testFn)
}
}

for (const testSchema of testFile) {
describe(testSchema.description, () => {
for (const test of testSchema.tests) {
it(test.description, () => {
// Tests that will run only if they previously failed
runTestIfFeatureImplemented(`JSON Schema Test Suite ${testSchema.description} ${test.description}`, () => {
// TODO: properly extend the expect interface
expect(testSchema.schema).toBeValid(test.data, test.valid)
})
Expand Down
54 changes: 54 additions & 0 deletions next/test/validation/json_schema_test_suite_tracker.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
import fs from 'node:fs'
import path from 'node:path'
import { fileURLToPath } from 'node:url'

import { JSON_SCHEMA_SUITE_FAILED_TESTS_FILE } from './constants.js'

const __filename = fileURLToPath(import.meta.url)
const __dirname = path.dirname(__filename)

// Change this value to true to write the failed tests to a file, for
// later consumption at the json_schema_test_suite.test.js file.
const SHOULD_WRITE_FAILED_TESTS_TO_FILE = false

// Save newly failed tests
function saveFailedTests(failedTests) {
fs.writeFileSync(path.join(__dirname, JSON_SCHEMA_SUITE_FAILED_TESTS_FILE), JSON.stringify({ failedTests }, null, 2))
}

/**
* On top of our test cases, we're running the JSON-Schema-Test-Suite
* (https://github.com/json-schema-org/JSON-Schema-Test-Suite/tree/main/tests/draft2020-12).
* However, until all features are implemented, some of the
* tests in this suite would fail. We're using this reporter to keep track of the
* tests that are failing (on the JSON_SCHEMA_SUITE_FAILED_TESTS_FILE), so we
* can re-enable them once the feature is implemented.
*
* Note: The failed tests are not being saved to file every time the suite runs,
* as this could cause for bugs to surface due to a change in the codebase.
* The SHOULD_WRITE_FAILED_TESTS_TO_FILE constant can be set to true to write the
* failed tests to a file (on-demand), for later consumption at the
* json_schema_test_suite.test.js file.
*
*/
class FailureTrackingReporter {
constructor() {
this.failedTests = new Set()
}

onTestResult(test, testResult) {
testResult.testResults.forEach((result) => {
if (result.status === 'failed') {
this.failedTests.add(result.fullName || result.title)
}
})
}

onRunComplete() {
if (SHOULD_WRITE_FAILED_TESTS_TO_FILE) {
saveFailedTests(Array.from(this.failedTests))
}
}
}

export default FailureTrackingReporter

0 comments on commit 0728e56

Please sign in to comment.