Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(sanity): optimise getLeafWeights to not stack overflow #7999

Merged
merged 1 commit into from
Dec 10, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -433,4 +433,50 @@ describe('deriveSearchWeightsFromType', () => {
],
})
})

it('works for schemas that branch out a lot', () => {
// schema of 60 "components" with 10 fields each
const range = [...Array(60).keys()]

const componentRefs = range.map((index) => ({type: `component_${index}`}))
const components = range.map((index) =>
defineType({
name: `component_${index}`,
type: 'object',
fields: [
...[...Array(10).keys()].map((fieldIndex) =>
defineField({name: `component_${index}_field_${fieldIndex}`, type: 'string'}),
),
defineField({name: `children_${index}`, type: 'array', of: [...componentRefs]}),
],
}),
)

const schema = createSchema({
name: 'default',
types: [
...components,
defineType({
name: 'testType',
type: 'document',
fields: [
defineField({
name: 'components',
type: 'array',
of: [...componentRefs],
}),
],
}),
],
})

expect(
deriveSearchWeightsFromType({
schemaType: schema.get('testType')!,
maxDepth: 5,
}),
).toMatchObject({
typeName: 'testType',
})
})
})
Original file line number Diff line number Diff line change
Expand Up @@ -67,58 +67,57 @@ function getLeafWeights(
type: SchemaType | undefined,
path: string,
depth: number,
accumulator: SearchWeightEntry[] = [], // use accumulator to avoid stack overflow
): SearchWeightEntry[] {
if (!type) return []
if (depth > maxDepth) return []
if (!type) return accumulator
if (depth > maxDepth) return accumulator

const typeChain = getTypeChain(type)

if (isStringField(type) || isPtField(type)) {
const weight = getWeight(type, path)

if (typeof weight !== 'number') return []
return [{path, weight, type: isPtField(type) ? 'pt' : 'string'}]
if (typeof weight === 'number') {
accumulator.push({path, weight, type: isPtField(type) ? 'pt' : 'string'})
}
return accumulator
}

if (isSlugField(type)) {
const weight = getWeight(type, path)
if (typeof weight !== 'number') return []
return [
{
if (typeof weight === 'number') {
accumulator.push({
path: getFullyQualifiedPath(type, path),
weight,
type: isPtField(type) ? 'pt' : 'string',
},
]
})
}
return accumulator
}

const results: SearchWeightEntry[] = []

const objectTypes = typeChain.filter(
(t): t is Extract<SchemaType, {jsonType: 'object'}> =>
let recursiveResult = accumulator
for (const t of typeChain) {
if (
t.jsonType === 'object' &&
!!t.fields?.length &&
!ignoredBuiltInObjectTypes.includes(t.name),
)
for (const objectType of objectTypes) {
for (const field of objectType.fields) {
const nextPath = pathToString([path, field.name].filter(Boolean))
results.push(...traverse(field.type, nextPath, depth + 1))
}
}

const arrayTypes = typeChain.filter(
(t): t is Extract<SchemaType, {jsonType: 'array'}> =>
t.jsonType === 'array' && !!t.of?.length,
)
for (const arrayType of arrayTypes) {
for (const arrayItemType of arrayType.of) {
const nextPath = `${path}[]`
results.push(...traverse(arrayItemType, nextPath, depth + 1))
!ignoredBuiltInObjectTypes.includes(t.name)
) {
for (const field of t.fields) {
recursiveResult = traverse(
field.type,
pathToString([path, field.name].filter(Boolean)),
depth + 1,
recursiveResult,
)
}
} else if (t.jsonType === 'array' && !!t.of?.length) {
for (const arrayItemType of t.of) {
// eslint-disable-next-line no-param-reassign
recursiveResult = traverse(arrayItemType, `${path}[]`, depth + 1, recursiveResult)
}
}
}

return results
return recursiveResult
}

// Cross Dataset Reference are not part of the schema, so we should not attempt to reconcile them.
Expand Down
Loading