diff --git a/README.md b/README.md index 5b79af4..6952645 100644 --- a/README.md +++ b/README.md @@ -173,6 +173,9 @@ main() - Arguments - `config` (object, optional) - A configuration object with the following schema. + - `defaultArgsAdapter` (function, optional) - The default `argsAdapter` function for the entities. + - `addEntitiesResolvers` (boolean, optional) - automatically add entities types and resolvers accordingly with configuration, see [composer entities section](#composer-entities). + - `logger` TODO - `subgraphs` (array, optional) - Array of subgraph configuration objects with the following schema. - `name` (string, optional) - A unique name to identify the subgraph; if missing the default one is `#${index}`, where index is the subgraph index in the array. - `server` (object, required) - Configuration object for communicating with the subgraph server with the following schema: @@ -195,11 +198,12 @@ main() - `resolver` (object, optional) - The resolver definition to query the foreing entity, same structure as `entity.resolver`. - `many` (array of objects, optional) - Describe a 1-to-many relation - the reverse of the foreign key. - `type` (string, required) - The entity type where the entity is a foreign key. - - `fkey` (string, required) - The foreign key field in the referred entity. + - `fkey` (string, optional) - The foreign key field in the referred entity. TODO Required but not on `link` - `as` (string, required) - When using `addEntitiesResolvers`, it defines the name of the relation as a field of the current one, as a list. - `pkey` (string, optional) - The primary key of the referred entity. - `subgraph` (string, optional) - The subgraph name of the referred entity, where the resolver is located; if missing is intended the self. - `resolver` (object, required) - The resolver definition to query the referred entity, same structure as `entity.resolver`. + - `link` TODO - `onSubgraphError` (function, optional) - Hook called when an error occurs getting schema from a subgraph. The default function will throw the error. The arguments are: - `error` (error) - The error. - `subgraph` (string) - The erroring subgraph name. @@ -221,8 +225,6 @@ main() - `queryTypeName` (string, optional) - The name of the `Query` type in the composed schema. **Default:** `'Query'`. - `mutationTypeName` (string, optional) - The name of the `Mutation` type in the composed schema. **Default:** `'Mutation'`. - `subscriptionTypeName` (string, optional) - The name of the `Subscription` type in the composed schema. **Default:** `'Subscription'`. - - `defaultArgsAdapter` (function, optional) - The default `argsAdapter` function for the entities. - - `addEntitiesResolvers` (boolean, optional) - automatically add entities types and resolvers accordingly with configuration, see [composer entities section](#composer-entities). - Returns - A `Promise` that resolves with a `Composer` instance. diff --git a/fixtures/artists-subgraph-with-entities.js b/fixtures/artists-subgraph-with-entities.js index fff7742..3a93a26 100644 --- a/fixtures/artists-subgraph-with-entities.js +++ b/fixtures/artists-subgraph-with-entities.js @@ -91,4 +91,4 @@ const entities = { } } -module.exports = { name: 'artists', schema, reset, resolvers, entities, data } +module.exports = { name: 'artists-subgraph', schema, reset, resolvers, entities, data } diff --git a/fixtures/foods-subgraph.js b/fixtures/foods-subgraph.js new file mode 100644 index 0000000..b857674 --- /dev/null +++ b/fixtures/foods-subgraph.js @@ -0,0 +1,70 @@ +'use strict' + +const schema = ` + input WhereConditionIn { + in: [ID!]! + } + + input FoodsWhereCondition { + id: WhereConditionIn + } + + type Food { + id: ID! + name: String + } + + type Query { + foods(where: FoodsWhereCondition): [Food] + } +` + +const data = { + foods: null +} + +function reset () { + data.foods = { + 50: { + id: 50, + name: 'Pizza margherita' + }, + 51: { + id: 51, + name: 'Pizza boscaiola' + }, + 52: { + id: 52, + name: 'Pizza capricciosa' + }, + 60: { + id: 60, + name: 'Spaghetti carbonara' + }, + 61: { + id: 61, + name: 'Tagliolini scoglio' + }, + 62: { + id: 62, + name: 'Pici cacio e pepe' + }, + 63: { + id: 63, + name: 'Grigliata mista' + } + } +} + +reset() + +const resolvers = { + Query: { + foods (_, { where }) { + return Object.values(data.foods) + .filter(a => where.id.in.includes(String(a.id))) + } + } +} + +module.exports = { name: 'foods-subgraph', schema, reset, resolvers, data } diff --git a/fixtures/movies-subgraph-with-entities.js b/fixtures/movies-subgraph-with-entities.js index 1348660..3e5926f 100644 --- a/fixtures/movies-subgraph-with-entities.js +++ b/fixtures/movies-subgraph-with-entities.js @@ -100,4 +100,4 @@ const entities = { } } -module.exports = { name: 'movies', schema, reset, resolvers, entities, data } +module.exports = { name: 'movies-subgraph', schema, reset, resolvers, entities, data } diff --git a/fixtures/restaurants-subgraph.js b/fixtures/restaurants-subgraph.js new file mode 100644 index 0000000..1e07f8c --- /dev/null +++ b/fixtures/restaurants-subgraph.js @@ -0,0 +1,112 @@ +'use strict' + +const schema = ` + input WhereConditionIn { + in: [ID!]! + } + + input RestaurantsWhereCondition { + id: WhereConditionIn + } + + input RestaurantsFoodsWhereCondition { + restaurantId: WhereConditionIn + foodId: WhereConditionIn + } + + type Restaurant { + id: ID! + businessName: String + } + + type RestaurantsFoods { + restaurantId: ID! + foodId: ID! + } + + type Query { + restaurants(where: RestaurantsWhereCondition): [Restaurant] + restaurantsFoods(where: RestaurantsFoodsWhereCondition): [RestaurantsFoods] + } +` + +const data = { + restaurants: null +} + +function reset () { + data.restaurantsFoods = [ + { + restaurantId: 90, + foodId: 50 + }, + { + restaurantId: 90, + foodId: 51 + }, + { + restaurantId: 90, + foodId: 52 + }, + + { + restaurantId: 91, + foodId: 60 + }, + { + restaurantId: 91, + foodId: 61 + }, + { + restaurantId: 91, + foodId: 62 + }, + + { + restaurantId: 92, + foodId: 60 + }, + { + restaurantId: 92, + foodId: 63 + } + ] + + data.restaurants = { + 90: { + id: 90, + businessName: 'Pizzeria Napoletana' + }, + 91: { + id: 91, + businessName: 'Ristorante Stellato' + }, + 92: { + id: 92, + businessName: 'Trattoria da Gigi' + } + } +} + +reset() + +const resolvers = { + Query: { + restaurants (_, { where }) { + return Object.values(data.restaurants) + .filter(r => where.id.in.includes(String(r.id))) + }, + restaurantsFoods (_, { where }) { + if (where.restaurantId?.in) { + return data.restaurantsFoods + .filter(rf => where.restaurantId.in.includes(String(rf.restaurantId))) + } + if (where.foodId?.in) { + return data.restaurantsFoods + .filter(rf => where.foodId.in.includes(String(rf.foodId))) + } + } + } +} + +module.exports = { name: 'restaurants-subgraph', schema, reset, resolvers, data } diff --git a/fixtures/songs-subgraph-with-entities.js b/fixtures/songs-subgraph-with-entities.js index 6b35b5f..261e5f8 100644 --- a/fixtures/songs-subgraph-with-entities.js +++ b/fixtures/songs-subgraph-with-entities.js @@ -100,4 +100,4 @@ const entities = { } } -module.exports = { name: 'songs', schema, reset, resolvers, entities, data } +module.exports = { name: 'songs-subgraph', schema, reset, resolvers, entities, data } diff --git a/lib/composer.js b/lib/composer.js index 72c767a..a0744c7 100644 --- a/lib/composer.js +++ b/lib/composer.js @@ -10,7 +10,7 @@ const metaline = require('metaline') const { createEmptyObject, unwrapSchemaType } = require('./graphql-utils') const { fetchSubgraphSchema, makeGraphqlRequest } = require('./network') const { QueryBuilder } = require('./query-builder') -const { isObject, traverseResult, schemaTypeName, createDefaultArgsAdapter } = require('./utils') +const { schemaTypeName, createDefaultArgsAdapter, dummyLogger } = require('./utils') const { validateArray, validateFunction, @@ -18,6 +18,7 @@ const { validateString, validateResolver } = require('./validation') +const { mergeResults } = require('./results') class Composer { #queryTypeName @@ -29,6 +30,7 @@ class Composer { #types #directives #entities + #logger constructor (options = {}) { validateObject(options, 'options') @@ -40,12 +42,14 @@ class Composer { subgraphs = [], onSubgraphError = onError, subscriptions, - addEntitiesResolvers + addEntitiesResolvers, + logger = dummyLogger() } = options let defaultArgsAdapter = options.defaultArgsAdapter this.addEntitiesResolvers = !!addEntitiesResolvers + this.#logger = logger validateString(queryTypeName, 'queryTypeName') validateString(mutationTypeName, 'mutationTypeName') @@ -148,12 +152,28 @@ class Composer { for (let k = 0; k < many.length; ++k) { const m = many[k] - for (const p of ['type', 'fkey', 'as', 'pkey']) { + for (const p of ['type', 'as', 'pkey']) { validateString(m[p], `subgraphs[${subgraphName}].entities.${name}.many[${k}].${p}`) } - for (const p of ['subgraph']) { - if (!m[p]) { continue } - validateString(m[p], `subgraphs[${subgraphName}].entities.${name}.many[${k}].${p}`) + // TODO validate: fkey is not required for link + if (m.fkey) { + validateString(m.subgraph, `subgraphs[${subgraphName}].entities.${name}.many[${k}].fkey`) + } + + if (!m.subgraph) { + m.subgraph = subgraphName + } else { + validateString(m.subgraph, `subgraphs[${subgraphName}].entities.${name}.many[${k}].subgraph`) + } + + // TODO validate many.link + // type, pkey, fkey, resolver + if (m.link) { + if (!m.link.subgraph) { + m.link.subgraph = subgraphName + } else { + validateString(m.subgraph, `subgraphs[${subgraphName}].entities.${name}.many[${k}].subgraph`) + } } if (typeof m.resolver.argsAdapter === 'string') { @@ -465,21 +485,23 @@ class Composer { const schemaNode = type.fields.find((f) => { return f.name === node.name.value }) - const bareType = unwrapSchemaType(schemaNode.type) - const objType = this.#types.get(bareType.name) + const schemaNodeType = unwrapSchemaType(schemaNode.type) + const schemaNodeTypeName = this.#types.get(schemaNodeType.name) const query = new QueryBuilder({ node, schemaNode, path: [fieldName], - type: objType, + type: schemaNodeTypeName, fields: [], + subgraphs: this.#subgraphs, subgraph, resolverName: info.fieldName, root: true, info, argsAdapter: null, types: this.#types, - entities: this.#entities + entities: this.#entities, + logger: this.#logger }) await this.#runQuery(ctx, query) return ctx.result[fieldName] @@ -527,13 +549,15 @@ class Composer { path: [fieldName], type: objType, fields: [], + subgraphs: this.#subgraphs, subgraph, resolverName: info.fieldName, root: true, info, argsAdapter: null, types: this.#types, - entities: this.#entities + entities: this.#entities, + logger: this.#logger }) const text = query.buildQuery(ctx) topic = await client.createSubscription(text, {}, async (data) => { @@ -559,10 +583,13 @@ class Composer { async #runQuery (ctx, query) { const text = query.buildQuery(ctx) - // TODO debug(' run subgraph query', query.subgraph.name, text) + + this.#logger.debug({ subgraph: query.subgraph.name, query: text }, 'run subgraph query') const data = await makeGraphqlRequest(text, query.subgraph.server) - // TODO debug(' result', data) + + this.#logger.debug({ query: text, data }, 'query result') mergeResults(query, ctx.result, data) + this.#logger.debug({ merged: ctx.result }, 'merged result') await this.#runFollowupQueries(ctx, query) } @@ -692,88 +719,6 @@ class Composer { } } -function mergeResults (query, partialResult, response) { - let { result, mergedPartial, mergedPartialParentNode } = selectResult(query, partialResult, response) - - if (mergedPartial === null) { - mergedPartialParentNode[query.path.at(-1)] = result - mergedPartial = result - } else if (Array.isArray(result) && result.length > 0) { - // TODO refactor this case, too many loops, split functions, memoize if possible - - const key = query.key - const parentKey = query.parentKey.field - const as = query.parentKey.as - const many = query.parentKey.many - - const resultIndex = new Map() - - // TODO get list from node result type? - const list = Array.isArray(result[0][key]) - - // TODO refactor as a matrix for every case - if (list) { - for (let i = 0; i < result.length; i++) { - for (let j = 0; j < result[i][key].length; j++) { - const s = resultIndex.get(result[i][key][j]) - if (s) { - resultIndex.set(result[i][key][j], s.concat(i)) - continue - } - resultIndex.set(result[i][key][j], [i]) - } - } - } else if (many) { - for (let i = 0; i < result.length; i++) { - const s = resultIndex.get(result[i][key]) - if (s) { - resultIndex.set(result[i][key], s.concat(i)) - continue - } - resultIndex.set(result[i][key], [i]) - } - } else { - for (let i = 0; i < result.length; i++) { - resultIndex.set(result[i][key], i) - } - } - - for (let i = 0; i < mergedPartial.length; i++) { - const merging = mergedPartial[i] - if (!merging) { continue } - - // no need to be recursive - if (Array.isArray(merging)) { - if (list || many) { - for (let j = 0; j < merging.length; j++) { - copyResults(result, resultIndex, merging[j], parentKey, as) - } - } else { - for (let j = 0; j < merging.length; j++) { - copyResult(result, resultIndex, merging[j], parentKey, as) - } - } - continue - } - - if (list || many) { - copyResults(result, resultIndex, merging, parentKey, as) - } else { - copyResult(result, resultIndex, merging, parentKey, as) - } - } - } else if (isObject(result)) { - // TODO copy object fn? - const fields = Object.keys(result) - for (let i = 0; i < fields.length; i++) { - mergedPartial[fields[i]] = result[fields[i]] - } - } else { - // no result - mergedPartialParentNode[query.path.at(-1)] = result - } -} - function createContext ({ fieldName, followups, result }) { return { path: [], @@ -782,106 +727,6 @@ function createContext ({ fieldName, followups, result }) { } } -function copyResult (result, resultIndex, to, key, as) { - if (Array.isArray(to)) { - for (const line of to) { - copyResult(result, resultIndex, line, key, as) - } - return - } - - const index = resultIndex.get(to[key]) - if (index === undefined) { - // TODO if not nullable set it to an empty object - return - } - if (as) { - if (!to[as]) { - to[as] = {} - } - to = to[as] - } - - // TODO copy object fn? - const fields = Object.keys(result[index]) - for (let i = 0; i < fields.length; i++) { - to[fields[i]] = result[index][fields[i]] - } -} - -function copyResults (result, resultIndex, to, key, as) { - let indexes - - if (Array.isArray(to)) { - for (const line of to) { - copyResults(result, resultIndex, line, key, as) - } - return - } - - // TODO refactor? - if (Array.isArray(to[key])) { - indexes = to[key].map(k => resultIndex.get(k)).flat() - } else { - indexes = resultIndex.get(to[key]) - } - - if (indexes === undefined) { - // TODO get if nullable from node result type - if (as && !to[as]) { - to[as] = [] - } - - return - } - if (as) { - if (!to[as]) { - to[as] = [] - } - to = to[as] - } - - for (let i = 0; i < indexes.length; i++) { - // TODO copy object fn? - const fields = Object.keys(result[indexes[i]]) - const r = {} - for (let j = 0; j < fields.length; j++) { - r[fields[j]] = result[indexes[i]][fields[j]] - } - to.push(r) - } -} - -function selectResult (query, partialResult, response) { - let result = response[query.resolverName] - let mergedPartial = partialResult - let mergedPartialParentNode = null - - for (let i = 0; i < query.path.length; ++i) { - const path = query.path[i] - mergedPartialParentNode = mergedPartial - mergedPartialParentNode[path] ??= null - - if (!mergedPartial && !mergedPartial[path]) { break } - - mergedPartial = traverseResult(mergedPartial, path) - } - - if (!query.root) { - if (Array.isArray(mergedPartial) && !Array.isArray(result)) { - result = [result] - } else if (!Array.isArray(mergedPartial) && Array.isArray(result) && result.length === 1) { - result = result[0] - } - } - - if (Array.isArray(result)) { - result = result.filter(r => !!r) - } - - return { result, mergedPartial, mergedPartialParentNode } -} - function addFieldToType (type, field, subgraph) { const { schemaNode: schemaType, fieldMap } = type const existingField = fieldMap.get(field.name) diff --git a/lib/query-builder.js b/lib/query-builder.js index bffd330..4b82403 100644 --- a/lib/query-builder.js +++ b/lib/query-builder.js @@ -1,6 +1,6 @@ 'use strict' const { unwrapSchemaType, valueToArgumentString } = require('./graphql-utils') -const { toQueryArgs, keySelection, traverseResult, nodeTypeName, transitivePKey } = require('./utils') +const { toQueryArgs, keySelection, traverseResult, nodeTypeName, transitivePKey, linkingEntities, dummyLogger } = require('./utils') class QueryBuilder { constructor (options) { @@ -10,7 +10,9 @@ class QueryBuilder { this.path = options.path.slice() this.type = options.type this.fields = options.fields + this.logger = options.logger ?? dummyLogger() + this.subgraphs = options.subgraphs this.subgraph = options.subgraph this.resolverName = options.resolverName this.resolverArgsAdapter = options.argsAdapter @@ -46,13 +48,14 @@ class QueryBuilder { // avoid to pass back and forth path between context and folloups // keep the "result path" synced with "result" since it's needed to merge results and get partial results // then work with a subpart/branch of ctx.result and then for followups + // possible solution: inject a node to the schemaNode if (this.swap) { ctx.path = this.path.slice() } else { ctx.path = this.path.slice(0, -1) } - const selectionSet = this.buildSelectionSet(ctx, this.node, this.schemaNode) + const selectionSet = this.buildSelectionSet(ctx, this.node, this.schemaNode, true) const computedArgs = this.buildArguments(this.node) const query = `${this.operation} { ${this.resolverName}${computedArgs} ${selectionSet} }` @@ -168,7 +171,7 @@ class QueryBuilder { return this.buildNodeArgs(node) } - buildSelectionSet (ctx, node, schemaNode) { + buildSelectionSet (ctx, node, schemaNode, main) { const selections = node.selectionSet?.selections const length = selections?.length ?? 0 @@ -178,10 +181,10 @@ class QueryBuilder { ctx.path.push(node.name.value) - const bareType = unwrapSchemaType(schemaNode.type) - const type = this.types.get(bareType.name) + const schemaNodeType = unwrapSchemaType(schemaNode.type) + const type = this.types.get(schemaNodeType.name) const { fieldMap } = type - const set = new Set() + const selectionFields = new Set() let keyFields for (let i = 0; i < length; ++i) { @@ -204,16 +207,20 @@ class QueryBuilder { value += ` ${this.buildSelectionSet(ctx, fragNode, schemaNode)}` } - set.add(value) + selectionFields.add(value) } } else { - let value = selection.name.value - const field = fieldMap.get(value) + let fieldName = selection.name.value + const field = fieldMap.get(fieldName) const selectionSchemaNode = field?.schemaNode // Some nodes won't have a schema representation. For example, __typename. if (selectionSchemaNode) { - if (!field.subgraphs.has(this.subgraph)) { + console.log({ type: schemaNodeType.name, fieldName, subgraph: field.subgraphs.has(this.subgraph) }, 'buildSelectionSet > selectionSchemaNode') + + // followup if field is not on this subgraph or is resolving a swap entity (fkey or many relation) + if (!field.subgraphs.has(this.subgraph) || + (main && this.swap && schemaNodeType.name !== this.type.schemaNode.name)) { const { index, subgraph, swap } = this.followupIndex(field, ctx.path) let followup = ctx.followups.get(index) @@ -221,6 +228,8 @@ class QueryBuilder { const parentKeys = this.getKeyFields(type) let followupSchema, followupNode + // on swap: resolve current entity with its source resolver + // on parentKeys.many: m2m relation, querying linking entity if (swap) { followupSchema = selectionSchemaNode followupNode = selection @@ -229,8 +238,7 @@ class QueryBuilder { followupNode = node } - followup = this.createFollowup(ctx, followupSchema, followupNode, this.subgraph, type, field, subgraph, swap) - + followup = this.createFollowup(ctx, followupSchema, followupNode, this.subgraph, type, field, subgraph, swap, parentKeys.many) keyFields = collectKeys(keyFields, parentKeys) if (followup) { ctx.followups.set(index, followup) @@ -246,55 +254,77 @@ class QueryBuilder { } if (selection.arguments.length > 0) { - value += this.buildArguments(selection) + fieldName += this.buildArguments(selection) } if (selection.selectionSet) { - value += ` ${this.buildSelectionSet(ctx, selection, selectionSchemaNode)}` + fieldName += ` ${this.buildSelectionSet(ctx, selection, selectionSchemaNode)}` } } - set.add(value) + selectionFields.add(fieldName) } } ctx.path.pop() - // add entity pkey to selection, needed to match rows merging results + const selectedFields = this.selectFields(schemaNodeType, keyFields, selectionFields) + + if (selectedFields.length < 1) { + // TODO this is an error on configuration + return '' + } + + this.selectedFields = selectedFields.map(f => f.split(' ')[0]) + return `{ ${selectedFields.join(', ')} }` + } + + selectFields (type, keyFields, selectionFields) { + const selectionKeyFields = new Set() + // add entity keys to selection, needed to match rows merging results // no followups here if (!keyFields) { - const entity = this.subgraph.entities[bareType.name] + const entity = this.subgraph.entities[type.name] if (!entity) { + console.log('aaaaaaaaaaaa') // TODO onError: missing entity definition', typeName, 'in subgraph', this.subgraph.name) } else { - // TODO keys generator fn - // TODO collect entity keys on composer mergeSchema to avoid collecting them every time - // TODO lookup for entities to resolve, no need to add all the fkeys always - keyFields = { - pkey: entity.pkey, - fkeys: [ - ...(entity.fkeys || []), - ...(entity.many || []) - .map(m => ({ field: m.pkey, as: m.as })) - ] - } + keyFields = collectEntityKeys(entity) } } if (keyFields) { - set.add(keySelection(keyFields.pkey)) + if (!this.swap) { + keySelection(keyFields.pkey, selectionKeyFields) + } + for (let i = 0; i < keyFields.fkeys.length; ++i) { if (!keyFields.fkeys[i].field) { continue } - const keyFieldName = keySelection(keyFields.fkeys[i].field) - set.add(keyFieldName) + keySelection(keyFields.fkeys[i].field, selectionKeyFields) + } + + // resolving a linking entity + if (this.parentKey?.linking) { + // get keys for linking entity + for (let i = 0; i < keyFields.many.length; ++i) { + if (!keyFields.many[i].link) { continue } + keySelection(keyFields.many[i].link.pkey, selectionKeyFields) + keySelection(keyFields.many[i].link.fkey, selectionKeyFields) + } + } else { + // skip link entity keys, they have their own query + for (let i = 0; i < keyFields.many.length; ++i) { + if (keyFields.many[i].link) { continue } + keySelection(keyFields.many[i].pkey, selectionKeyFields) + } } } // TODO improve structure - const selectedFields = Array.from(set) - this.selectedFields = selectedFields.map(f => f.split(' ')[0]) - - return `{ ${selectedFields.join(', ')} }` + for (const fieldName of selectionFields.values()) { + selectionKeyFields.add(fieldName) + } + return Array.from(selectionKeyFields) } // TODO memoize by subgraph#entity @@ -303,26 +333,22 @@ class QueryBuilder { const entity = this.subgraph.entities[typeName] - if (!entity) { - const pkey = transitivePKey(typeName, this.subgraph.entities) - if (!pkey) { - // TODO onError - throw new Error(`Unable to resolve entity ${typeName} in subgraph ${this.subgraph.name}`) - } - return { - pkey, - fkeys: [] - } - } else { - return { - pkey: entity.pkey, - fkeys: [ - ...(entity.fkeys || []), - ...(entity.many || []) - .map(m => ({ field: m.pkey, as: m.as })) - ] - } + if (entity) { + return collectEntityKeys(entity) + } + + // look for transitive key + const pkey = transitivePKey(typeName, this.subgraph.entities) + + // look for 1-to-many or many-to-many relations and so keys + const many = linkingEntities(typeName, this.subgraph.entities) + + // at least should be a key to resolve the entity type + if (!pkey && !many) { + // TODO onError + throw new Error(`Unable to resolve entity ${typeName} in subgraph ${this.subgraph.name}`) } + return { pkey, fkeys: [], many: many ?? [] } } followupSubgraph (field) { @@ -340,9 +366,7 @@ class QueryBuilder { // in this way we save a roundtrip to a resolver on the current subgraph to resolve the entity // TODO use a Map on subgraph to avoid this loop - const entitySubgraph = entity.subgraphs.find(s => { - return s.name === subgraph.name - }) + const entitySubgraph = entity.subgraphs.find(s => s.name === subgraph.name) if (entitySubgraph.entities[fieldTypeName]?.resolver) { return { subgraph, swap: true } @@ -356,7 +380,9 @@ class QueryBuilder { return { subgraph: s, swap: true } } - createFollowup (ctx, schemaNode, node, parentSubgraph, parentType, field, subgraph, swap) { + // TODO refactor this function, too long + // TODO memoize partially + createFollowup (ctx, schemaNode, node, parentSubgraph, parentType, field, subgraph, swap, linkingEntitiesMany) { const parentTypeName = parentType.schemaNode.name let fieldTypeName = parentTypeName @@ -378,16 +404,84 @@ class QueryBuilder { let resolverName = entity.resolver.name let key = entity.pkey + // TODO can a node have more linking entities? + const many = linkingEntitiesMany?.[0] + // linking entities + if (many?.link) { + if (this.parentKey?.linking) { + // resolve linked entity - followup of linking + parentKey = { field: many.link.fkey, as: many.as, many: false, linked: true } + // TODO use a Map on subgraph to avoid this loop + subgraph = this.subgraphs.find(s => s.name === many.subgraph) + const type = this.types.get(many.type) + + return { + swap, + path, + node, + schemaNode, + type, + resolverName: many.resolver.name, + argsAdapter: many.resolver.argsAdapter, + partialResults: many.resolver.partialResults, + key, + parentKey, + subgraph, + subgraphs: this.subgraphs, + logger: this.logger, + info: this.info, + types: this.types, + entities: this.entities, + root: false, + fields: [field], + solved: false + } + } else { + // resolve linking entity - first followup of many.link option + key = many.pkey + parentKey = { field: many.link.pkey, as: many.as, many: true, linking: true } + // TODO use a Map on subgraph to avoid this loop + subgraph = this.subgraphs.find(s => s.name === many.link.subgraph) + const type = this.types.get(many.link.type) + + return { + swap, + path, + node, + schemaNode, + type, + resolverName: many.link.resolver.name, + argsAdapter: many.link.resolver.argsAdapter, + partialResults: many.link.resolver.partialResults, + key, + parentKey, + subgraph, + subgraphs: this.subgraphs, + logger: this.logger, + info: this.info, + types: this.types, + entities: this.entities, + root: false, + fields: [field], + solved: false + } + } + } + if (parentEntity?.many) { + // TODO support many folloups manyEntity = parentEntity.many.find(m => m.type === fieldTypeName) if (manyEntity) { // TODO createKey fn - // reverse the keys as the many relation is inverse + // pkey and fkey are switched as the many relation is inverse key = manyEntity.fkey parentKey = { field: manyEntity.pkey, as: manyEntity.as, many: true } argsAdapter = manyEntity.resolver.argsAdapter resolverName = manyEntity.resolver.name partialResults = manyEntity.resolver.partialResults + + // TODO use a Map on subgraph to avoid this loop + subgraph = this.subgraphs.find(s => s.name === manyEntity.subgraph) } } @@ -414,12 +508,14 @@ class QueryBuilder { resolverName, argsAdapter, partialResults, + key, + parentKey, subgraph, + subgraphs: this.subgraphs, + logger: this.logger, info: this.info, types: this.types, entities: this.entities, - key, - parentKey, root: false, fields: [field], solved: false @@ -438,6 +534,17 @@ class QueryBuilder { } } +// TODO collect entity keys on composer mergeSchema to avoid collecting them every time +// TODO lookup for entities to resolve, no need to add all the fkeys always +// TODO memoize by subgraph +function collectEntityKeys (entity) { + return { + pkey: entity.pkey, + fkeys: [...(entity.fkeys || [])], + many: [...(entity.many || [])] + } +} + function collectKeys (keyFields, parentKeys) { if (!keyFields) { return parentKeys @@ -446,6 +553,7 @@ function collectKeys (keyFields, parentKeys) { // TODO if parentKeys.pkey != keyFields.pkey throw Error keyFields.fkeys = keyFields.fkeys.concat(parentKeys.fkeys) + keyFields.many = keyFields.many.concat(parentKeys.many) return keyFields } diff --git a/lib/results.js b/lib/results.js new file mode 100644 index 0000000..2069c1e --- /dev/null +++ b/lib/results.js @@ -0,0 +1,257 @@ +'use strict' + +const { copyObjectByKeys, traverseResult, isObject } = require('./utils') + +function selectResult (query, partialResult, response) { + let result = response[query.resolverName] + let mergedPartial = partialResult + let mergedPartialParentNode = null + + const path = query.parentKey?.linked + ? query.path.slice(0, -1) + : query.path + + for (let i = 0; i < path.length; ++i) { + const p = path[i] + mergedPartialParentNode = mergedPartial + mergedPartialParentNode[p] ??= null + + if (!mergedPartial && !mergedPartial[p]) { break } + + mergedPartial = traverseResult(mergedPartial, p) + } + + if (!query.root) { + if (Array.isArray(mergedPartial) && !Array.isArray(result)) { + result = [result] + } else if (!Array.isArray(mergedPartial) && Array.isArray(result) && result.length === 1) { + result = result[0] + } + } + + if (Array.isArray(result)) { + result = result.filter(r => !!r) + } + + return { result, mergedPartial, mergedPartialParentNode } +} + +function mergeResults (query, partialResult, response) { + const path = query.path + let { result, mergedPartial, mergedPartialParentNode } = selectResult(query, partialResult, response) + + if (mergedPartial === null) { + mergedPartialParentNode[path.at(-1)] = result + mergedPartial = result + } else if (Array.isArray(result) && result.length > 0) { + if (query.parentKey?.linking) { + mergeManyLinkingResults(query, result, mergedPartial) + return + } else if (query.parentKey?.linked) { + mergeManyLinkedResults(query, result, mergedPartial) + return + } + // TODO cover other cases to avoid the following loop with many switches + + const key = query.key + const parentKey = query.parentKey.field + const as = query.parentKey.as + const many = query.parentKey.many + + const { list, resultIndex } = index(result, key, many) + + for (let i = 0; i < mergedPartial.length; i++) { + const merging = mergedPartial[i] + if (!merging) { continue } + + // no need to be recursive + if (Array.isArray(merging)) { + if (list || many) { + for (let j = 0; j < merging.length; j++) { + copyResults(result, resultIndex, merging[j], parentKey, as) + } + } else { + for (let j = 0; j < merging.length; j++) { + copyResult(result, resultIndex, merging[j], parentKey, as) + } + } + continue + } + + if (list || many) { + copyResults(result, resultIndex, merging, parentKey, as) + } else { + copyResult(result, resultIndex, merging, parentKey, as) + } + } + } else if (isObject(result)) { + copyObjectByKeys(mergedPartial, result) + } else { + // no result + mergedPartialParentNode[query.path.at(-1)] = result + } +} + +/** + * index result by key + */ +function index (result, key, many) { + // TODO get list from node result type? + const list = Array.isArray(result[0][key]) + const resultIndex = new Map() + // TODO refactor as a matrix for every case + if (list) { + for (let i = 0; i < result.length; i++) { + for (let j = 0; j < result[i][key].length; j++) { + const s = resultIndex.get(result[i][key][j]) + if (s) { + resultIndex.set(result[i][key][j], s.concat(i)) + continue + } + resultIndex.set(result[i][key][j], [i]) + } + } + } else if (many) { + for (let i = 0; i < result.length; i++) { + const s = resultIndex.get(result[i][key]) + if (s) { + resultIndex.set(result[i][key], s.concat(i)) + continue + } + resultIndex.set(result[i][key], [i]) + } + } else { + for (let i = 0; i < result.length; i++) { + resultIndex.set(result[i][key], i) + } + } + return { list, resultIndex } +} + +/** + * merge many to many relation + */ +function mergeManyLinkingResults (query, result, mergedPartial) { + const key = query.key + const parentKey = query.parentKey.field + const as = query.parentKey.as + const many = query.parentKey.many // true + const { resultIndex } = index(result, parentKey, many) + + for (let i = 0; i < mergedPartial.length; i++) { + const merging = mergedPartial[i] + if (!merging) { continue } + + // no need to be recursive + if (Array.isArray(merging)) { + for (let j = 0; j < merging.length; j++) { + copyResults(result, resultIndex, merging[j], key, as) + } + continue + } + copyResults(result, resultIndex, merging, key, as) + } +} + +function mergeManyLinkedResults (query, result, mergedPartial) { + const key = query.key + const parentKey = query.parentKey.field + const as = query.parentKey.as + const many = query.parentKey.many // false + const { list, resultIndex } = index(result, key, many) + + for (let i = 0; i < mergedPartial.length; i++) { + const merging = mergedPartial[i][as] + if (!merging) { continue } + + // no need to be recursive + if (Array.isArray(merging)) { + if (list) { + for (let j = 0; j < merging.length; j++) { + copyResults(result, resultIndex, merging[j], parentKey, as) + } + } else { + for (let j = 0; j < merging.length; j++) { + copyResult(result, resultIndex, merging[j], parentKey) + } + } + continue + } + + if (list || many) { + copyResults(result, resultIndex, merging, parentKey, as) + } else { + copyResult(result, resultIndex, merging, parentKey, as) + } + } +} + +function copyResult (result, resultIndex, to, key, as) { + if (Array.isArray(to)) { + for (const line of to) { + copyResult(result, resultIndex, line, key, as) + } + return + } + + const index = resultIndex.get(to[key]) + if (index === undefined) { + // TODO if not nullable set it to an empty object + return + } + if (as) { + if (!to[as]) { + to[as] = {} + } + to = to[as] + } + + // TODO copy object fn? + const fields = Object.keys(result[index]) + for (let i = 0; i < fields.length; i++) { + to[fields[i]] = result[index][fields[i]] + } +} + +function copyResults (result, resultIndex, to, key, as) { + let indexes + + if (Array.isArray(to)) { + for (const line of to) { + copyResults(result, resultIndex, line, key, as) + } + return + } + + // TODO refactor? + if (Array.isArray(to[key])) { + indexes = to[key].map(k => resultIndex.get(k)).flat() + } else { + indexes = resultIndex.get(to[key]) + } + + if (indexes === undefined) { + // TODO get if nullable from node result type + if (as && !to[as]) { + to[as] = [] + } + + return + } + if (as) { + if (!to[as]) { + to[as] = [] + } + to = to[as] + } + + for (let i = 0; i < indexes.length; i++) { + const r = {} + copyObjectByKeys(r, result[indexes[i]]) + to.push(r) + } +} + +module.exports = { + mergeResults +} diff --git a/lib/utils.js b/lib/utils.js index b247c50..9ae2e04 100644 --- a/lib/utils.js +++ b/lib/utils.js @@ -1,5 +1,11 @@ 'use strict' +const pino = require('pino') + +function dummyLogger () { + return pino({ level: 'silent' }) +} + function createDefaultArgsAdapter (entityName, pkey) { return function argsAdapter (partialResults) { return { [pkey + 's']: partialResults.map(r => r[pkey]) } @@ -10,6 +16,13 @@ function isObject (obj) { return obj !== null && typeof obj === 'object' } +function copyObjectByKeys (to, src) { + const keys = Object.keys(src) + for (let i = 0; i < keys.length; i++) { + to[keys[i]] = src[keys[i]] + } +} + // TODO filter same values function toQueryArgs (v, root = true) { if (v === undefined || v === null) { return '' } @@ -44,26 +57,79 @@ function toQueryArgs (v, root = true) { return typeof v === 'string' ? `"${v}"` : v.toString() } -function keySelection (path) { - if (path.indexOf('.') === -1) { return path } +/** + * add to "set" the key portion of path, if any + * @param {string?} path + * @param {Set} set + */ +function keySelection (path, set) { + if (!path) { return } + if (path.indexOf('.') === -1) { set.add(path) } - return path.split('.').pop() + const key = path.split('.').pop() + if (!key) { return } + set.add(key) } /** - * get pkey from fkeys of entities in the subgraph - * TODO update to support composite pkey + * Get pkey from fkeys of entities in the subgraph + * it happens when the entity doesn't have a field to identify the fk, + * but the key is nested in a type. + * Example: + * + * type Book { id: ID, title: String, author: Writer } + * type Writer { id: ID, name: String } + * + * fkeys: [{ pkey: 'author.id', type: 'Writer' }] */ -function transitivePKey (type, subgraphEntities) { +function transitivePKey (typeName, subgraphEntities) { for (const entity of Object.values(subgraphEntities)) { for (const fkey of entity?.fkeys) { - if (fkey.type === type) { + if (fkey.type === typeName) { return fkey.pkey } } } } +/** + * Get fkeys to resolve mamy-2-many relations with a linking entity, + * Example: + * + * type Food { id: ID, name: String } + * type Restaurants { id: ID, name: String } + * type RestaurantsFoods { foodId: ID, restaurantId: ID } + * + * to have + * extend type Restaurants { foods: [Food] } + * + * entities: { Restaurant: { + * ... + * many: [{ + * type: 'Food', as: 'foods', pkey: 'id', + * link: { + * entity: 'RestaurantsFoods', + * pkey: 'foodId', + * fkey: 'restaurantId', + * resolver: { name: 'restaurantsFoods' } + * } + * }] + * } } + * and viceversa for Food.restaurants + * + * TODO memoize + */ +function linkingEntities (typeName, subgraphEntities) { + let many = [] + + // TODO optimize this loop + for (const entity of Object.values(subgraphEntities)) { + many = many.concat(entity?.many.filter(many => many.type === typeName && many.link)) + } + + return many.length > 0 ? many : undefined +} + function traverseResult (result, path) { if (Array.isArray(result)) { result = result.map(r => { @@ -85,4 +151,16 @@ function nodeTypeName (node) { return node.schemaNode.type.name || node.schemaNode.type.ofType.name || node.schemaNode.type.ofType.ofType.name } -module.exports = { createDefaultArgsAdapter, isObject, keySelection, transitivePKey, traverseResult, toQueryArgs, schemaTypeName, nodeTypeName } +module.exports = { + dummyLogger, + createDefaultArgsAdapter, + isObject, + copyObjectByKeys, + keySelection, + transitivePKey, + linkingEntities, + traverseResult, + toQueryArgs, + schemaTypeName, + nodeTypeName +} diff --git a/package.json b/package.json index 090bec6..84f088d 100644 --- a/package.json +++ b/package.json @@ -16,6 +16,7 @@ "graphql": "^16.8.1", "mercurius": "^13.2.2", "metaline": "^1.1.0", + "pino": "^8.16.2", "undici": "^6.0.0" }, "devDependencies": { diff --git a/test/many-to-many.test.js b/test/many-to-many.test.js new file mode 100644 index 0000000..771f643 --- /dev/null +++ b/test/many-to-many.test.js @@ -0,0 +1,125 @@ +'use strict' + +const assert = require('node:assert/strict') +const { test } = require('node:test') +const pino = require('pino') +const { graphqlRequest, buildComposer } = require('./helper') + +test('should handle many-to-many relation linked by an intermediary entity', async (t) => { + const composerOptions = { + defaultArgsAdapter: (partialResults) => { + return { where: { id: { in: partialResults.map(r => r.id) } } } + }, + addEntitiesResolvers: true, + logger: pino({ level: 'debug' }), + entities: { + 'restaurants-subgraph': { + Restaurant: { + resolver: { name: 'restaurants' }, + pkey: 'id', + many: [ + { + type: 'Food', + as: 'foods', + pkey: 'id', + link: { + type: 'RestaurantsFoods', + pkey: 'restaurantId', + fkey: 'foodId', + resolver: { + name: 'restaurantsFoods', + argsAdapter: (restaurantIds) => { + return { where: { restaurantId: { in: restaurantIds } } } + }, + partialResults: (restaurants) => { + return restaurants.map(r => r.id) + } + } + }, + subgraph: 'foods-subgraph', + resolver: { + name: 'foods', + argsAdapter: (foodIds) => { + return { where: { id: { in: foodIds } } } + }, + partialResults: (restaurantFoods) => { + return restaurantFoods.map(r => r.foodId) + } + } + } + ] + } + }, + 'foods-subgraph': { + Food: { + resolver: { name: 'foods' }, + pkey: 'id', + many: [ + { + type: 'Restaurant', + as: 'restaurants', + pkey: 'id', + link: { + type: 'RestaurantsFoods', + pkey: 'foodId', + fkey: 'restaurantId', + subgraph: 'restaurants-subgraph', + resolver: { + name: 'restaurantsFoods', + argsAdapter: (foodIds) => { + return { where: { foodId: { in: foodIds } } } + }, + partialResults: (foods) => { + return foods.map(r => r.id) + } + } + }, + subgraph: 'restaurants-subgraph', + resolver: { + name: 'restaurants', + argsAdapter: (restaurantIds) => { + return { where: { id: { in: restaurantIds } } } + }, + partialResults: (restaurantFoods) => { + return restaurantFoods.map(r => r.restaurantId) + } + } + } + ] + } + } + } + } + + const requests = [ + // { + // query: '{ restaurants (where: { id: { in: [90, 91] } }) { businessName, foods { name } } }', + // expected: { + // restaurants: [ + // { businessName: 'Pizzeria Napoletana', foods: [{ name: 'Pizza margherita' }, { name: 'Pizza boscaiola' }, { name: 'Pizza capricciosa' }] }, + // { businessName: 'Ristorante Stellato', foods: [{ name: 'Spaghetti carbonara' }, { name: 'Tagliolini scoglio' }, { name: 'Pici cacio e pepe' }] } + // ] + // } + // }, + { + query: '{ foods (where: { id: { in: [50, 60] } }) { name, restaurants { businessName } } }', + expected: { } + } + // TODO nested, nested x n + ] + + const { service } = await buildComposer(t, ['restaurants-subgraph', 'foods-subgraph'], composerOptions) + + await service.listen() + + for (const request of requests) { + const response = await graphqlRequest(service, request.query, request.variables) + + console.log(JSON.stringify(response)) + + assert.deepStrictEqual(response, request.expected, 'should get expected result from composer service,' + + '\nquery: ' + request.query + + '\nexpected' + JSON.stringify(request.expected, null, 2) + + '\nresponse' + JSON.stringify(response, null, 2)) + } +})