diff --git a/libV2/schemaUtils.js b/libV2/schemaUtils.js index ddc63591..ea828681 100644 --- a/libV2/schemaUtils.js +++ b/libV2/schemaUtils.js @@ -253,6 +253,36 @@ let QUERYPARAM = 'query', return REF_STACK_LIMIT; }, + /** + * Resets cache storing readOnly and writeOnly property map. + * + * @param {Object} context - Global context object + * @returns {void} + */ + resetReadWritePropCache = (context) => { + context.readOnlyPropCache = {}; + context.writeOnlyPropCache = {}; + }, + + /** + * Merges provided readOnly writeOnly properties cache with existing cache present in context + * + * @param {Object} context - Global context object + * @param {Object} readOnlyPropCache - readOnly properties cache to be merged + * @param {Object} writeOnlyPropCache - writeOnly properties cache to be merged + * @param {Object} currentPath - Current path (json-pointer) being resolved relative to original schema + * @returns {void} + */ + mergeReadWritePropCache = (context, readOnlyPropCache, writeOnlyPropCache, currentPath = '') => { + _.forOwn(readOnlyPropCache, (value, key) => { + context.readOnlyPropCache[utils.mergeJsonPath(currentPath, key)] = true; + }); + + _.forOwn(writeOnlyPropCache, (value, key) => { + context.writeOnlyPropCache[utils.mergeJsonPath(currentPath, key)] = true; + }); + }, + /** * Resolve a given ref from the schema * @param {Object} context - Global context object @@ -260,7 +290,7 @@ let QUERYPARAM = 'query', * @param {Number} stackDepth - Depth of the current stack for Ref resolution * @param {Object} seenRef - Seen Reference map * - * @returns {Object} Returns the object that staisfies the schema + * @returns {Object} Returns the object that satisfies the schema */ resolveRefFromSchema = (context, $ref, stackDepth = 0, seenRef = {}) => { const { specComponents } = context, @@ -274,7 +304,11 @@ let QUERYPARAM = 'query', seenRef[$ref] = true; if (context.schemaCache[$ref]) { - return context.schemaCache[$ref]; + // Also merge readOnly and writeOnly prop cache from schemaCache to global context cache + mergeReadWritePropCache(context, context.schemaCache[$ref].readOnlyPropCache, + context.schemaCache[$ref].writeOnlyPropCache); + + return context.schemaCache[$ref].schema; } if (!_.isFunction($ref.split)) { @@ -330,7 +364,7 @@ let QUERYPARAM = 'query', * @param {Number} stackDepth - Depth of the current stack for Ref resolution * @param {Object} seenRef - Seen Reference map * - * @returns {Object} Returns the object that staisfies the schema + * @returns {Object} Returns the object that satisfies the schema */ resolveRefForExamples = (context, $ref, stackDepth = 0, seenRef = {}) => { const { specComponents } = context, @@ -344,7 +378,11 @@ let QUERYPARAM = 'query', seenRef[$ref] = true; if (context.schemaCache[$ref]) { - return context.schemaCache[$ref]; + // Also merge readOnly and writeOnly prop cache from schemaCache to global context cache + mergeReadWritePropCache(context, context.schemaCache[$ref].readOnlyPropCache, + context.schemaCache[$ref].writeOnlyPropCache); + + return context.schemaCache[$ref].schema; } if (!_.isFunction($ref.split)) { @@ -391,7 +429,11 @@ let QUERYPARAM = 'query', } // Add the resolved schema to the global schema cache - context.schemaCache[$ref] = resolvedExample; + context.schemaCache[$ref] = { + schema: resolvedExample, + readOnlyPropCache: {}, + writeOnlyPropCache: {} + }; return resolvedExample; }, @@ -439,22 +481,23 @@ let QUERYPARAM = 'query', }, /** - * Handle resoltion of allOf property of schema + * Handle resolution of allOf property of schema * * @param {Object} context - Global context object * @param {Object} schema - Schema to be resolved * @param {Number} [stack] - Current recursion depth * @param {*} resolveFor - resolve refs for flow validation/conversion (value to be one of VALIDATION/CONVERSION) * @param {Object} seenRef - Map of all the references that have been resolved + * @param {String} currentPath - Current path (json-pointer) being resolved relative to original schema * * @returns {Object} Resolved schema */ - resolveAllOfSchema = (context, schema, stack, resolveFor = CONVERSION, seenRef = {}) => { + resolveAllOfSchema = (context, schema, stack = 0, resolveFor = CONVERSION, seenRef = {}, currentPath = '') => { try { return mergeAllOf(_.assign(schema, { allOf: _.map(schema.allOf, (schema) => { // eslint-disable-next-line no-use-before-define - return resolveSchema(context, schema, stack, resolveFor, _.cloneDeep(seenRef)); + return _resolveSchema(context, schema, stack, resolveFor, _.cloneDeep(seenRef), currentPath); }) }), { // below option is required to make sure schemas with additionalProperties set to false are resolved correctly @@ -482,11 +525,12 @@ let QUERYPARAM = 'query', * @param {Number} [stack] - Current recursion depth * @param {String} resolveFor - For which action this resolution is to be done * @param {Object} seenRef - Map of all the references that have been resolved + * @param {String} currentPath - Current path (json-pointer) being resolved relative to original schema * @todo: Explore using a directed graph/tree for maintaining seen ref * * @returns {Object} Returns the object that satisfies the schema */ - resolveSchema = (context, schema, stack = 0, resolveFor = CONVERSION, seenRef = {}) => { + _resolveSchema = (context, schema, stack = 0, resolveFor = CONVERSION, seenRef = {}, currentPath = '') => { if (!schema) { return new Error('Schema is empty'); } @@ -523,16 +567,17 @@ let QUERYPARAM = 'query', }); if (resolveFor === CONVERSION) { - return resolveSchema(context, compositeSchema[0], stack, resolveFor, _.cloneDeep(seenRef)); + return _resolveSchema(context, compositeSchema[0], stack, resolveFor, _.cloneDeep(seenRef), currentPath); } - return { [compositeKeyword]: _.map(compositeSchema, (schemaElement) => { - return resolveSchema(context, schemaElement, stack, resolveFor, _.cloneDeep(seenRef)); + return { [compositeKeyword]: _.map(compositeSchema, (schemaElement, index) => { + return _resolveSchema(context, schemaElement, stack, resolveFor, _.cloneDeep(seenRef), + utils.addToJsonPath(currentPath, [compositeKeyword, index])); }) }; } if (schema.allOf) { - return resolveAllOfSchema(context, schema, stack, resolveFor, _.cloneDeep(seenRef)); + return resolveAllOfSchema(context, schema, stack, resolveFor, _.cloneDeep(seenRef), currentPath); } if (schema.$ref) { @@ -547,14 +592,42 @@ let QUERYPARAM = 'query', seenRef[schemaRef] = true; if (context.schemaCache[schemaRef]) { - schema = context.schemaCache[schemaRef]; + // Also merge readOnly and writeOnly prop cache from schemaCache to global context cache + mergeReadWritePropCache(context, context.schemaCache[schemaRef].readOnlyPropCache, + context.schemaCache[schemaRef].writeOnlyPropCache, currentPath); + + schema = context.schemaCache[schemaRef].schema; } else { + const existingReadPropCache = context.readOnlyPropCache, + existingWritePropCache = context.writeOnlyPropCache; + schema = resolveRefFromSchema(context, schemaRef, stack, _.cloneDeep(seenRef)); - schema = resolveSchema(context, schema, stack, resolveFor, _.cloneDeep(seenRef)); + + /** + * Reset readOnly and writeOnly prop cache before resolving schema to make sure + * we have fresh cache for $ref resolution which will be stored as part of schemaCache + */ + resetReadWritePropCache(context); + schema = _resolveSchema(context, schema, stack, resolveFor, _.cloneDeep(seenRef), ''); // Add the resolved schema to the global schema cache - context.schemaCache[schemaRef] = schema; + context.schemaCache[schemaRef] = { + schema, + readOnlyPropCache: context.readOnlyPropCache, + writeOnlyPropCache: context.writeOnlyPropCache + }; + + // eslint-disable-next-line one-var + const newReadPropCache = context.readOnlyPropCache, + newWritePropCache = context.writeOnlyPropCache; + + // Assign existing readOnly and writeOnly prop cache back to global context cache + context.readOnlyPropCache = existingReadPropCache; + context.writeOnlyPropCache = existingWritePropCache; + + // Merge existing and current cache to make sure we have all the properties in cache + mergeReadWritePropCache(context, newReadPropCache, newWritePropCache, currentPath); } return schema; } @@ -591,7 +664,10 @@ let QUERYPARAM = 'query', return; } - resolvedSchemaProps[propertyName] = resolveSchema(context, property, stack, resolveFor, _.cloneDeep(seenRef)); + const currentPropPath = utils.addToJsonPath(currentPath, ['properties', propertyName]); + + resolvedSchemaProps[propertyName] = _resolveSchema(context, property, stack, resolveFor, + _.cloneDeep(seenRef), currentPropPath); }); schema.properties = resolvedSchemaProps; @@ -599,7 +675,8 @@ let QUERYPARAM = 'query', } // If schema is of type array else if (concreteUtils.compareTypes(schema.type, SCHEMA_TYPES.array) && schema.items) { - schema.items = resolveSchema(context, schema.items, stack, resolveFor, _.cloneDeep(seenRef)); + schema.items = _resolveSchema(context, schema.items, stack, resolveFor, _.cloneDeep(seenRef), + utils.addToJsonPath(currentPath, ['items'])); } // Any properties to ignored should not be available in schema else if (_.every(SCHEMA_PROPERTIES_TO_EXCLUDE, (schemaKey) => { return !schema.hasOwnProperty(schemaKey); })) { @@ -631,7 +708,8 @@ let QUERYPARAM = 'query', if (schema.hasOwnProperty('additionalProperties')) { schema.additionalProperties = _.isBoolean(schema.additionalProperties) ? schema.additionalProperties : - resolveSchema(context, schema.additionalProperties, stack, resolveFor, _.cloneDeep(seenRef)); + _resolveSchema(context, schema.additionalProperties, stack, resolveFor, _.cloneDeep(seenRef), + utils.addToJsonPath(currentPath, ['additionalProperties'])); schema.type = schema.type || SCHEMA_TYPES.object; } @@ -646,14 +724,72 @@ let QUERYPARAM = 'query', }); } + // Keep track of readOnly and writeOnly properties to resolve request and responses accordingly later. + if (schema.readOnly) { + context.readOnlyPropCache[currentPath] = true; + } + + if (schema.writeOnly) { + context.writeOnlyPropCache[currentPath] = true; + } + return schema; }, + /** + * Wrapper around _resolveSchema which resolves a given schema + * + * @param {Object} context - Global context + * @param {Object} schema - Schema that is to be resolved + * @param {Object} resolutionMeta - Metadata of resolution taking place + * @param {Number} resolutionMeta.stack - Current recursion depth + * @param {String} resolutionMeta.resolveFor - For which action this resolution is to be done + * @param {Object} resolutionMeta.seenRef - Map of all the references that have been resolved + * @param {Boolean} resolutionMeta.isResponseSchema - Whether schema is from response or not + * + * @returns {Object} Returns the object that satisfies the schema + */ + resolveSchema = (context, schema, + { stack = 0, resolveFor = CONVERSION, seenRef = {}, isResponseSchema = false } = {} + ) => { + // reset readOnly and writeOnly prop cache before resolving schema to make sure we have fresh cache + resetReadWritePropCache(context); + + let resolvedSchema = _resolveSchema(context, schema, stack, resolveFor, seenRef); + + /** + * If readOnly or writeOnly properties are present in the schema, we need to clone original schema first. + * Because we modify original resolved schema and delete readOnly or writeOnly properties from it + * depending upon if schema belongs to Request or Response. + * This is done to avoid modifying original schema object and to keep it intact for future use. + */ + if (!_.isEmpty(context.readOnlyPropCache) || !_.isEmpty(context.writeOnlyPropCache)) { + resolvedSchema = _.cloneDeep(resolvedSchema); + } + + if (isResponseSchema) { + _.forOwn(context.writeOnlyPropCache, (value, key) => { + // We need to make sure to remove empty strings via _.compact that are added while forming json-pointer + _.unset(resolvedSchema, utils.getJsonPathArray(key)); + }); + } + else { + _.forOwn(context.readOnlyPropCache, (value, key) => { + // We need to make sure to remove empty strings via _.compact that are added while forming json-pointer + _.unset(resolvedSchema, utils.getJsonPathArray(key)); + }); + } + + return resolvedSchema; + }, + /** * Provides information regarding serialisation of param * * @param {Object} context - Required context from related SchemaPack function * @param {Object} param - OpenAPI Parameter object + * @param {Object} options - Options object + * @param {Boolean} options.isResponseSchema - Whether schema is from response or not * @returns {Object} - Information regarding parameter serialisation. Contains following properties. * { * style - style property defined/inferred from schema @@ -664,7 +800,7 @@ let QUERYPARAM = 'query', * isExplodable - whether params can be exploded (serialised value can contain key and value) * } */ - getParamSerialisationInfo = (context, param) => { + getParamSerialisationInfo = (context, param, { isResponseSchema = false } = {}) => { let paramName = _.get(param, 'name'), paramSchema, style, // style property defined/inferred from schema @@ -682,7 +818,7 @@ let QUERYPARAM = 'query', } // Resolve the ref and composite schemas - paramSchema = resolveSchema(context, param.schema); + paramSchema = resolveSchema(context, param.schema, { isResponseSchema }); isExplodable = paramSchema.type === 'object'; @@ -796,16 +932,20 @@ let QUERYPARAM = 'query', * * @param {Object} context - Required context from related SchemaPack function * @param {Object} param - Parameter that is to be resolved from schema - * @param {String} schemaFormat - Corresponding schema format (can be one of xml/default) + * @param {Object} options - Addition options + * @param {String} options.schemaFormat - Corresponding schema format (can be one of xml/default) + * @param {Boolean} options.isResponseSchema - Whether schema is from response or not * @returns {*} Value of the parameter */ - resolveValueOfParameter = (context, param, schemaFormat = SCHEMA_FORMATS.DEFAULT) => { + resolveValueOfParameter = (context, param, + { schemaFormat = SCHEMA_FORMATS.DEFAULT, isResponseSchema = false } = {} + ) => { if (!param || !param.hasOwnProperty('schema')) { return ''; } const { indentCharacter } = context.computedOptions, - resolvedSchema = resolveSchema(context, param.schema), + resolvedSchema = resolveSchema(context, param.schema, { isResponseSchema }), { parametersResolution } = context.computedOptions, shouldGenerateFromExample = parametersResolution === 'example', hasExample = param.example !== undefined || @@ -917,9 +1057,19 @@ let QUERYPARAM = 'query', (parameter.enum ? ' (This can only be one of ' + parameter.enum + ')' : ''); }, - serialiseParamsBasedOnStyle = (context, param, paramValue) => { + /** + * Serialise Param based on mentioned style field in schema object + * + * @param {Object} context - Global context object + * @param {Object} param - OpenAPI Parameter object + * @param {*} paramValue - Value of the parameter + * @param {Object} options - Additional options for serialisation + * @param {Boolean} options.isResponseSchema - Whether schema is from response or not + * @returns {Array} - Array of key-value pairs for the parameter + */ + serialiseParamsBasedOnStyle = (context, param, paramValue, { isResponseSchema = false } = {}) => { const { style, explode, startValue, propSeparator, keyValueSeparator, isExplodable } = - getParamSerialisationInfo(context, param), + getParamSerialisationInfo(context, param, { isResponseSchema }), { enableOptionalParameters } = context.computedOptions; let serialisedValue = '', @@ -1264,7 +1414,7 @@ let QUERYPARAM = 'query', } if (requestBodySchema.$ref) { - requestBodySchema = resolveSchema(context, requestBodySchema); + requestBodySchema = resolveSchema(context, requestBodySchema, { isResponseSchema: isExampleBody }); } /** @@ -1313,7 +1463,7 @@ let QUERYPARAM = 'query', examples = requestBodySchema.examples || _.get(requestBodySchema, 'schema.examples'); requestBodySchema = requestBodySchema.schema || requestBodySchema; - requestBodySchema = resolveSchema(context, requestBodySchema); + requestBodySchema = resolveSchema(context, requestBodySchema, { isResponseSchema: isExampleBody }); // If schema object has example defined, try to use that if no example is defiend at request body level if (example === undefined && _.get(requestBodySchema, 'example') !== undefined) { @@ -1338,7 +1488,7 @@ let QUERYPARAM = 'query', requestBodySchema = requestBodySchema.schema || requestBodySchema; if (requestBodySchema.$ref) { - requestBodySchema = resolveSchema(context, requestBodySchema); + requestBodySchema = resolveSchema(context, requestBodySchema, { isResponseSchema: isExampleBody }); } if (isBodyTypeXML) { @@ -1910,7 +2060,7 @@ let QUERYPARAM = 'query', } if (responseBody.$ref) { - responseBody = resolveSchema(context, responseBody); + responseBody = resolveSchema(context, responseBody, { isResponseSchema: true }); } responseContent = responseBody.content; @@ -1971,7 +2121,7 @@ let QUERYPARAM = 'query', { includeDeprecated } = context.computedOptions; if (_.has(responseHeaders, '$ref')) { - responseHeaders = resolveSchema(context, responseHeaders); + responseHeaders = resolveSchema(context, responseHeaders, { isResponseSchema: true }); } _.forOwn(responseHeaders, (value, headerName) => { @@ -1983,7 +2133,7 @@ let QUERYPARAM = 'query', return; } - let headerValue = resolveValueOfParameter(context, value); + let headerValue = resolveValueOfParameter(context, value, { isResponseSchema: true }); if (typeof headerValue === 'number' || typeof headerValue === 'boolean') { // the SDK will keep the number-ness, @@ -1994,7 +2144,7 @@ let QUERYPARAM = 'query', } const headerData = Object.assign({}, value, { name: headerName }), - serialisedHeader = serialiseParamsBasedOnStyle(context, headerData, headerValue); + serialisedHeader = serialiseParamsBasedOnStyle(context, headerData, headerValue, { isResponseSchema: true }); headers.push(...serialisedHeader); }); @@ -2095,7 +2245,7 @@ let QUERYPARAM = 'query', // store all request examples which will be used for creation of examples with correct request and response matching if (typeof requestBody === 'object') { if (requestBody.$ref) { - requestBody = resolveSchema(context, requestBody); + requestBody = resolveSchema(context, requestBody, { isResponseSchema: true }); } requestContent = requestBody.content; @@ -2114,7 +2264,8 @@ let QUERYPARAM = 'query', const exampleData = getExampleData(context, { [name]: exampleObj }); if (isBodyTypeXML) { - let bodyData = getXMLExampleData(context, exampleData, resolveSchema(context, content.schema)); + let bodyData = getXMLExampleData(context, exampleData, resolveSchema(context, content.schema, + { isResponseSchema: true })); exampleObj.value = getXmlVersionContent(bodyData); } @@ -2132,7 +2283,8 @@ let QUERYPARAM = 'query', } _.forOwn(operationItem.responses, (responseObj, code) => { - let responseSchema = _.has(responseObj, '$ref') ? resolveSchema(context, responseObj) : responseObj, + let responseSchema = _.has(responseObj, '$ref') ? + resolveSchema(context, responseObj, { isResponseSchema: true }) : responseObj, { includeAuthInfoInExample } = context.computedOptions, auth = request.auth, resolvedExamples = resolveResponseBody(context, responseSchema, requestBodyExamples, code) || {}, diff --git a/libV2/utils.js b/libV2/utils.js index 66419302..9d4df9a8 100644 --- a/libV2/utils.js +++ b/libV2/utils.js @@ -1,4 +1,5 @@ const _ = require('lodash'), + jsonPointer = require('json-pointer'), { Item } = require('postman-collection/lib/collection/item'), { Response } = require('postman-collection/lib/collection/response'), @@ -200,6 +201,48 @@ module.exports = { return title; }, + /** + * Adds provided property array to the given JSON path + * + * @param {string} jsonPath - JSON path to which properties should be added + * @param {array} propArray - Array of properties to be added to JSON path + * @returns {string} - Combined JSON path + */ + addToJsonPath: function (jsonPath, propArray) { + const jsonPathArray = jsonPointer.parse(jsonPath), + escapedPropArray = _.map(propArray, (prop) => { + return jsonPointer.escape(prop); + }); + + return jsonPointer.compile(jsonPathArray.concat(escapedPropArray)); + }, + + /** + * Merges two JSON paths. i.e. Parent JSON path and Child JSON path + * + * @param {string} parentJsonPath - Parent JSON path + * @param {string} childJsonPath - Child JSON path + * @returns {string} - Merged JSON path + */ + mergeJsonPath: function (parentJsonPath, childJsonPath) { + let jsonPathArray = jsonPointer.parse(parentJsonPath); + + // Merges childJsonPath with parentJsonPath + jsonPathArray = jsonPathArray.concat(jsonPointer.parse(childJsonPath)); + + return jsonPointer.compile(jsonPathArray); + }, + + /** + * Gets JSON path in array from string JSON path + * + * @param {string} jsonPath - input JSON path + * @returns {array} - Parsed JSON path (each part is distributed in an array) + */ + getJsonPathArray: function (jsonPath) { + return jsonPointer.parse(jsonPointer.unescape(jsonPath)); + }, + generatePmResponseObject, generateRequestItemObject }; diff --git a/libV2/validationUtils.js b/libV2/validationUtils.js index 65248fe6..ceabf1d5 100644 --- a/libV2/validationUtils.js +++ b/libV2/validationUtils.js @@ -159,7 +159,10 @@ function safeSchemaFaker (context, oldSchema, resolveFor, parameterSourceOption, * i.e. For array it'll add maxItems = 2. This should be avoided as we'll again be needing non-mutated schema * in further VALIDATION use cases as needed. */ - resolvedSchema = resolveSchema(context, _.cloneDeep(oldSchema), 0, _.toLower(PROCESSING_TYPE.CONVERSION)); + resolvedSchema = resolveSchema(context, _.cloneDeep(oldSchema), { + resolveFor: _.toLower(PROCESSING_TYPE.CONVERSION), + isResponseSchema: parameterSourceOption === PARAMETER_SOURCE.RESPONSE + }); resolvedSchema = concreteUtils.fixExamplesByVersion(resolvedSchema); key = JSON.stringify(resolvedSchema); @@ -404,8 +407,10 @@ function getParameterDescription (parameter) { */ function getParamSerialisationInfo (param, parameterSource, components, options) { var paramName = _.get(param, 'name'), - paramSchema = resolveSchema(getDefaultContext(options, components), _.cloneDeep(param.schema), - 0, PROCESSING_TYPE.VALIDATION), + paramSchema = resolveSchema(getDefaultContext(options, components), _.cloneDeep(param.schema), { + resolveFor: PROCESSING_TYPE.VALIDATION, + isResponseSchema: parameterSource === PARAMETER_SOURCE.RESPONSE + }), style, // style property defined/inferred from schema explode, // explode property defined/inferred from schema propSeparator, // separates two properties or values @@ -494,8 +499,10 @@ function getParamSerialisationInfo (param, parameterSource, components, options) */ function deserialiseParamValue (param, paramValue, parameterSource, components, options) { var constructedValue, - paramSchema = resolveSchema(getDefaultContext(options, components), _.cloneDeep(param.schema), - 0, PROCESSING_TYPE.VALIDATION), + paramSchema = resolveSchema(getDefaultContext(options, components), _.cloneDeep(param.schema), { + resolveFor: PROCESSING_TYPE.VALIDATION, + isResponseSchema: parameterSource === PARAMETER_SOURCE.RESPONSE + }), isEvenNumber = (num) => { return (num % 2 === 0); }, @@ -1335,7 +1342,10 @@ function checkValueAgainstSchema (context, property, jsonPathPrefix, txnParamNam invalidJson = false, valueToUse = value, - schema = resolveSchema(context, openApiSchemaObj, 0, PROCESSING_TYPE.VALIDATION), + schema = resolveSchema(context, openApiSchemaObj, { + resolveFor: PROCESSING_TYPE.VALIDATION, + isResponseSchema: parameterSourceOption === PARAMETER_SOURCE.RESPONSE + }), compositeSchema = schema.oneOf || schema.anyOf, compareTypes = _.get(context, 'concreteUtils.compareTypes') || concreteUtils.compareTypes; @@ -1711,7 +1721,9 @@ function checkPathVariables (context, matchedPathData, transactionPathPrefix, sc }; if (options.suggestAvailableFixes) { - const resolvedSchema = resolveSchema(context, pathVar.schema, 0, PROCESSING_TYPE.VALIDATION); + const resolvedSchema = resolveSchema(context, pathVar.schema, { + resolveFor: PROCESSING_TYPE.VALIDATION + }); mismatchObj.suggestedFix = { key: pathVar.name, @@ -1758,8 +1770,9 @@ function checkQueryParams (context, queryParams, transactionPathPrefix, schemaPa // below will make sure for exploded params actual schema of property present in collection is present _.forEach(schemaParams, (param) => { let pathPrefix = param.pathPrefix, - paramSchema = resolveSchema(context, _.cloneDeep(param.schema), - 0, PROCESSING_TYPE.VALIDATION), + paramSchema = resolveSchema(context, _.cloneDeep(param.schema), { + resolveFor: PROCESSING_TYPE.VALIDATION + }), { style, explode } = getParamSerialisationInfo(param, PARAMETER_SOURCE.REQUEST, components, options), encodingObj = { [param.name]: { style, explode } }, metaInfo = { @@ -1862,7 +1875,9 @@ function checkQueryParams (context, queryParams, transactionPathPrefix, schemaPa }; if (options.suggestAvailableFixes) { - const resolvedSchema = resolveSchema(context, qp.schema, 0, PROCESSING_TYPE.VALIDATION); + const resolvedSchema = resolveSchema(context, qp.schema, { + resolveFor: PROCESSING_TYPE.VALIDATION + }); mismatchObj.suggestedFix = { key: qp.name, @@ -2000,7 +2015,9 @@ function checkRequestHeaders (context, headers, transactionPathPrefix, schemaPat }; if (options.suggestAvailableFixes) { - const resolvedSchema = resolveSchema(context, header.schema, 0, PROCESSING_TYPE.VALIDATION); + const resolvedSchema = resolveSchema(context, header.schema, { + resolveFor: PROCESSING_TYPE.VALIDATION + }); mismatchObj.suggestedFix = { key: header.name, @@ -2131,7 +2148,10 @@ function checkResponseHeaders (context, schemaResponse, headers, transactionPath }; if (options.suggestAvailableFixes) { - const resolvedSchema = resolveSchema(context, header.schema, 0, PROCESSING_TYPE.VALIDATION); + const resolvedSchema = resolveSchema(context, header.schema, { + resolveFor: PROCESSING_TYPE.VALIDATION, + isResponseSchema: true + }); mismatchObj.suggestedFix = { key: header.name, @@ -2139,7 +2159,7 @@ function checkResponseHeaders (context, schemaResponse, headers, transactionPath suggestedValue: { key: header.name, value: safeSchemaFaker(context, resolvedSchema || {}, PROCESSING_TYPE.VALIDATION, - PARAMETER_SOURCE.REQUEST, components, SCHEMA_FORMATS.DEFAULT, schemaCache), + PARAMETER_SOURCE.RESPONSE, components, SCHEMA_FORMATS.DEFAULT, schemaCache), description: getParameterDescription(header) } }; @@ -2204,8 +2224,9 @@ function checkRequestBody (context, requestBody, transactionPathPrefix, schemaPa return param.value !== OAS_NOT_SUPPORTED; }); - urlencodedBodySchema = resolveSchema(context, urlencodedBodySchema, - 0, PROCESSING_TYPE.VALIDATION); + urlencodedBodySchema = resolveSchema(context, urlencodedBodySchema, { + resolveFor: PROCESSING_TYPE.VALIDATION + }); resolvedSchemaParams = resolveFormParamSchema(urlencodedBodySchema, '', encodingObj, filteredUrlEncodedBody, {}, components, options); @@ -2313,7 +2334,9 @@ function checkRequestBody (context, requestBody, transactionPathPrefix, schemaPa }; if (options.suggestAvailableFixes) { - const resolvedSchema = resolveSchema(context, uParam.schema, 0, PROCESSING_TYPE.VALIDATION); + const resolvedSchema = resolveSchema(context, uParam.schema, { + resolveFor: PROCESSING_TYPE.VALIDATION + }); mismatchObj.suggestedFix = { key: uParam.name, diff --git a/package-lock.json b/package-lock.json index 28d6fb12..42d5658f 100644 --- a/package-lock.json +++ b/package-lock.json @@ -16,6 +16,7 @@ "commander": "2.20.3", "graphlib": "2.1.8", "js-yaml": "4.1.0", + "json-pointer": "0.6.2", "json-schema-merge-allof": "0.8.1", "lodash": "4.17.21", "neotraverse": "0.6.15", @@ -1809,6 +1810,11 @@ "is-callable": "^1.1.3" } }, + "node_modules/foreach": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/foreach/-/foreach-2.0.6.tgz", + "integrity": "sha512-k6GAGDyqLe9JaebCsFCoudPPWfihKu8pylYXRlqP1J7ms39iPoTtk2fviNglIeQEwdh0bQeKJ01ZPyuyQvKzwg==" + }, "node_modules/foreground-child": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz", @@ -2905,6 +2911,14 @@ "node": ">=4" } }, + "node_modules/json-pointer": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/json-pointer/-/json-pointer-0.6.2.tgz", + "integrity": "sha512-vLWcKbOaXlO+jvRy4qNd+TI1QUPZzfJj1tpJ3vAXDych5XJf93ftpUKe5pKCrzyIIwgBJcOcCVRUfqQP25afBw==", + "dependencies": { + "foreach": "^2.0.4" + } + }, "node_modules/json-schema-compare": { "version": "0.2.2", "resolved": "https://registry.npmjs.org/json-schema-compare/-/json-schema-compare-0.2.2.tgz", @@ -7012,6 +7026,11 @@ "is-callable": "^1.1.3" } }, + "foreach": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/foreach/-/foreach-2.0.6.tgz", + "integrity": "sha512-k6GAGDyqLe9JaebCsFCoudPPWfihKu8pylYXRlqP1J7ms39iPoTtk2fviNglIeQEwdh0bQeKJ01ZPyuyQvKzwg==" + }, "foreground-child": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz", @@ -7784,6 +7803,14 @@ "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", "dev": true }, + "json-pointer": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/json-pointer/-/json-pointer-0.6.2.tgz", + "integrity": "sha512-vLWcKbOaXlO+jvRy4qNd+TI1QUPZzfJj1tpJ3vAXDych5XJf93ftpUKe5pKCrzyIIwgBJcOcCVRUfqQP25afBw==", + "requires": { + "foreach": "^2.0.4" + } + }, "json-schema-compare": { "version": "0.2.2", "resolved": "https://registry.npmjs.org/json-schema-compare/-/json-schema-compare-0.2.2.tgz", diff --git a/package.json b/package.json index 4f2b9f04..450a3363 100644 --- a/package.json +++ b/package.json @@ -122,6 +122,7 @@ "async": "3.2.4", "commander": "2.20.3", "js-yaml": "4.1.0", + "json-pointer": "0.6.2", "json-schema-merge-allof": "0.8.1", "lodash": "4.17.21", "neotraverse": "0.6.15", diff --git a/test/data/valid_openapi/readOnly.json b/test/data/valid_openapi/readOnly.json index b7115914..e44b1fab 100644 --- a/test/data/valid_openapi/readOnly.json +++ b/test/data/valid_openapi/readOnly.json @@ -21,7 +21,20 @@ "type": "array", "items": { "type": "object", - "$ref": "#/components/schemas/Pet" + "properties": { + "id": { + "type": "integer", + "format": "int64", + "readOnly": true + }, + "name": { + "type": "string" + }, + "tag": { + "type": "string", + "writeOnly": true + } + } } } } @@ -59,25 +72,5 @@ } } } - }, - "components": { - "schemas": { - "Pet": { - "properties": { - "id": { - "type": "integer", - "format": "int64", - "readOnly": true - }, - "name": { - "type": "string" - }, - "tag": { - "type": "string", - "writeOnly": true - } - } - } - } } } diff --git a/test/data/valid_openapi/readOnlyAllOf.json b/test/data/valid_openapi/readOnlyAllOf.json new file mode 100644 index 00000000..271b7bd1 --- /dev/null +++ b/test/data/valid_openapi/readOnlyAllOf.json @@ -0,0 +1,95 @@ +{ + "openapi": "3.0.0", + "info": { + "version": "1.0.0", + "title": "Swagger Petstore" + }, + "servers": [ + { + "url": "http://petstore.swagger.io/v1" + } + ], + "paths": { + "/pets": { + "get": { + "responses": { + "200": { + "description": "Successfull", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/UserPet" + } + } + } + } + } + } + }, + "post": { + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UserPet" + } + } + } + }, + "responses": { + "200": { + "description": "Successfully created a pet" + } + } + } + } + }, + "components": { + "schemas": { + "Pet": { + "properties": { + "id": { + "type": "integer", + "format": "int64", + "readOnly": true + }, + "name": { + "type": "string" + }, + "tag": { + "type": "string", + "writeOnly": true + } + } + }, + "User": { + "properties": { + "user.id": { + "type": "integer", + "format": "int64", + "readOnly": true + }, + "user.name": { + "type": "string" + }, + "user.tag": { + "type": "string", + "writeOnly": true + } + } + }, + "UserPet": { + "allOf": [ + { + "$ref": "#/components/schemas/Pet" + }, + { + "$ref": "#/components/schemas/User" + } + ] + } + } + } +} diff --git a/test/data/valid_openapi/readOnlyNested.json b/test/data/valid_openapi/readOnlyNested.json new file mode 100644 index 00000000..54d707b2 --- /dev/null +++ b/test/data/valid_openapi/readOnlyNested.json @@ -0,0 +1,101 @@ +{ + "openapi": "3.0.0", + "info": { + "version": "1.0.0", + "title": "Swagger Petstore" + }, + "servers": [ + { + "url": "http://petstore.swagger.io/v1" + } + ], + "paths": { + "/pets": { + "get": { + "responses": { + "200": { + "description": "Successfull", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/User" + } + } + } + } + } + }, + "post": { + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Pet" + } + } + } + }, + "responses": { + "200": { + "description": "Successfully created a pet" + } + } + } + } + }, + "components": { + "schemas": { + "Pet": { + "properties": { + "id": { + "type": "integer", + "format": "int64", + "readOnly": true + }, + "name": { + "type": "string" + }, + "tag": { + "type": "string", + "writeOnly": true + }, + "address": { + "type": "object", + "properties": { + "addressCode": { + "type": "object", + "properties": { + "code": { + "type": "string" + } + }, + "readOnly": true + }, + "city": { + "type": "string" + }, + "state": { + "type": "string", + "writeOnly": true + } + } + } + }, + "additionalProperties": { + "type": "string", + "writeOnly": true + } + }, + "User": { + "properties": { + "name": { + "type": "string" + }, + "pet": { + "$ref": "#/components/schemas/Pet" + } + } + } + } + } +} diff --git a/test/data/valid_openapi/readOnlyOneOf.json b/test/data/valid_openapi/readOnlyOneOf.json new file mode 100644 index 00000000..1561c6d6 --- /dev/null +++ b/test/data/valid_openapi/readOnlyOneOf.json @@ -0,0 +1,95 @@ +{ + "openapi": "3.0.0", + "info": { + "version": "1.0.0", + "title": "Swagger Petstore" + }, + "servers": [ + { + "url": "http://petstore.swagger.io/v1" + } + ], + "paths": { + "/pets": { + "get": { + "responses": { + "200": { + "description": "Successfull", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/UserPet" + } + } + } + } + } + } + }, + "post": { + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UserPet" + } + } + } + }, + "responses": { + "200": { + "description": "Successfully created a pet" + } + } + } + } + }, + "components": { + "schemas": { + "Pet": { + "properties": { + "id": { + "type": "integer", + "format": "int64", + "readOnly": true + }, + "name": { + "type": "string" + }, + "tag": { + "type": "string", + "writeOnly": true + } + } + }, + "User": { + "properties": { + "user/id": { + "type": "integer", + "format": "int64", + "readOnly": true + }, + "user/name": { + "type": "string" + }, + "user/tag": { + "type": "string", + "writeOnly": true + } + } + }, + "UserPet": { + "oneOf": [ + { + "$ref": "#/components/schemas/User" + }, + { + "$ref": "#/components/schemas/Pet" + } + ] + } + } + } +} diff --git a/test/data/valid_openapi/readOnlyRef.json b/test/data/valid_openapi/readOnlyRef.json new file mode 100644 index 00000000..faf1dd1b --- /dev/null +++ b/test/data/valid_openapi/readOnlyRef.json @@ -0,0 +1,69 @@ +{ + "openapi": "3.0.0", + "info": { + "version": "1.0.0", + "title": "Swagger Petstore" + }, + "servers": [ + { + "url": "http://petstore.swagger.io/v1" + } + ], + "paths": { + "/pets": { + "get": { + "responses": { + "200": { + "description": "Successfull", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/Pet" + } + } + } + } + } + } + }, + "post": { + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Pet" + } + } + } + }, + "responses": { + "200": { + "description": "Successfully created a pet" + } + } + } + } + }, + "components": { + "schemas": { + "Pet": { + "properties": { + "id": { + "type": "integer", + "format": "int64", + "readOnly": true + }, + "name": { + "type": "string" + }, + "tag": { + "type": "string", + "writeOnly": true + } + } + } + } + } +} diff --git a/test/unit/convertV2.test.js b/test/unit/convertV2.test.js index 1fe5e006..985e3499 100644 --- a/test/unit/convertV2.test.js +++ b/test/unit/convertV2.test.js @@ -111,6 +111,16 @@ const expect = require('chai').expect, path.join(__dirname, VALID_OPENAPI_PATH, '/multiExampleResponseCodeMatching.json'), duplicateCollectionVars = path.join(__dirname, VALID_OPENAPI_PATH, '/duplicateCollectionVars.json'), + readOnlySpec = + path.join(__dirname, VALID_OPENAPI_PATH, '/readOnly.json'), + readOnlyRefSpec = + path.join(__dirname, VALID_OPENAPI_PATH, '/readOnlyRef.json'), + readOnlyAllOfSpec = + path.join(__dirname, VALID_OPENAPI_PATH, '/readOnlyAllOf.json'), + readOnlyOneOfSpec = + path.join(__dirname, VALID_OPENAPI_PATH, '/readOnlyOneOf.json'), + readOnlyNestedSpec = + path.join(__dirname, VALID_OPENAPI_PATH, '/readOnlyNested.json'), issue795 = path.join(__dirname, VALID_OPENAPI_PATH, '/form-binary-file.json'); @@ -2821,6 +2831,105 @@ describe('The convert v2 Function', function() { }); }); + describe('[Github #12255] Should handle readOnly and writeOnly correctly', function() { + it('when definition contains inline schemas', function(done) { + var openapi = fs.readFileSync(readOnlySpec, 'utf8'), + options = { schemaFaker: true, exampleParametersResolution: 'schema' }; + Converter.convert({ type: 'string', data: openapi }, options, (err, conversionResult) => { + let requestBody = JSON.parse(conversionResult.output[0].data.item[0].item[1].request.body.raw), + responseBody = JSON.parse(conversionResult.output[0].data.item[0].item[0].response[0].body); + expect(err).to.be.null; + expect(requestBody).to.eql({ name: '', tag: '' }); + expect(responseBody).to.eql([ + { id: '', name: '' }, + { id: '', name: '' } + ]); + done(); + }); + }); + + it('when definition contains $ref in schemas', function(done) { + var openapi = fs.readFileSync(readOnlyRefSpec, 'utf8'), + options = { schemaFaker: true, exampleParametersResolution: 'schema' }; + Converter.convert({ type: 'string', data: openapi }, options, (err, conversionResult) => { + let requestBody = JSON.parse(conversionResult.output[0].data.item[0].item[1].request.body.raw), + responseBody = JSON.parse(conversionResult.output[0].data.item[0].item[0].response[0].body); + expect(err).to.be.null; + expect(requestBody).to.eql({ name: '', tag: '' }); + expect(responseBody).to.eql([ + { id: '', name: '' }, + { id: '', name: '' } + ]); + done(); + }); + }); + + it('when definition contains composite keyword "allOf" in schema', function(done) { + var openapi = fs.readFileSync(readOnlyAllOfSpec, 'utf8'), + options = { schemaFaker: true, exampleParametersResolution: 'schema' }; + Converter.convert({ type: 'string', data: openapi }, options, (err, conversionResult) => { + let requestBody = JSON.parse(conversionResult.output[0].data.item[0].item[1].request.body.raw), + responseBody = JSON.parse(conversionResult.output[0].data.item[0].item[0].response[0].body); + expect(err).to.be.null; + expect(requestBody).to.eql({ + name: '', tag: '', + 'user.name': '', 'user.tag': '' + }); + expect(responseBody).to.eql([ + { id: '', name: '', 'user.id': '', 'user.name': '' }, + { id: '', name: '', 'user.id': '', 'user.name': '' } + ]); + done(); + }); + }); + + it('when definition contains composite keyword "oneOf" in schema', function(done) { + var openapi = fs.readFileSync(readOnlyOneOfSpec, 'utf8'), + options = { schemaFaker: true, exampleParametersResolution: 'schema' }; + Converter.convert({ type: 'string', data: openapi }, options, (err, conversionResult) => { + let requestBody = JSON.parse(conversionResult.output[0].data.item[0].item[1].request.body.raw), + responseBody = JSON.parse(conversionResult.output[0].data.item[0].item[0].response[0].body); + expect(err).to.be.null; + expect(requestBody).to.eql({ + 'user/name': '', 'user/tag': '' + }); + expect(responseBody).to.eql([ + { 'user/id': '', 'user/name': '' }, + { 'user/id': '', 'user/name': '' } + ]); + done(); + }); + }); + + it('when definition contains schemas with nested array and object schema types', function(done) { + var openapi = fs.readFileSync(readOnlyNestedSpec, 'utf8'), + options = { schemaFaker: true, exampleParametersResolution: 'schema' }; + Converter.convert({ type: 'string', data: openapi }, options, (err, conversionResult) => { + let requestBody = JSON.parse(conversionResult.output[0].data.item[0].item[1].request.body.raw), + responseBody = JSON.parse(conversionResult.output[0].data.item[0].item[0].response[0].body); + expect(err).to.be.null; + + // Assert readOnly property to not be in request body and other/writeOnly properties to be in response body + expect(requestBody).to.not.have.property('id'); + expect(requestBody).to.have.property('name', ''); + expect(requestBody).to.have.property('tag', ''); + expect(requestBody.address).to.not.have.property('addressCode'); + expect(requestBody.address).to.have.property('city', ''); + expect(requestBody.address).to.have.property('state', ''); + + // Assert writeOnly property to not be in request body and other/readOnly properties to be in response body + expect(responseBody).to.have.property('name', ''); + expect(responseBody.pet).to.have.property('id', ''); + expect(responseBody.pet).to.have.property('name', ''); + expect(responseBody.pet).to.not.have.property('tag', ''); + expect(responseBody.pet.address).to.have.property('addressCode'); + expect(responseBody.pet.address).to.have.property('city', ''); + expect(responseBody.pet.address).to.not.have.property('state', ''); + done(); + }); + }); + }); + it('[Github #795] Should properly convert format binary to form data', function (done) { var openapi = fs.readFileSync(issue795, 'utf8'), reqBody, formData;