diff --git a/.travis.yml b/.travis.yml index 022fee67..22862bed 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,6 +12,7 @@ install: sudo apt-get update sudo apt-get install dotnet-dev-1.0.0-rc4-004769 -y script: + - npm install - dotnet clean openapi-diff/OpenApiDiff.sln - dotnet restore openapi-diff/OpenApiDiff.sln - dotnet build -c debug openapi-diff/OpenApiDiff.sln /nologo /clp:NoSummary diff --git a/cli.js b/cli.js new file mode 100644 index 00000000..5122136a --- /dev/null +++ b/cli.js @@ -0,0 +1,37 @@ +#!/usr/bin/env node + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +'use strict'; + +var yargs = require('yargs'), + os = require('os'), + log = require('./lib/util/logging'); + +var defaultLogDir = log.directory; +var logFilepath = log.filepath; +var packageVersion = require('./package.json').version; + +yargs + .version(packageVersion) + .commandDir('lib/commands') + .option('h', { alias: 'help' }) + .option('l', { + alias: 'logLevel', + describe: 'Set the logging level for console.', + choices: ['off', 'json', 'error', 'warn', 'info', 'verbose', 'debug', 'silly'], + default: 'warn' + }) + .option('f', { + alias: 'logFilepath', + describe: `Set the log file path. It must be an absolute filepath. ` + + `By default the logs will stored in a timestamp based log file at "${defaultLogDir}".` + }) + .global(['h', 'l', 'f']) + .help() + .argv; + +if (yargs.argv._.length === 0 && yargs.argv.h === false) { + yargs.coerce('help', function (arg) { return true; }).argv; +} diff --git a/index.js b/index.js new file mode 100644 index 00000000..12693113 --- /dev/null +++ b/index.js @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +'use strict'; + +var validate = require('./lib/validate'); +var utils = require('./lib/util/utils'); + +// Easy to use methods from validate.js +exports.log = require('./lib/util/logging'); +exports.detectChnages = validate.detectChnages; + +// Classes +exports.OpenApiDiff = require('./lib/validators/OpenApiDiff'); + +// Constants +exports.Constants = require('./lib/util/constants'); diff --git a/lib/commands/oad.js b/lib/commands/oad.js new file mode 100644 index 00000000..dfe5a954 --- /dev/null +++ b/lib/commands/oad.js @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +'use strict'; +const log = require('../util/logging'), + validate = require('../validate'); + +exports.command = 'oad '; + +exports.describe = 'Detects breaking changes between old and new open api specification.'; + +exports.handler = function (argv) { + log.debug(argv); + let oldSpec = argv.oldSpec; + let newSpec = argv.newSpec; + let vOptions = {}; + vOptions.consoleLogLevel = argv.logLevel; + vOptions.logFilepath = argv.f; + + return validate.detectChanges(oldSpec, newSpec, vOptions); +} + +exports = module.exports; diff --git a/lib/scripts/postInstall.js b/lib/scripts/postInstall.js new file mode 100644 index 00000000..8a9b2b2c --- /dev/null +++ b/lib/scripts/postInstall.js @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +'use strict'; + +const execSync = require('child_process').execSync; + +/** + * @class + * Open API Diff class. + */ +class PostInstall { + + constructor() { + } + + installAutoRest() { + try { + // let cmd = `npm install -g autorest`; + // let result = execSync(cmd, { encoding: 'utf8' }); + let cmd = `autorest --version=latest`; + let result = execSync(cmd, { encoding: 'utf8' }); + console.log(result); + result = execSync(`which autorest`, { encoding: 'utf8' }); + console.log(result); + } catch (err) { + throw new Error(`An error occurred while installing AutoRest: ${util.inspect(err, { depth: null })}.`); + } + } +} + +let postInstall = new PostInstall(); +postInstall.installAutoRest(); + +module.exports = PostInstall; diff --git a/lib/util/constants.js b/lib/util/constants.js new file mode 100644 index 00000000..ef2cc879 --- /dev/null +++ b/lib/util/constants.js @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +'use strict'; + +var Constants = { + constraints: ['minLength', 'maxLength', 'minimum', 'maximum', 'enum', 'maxItems', 'minItems', 'uniqueItems', 'multipleOf', 'pattern'], + xmsExamples: 'x-ms-examples', + exampleInSpec: 'example-in-spec', + BodyParameterValid: 'BODY_PARAMETER_VALID', + xmsSkipUrlEncoding: 'x-ms-skip-url-encoding', + Errors: 'Errors', + Warnings: 'Warnings', + ErrorCodes: { + InternalError: 'INTERNAL_ERROR', + InitializationError: 'INITIALIZATION_ERROR', + ResolveSpecError: 'RESOLVE_SPEC_ERROR', + RefNotFoundError: 'REF_NOTFOUND_ERROR', + JsonParsingError: 'JSON_PARSING_ERROR', + RequiredParameterExampleNotFound: 'REQUIRED_PARAMETER_EXAMPLE_NOT_FOUND', + ErrorInPreparingRequest: 'ERROR_IN_PREPARING_REQUEST', + XmsExampleNotFoundError: 'X-MS-EXAMPLE_NOTFOUND_ERROR', + ResponseValidationError: 'RESPONSE_VALIDATION_ERROR', + RequestValidationError: 'REQUEST_VALIDATION_ERROR', + ResponseBodyValidationError: 'RESPONSE_BODY_VALIDATION_ERROR', + ResponseStatusCodeNotInExample: 'RESPONSE_STATUS_CODE_NOT_IN_EXAMPLE', + ResponseStatusCodeNotInSpec: 'RESPONSE_STATUS_CODE_NOT_IN_SPEC', + ResponseSchemaNotInSpec: 'RESPONSE_SCHEMA_NOT_IN_SPEC', + RequiredParameterNotInExampleError: 'REQUIRED_PARAMETER_NOT_IN_EXAMPLE_ERROR', + BodyParameterValidationError: 'BODY_PARAMETER_VALIDATION_ERROR', + TypeValidationError: 'TYPE_VALIDATION_ERROR', + ConstraintValidationError: 'CONSTRAINT_VALIDATION_ERROR', + StatuscodeNotInExampleError: 'STATUS_CODE_NOT_IN_EXAMPLE_ERROR', + SemanticValidationError: 'SEMANTIC_VALIDATION_ERROR', + MultipleOperationsFound: 'MULTIPLE_OPERATIONS_FOUND', + NoOperationFound: 'NO_OPERATION_FOUND', + IncorrectInput: 'INCORRECT_INPUT', + PotentialOperationSearchError: 'POTENTIAL_OPERATION_SEARCH_ERROR', + PathNotFoundInRequestUrl: "PATH_NOT_FOUND_IN_REQUEST_URL", + OperationNotFoundInCache: "OPERATION_NOT_FOUND_IN_CACHE", + OperationNotFoundInCacheWithVerb: "OPERATION_NOT_FOUND_IN_CACHE_WITH_VERB", // Implies we found correct api-version + provider in cache + OperationNotFoundInCacheWithApi: "OPERATION_NOT_FOUND_IN_CACHE_WITH_API", // Implies we found correct provider in cache + OperationNotFoundInCacheWithProvider: "OPERATION_NOT_FOUND_IN_CACHE_WITH_PROVIDER" // Implies we never found correct provider in cache + }, + EnvironmentVariables: { + ClientId: 'CLIENT_ID', + Domain: 'DOMAIN', + ApplicationSecret: 'APPLICATION_SECRET', + AzureSubscriptionId: 'AZURE_SUBSCRIPTION_ID', + AzureLocation: 'AZURE_LOCATION', + AzureResourcegroup: 'AZURE_RESOURCE_GROUP' + }, + unknownResourceProvider: 'microsoft.unknown', + unknownApiVersion: 'unknown-api-version', + knownTitleToResourceProviders: { + 'ResourceManagementClient': 'Microsoft.Resources' + } +}; + +exports = module.exports = Constants; \ No newline at end of file diff --git a/lib/util/logging.js b/lib/util/logging.js new file mode 100644 index 00000000..5819328b --- /dev/null +++ b/lib/util/logging.js @@ -0,0 +1,124 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +'use strict'; + +var winston = require('winston'), + path = require('path'), + fs = require('fs'), + os = require('os'), + logDir = path.resolve(os.homedir(), 'oad_output'); + +var currentLogFile; + +/* + * Provides current time in custom format that will be used in naming log files. Example:'20140820_151113' + * @return {string} Current time in a custom string format + */ +function getTimeStamp() { + // We pad each value so that sorted directory listings show the files in chronological order + function pad(number) { + if (number < 10) { + return '0' + number; + } + + return number; + } + + var now = new Date(); + return pad(now.getFullYear()) + + pad(now.getMonth() + 1) + + pad(now.getDate()) + + "_" + + pad(now.getHours()) + + pad(now.getMinutes()) + + pad(now.getSeconds()); +} +var customLogLevels = { + off: 0, + json: 1, + error: 2, + warn: 3, + info: 4, + verbose: 5, + debug: 6, + silly: 7 +}; + +var logger = new (winston.Logger)({ + transports: [ + new (winston.transports.Console)({ + level: 'warn', + colorize: true, + prettyPrint: true, + humanReadableUnhandledException: true + }) + ], + levels: customLogLevels +}); + +Object.defineProperties(logger, { + 'consoleLogLevel': { + enumerable: true, + get: function () { return this.transports.console.level; }, + set: function (level) { + if (!level) { + level = 'warn'; + } + let validLevels = Object.keys(customLogLevels); + if (!validLevels.some(function (item) { return item === level; })) { + throw new Error(`The logging level provided is "${level}". Valid values are: "${validLevels}".`); + } + this.transports.console.level = level; + return; + } + }, + 'directory': { + enumerable: true, + get: function () { + return logDir; + }, + set: function (logDirectory) { + if (!logDirectory || logDirectory && typeof logDirectory.valueOf() !== 'string') { + throw new Error('logDirectory cannot be null or undefined and must be of type "string".'); + } + + if (!fs.existsSync(logDirectory)) { + fs.mkdirSync(logDirectory); + } + logDir = logDirectory; + return; + } + }, + 'filepath': { + enumerable: true, + get: function () { + if (!currentLogFile) { + let filename = `validate_log_${getTimeStamp()}.log`; + currentLogFile = path.join(this.directory, filename); + } + + return currentLogFile; + }, + set: function (logFilePath) { + if (!logFilePath || logFilePath && typeof logFilePath.valueOf() !== 'string') { + throw new Error('filepath cannot be null or undefined and must be of type string. It must be an absolute file path.') + } + currentLogFile = logFilePath; + this.directory = path.dirname(logFilePath); + if (!this.transports.file) { + this.add(winston.transports.File, { + level: 'silly', + colorize: false, + silent: false, + prettyPrint: true, + json: false, + filename: logFilePath + }); + } + return; + } + } +}); + +module.exports = logger; \ No newline at end of file diff --git a/lib/util/utils.js b/lib/util/utils.js new file mode 100644 index 00000000..a2e0c6a3 --- /dev/null +++ b/lib/util/utils.js @@ -0,0 +1,517 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +'use strict'; +var fs = require('fs'), + execSync = require('child_process').execSync, + util = require('util'), + path = require('path'), + jsonPointer = require('json-pointer'), + YAML = require('js-yaml'), + log = require('./logging'), + request = require('request'); + +exports = module.exports; + +/* + * Caches the json docs that were successfully parsed by exports.parseJson(). This avoids, fetching them again. + * key: docPath + * value: parsed doc in JSON format + */ +exports.docCache = {}; + + +exports.clearCache = function clearCache() { + exports.docCache = {}; + return; +} +/* + * Removes byte order marker. This catches EF BB BF (the UTF-8 BOM) + * because the buffer-to-string conversion in `fs.readFile()` + * translates it to FEFF, the UTF-16 BOM. + */ +exports.stripBOM = function stripBOM(content) { + if (Buffer.isBuffer(content)) { + content = content.toString(); + } + if (content.charCodeAt(0) === 0xFEFF || content.charCodeAt(0) === 0xFFFE) { + content = content.slice(1); + } + return content; +}; + +/* + * Provides a parsed JSON from the given file path or a url. + * + * @param {string} specPath - A local file path or a (github) url to the swagger spec. + * The method will auto convert a github url to rawgithub url. + * + * @returns {object} jsonDoc - Parsed document in JSON format. + */ +exports.parseJson = function parseJson(specPath) { + let result = null; + if (!specPath || (specPath && typeof specPath.valueOf() !== 'string')) { + let err = new Error('A (github) url or a local file path to the swagger spec is required and must be of type string.'); + return Promise.reject(err); + } + if (exports.docCache[specPath]) { + return Promise.resolve(exports.docCache[specPath]); + } + //url + if (specPath.match(/^http.*/ig) !== null) { + //If the spec path is a url starting with https://github then let us auto convert it to an https://raw.githubusercontent url. + if (specPath.startsWith('https://github')) { + specPath = specPath.replace(/^https:\/\/(github.com)(.*)blob\/(.*)/ig, 'https://raw.githubusercontent.com$2$3'); + } + let res = exports.makeRequest({ url: specPath, errorOnNon200Response: true }); + exports.docCache[specPath] = res; + return res; + } else { + //local filepath + try { + let fileContent = exports.stripBOM(fs.readFileSync(specPath, 'utf8')); + let result = exports.parseContent(specPath, fileContent); + exports.docCache[specPath] = result; + return Promise.resolve(result); + } catch (err) { + log.error(err); + return Promise.reject(err); + } + } +}; + +/* + * Provides a parsed JSON from the given content. + * + * @param {string} filePath - A local file path or a (github) url to the swagger spec. + * + * @param {string} fileContent - The content to be parsed. + * + * @returns {object} jsonDoc - Parsed document in JSON format. + */ +exports.parseContent = function parseContent(filePath, fileContent) { + let result = null; + if (/.*\.json$/ig.test(filePath)) { + result = JSON.parse(fileContent); + } else if (/.*\.ya?ml$/ig.test(filePath)) { + result = YAML.safeLoad(fileContent); + } else { + let msg = `We currently support "*.json" and "*.yaml | *.yml" file formats for validating swaggers.\n` + + `The current file extension in "${filePath}" is not supported.`; + throw new Error(msg); + } + return result; +}; + +/* + * A utility function to help us acheive stuff in the same way as async/await but with yield statement and generator functions. + * It waits till the task is over. + * @param {function} A generator function as an input + */ +exports.run = function run(genfun) { + // instantiate the generator object + var gen = genfun(); + // This is the async loop pattern + function next(err, answer) { + var res; + if (err) { + // if err, throw it into the wormhole + return gen.throw(err); + } else { + // if good value, send it + res = gen.next(answer); + } + if (!res.done) { + // if we are not at the end + // we have an async request to + // fulfill, we do this by calling + // `value` as a function + // and passing it a callback + // that receives err, answer + // for which we'll just use `next()` + res.value(next); + } + } + // Kick off the async loop + next(); +}; + +/* + * Makes a generic request. It is a wrapper on top of request.js library that provides a promise instead of a callback. + * + * @param {object} options - The request options as described over here https://github.com/request/request#requestoptions-callback + * + * @param {boolean} options.errorOnNon200Response If true will reject the promise with an error if the response statuscode is not 200. + * + * @return {Promise} promise - A promise that resolves to the responseBody or rejects to an error. + */ +exports.makeRequest = function makeRequest(options) { + var promise = new Promise(function (resolve, reject) { + request(options, function (err, response, responseBody) { + if (err) { + reject(err); + } + if (options.errorOnNon200Response && response.statusCode !== 200) { + var msg = `StatusCode: "${response.statusCode}", ResponseBody: "${responseBody}."`; + reject(new Error(msg)); + } + let res = responseBody; + try { + if (typeof responseBody.valueOf() === 'string') { + res = exports.parseContent(options.url, responseBody); + } + } catch (error) { + let msg = `An error occurred while parsing the file ${options.url}. The error is:\n ${util.inspect(error, { depth: null })}.` + let e = new Error(msg); + reject(e); + } + + resolve(res); + }); + }); + return promise; +}; + +/* + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * + * @param {Array} promiseFactories An array of promise factories(A function that return a promise) + * + * @return A chain of resolved or rejected promises + */ +exports.executePromisesSequentially = function executePromisesSequentially(promiseFactories) { + let result = Promise.resolve(); + promiseFactories.forEach(function (promiseFactory) { + result = result.then(promiseFactory); + }); + return result; +}; + +/* + * Generates a randomId + * + * @param {string} [prefix] A prefix to which the random numbers will be appended. + * + * @param {object} [existingIds] An object of existingIds. The function will + * ensure that the randomId is not one of the existing ones. + * + * @return {string} result A random string + */ +exports.generateRandomId = function generateRandomId(prefix, existingIds) { + let randomStr; + while (true) { + randomStr = Math.random().toString(36).substr(2, 12); + if (prefix && typeof prefix.valueOf() === 'string') { + randomStr = prefix + randomStr; + } + if (!exsitingIds || !(randomStr in existingIds)) { + break; + } + } + return randomStr; +}; + +/* + * Parses a [inline|relative] [model|parameter] reference in the swagger spec. + * This method does not handle parsing paths "/subscriptions/{subscriptionId}/etc.". + * + * @param {string} reference Reference to be parsed. + * + * @return {object} result + * {string} [result.filePath] Filepath present in the reference. Examples are: + * - '../newtwork.json#/definitions/Resource' => '../network.json' + * - '../examples/nic_create.json' => '../examples/nic_create.json' + * {object} [result.localReference] Provides information about the local reference in the json document. + * {string} [result.localReference.value] The json reference value. Examples are: + * - '../newtwork.json#/definitions/Resource' => '#/definitions/Resource' + * - '#/parameters/SubscriptionId' => '#/parameters/SubscriptionId' + * {string} [result.localReference.accessorProperty] The json path expression that can be used by + * eval() to access the desired object. Examples are: + * - '../newtwork.json#/definitions/Resource' => 'definitions.Resource' + * - '#/parameters/SubscriptionId' => 'parameters,SubscriptionId' + */ +exports.parseReferenceInSwagger = function parseReferenceInSwagger(reference) { + if (!reference || (reference && reference.trim().length === 0)) { + throw new Error('reference cannot be null or undefined and it must be a non-empty string.'); + } + + let result = {}; + if (reference.includes('#')) { + //local reference in the doc + if (reference.startsWith('#/')) { + result.localReference = {}; + result.localReference.value = reference; + result.localReference.accessorProperty = reference.slice(2).replace('/', '.'); + } else { + //filePath+localReference + let segments = reference.split('#'); + result.filePath = segments[0]; + result.localReference = {}; + result.localReference.value = '#' + segments[1]; + result.localReference.accessorProperty = segments[1].slice(1).replace('/', '.'); + } + } else { + //we are assuming that the string is a relative filePath + result.filePath = reference; + } + + return result; +}; + +/* + * Same as path.join(), however, it converts backward slashes to forward slashes. + * This is required because path.join() joins the paths and converts all the + * forward slashes to backward slashes if executed on a windows system. This can + * be problematic while joining a url. For example: + * path.join('https://github.com/Azure/openapi-validation-tools/blob/master/lib', '../examples/foo.json') returns + * 'https:\\github.com\\Azure\\openapi-validation-tools\\blob\\master\\examples\\foo.json' instead of + * 'https://github.com/Azure/openapi-validation-tools/blob/master/examples/foo.json' + * + * @param variable number of arguments and all the arguments must be of type string. Similar to the API + * provided by path.join() https://nodejs.org/dist/latest-v6.x/docs/api/path.html#path_path_join_paths + * @return {string} resolved path + */ +exports.joinPath = function joinPath() { + let finalPath = path.join.apply(path, arguments); + finalPath = finalPath.replace(/\\/gi, '/'); + finalPath = finalPath.replace(/^(http|https):\/(.*)/gi, '$1://$2'); + return finalPath; +}; + +/* + * Provides a parsed JSON from the given file path or a url. Same as exports.parseJson(). However, + * this method accepts variable number of path segments as strings and joins them together. + * After joining the path, it internally calls exports.parseJson(). + * + * @param variable number of arguments and all the arguments must be of type string. + * + * @returns {object} jsonDoc - Parsed document in JSON format. + */ +exports.parseJsonWithPathFragments = function parseJsonWithPathFragments() { + let specPath = exports.joinPath.apply(this, arguments); + return exports.parseJson(specPath); +}; + +/* + * Merges source object into the target object + * @param {object} source The object that needs to be merged + * + * @param {object} target The object to be merged into + * + * @returns {object} target - Returns the merged target object. + */ +exports.mergeObjects = function mergeObjects(source, target) { + Object.keys(source).forEach(function (key) { + target[key] = source[key]; + }); + return target; +} + +/* + * Gets the object from the given doc based on the provided json reference pointer. + * It returns undefined if the location is not found in the doc. + * @param {object} doc The source object. + * + * @param {string} ptr The json reference pointer + * + * @returns {any} result - Returns the value that the ptr points to, in the doc. + */ +exports.getObject = function getObject(doc, ptr) { + let result; + try { + result = jsonPointer.get(doc, ptr); + } catch (err) { + log.error(err); + throw err; + } + return result; +}; + +/* + * Sets the given value at the location provided by the ptr in the given doc. + * @param {object} doc The source object. + * + * @param {string} ptr The json reference pointer. + * + * @param {any} value The value that needs to be set at the + * location provided by the ptr in the doc. + */ +exports.setObject = function setObject(doc, ptr, value) { + let result; + try { + result = jsonPointer.set(doc, ptr, value); + } catch (err) { + log.error(err); + } + return result; +}; + +/* + * Removes the location pointed by the json pointer in the given doc. + * @param {object} doc The source object. + * + * @param {string} ptr The json reference pointer. + */ +exports.removeObject = function removeObject(doc, ptr) { + let result; + try { + result = jsonPointer.remove(doc, ptr); + } catch (err) { + log.error(err); + } + return result; +}; + +/** +/* + * Gets provider namespace from the given path. In case of multiple, last one will be returned. + * @param {string} path The path of the operation. + * Example "/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/ + * {parentResourcePath}/{resourceType}/{resourceName}/providers/Microsoft.Authorization/roleAssignments" + * will return "Microsoft.Authorization". + * + * @returns {string} result - provider namespace from the given path. + */ +exports.getProvider = function getProvider(path) { + if (path === null || path === undefined || typeof path.valueOf() !== 'string' || !path.trim().length) { + throw new Error('path is a required parameter of type string and it cannot be an empty string.'); + } + + let providerRegEx = new RegExp('/providers/(\:?[^{/]+)', 'gi'); + let result; + let pathMatch; + + // Loop over the paths to find the last matched provider namespace + while ((pathMatch = providerRegEx.exec(path)) != null) { + result = pathMatch[1]; + } + + return result; +}; + +/* + * Clones a github repository in the given directory. + * @param {string} github url to be cloned. + * Example "https://github.com/Azure/azure-rest-api-specs.git" or + * "git@github.com:Azure/azure-rest-api-specs.git". + * + * @param {string} path where to clone the repository. + */ +exports.gitClone = function gitClone(url, directory) { + if (url === null || url === undefined || typeof url.valueOf() !== 'string' || !url.trim().length) { + throw new Error('url is a required parameter of type string and it cannot be an empty string.'); + } + + if (directory === null || directory === undefined || typeof directory.valueOf() !== 'string' || !directory.trim().length) { + throw new Error('directory is a required parameter of type string and it cannot be an empty string.'); + } + + // If the directory exists then we assume that the repo to be cloned is already present. + if (fs.existsSync(directory)) { + if (!fs.lstatSync(directory).isDirectory()) { + throw new Error(`"${directory}" must be a directory.`); + } + return; + } else { + fs.mkdirSync(directory); + } + + try { + let cmd = `git clone ${url} ${directory}`; + let result = execSync(cmd, { encoding: 'utf8' }); + } catch (err) { + throw new Error(`An error occurred while cloning git repository: ${util.inspect(err, { depth: null })}.`); + } +}; + +/* + * Finds the first content-type that contains "/json". Only supported Content-Types are + * "text/json" & "application/json" so we perform first best match that contains '/json' + * + * @param {array} consumesOrProduces Array of content-types. + * @returns {string} firstMatchedJson content-type that contains "/json". + */ +exports.getJsonContentType = function getJsonContentType(consumesOrProduces) { + let firstMatchedJson = null; + if (consumesOrProduces) { + firstMatchedJson = consumesOrProduces.find((contentType) => { + return (contentType.match(/.*\/json.*/ig) !== null); + }); + } + return firstMatchedJson; +}; + +/** + * Determines whether the given string is url encoded + * @param {string} str - The input string to be verified. + * @returns {boolean} result - true if str is url encoded; false otherwise. + */ +exports.isUrlEncoded = function isUrlEncoded(str) { + str = str || ''; + return str !== decodeURIComponent(str); +}; + +/** + * Determines whether the given model is a pure (free-form) object candidate (i.e. equivalent of the C# Object type). + * @param {object} model - The model to be verified + * @returns {boolean} result - true if model is a pure object; false otherwise. + */ +exports.isPureObject = function isPureObject(model) { + if (!model) { + throw new Error(`model cannot be null or undefined and must be of type "object"`); + } + if (model.type && typeof model.type.valueOf() === 'string' && model.type === 'object' && model.properties && Object.keys(model.properties).length === 0) { + return true; + } else if (!model.type && model.properties && Object.keys(model.properties).length === 0) { + return true; + } else if (model.type && typeof model.type.valueOf() === 'string' && model.type === 'object' && !model.properties) { + return true; + } else { + return false; + } +} + +/** + * Relaxes/Transforms the given entities type from a specific JSON schema primitive type (http://json-schema.org/latest/json-schema-core.html#rfc.section.4.2) + * to an array of JSON schema primitve types (http://json-schema.org/latest/json-schema-validation.html#rfc.section.5.21). + * + * @param {object} entity - The entity to be relaxed. + * @param {boolean|undefined} [isRequired] - A boolean value that indicates whether the entity is required or not. + * If the entity is required then the primitve type "null" is not added. + * @returns {object} entity - The transformed entity if it is a pure object else the same entity is returned as-is. + */ +exports.relaxEntityType = function relaxEntityType(entity, isRequired) { + if (exports.isPureObject(entity)) { + entity.type = ['array', 'boolean', 'number', 'object', 'string']; + // if (!isRequired) { + // entity.type.push('null'); + // } + } + if (entity.additionalProperties && exports.isPureObject(entity.additionalProperties)) { + entity.additionalProperties.type = ['array', 'boolean', 'number', 'object', 'string']; + // if (!isRequired) { + // entity.additionalProperties.type.push('null'); + // } + } + return entity; +}; + +/** + * Relaxes/Transforms model definition like entities recursively + */ +exports.relaxModelLikeEntities = function relaxModelLikeEntities(model) { + model = exports.relaxEntityType(model); + if (model.properties) { + let modelProperties = model.properties; + for (let propName in modelProperties) { + let isPropRequired = model.required ? model.required.some((p) => { return p == propName; }) : false + if (modelProperties[propName].properties) { + modelProperties[propName] = exports.relaxModelLikeEntities(modelProperties[propName]); + } else { + modelProperties[propName] = exports.relaxEntityType(modelProperties[propName], isPropRequired); + } + } + } + return model; +} \ No newline at end of file diff --git a/lib/validate.js b/lib/validate.js new file mode 100644 index 00000000..9a5b8a10 --- /dev/null +++ b/lib/validate.js @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +'use strict'; + +var fs = require('fs'), + log = require('./util/logging'), + utils = require('./util/utils'), + Constants = require('./util/constants'), + path = require('path'), + util = require('util'), + OpenApiDiff = require('./validators/openApiDiff'); + +exports = module.exports; + +exports.detectChanges = function detectChanges(oldSpec, newSpec, options) { + if (!options) options = {}; + log.consoleLogLevel = options.consoleLogLevel || log.consoleLogLevel; + log.filepath = options.logFilepath || log.filepath; + let openApiDiff = new OpenApiDiff(); + return openApiDiff.detectChanges(oldSpec, newSpec, options); +}; diff --git a/lib/validators/openApiDiff.js b/lib/validators/openApiDiff.js new file mode 100644 index 00000000..a030fc35 --- /dev/null +++ b/lib/validators/openApiDiff.js @@ -0,0 +1,171 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +'use strict'; + +const util = require('util'), + path = require('path'), + os = require('os'), + url = require('url'), + _ = require('lodash'), + glob = require('glob'), + log = require('../util/logging'), + utils = require('../util/utils'), + fs = require('fs'), + execSync = require('child_process').execSync, + exec = require('child_process').exec; + +/** + * @class + * Open API Diff class. + */ +class OpenApiDiff { + + constructor() { + log.silly(`Initializaing OpenApiDiff`); + } + + /** + * Initializes the Open API Diff class. + */ + initialize() { + } + + detectChanges(oldSwagger, newSwagger, options) { + let self = this; + log.debug(`Hello World`); + + self.processViaAutoRest(oldSwagger, 'old').then((oldSwaggerResult, error) => { + if (error) { + } + self.processViaAutoRest(newSwagger, 'new').then((newSwaggerResult, error) => { + if (error) { + } + + self.processViaOpenApiDiff(oldSwaggerResult, newSwaggerResult).then((result, error) => { + if (error) { + + } + console.log(result); + return; + }); + }); + }); + } + + dotNetPath() { + // try global installation directory + let result = path.join(os.homedir(), ".autorest", "frameworks", "dotnet") + if (fs.existsSync(result)) { + return result; + } + + result = path.join(os.homedir(), ".autorest", "frameworks", "dotnet.exe") + if (fs.existsSync(result)) { + return result; + } + + // hope there is one in the PATH + return "dotnet"; + } + + openApiDiffDll() { + // try global installation directory + let result = '../Users/vishrut/git-repos/openapi-diff/openapi-diff/src/core/OpenApiDiff/bin/Debug/netcoreapp1.0/OpenApiDiff.dll'; + + return result; + } + + processViaAutoRest(swaggerPath, outputFileName) { + if (swaggerPath === null || swaggerPath === undefined || typeof swaggerPath.valueOf() !== 'string' || !swaggerPath.trim().length) { + throw new Error('swaggerPath is a required parameter of type "string" and it cannot be an empty string.'); + } + + if (outputFileName === null || outputFileName === undefined || typeof outputFileName.valueOf() !== 'string' || !outputFileName.trim().length) { + throw new Error('outputFile is a required parameter of type "string" and it cannot be an empty string.'); + } + + log.debug(`swaggerPath = "${swaggerPath}"`); + log.debug(`outputFileName = "${outputFileName}"`); + + let autoRestPromise = new Promise((resolve, reject) => { + if (!fs.existsSync(swaggerPath)) { + reject(`File "${swaggerPath}" not found.`); + } + + let outputFolder = os.tmpdir(); + let outputFilePath = path.join(outputFolder, `${outputFileName}.json`); + let autoRestCmd = `autorest --input-file=${swaggerPath} --output-artifact=swagger-document.json --output-file=${outputFileName} --output-folder=${outputFolder}`; + + log.debug(`Executing: "${autoRestCmd}"`); + exec(autoRestCmd, { encoding: 'utf8', maxBuffer: 1024 * 1024 * 64 }, (err, stdout, stderr) => { + if (stderr) { + reject(stderr); + } + + log.debug(`outputFilePath: "${outputFilePath}"`); + resolve(outputFilePath); + }); + }); + + return autoRestPromise; + } + + processViaOpenApiDiff(oldSwagger, newSwagger) { + let self = this; + + if (oldSwagger === null || oldSwagger === undefined || typeof oldSwagger.valueOf() !== 'string' || !oldSwagger.trim().length) { + throw new Error('oldSwagger is a required parameter of type "string" and it cannot be an empty string.'); + } + + if (newSwagger === null || newSwagger === undefined || typeof newSwagger.valueOf() !== 'string' || !newSwagger.trim().length) { + throw new Error('newSwagger is a required parameter of type "string" and it cannot be an empty string.'); + } + + log.debug(`oldSwagger = "${oldSwagger}"`); + log.debug(`newSwagger = "${newSwagger}"`); + + let OpenApiDiffPromise = new Promise((resolve, reject) => { + if (!fs.existsSync(oldSwagger)) { + reject(`File "${oldSwagger}" not found.`); + } + + if (!fs.existsSync(newSwagger)) { + reject(`File "${newSwagger}" not found.`); + } + + let cmd = `${self.dotNetPath()} ${self.openApiDiffDll()} -o ${oldSwagger} -n ${newSwagger} -JsonValidationMessages`; + + log.debug(`Executing: "${cmd}"`); + exec(cmd, { encoding: 'utf8', maxBuffer: 1024 * 1024 * 64 }, (err, stdout, stderr) => { + if (err) { + reject(err); + } + + resolve(stdout); + }); + }); + + return OpenApiDiffPromise; + } +} + +module.exports = OpenApiDiff; + +// Testing +// let swagger = '/Users/vishrut/git-repos/azure-rest-api-specs/arm-network/2017-03-01/swagger/virtualNetworkGateway.json'; +// let obj = new OpenApiDiff(); +// // obj.processViaAutoRest(swagger, 'new').then((success, error) => { +// // console.log(success); +// // console.log(error); +// // }); +// console.log(obj.dotNetPath()); + +// let newSwagger = '/Users/vishrut/git-repos/autorest/generated/NewVirtualNetworkGateway.json'; +// let oldSwagger = '/Users/vishrut/git-repos/autorest/generated/VirtualNetworkGateway.json'; +// // obj.processViaOpenApiDiff(oldSwagger, newSwagger).then((success, error) => { +// // console.log(success); +// // console.log(error); +// // }); + +// obj.detectChanges(oldSwagger, newSwagger, {}); diff --git a/package.json b/package.json new file mode 100644 index 00000000..85272f92 --- /dev/null +++ b/package.json @@ -0,0 +1,47 @@ +{ + "name": "oad", + "version": "0.1.0", + "author": { + "name": "Microsoft Corporation", + "email": "azsdkteam@microsoft.com", + "url": "https://github.com/Azure/openapi-diff" + }, + "description": "OpenApi Specification Diff tool", + "license": "MIT", + "dependencies": { + "autorest": "^1.1.0", + "json-pointer": "^0.6.0", + "js-yaml": "^3.8.2", + "moment": "^2.14.1", + "request": "^2.79.0", + "winston": "^2.3.0", + "yargs": "^6.6.0", + "glob": "^5.0.14", + "uuid": "^3.0.1", + "recursive-readdir": "^2.1.0", + "swagger-parser": "^3.4.1", + "swagger-tools": "^0.10.1" + }, + "devDependencies": { + "jshint": "2.9.4", + "mocha": "^3.2.0", + "should": "5.2.0", + "@types/mocha": "^2.2.40", + "@types/should": "^8.1.30" + }, + "homepage": "https://github.com/Azure/openapi-diff", + "repository": { + "type": "git", + "url": "https://github.com/Azure/openapi-diff.git" + }, + "bugs": { + "url": "https://github.com/Azure/openapi-diff/issues" + }, + "main": "./index.js", + "bin": { "oad": "./cli.js" }, + "scripts": { + "jshint": "jshint index.js --reporter=jslint", + "test": "npm -s run-script jshint && mocha -t 50000", + "postinstall": "node ./lib/scripts/postInstall.js" + } +}