diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..1625e63a --- /dev/null +++ b/.editorconfig @@ -0,0 +1,17 @@ +# http://editorconfig.org + +root = true + +[*] +charset = utf-8 +end_of_line = lf +insert_final_newline = true +trim_trailing_whitespace = true + +[*.{js,json,jsonld,yaml,yml}] +indent_style = space +indent_size = 2 + +[*.idl] +indent_style = space +indent_size = 4 diff --git a/.eslintrc b/.eslintrc new file mode 100644 index 00000000..1b5de0d0 --- /dev/null +++ b/.eslintrc @@ -0,0 +1,10 @@ +{ + "env": { + "amd": true, + "browser": true, + "jquery": true + }, + "rules": { + "indent": ["error", 2, {"outerIIFEBody": 0}] + } +} diff --git a/.gitignore b/.gitignore index 166004f0..bd8d1861 100644 --- a/.gitignore +++ b/.gitignore @@ -1,12 +1,17 @@ -*.sw[op] -*~ -.cproject -.project *.sublime-project *.sublime-workspace +*.sw[nop] +*~ .DS_Store +.cdtproject +.classpath +.cproject +.project .settings +TAGS coverage +dist node_modules -v8.log npm-debug.log +tests/webidl/*-new +v8.log diff --git a/.travis.yml b/.travis.yml index 4198e95e..c80dc8d5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,19 +1,28 @@ language: node_js node_js: - - "0.10" - - "0.12" - - "4" - "6" + - "8" - "node" sudo: false -# download test suite and run tests... submodule? meta testing project with -# all of the reference implementations? -script: - - git clone https://github.com/json-ld/json-ld.org.git _json-ld.org - - make test-suite JSONLD_TEST_SUITE=./_json-ld.org/test-suite - - git clone https://github.com/json-ld/normalization.git _normalization - - make test-suite JSONLD_TEST_SUITE=./_normalization/tests - - make test-local-node test-local-browser +addons: + apt: + sources: + - ubuntu-toolchain-r-test + packages: + - g++-4.9 +install: + - CC=gcc-4.9 CXX=g++-4.9 npm install + - npm run fetch-test-suites +script: + - if [ "x$BUNDLER" = "x" ]; then npm run test; fi + - if [ "x$BUNDLER" != "x" ]; then npm run test-karma; fi +# only run karma tests for one node version +matrix: + include: + - node_js: "6" + env: BUNDLER=webpack +# - node_js: "6" +# env: BUNDLER=browserify notifications: email: on_success: change diff --git a/CHANGELOG.md b/CHANGELOG.md index 19995ca6..098250c3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,10 +1,179 @@ # jsonld ChangeLog -## Unreleased +## 0.5.16 - 2018-01-25 + +### Removed +- **BREAKING**: Remove `jsonld.version` API and `pkginfo` dependency. This + feature added complexity and browser issues and the use case is likely + handled by semantic versioning and using a proper dependency. + +### Fixed +- Do not use native types to create IRIs in value expansion. +- Improved error detection for @container variations. +- Handle empty and relative `@base`. +- Remove shortcut from compactIri when IRI is a keyword (fixes compact-0073). + +### Changed +- Set processingMode from options or first encountered context. +- Use array representation of @container in processing. +- **BREAKING**: Check for keys in term definition outside that expected: + `@container`, `@id`, `@language`, `@reverse`, and `@type`. This also sets up + for additional keywords in 1.1. + +## 0.5.15 - 2017-10-16 + +### Changed +- **BREAKING**: Use RDF JS (rdf.js.org) interfaces for internal + representation of dataset and quads. This should only break + code that was using undocumented internal datastructures, + backwards-compat code exists to handle external RDF parsers. +- Update `rdf-canonize` to dependency with native support. + +## 0.5.14 - 2017-10-11 + +### Fixed +- Allow empty lists to be compacted to any @list container term. Fixes + compact-0074 test. + +## 0.5.13 - 2017-10-05 + +### Fixed +- Remote context retrieval bug. + +### Removed +- **BREAKING**: Remove `promisify` API. + +## 0.5.12 - 2017-10-05 + +### Changed +- **BREAKING**: Remove top-layer errors. + +## 0.5.11 - 2017-09-28 + +### Removed +- **BREAKING**: Remove deprecated extensions API, including `jsonld.request`. + +## 0.5.10 - 2017-09-21 + +### Added +- Add `expansionMap` and `compactionMap` options. These + functions may be provided that will be called when an + unmapped value or property will be dropped during expansion + or compaction, respectively. The function map return either + `undefined` to cause the default behavior, some other + value to use instead of the default expanded/compacted value, + or it may throw an error to stop expansion/compaction. + +### Removed +- **BREAKING**: Remove deprecated `objectify` and `prependBase` APIs. Now + `objectify` can be achieved via the `@link` option in framing and + `prependBase` can be found via `url.prependBase`. +- **BREAKING**: Remove deprecated `namer` option from all public APIs, use + `issuer` instead. +- **BREAKING**: Last active context used is no longer returned as an optional + parameter to the `compact` callback. +- **BREAKING**: Do not expose deprecated `DocumentCache`. + +### Changed +- **BREAKING**: Change default canonicalization algorithm to `URDNA2015`. + +## 0.5.9 - 2017-09-21 + +### Fixed +- Callbackify bugs. +- Document loaders. +- Request queue. +- Handling of exceptios in callbacks. + +### Added +- Various toRDF tests. + +### Changed +- Move tests from test/ to tests/. + +## 0.5.8 - 2017-09-20 + +### Changed +- Run all test-suite tests with promises and callbacks. + +### Fixed +- Use Node.js "global" or webpack polyfill. + +## 0.5.7 - 2017-09-20 + +### Fixed +- Distribute all js files, for real this time. + +## 0.5.6 - 2017-09-20 + +### Fixed +- Fix `toRDF()`. + +## 0.5.5 - 2017-09-20 + +### Fixed +- Distribute all js files. + +## 0.5.4 - 2017-09-20 + +### Fixed +- Generate all js files for Node.js 6. + +## 0.5.3 - 2017-09-20 + +### Changed +- Significant code reorganization and splitting into multiple files. + +### Removed +- **BREAKING**: Explicit IE8 support. Webpack, babel, and/or polyfills may be + of help if support is still needed. +- **BREAKING**: jQuery document loader. Use the XHR loader. +- `Object.keys` polyfill. Other tools can provide this. + +### Fixed +- Handling of "global". + +## 0.5.2 - 2017-09-19 + +### Fixed +- Distribute browser files. + +## 0.5.1 - 2017-09-19 + +### Fixed +- Distribute unminified bundle. + +## 0.5.0 - 2017-09-18 + +### Added +- Add .editorconfig support. +- `fetch-test-suites` and related `fetch-*-test-suite` NPM scripts. +- Support for `@graph` `@container`. + +### Removed +- Bower support. Use NPM, a NPM proxy site, or build your own bundle. +- Makefile. Use NPM script targets. ### Changed - Update url parser to remove default ports from URLs. - Skip spec version 1.1 tests. +- **BREAKING**: Only support Node.js 6.x and later with ES2015 features. +- Build and use custom Node.js 6.x output so async/await/etc can be used. +- **BREAKING**: Move `js/jsonld.js` to `lib/jsonld.js`. +- **BREAKING**: Switch to CommonJS. +- **BREAKING**: Fixes to allow RFC3986 tests to pass. Some URI edge cases and + certain base URIs with dot segments may cause different URI outputs. +- Switch to Karma for browser testing. +- Switch to webpack to build browser bundles. +- Add explicit feature compatibility libs to browser bundles. +- Use async APIs for test generation. + - Done to allow testing in Node.js and browsers. + - Required major testing changes to make everything async. + - Workarounds added to get async generated mocha tests working. +- Improved support for loading various types of tests. + - Can load local files, test manifests, or plain js files (in Node.js). +- Use ES2015 in tests and babel/webpack to support older platforms. +- Use rdf-canonize library, remove local implementation. ## 0.4.12 - 2017-04-24 @@ -24,7 +193,7 @@ ## 0.4.9 - 2016-04-23 ### Changed -- Add optimizations for _compactIri. +- Add optimizations for \_compactIri. ## 0.4.8 - 2016-04-14 diff --git a/Makefile b/Makefile deleted file mode 100644 index 44047b48..00000000 --- a/Makefile +++ /dev/null @@ -1,70 +0,0 @@ -TESTS = tests/test.js -LOCAL_TESTS = test/*.js -REPORTER = spec - -all: - -test: test-local test-node test-browser test-local-node test-local-browser test-normalization-node test-normalization-browser - -test-suite: test-suite-node test-suite-browser - -test-suite-node: - @if [ "x$(JSONLD_TEST_SUITE)" = x ]; then \ - echo "Error: JSONLD_TEST_SUITE env var not set"; \ - exit 1; \ - fi - @if [ -d $(JSONLD_TEST_SUITE) ]; then \ - NODE_ENV=test ./node_modules/.bin/mocha -t 30000 -A -R $(REPORTER) $(TESTS); \ - else \ - echo "Error: tests not found at $(JSONLD_TEST_SUITE)"; \ - exit 1; \ - fi - -test-suite-browser: - @if [ "x$(JSONLD_TEST_SUITE)" = x ]; then \ - echo "Error: JSONLD_TEST_SUITE env var not set"; \ - exit 1; \ - fi - @if [ -d $(JSONLD_TEST_SUITE) ]; then \ - NODE_ENV=test ./node_modules/.bin/phantomjs $(TESTS); \ - else \ - echo "Error: tests not found at $(JSONLD_TEST_SUITE)"; \ - exit 1; \ - fi - -test-node: - @JSONLD_TEST_SUITE=../json-ld.org/test-suite $(MAKE) test-suite-node - -test-browser: - @JSONLD_TEST_SUITE=../json-ld.org/test-suite $(MAKE) test-suite-browser - -test-local-node: - @JSONLD_TEST_SUITE=./tests/new-embed-api $(MAKE) test-suite-node - -test-local-browser: - @JSONLD_TEST_SUITE=./tests/new-embed-api $(MAKE) test-suite-browser - -test-normalization-node: - @JSONLD_TEST_SUITE=../normalization/tests $(MAKE) test-suite-node - -test-normalization-browser: - @JSONLD_TEST_SUITE=../normalization/tests $(MAKE) test-suite-browser - -test-coverage: - ./node_modules/.bin/istanbul cover ./node_modules/.bin/_mocha -- \ - -t 30000 -u exports -R $(REPORTER) $(TESTS) - -test-coverage-lcov: - ./node_modules/.bin/istanbul cover ./node_modules/.bin/_mocha \ - --report lcovonly -- -t 30000 -u exports -R $(REPORTER) $(TESTS) - -test-coverage-report: - ./node_modules/.bin/istanbul report - -test-local: - ./node_modules/.bin/mocha -t 30000 -R $(REPORTER) $(LOCAL_TESTS) - -clean: - rm -rf coverage - -.PHONY: test test-node test-browser test-local-node test-local-browser test-normalization-node test-normalization-browser test-coverage test-local clean diff --git a/README.md b/README.md index a89ffb34..00bb97f7 100644 --- a/README.md +++ b/README.md @@ -37,7 +37,8 @@ to JSON with added semantics. Finally, the format is intended to be fast to parse, fast to generate, stream-based and document-based processing compatible, and require a very small memory footprint in order to operate. -## Requiring jsonld.js: +Installation +------------ ### node.js + npm @@ -46,31 +47,21 @@ npm install jsonld ``` ```js -var jsonld = require('jsonld'); +const jsonld = require('jsonld'); ``` -### Browser (AMD) + bower +### Browser (AMD) + npm ``` -bower install jsonld +npm install jsonld ``` -```js -require.config({ - paths: { - jsonld: 'bower_components/jsonld/js/jsonld' - } -}); -define(['jsonld'], function(jsonld) { ... }); -``` +Use your favorite technology to load `node_modules/dist/jsonld.min.js`. ### Browser + script tag ```html - - - + ``` See https://cdnjs.com/libraries/jsonld for the the latest available cdnjs version. @@ -88,7 +79,8 @@ import {promises} from 'jsonld'; import {JsonLdProcessor} from 'jsonld'; ``` -## Quick Examples +Examples +-------- ```js var doc = { @@ -136,81 +128,73 @@ jsonld.expand('http://example.org/doc', ...); // flatten a document // see: http://json-ld.org/spec/latest/json-ld/#flattened-document-form -jsonld.flatten(doc, function(err, flattened) { +jsonld.flatten(doc, (err, flattened) => { // all deep-level trees flattened to the top-level }); // frame a document // see: http://json-ld.org/spec/latest/json-ld-framing/#introduction -jsonld.frame(doc, frame, function(err, framed) { +jsonld.frame(doc, frame, (err, framed) => { // document transformed into a particular tree structure per the given frame }); -// normalize a document using the RDF Dataset Normalization Algorithm +// canonize (normalize) a document using the RDF Dataset Normalization Algorithm // (URDNA2015), see: http://json-ld.github.io/normalization/spec/ -jsonld.normalize(doc, { +jsonld.canonize(doc, { algorithm: 'URDNA2015', format: 'application/nquads' -}, function(err, normalized) { - // normalized is a string that is a canonical representation of the document +}, (err, canonized) => { + // canonized is a string that is a canonical representation of the document // that can be used for hashing, comparison, etc. }); // serialize a document to N-Quads (RDF) -jsonld.toRDF(doc, {format: 'application/nquads'}, function(err, nquads) { +jsonld.toRDF(doc, {format: 'application/nquads'}, (err, nquads) => { // nquads is a string of nquads }); // deserialize N-Quads (RDF) to JSON-LD -jsonld.fromRDF(nquads, {format: 'application/nquads'}, function(err, doc) { +jsonld.fromRDF(nquads, {format: 'application/nquads'}, (err, doc) => { // doc is JSON-LD }); // register a custom async-callback-based RDF parser -jsonld.registerRDFParser = function(contentType, function(input, callback) { +jsonld.registerRDFParser(contentType, (input, callback) => { // parse input to a jsonld.js RDF dataset object... callback(err, dataset); }); // register a custom synchronous RDF parser -jsonld.registerRDFParser = function(contentType, function(input) { +jsonld.registerRDFParser(contentType, input => { // parse input to a jsonld.js RDF dataset object... and return it return dataset; }); -// use the promises API -var promises = jsonld.promises; +// use the promises API: // compaction -var promise = promises.compact(doc, context); -promise.then(function(compacted) {...}, function(err) {...}); +const compacted = await jsonld.compact(doc, context); // expansion -var promise = promises.expand(doc); -promise.then(function(expanded) {...}, function(err) {...}); +const expanded = await jsonld.expand(doc); // flattening -var promise = promises.flatten(doc); -promise.then(function(flattened) {...}, function(err) {...}); +const flattened = await jsonld.flatten(doc); // framing -var promise = promises.frame(doc, frame); -promise.then(function(framed) {...}, function(err) {...}); +const framed = await jsonld.frame(doc, frame); -// normalization -var promise = promises.normalize(doc, {format: 'application/nquads'}); -promise.then(function(normalized) {...}, function(err) {...}); +// canonicalization (normalization) +const canonized = await jsonld.canonize(doc, {format: 'application/nquads'}); // serialize to RDF -var promise = promises.toRDF(doc, {format: 'application/nquads'}); -promise.then(function(nquads) {...}, function(err) {...}); +const rdf = await jsonld.toRDF(doc, {format: 'application/nquads'}); // deserialize from RDF -var promise = promises.fromRDF(nquads, {format: 'application/nquads'}); -promise.then(function(doc) {...}, function(err) {...}); +const doc = await jsonld.fromRDF(nquads, {format: 'application/nquads'}); // register a custom promise-based RDF parser -jsonld.registerRDFParser = function(contentType, function(input) { +jsonld.registerRDFParser(contentType, async input => { // parse input into a jsonld.js RDF dataset object... return new Promise(...); }); @@ -219,21 +203,20 @@ jsonld.registerRDFParser = function(contentType, function(input) { // example, one that uses pre-loaded contexts: // define a mapping of context URL => context doc -var CONTEXTS = { +const CONTEXTS = { "http://example.com": { "@context": ... }, ... }; // grab the built-in node.js doc loader -var nodeDocumentLoader = jsonld.documentLoaders.node(); +const nodeDocumentLoader = jsonld.documentLoaders.node(); // or grab the XHR one: jsonld.documentLoaders.xhr() -// or grab the jquery one: jsonld.documentLoaders.jquery() // change the default document loader using the callback API // (you can also do this using the promise-based API, return a promise instead // of using a callback) -var customLoader = function(url, callback) { +const customLoader = (url, callback) => { if(url in CONTEXTS) { return callback( null, { @@ -253,11 +236,10 @@ var customLoader = function(url, callback) { jsonld.documentLoader = customLoader; // alternatively, pass the custom loader for just a specific call: -jsonld.compact(doc, context, {documentLoader: customLoader}, - function(err, compacted) { ... }); +const compacted = await jsonld.compact( +doc, context, {documentLoader: customLoader}); ``` - Related Modules --------------- @@ -292,44 +274,48 @@ the following: https://github.com/json-ld/json-ld.org https://github.com/json-ld/normalization -If the above directories are siblings of the jsonld.js directory you can run -all tests with a simple command: +They should be sibling directories of the jsonld.js directory or in a +`test-suites` dir. To clone shallow copies into the `test-suites` dir you can +use the following: + + npm run fetch-test-suites + +Node.js tests can be run with a simple command: - make test + npm test -If you installed the test suites elsewhere: +If you installed the test suites elsewhere, or wish to run other tests, use +the `JSONLD_TESTS` environment var: - make test-suite JSONLD_TEST_SUITE={PATH_TO_TEST_SUITE} + JSONLD_TESTS="/tmp/org/test-suites /tmp/norm/tests" npm test -See the `Makefile` for various individual test targets as well as split node -and browser targets. For instance, the json-ld.org test suite can be run -piecewise with: +Browser testing can be done with Karma: - make test-node - make test-browser + npm test-karma + npm test-karma -- --browsers Firefox,Chrome Code coverage of node tests can be generated in `coverage/`: - make test-coverage + npm run coverage The Mocha output reporter can be changed to min, dot, list, nyan, etc: - make test REPORTER=dot + REPORTER=dot npm test Remote context tests are also available: # run the context server in the background or another terminal node tests/remote-context-server.js - make test-suite JSONLD_TEST_SUITE=./tests + JSONLD_TESTS=./tests npm test To generate earl reports: # generate the earl report for node.js - ./node_modules/.bin/mocha -R spec tests/test.js --earl earl-node.jsonld + EARL=earl-node.jsonld npm test # generate the earl report for the browser - ./node_modules/.bin/phantomjs tests/test.js --earl earl-browser.jsonld + EARL=earl-firefox.jsonld npm test-karma -- --browser Firefox [Digital Bazaar]: http://digitalbazaar.com/ [JSON-LD]: http://json-ld.org/ diff --git a/bower.json b/bower.json deleted file mode 100644 index 04be6cc1..00000000 --- a/bower.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "name": "jsonld", - "description": "A JSON-LD Processor and API implementation in JavaScript.", - "authors": [ - "Digital Bazaar, Inc." - ], - "license": "BSD", - "main": ["js/jsonld.js"], - "dependencies": { - "es6-promise": "^2.0.0" - }, - "ignore": [ - "node_modules", - "bower_components" - ] -} diff --git a/browser/ignore.js b/browser/ignore.js index c0a4260f..4d6a5124 100644 --- a/browser/ignore.js +++ b/browser/ignore.js @@ -1 +1 @@ -// Ignore module for browserify (see package.json) \ No newline at end of file +// Ignore module for browserify (see package.json) diff --git a/js/jsonld.js b/js/jsonld.js deleted file mode 100644 index c4593c7a..00000000 --- a/js/jsonld.js +++ /dev/null @@ -1,8236 +0,0 @@ -/** - * A JavaScript implementation of the JSON-LD API. - * - * @author Dave Longley - * - * @license BSD 3-Clause License - * Copyright (c) 2011-2015 Digital Bazaar, Inc. - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * - * Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * - * Neither the name of the Digital Bazaar, Inc. nor the names of its - * contributors may be used to endorse or promote products derived from - * this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS - * IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED - * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A - * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT - * HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED - * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR - * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF - * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING - * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS - * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -(function() { - -// determine if in-browser or using node.js -var _nodejs = ( - typeof process !== 'undefined' && process.versions && process.versions.node); -var _browser = !_nodejs && - (typeof window !== 'undefined' || typeof self !== 'undefined'); -if(_browser) { - if(typeof global === 'undefined') { - if(typeof window !== 'undefined') { - global = window; - } else if(typeof self !== 'undefined') { - global = self; - } else if(typeof $ !== 'undefined') { - global = $; - } - } -} - -// attaches jsonld API to the given object -var wrapper = function(jsonld) { - -/* Core API */ - -/** - * Performs JSON-LD compaction. - * - * @param input the JSON-LD input to compact. - * @param ctx the context to compact with. - * @param [options] options to use: - * [base] the base IRI to use. - * [compactArrays] true to compact arrays to single values when - * appropriate, false not to (default: true). - * [graph] true to always output a top-level graph (default: false). - * [expandContext] a context to expand with. - * [skipExpansion] true to assume the input is expanded and skip - * expansion, false not to, defaults to false. - * [documentLoader(url, callback(err, remoteDoc))] the document loader. - * @param callback(err, compacted, ctx) called once the operation completes. - */ -jsonld.compact = function(input, ctx, options, callback) { - if(arguments.length < 2) { - return jsonld.nextTick(function() { - callback(new TypeError('Could not compact, too few arguments.')); - }); - } - - // get arguments - if(typeof options === 'function') { - callback = options; - options = {}; - } - options = options || {}; - - if(ctx === null) { - return jsonld.nextTick(function() { - callback(new JsonLdError( - 'The compaction context must not be null.', - 'jsonld.CompactError', {code: 'invalid local context'})); - }); - } - - // nothing to compact - if(input === null) { - return jsonld.nextTick(function() { - callback(null, null); - }); - } - - // set default options - if(!('base' in options)) { - options.base = (typeof input === 'string') ? input : ''; - } - if(!('compactArrays' in options)) { - options.compactArrays = true; - } - if(!('graph' in options)) { - options.graph = false; - } - if(!('skipExpansion' in options)) { - options.skipExpansion = false; - } - if(!('documentLoader' in options)) { - options.documentLoader = jsonld.loadDocument; - } - if(!('link' in options)) { - options.link = false; - } - if(options.link) { - // force skip expansion when linking, "link" is not part of the public - // API, it should only be called from framing - options.skipExpansion = true; - } - - var expand = function(input, options, callback) { - if(options.skipExpansion) { - return jsonld.nextTick(function() { - callback(null, input); - }); - } - jsonld.expand(input, options, callback); - }; - - // expand input then do compaction - expand(input, options, function(err, expanded) { - if(err) { - return callback(new JsonLdError( - 'Could not expand input before compaction.', - 'jsonld.CompactError', {cause: err})); - } - - // process context - var activeCtx = _getInitialContext(options); - jsonld.processContext(activeCtx, ctx, options, function(err, activeCtx) { - if(err) { - return callback(new JsonLdError( - 'Could not process context before compaction.', - 'jsonld.CompactError', {cause: err})); - } - - var compacted; - try { - // do compaction - compacted = new Processor().compact(activeCtx, null, expanded, options); - } catch(ex) { - return callback(ex); - } - - cleanup(null, compacted, activeCtx, options); - }); - }); - - // performs clean up after compaction - function cleanup(err, compacted, activeCtx, options) { - if(err) { - return callback(err); - } - - if(options.compactArrays && !options.graph && _isArray(compacted)) { - if(compacted.length === 1) { - // simplify to a single item - compacted = compacted[0]; - } else if(compacted.length === 0) { - // simplify to an empty object - compacted = {}; - } - } else if(options.graph && _isObject(compacted)) { - // always use array if graph option is on - compacted = [compacted]; - } - - // follow @context key - if(_isObject(ctx) && '@context' in ctx) { - ctx = ctx['@context']; - } - - // build output context - ctx = _clone(ctx); - if(!_isArray(ctx)) { - ctx = [ctx]; - } - // remove empty contexts - var tmp = ctx; - ctx = []; - for(var i = 0; i < tmp.length; ++i) { - if(!_isObject(tmp[i]) || Object.keys(tmp[i]).length > 0) { - ctx.push(tmp[i]); - } - } - - // remove array if only one context - var hasContext = (ctx.length > 0); - if(ctx.length === 1) { - ctx = ctx[0]; - } - - // add context and/or @graph - if(_isArray(compacted)) { - // use '@graph' keyword - var kwgraph = _compactIri(activeCtx, '@graph'); - var graph = compacted; - compacted = {}; - if(hasContext) { - compacted['@context'] = ctx; - } - compacted[kwgraph] = graph; - } else if(_isObject(compacted) && hasContext) { - // reorder keys so @context is first - var graph = compacted; - compacted = {'@context': ctx}; - for(var key in graph) { - compacted[key] = graph[key]; - } - } - - callback(null, compacted, activeCtx); - } -}; - -/** - * Performs JSON-LD expansion. - * - * @param input the JSON-LD input to expand. - * @param [options] the options to use: - * [base] the base IRI to use. - * [expandContext] a context to expand with. - * [keepFreeFloatingNodes] true to keep free-floating nodes, - * false not to, defaults to false. - * [documentLoader(url, callback(err, remoteDoc))] the document loader. - * @param callback(err, expanded) called once the operation completes. - */ -jsonld.expand = function(input, options, callback) { - if(arguments.length < 1) { - return jsonld.nextTick(function() { - callback(new TypeError('Could not expand, too few arguments.')); - }); - } - - // get arguments - if(typeof options === 'function') { - callback = options; - options = {}; - } - options = options || {}; - - // set default options - if(!('documentLoader' in options)) { - options.documentLoader = jsonld.loadDocument; - } - if(!('keepFreeFloatingNodes' in options)) { - options.keepFreeFloatingNodes = false; - } - - jsonld.nextTick(function() { - // if input is a string, attempt to dereference remote document - if(typeof input === 'string') { - var done = function(err, remoteDoc) { - if(err) { - return callback(err); - } - try { - if(!remoteDoc.document) { - throw new JsonLdError( - 'No remote document found at the given URL.', - 'jsonld.NullRemoteDocument'); - } - if(typeof remoteDoc.document === 'string') { - remoteDoc.document = JSON.parse(remoteDoc.document); - } - } catch(ex) { - return callback(new JsonLdError( - 'Could not retrieve a JSON-LD document from the URL. URL ' + - 'dereferencing not implemented.', 'jsonld.LoadDocumentError', { - code: 'loading document failed', - cause: ex, - remoteDoc: remoteDoc - })); - } - expand(remoteDoc); - }; - var promise = options.documentLoader(input, done); - if(promise && 'then' in promise) { - promise.then(done.bind(null, null), done); - } - return; - } - // nothing to load - expand({contextUrl: null, documentUrl: null, document: input}); - }); - - function expand(remoteDoc) { - // set default base - if(!('base' in options)) { - options.base = remoteDoc.documentUrl || ''; - } - // build meta-object and retrieve all @context URLs - var input = { - document: _clone(remoteDoc.document), - remoteContext: {'@context': remoteDoc.contextUrl} - }; - if('expandContext' in options) { - var expandContext = _clone(options.expandContext); - if(typeof expandContext === 'object' && '@context' in expandContext) { - input.expandContext = expandContext; - } else { - input.expandContext = {'@context': expandContext}; - } - } - _retrieveContextUrls(input, options, function(err, input) { - if(err) { - return callback(err); - } - - var expanded; - try { - var processor = new Processor(); - var activeCtx = _getInitialContext(options); - var document = input.document; - var remoteContext = input.remoteContext['@context']; - - // process optional expandContext - if(input.expandContext) { - activeCtx = processor.processContext( - activeCtx, input.expandContext['@context'], options); - } - - // process remote context from HTTP Link Header - if(remoteContext) { - activeCtx = processor.processContext( - activeCtx, remoteContext, options); - } - - // expand document - expanded = processor.expand( - activeCtx, null, document, options, false); - - // optimize away @graph with no other properties - if(_isObject(expanded) && ('@graph' in expanded) && - Object.keys(expanded).length === 1) { - expanded = expanded['@graph']; - } else if(expanded === null) { - expanded = []; - } - - // normalize to an array - if(!_isArray(expanded)) { - expanded = [expanded]; - } - } catch(ex) { - return callback(ex); - } - callback(null, expanded); - }); - } -}; - -/** - * Performs JSON-LD flattening. - * - * @param input the JSON-LD to flatten. - * @param ctx the context to use to compact the flattened output, or null. - * @param [options] the options to use: - * [base] the base IRI to use. - * [expandContext] a context to expand with. - * [documentLoader(url, callback(err, remoteDoc))] the document loader. - * @param callback(err, flattened) called once the operation completes. - */ -jsonld.flatten = function(input, ctx, options, callback) { - if(arguments.length < 1) { - return jsonld.nextTick(function() { - callback(new TypeError('Could not flatten, too few arguments.')); - }); - } - - // get arguments - if(typeof options === 'function') { - callback = options; - options = {}; - } else if(typeof ctx === 'function') { - callback = ctx; - ctx = null; - options = {}; - } - options = options || {}; - - // set default options - if(!('base' in options)) { - options.base = (typeof input === 'string') ? input : ''; - } - if(!('documentLoader' in options)) { - options.documentLoader = jsonld.loadDocument; - } - - // expand input - jsonld.expand(input, options, function(err, _input) { - if(err) { - return callback(new JsonLdError( - 'Could not expand input before flattening.', - 'jsonld.FlattenError', {cause: err})); - } - - var flattened; - try { - // do flattening - flattened = new Processor().flatten(_input); - } catch(ex) { - return callback(ex); - } - - if(ctx === null) { - return callback(null, flattened); - } - - // compact result (force @graph option to true, skip expansion) - options.graph = true; - options.skipExpansion = true; - jsonld.compact(flattened, ctx, options, function(err, compacted) { - if(err) { - return callback(new JsonLdError( - 'Could not compact flattened output.', - 'jsonld.FlattenError', {cause: err})); - } - callback(null, compacted); - }); - }); -}; - -/** - * Performs JSON-LD framing. - * - * @param input the JSON-LD input to frame. - * @param frame the JSON-LD frame to use. - * @param [options] the framing options. - * [base] the base IRI to use. - * [expandContext] a context to expand with. - * [embed] default @embed flag: '@last', '@always', '@never', '@link' - * (default: '@last'). - * [explicit] default @explicit flag (default: false). - * [requireAll] default @requireAll flag (default: true). - * [omitDefault] default @omitDefault flag (default: false). - * [documentLoader(url, callback(err, remoteDoc))] the document loader. - * @param callback(err, framed) called once the operation completes. - */ -jsonld.frame = function(input, frame, options, callback) { - if(arguments.length < 2) { - return jsonld.nextTick(function() { - callback(new TypeError('Could not frame, too few arguments.')); - }); - } - - // get arguments - if(typeof options === 'function') { - callback = options; - options = {}; - } - options = options || {}; - - // set default options - if(!('base' in options)) { - options.base = (typeof input === 'string') ? input : ''; - } - if(!('documentLoader' in options)) { - options.documentLoader = jsonld.loadDocument; - } - if(!('embed' in options)) { - options.embed = '@last'; - } - options.explicit = options.explicit || false; - if(!('requireAll' in options)) { - options.requireAll = true; - } - options.omitDefault = options.omitDefault || false; - - jsonld.nextTick(function() { - // if frame is a string, attempt to dereference remote document - if(typeof frame === 'string') { - var done = function(err, remoteDoc) { - if(err) { - return callback(err); - } - try { - if(!remoteDoc.document) { - throw new JsonLdError( - 'No remote document found at the given URL.', - 'jsonld.NullRemoteDocument'); - } - if(typeof remoteDoc.document === 'string') { - remoteDoc.document = JSON.parse(remoteDoc.document); - } - } catch(ex) { - return callback(new JsonLdError( - 'Could not retrieve a JSON-LD document from the URL. URL ' + - 'dereferencing not implemented.', 'jsonld.LoadDocumentError', { - code: 'loading document failed', - cause: ex, - remoteDoc: remoteDoc - })); - } - doFrame(remoteDoc); - }; - var promise = options.documentLoader(frame, done); - if(promise && 'then' in promise) { - promise.then(done.bind(null, null), done); - } - return; - } - // nothing to load - doFrame({contextUrl: null, documentUrl: null, document: frame}); - }); - - function doFrame(remoteFrame) { - // preserve frame context and add any Link header context - var frame = remoteFrame.document; - var ctx; - if(frame) { - ctx = frame['@context']; - if(remoteFrame.contextUrl) { - if(!ctx) { - ctx = remoteFrame.contextUrl; - } else if(_isArray(ctx)) { - ctx.push(remoteFrame.contextUrl); - } else { - ctx = [ctx, remoteFrame.contextUrl]; - } - frame['@context'] = ctx; - } else { - ctx = ctx || {}; - } - } else { - ctx = {}; - } - - // expand input - jsonld.expand(input, options, function(err, expanded) { - if(err) { - return callback(new JsonLdError( - 'Could not expand input before framing.', - 'jsonld.FrameError', {cause: err})); - } - - // expand frame - var opts = _clone(options); - opts.isFrame = true; - opts.keepFreeFloatingNodes = true; - jsonld.expand(frame, opts, function(err, expandedFrame) { - if(err) { - return callback(new JsonLdError( - 'Could not expand frame before framing.', - 'jsonld.FrameError', {cause: err})); - } - - var framed; - try { - // do framing - framed = new Processor().frame(expanded, expandedFrame, opts); - } catch(ex) { - return callback(ex); - } - - // compact result (force @graph option to true, skip expansion, - // check for linked embeds) - opts.graph = true; - opts.skipExpansion = true; - opts.link = {}; - jsonld.compact(framed, ctx, opts, function(err, compacted, ctx) { - if(err) { - return callback(new JsonLdError( - 'Could not compact framed output.', - 'jsonld.FrameError', {cause: err})); - } - // get graph alias - var graph = _compactIri(ctx, '@graph'); - // remove @preserve from results - opts.link = {}; - compacted[graph] = _removePreserve(ctx, compacted[graph], opts); - callback(null, compacted); - }); - }); - }); - } -}; - -/** - * **Experimental** - * - * Links a JSON-LD document's nodes in memory. - * - * @param input the JSON-LD document to link. - * @param ctx the JSON-LD context to apply. - * @param [options] the options to use: - * [base] the base IRI to use. - * [expandContext] a context to expand with. - * [documentLoader(url, callback(err, remoteDoc))] the document loader. - * @param callback(err, linked) called once the operation completes. - */ -jsonld.link = function(input, ctx, options, callback) { - // API matches running frame with a wildcard frame and embed: '@link' - // get arguments - var frame = {}; - if(ctx) { - frame['@context'] = ctx; - } - frame['@embed'] = '@link'; - jsonld.frame(input, frame, options, callback); -}; - -/** - * **Deprecated** - * - * Performs JSON-LD objectification. - * - * @param input the JSON-LD document to objectify. - * @param ctx the JSON-LD context to apply. - * @param [options] the options to use: - * [base] the base IRI to use. - * [expandContext] a context to expand with. - * [documentLoader(url, callback(err, remoteDoc))] the document loader. - * @param callback(err, linked) called once the operation completes. - */ -jsonld.objectify = function(input, ctx, options, callback) { - if(typeof options === 'function') { - callback = options; - options = {}; - } - options = options || {}; - - // set default options - if(!('base' in options)) { - options.base = (typeof input === 'string') ? input : ''; - } - if(!('documentLoader' in options)) { - options.documentLoader = jsonld.loadDocument; - } - - // expand input - jsonld.expand(input, options, function(err, _input) { - if(err) { - return callback(new JsonLdError( - 'Could not expand input before linking.', - 'jsonld.LinkError', {cause: err})); - } - - var flattened; - try { - // flatten the graph - flattened = new Processor().flatten(_input); - } catch(ex) { - return callback(ex); - } - - // compact result (force @graph option to true, skip expansion) - options.graph = true; - options.skipExpansion = true; - jsonld.compact(flattened, ctx, options, function(err, compacted, ctx) { - if(err) { - return callback(new JsonLdError( - 'Could not compact flattened output before linking.', - 'jsonld.LinkError', {cause: err})); - } - // get graph alias - var graph = _compactIri(ctx, '@graph'); - var top = compacted[graph][0]; - - var recurse = function(subject) { - // can't replace just a string - if(!_isObject(subject) && !_isArray(subject)) { - return; - } - - // bottom out recursion on re-visit - if(_isObject(subject)) { - if(recurse.visited[subject['@id']]) { - return; - } - recurse.visited[subject['@id']] = true; - } - - // each array element *or* object key - for(var k in subject) { - var obj = subject[k]; - var isid = (jsonld.getContextValue(ctx, k, '@type') === '@id'); - - // can't replace a non-object or non-array unless it's an @id - if(!_isArray(obj) && !_isObject(obj) && !isid) { - continue; - } - - if(_isString(obj) && isid) { - subject[k] = obj = top[obj]; - recurse(obj); - } else if(_isArray(obj)) { - for(var i = 0; i < obj.length; ++i) { - if(_isString(obj[i]) && isid) { - obj[i] = top[obj[i]]; - } else if(_isObject(obj[i]) && '@id' in obj[i]) { - obj[i] = top[obj[i]['@id']]; - } - recurse(obj[i]); - } - } else if(_isObject(obj)) { - var sid = obj['@id']; - subject[k] = obj = top[sid]; - recurse(obj); - } - } - }; - recurse.visited = {}; - recurse(top); - - compacted.of_type = {}; - for(var s in top) { - if(!('@type' in top[s])) { - continue; - } - var types = top[s]['@type']; - if(!_isArray(types)) { - types = [types]; - } - for(var t = 0; t < types.length; ++t) { - if(!(types[t] in compacted.of_type)) { - compacted.of_type[types[t]] = []; - } - compacted.of_type[types[t]].push(top[s]); - } - } - callback(null, compacted); - }); - }); -}; - -/** - * Performs RDF dataset normalization on the given input. The input is JSON-LD - * unless the 'inputFormat' option is used. The output is an RDF dataset - * unless the 'format' option is used. - * - * @param input the input to normalize as JSON-LD or as a format specified by - * the 'inputFormat' option. - * @param [options] the options to use: - * [algorithm] the normalization algorithm to use, `URDNA2015` or - * `URGNA2012` (default: `URGNA2012`). - * [base] the base IRI to use. - * [expandContext] a context to expand with. - * [inputFormat] the format if input is not JSON-LD: - * 'application/nquads' for N-Quads. - * [format] the format if output is a string: - * 'application/nquads' for N-Quads. - * [documentLoader(url, callback(err, remoteDoc))] the document loader. - * @param callback(err, normalized) called once the operation completes. - */ -jsonld.normalize = function(input, options, callback) { - if(arguments.length < 1) { - return jsonld.nextTick(function() { - callback(new TypeError('Could not normalize, too few arguments.')); - }); - } - - // get arguments - if(typeof options === 'function') { - callback = options; - options = {}; - } - options = options || {}; - - // set default options - if(!('algorithm' in options)) { - options.algorithm = 'URGNA2012'; - } - if(!('base' in options)) { - options.base = (typeof input === 'string') ? input : ''; - } - if(!('documentLoader' in options)) { - options.documentLoader = jsonld.loadDocument; - } - - if('inputFormat' in options) { - if(options.inputFormat !== 'application/nquads') { - return callback(new JsonLdError( - 'Unknown normalization input format.', - 'jsonld.NormalizeError')); - } - var parsedInput = _parseNQuads(input); - // do normalization - new Processor().normalize(parsedInput, options, callback); - } else { - // convert to RDF dataset then do normalization - var opts = _clone(options); - delete opts.format; - opts.produceGeneralizedRdf = false; - jsonld.toRDF(input, opts, function(err, dataset) { - if(err) { - return callback(new JsonLdError( - 'Could not convert input to RDF dataset before normalization.', - 'jsonld.NormalizeError', {cause: err})); - } - // do normalization - new Processor().normalize(dataset, options, callback); - }); - } -}; - -/** - * Converts an RDF dataset to JSON-LD. - * - * @param dataset a serialized string of RDF in a format specified by the - * format option or an RDF dataset to convert. - * @param [options] the options to use: - * [format] the format if dataset param must first be parsed: - * 'application/nquads' for N-Quads (default). - * [rdfParser] a custom RDF-parser to use to parse the dataset. - * [useRdfType] true to use rdf:type, false to use @type - * (default: false). - * [useNativeTypes] true to convert XSD types into native types - * (boolean, integer, double), false not to (default: false). - * @param callback(err, output) called once the operation completes. - */ -jsonld.fromRDF = function(dataset, options, callback) { - if(arguments.length < 1) { - return jsonld.nextTick(function() { - callback(new TypeError('Could not convert from RDF, too few arguments.')); - }); - } - - // get arguments - if(typeof options === 'function') { - callback = options; - options = {}; - } - options = options || {}; - - // set default options - if(!('useRdfType' in options)) { - options.useRdfType = false; - } - if(!('useNativeTypes' in options)) { - options.useNativeTypes = false; - } - - if(!('format' in options) && _isString(dataset)) { - // set default format to nquads - if(!('format' in options)) { - options.format = 'application/nquads'; - } - } - - jsonld.nextTick(function() { - // handle special format - var rdfParser; - if(options.format) { - // check supported formats - rdfParser = options.rdfParser || _rdfParsers[options.format]; - if(!rdfParser) { - return callback(new JsonLdError( - 'Unknown input format.', - 'jsonld.UnknownFormat', {format: options.format})); - } - } else { - // no-op parser, assume dataset already parsed - rdfParser = function() { - return dataset; - }; - } - - var callbackCalled = false; - try { - // rdf parser may be async or sync, always pass callback - dataset = rdfParser(dataset, function(err, dataset) { - callbackCalled = true; - if(err) { - return callback(err); - } - fromRDF(dataset, options, callback); - }); - } catch(e) { - if(!callbackCalled) { - return callback(e); - } - throw e; - } - // handle synchronous or promise-based parser - if(dataset) { - // if dataset is actually a promise - if('then' in dataset) { - return dataset.then(function(dataset) { - fromRDF(dataset, options, callback); - }, callback); - } - // parser is synchronous - fromRDF(dataset, options, callback); - } - - function fromRDF(dataset, options, callback) { - // convert from RDF - new Processor().fromRDF(dataset, options, callback); - } - }); -}; - -/** - * Outputs the RDF dataset found in the given JSON-LD object. - * - * @param input the JSON-LD input. - * @param [options] the options to use: - * [base] the base IRI to use. - * [expandContext] a context to expand with. - * [format] the format to use to output a string: - * 'application/nquads' for N-Quads. - * [produceGeneralizedRdf] true to output generalized RDF, false - * to produce only standard RDF (default: false). - * [documentLoader(url, callback(err, remoteDoc))] the document loader. - * @param callback(err, dataset) called once the operation completes. - */ -jsonld.toRDF = function(input, options, callback) { - if(arguments.length < 1) { - return jsonld.nextTick(function() { - callback(new TypeError('Could not convert to RDF, too few arguments.')); - }); - } - - // get arguments - if(typeof options === 'function') { - callback = options; - options = {}; - } - options = options || {}; - - // set default options - if(!('base' in options)) { - options.base = (typeof input === 'string') ? input : ''; - } - if(!('documentLoader' in options)) { - options.documentLoader = jsonld.loadDocument; - } - - // expand input - jsonld.expand(input, options, function(err, expanded) { - if(err) { - return callback(new JsonLdError( - 'Could not expand input before serialization to RDF.', - 'jsonld.RdfError', {cause: err})); - } - - var dataset; - try { - // output RDF dataset - dataset = Processor.prototype.toRDF(expanded, options); - if(options.format) { - if(options.format === 'application/nquads') { - return callback(null, _toNQuads(dataset)); - } - throw new JsonLdError( - 'Unknown output format.', - 'jsonld.UnknownFormat', {format: options.format}); - } - } catch(ex) { - return callback(ex); - } - callback(null, dataset); - }); -}; - -/** - * **Experimental** - * - * Recursively flattens the nodes in the given JSON-LD input into a map of - * node ID => node. - * - * @param input the JSON-LD input. - * @param [options] the options to use: - * [base] the base IRI to use. - * [expandContext] a context to expand with. - * [issuer] a jsonld.IdentifierIssuer to use to label blank nodes. - * [namer] (deprecated) - * [documentLoader(url, callback(err, remoteDoc))] the document loader. - * @param callback(err, nodeMap) called once the operation completes. - */ -jsonld.createNodeMap = function(input, options, callback) { - if(arguments.length < 1) { - return jsonld.nextTick(function() { - callback(new TypeError('Could not create node map, too few arguments.')); - }); - } - - // get arguments - if(typeof options === 'function') { - callback = options; - options = {}; - } - options = options || {}; - - // set default options - if(!('base' in options)) { - options.base = (typeof input === 'string') ? input : ''; - } - if(!('documentLoader' in options)) { - options.documentLoader = jsonld.loadDocument; - } - - // expand input - jsonld.expand(input, options, function(err, _input) { - if(err) { - return callback(new JsonLdError( - 'Could not expand input before creating node map.', - 'jsonld.CreateNodeMapError', {cause: err})); - } - - var nodeMap; - try { - nodeMap = new Processor().createNodeMap(_input, options); - } catch(ex) { - return callback(ex); - } - - callback(null, nodeMap); - }); -}; - -/** - * **Experimental** - * - * Merges two or more JSON-LD documents into a single flattened document. - * - * @param docs the JSON-LD documents to merge together. - * @param ctx the context to use to compact the merged result, or null. - * @param [options] the options to use: - * [base] the base IRI to use. - * [expandContext] a context to expand with. - * [issuer] a jsonld.IdentifierIssuer to use to label blank nodes. - * [namer] (deprecated). - * [mergeNodes] true to merge properties for nodes with the same ID, - * false to ignore new properties for nodes with the same ID once - * the ID has been defined; note that this may not prevent merging - * new properties where a node is in the `object` position - * (default: true). - * [documentLoader(url, callback(err, remoteDoc))] the document loader. - * @param callback(err, merged) called once the operation completes. - */ -jsonld.merge = function(docs, ctx, options, callback) { - if(arguments.length < 1) { - return jsonld.nextTick(function() { - callback(new TypeError('Could not merge, too few arguments.')); - }); - } - if(!_isArray(docs)) { - return jsonld.nextTick(function() { - callback(new TypeError('Could not merge, "docs" must be an array.')); - }); - } - - // get arguments - if(typeof options === 'function') { - callback = options; - options = {}; - } else if(typeof ctx === 'function') { - callback = ctx; - ctx = null; - options = {}; - } - options = options || {}; - - // expand all documents - var expanded = []; - var error = null; - var count = docs.length; - for(var i = 0; i < docs.length; ++i) { - var opts = {}; - for(var key in options) { - opts[key] = options[key]; - } - jsonld.expand(docs[i], opts, expandComplete); - } - - function expandComplete(err, _input) { - if(error) { - return; - } - if(err) { - error = err; - return callback(new JsonLdError( - 'Could not expand input before flattening.', - 'jsonld.FlattenError', {cause: err})); - } - expanded.push(_input); - if(--count === 0) { - merge(expanded); - } - } - - function merge(expanded) { - var mergeNodes = true; - if('mergeNodes' in options) { - mergeNodes = options.mergeNodes; - } - - var issuer = options.namer || options.issuer || new IdentifierIssuer('_:b'); - var graphs = {'@default': {}}; - - var defaultGraph; - try { - for(var i = 0; i < expanded.length; ++i) { - // uniquely relabel blank nodes - var doc = expanded[i]; - doc = jsonld.relabelBlankNodes(doc, { - issuer: new IdentifierIssuer('_:b' + i + '-') - }); - - // add nodes to the shared node map graphs if merging nodes, to a - // separate graph set if not - var _graphs = (mergeNodes || i === 0) ? graphs : {'@default': {}}; - _createNodeMap(doc, _graphs, '@default', issuer); - - if(_graphs !== graphs) { - // merge document graphs but don't merge existing nodes - for(var graphName in _graphs) { - var _nodeMap = _graphs[graphName]; - if(!(graphName in graphs)) { - graphs[graphName] = _nodeMap; - continue; - } - var nodeMap = graphs[graphName]; - for(var key in _nodeMap) { - if(!(key in nodeMap)) { - nodeMap[key] = _nodeMap[key]; - } - } - } - } - } - - // add all non-default graphs to default graph - defaultGraph = _mergeNodeMaps(graphs); - } catch(ex) { - return callback(ex); - } - - // produce flattened output - var flattened = []; - var keys = Object.keys(defaultGraph).sort(); - for(var ki = 0; ki < keys.length; ++ki) { - var node = defaultGraph[keys[ki]]; - // only add full subjects to top-level - if(!_isSubjectReference(node)) { - flattened.push(node); - } - } - - if(ctx === null) { - return callback(null, flattened); - } - - // compact result (force @graph option to true, skip expansion) - options.graph = true; - options.skipExpansion = true; - jsonld.compact(flattened, ctx, options, function(err, compacted) { - if(err) { - return callback(new JsonLdError( - 'Could not compact merged output.', - 'jsonld.MergeError', {cause: err})); - } - callback(null, compacted); - }); - } -}; - -/** - * Relabels all blank nodes in the given JSON-LD input. - * - * @param input the JSON-LD input. - * @param [options] the options to use: - * [issuer] a jsonld.IdentifierIssuer to use to label blank nodes. - * [namer] (deprecated). - */ -jsonld.relabelBlankNodes = function(input, options) { - options = options || {}; - var issuer = options.namer || options.issuer || new IdentifierIssuer('_:b'); - return _labelBlankNodes(issuer, input); -}; - -/** - * Prepends a base IRI to the given relative IRI. - * - * @param base the base IRI. - * @param iri the relative IRI. - * - * @return the absolute IRI. - */ -jsonld.prependBase = function(base, iri) { - return _prependBase(base, iri); -}; - -/** - * The default document loader for external documents. If the environment - * is node.js, a callback-continuation-style document loader is used; otherwise, - * a promises-style document loader is used. - * - * @param url the URL to load. - * @param callback(err, remoteDoc) called once the operation completes, - * if using a non-promises API. - * - * @return a promise, if using a promises API. - */ -jsonld.documentLoader = function(url, callback) { - var err = new JsonLdError( - 'Could not retrieve a JSON-LD document from the URL. URL ' + - 'dereferencing not implemented.', 'jsonld.LoadDocumentError', - {code: 'loading document failed'}); - if(_nodejs) { - return callback(err, {contextUrl: null, documentUrl: url, document: null}); - } - return jsonld.promisify(function(callback) { - callback(err); - }); -}; - -/** - * Deprecated default document loader. Use or override jsonld.documentLoader - * instead. - */ -jsonld.loadDocument = function(url, callback) { - var promise = jsonld.documentLoader(url, callback); - if(promise && 'then' in promise) { - promise.then(callback.bind(null, null), callback); - } -}; - -/* Promises API */ - -/** - * Creates a new promises API object. - * - * @param [options] the options to use: - * [api] an object to attach the API to. - * [version] 'json-ld-1.0' to output a standard JSON-LD 1.0 promises - * API, 'jsonld.js' to output the same with augmented proprietary - * methods (default: 'jsonld.js') - * - * @return the promises API object. - */ -jsonld.promises = function(options) { - options = options || {}; - var slice = Array.prototype.slice; - var promisify = jsonld.promisify; - - // handle 'api' option as version, set defaults - var api = options.api || {}; - var version = options.version || 'jsonld.js'; - if(typeof options.api === 'string') { - if(!options.version) { - version = options.api; - } - api = {}; - } - - // The Web IDL test harness will check the number of parameters defined in - // the functions below. The number of parameters must exactly match the - // required (non-optional) parameters of the JsonLdProcessor interface as - // defined here: - // https://www.w3.org/TR/json-ld-api/#the-jsonldprocessor-interface - - api.expand = function(input) { - if(arguments.length < 1) { - throw new TypeError('Could not expand, too few arguments.'); - } - return promisify.apply(null, [jsonld.expand].concat(slice.call(arguments))); - }; - api.compact = function(input, ctx) { - if(arguments.length < 2) { - throw new TypeError('Could not compact, too few arguments.'); - } - var compact = function(input, ctx, options, callback) { - if(typeof options === 'function') { - callback = options; - options = {}; - } - options = options || {}; - // ensure only one value is returned in callback - jsonld.compact(input, ctx, options, function(err, compacted) { - callback(err, compacted); - }); - }; - return promisify.apply(null, [compact].concat(slice.call(arguments))); - }; - api.flatten = function(input) { - if(arguments.length < 1) { - throw new TypeError('Could not flatten, too few arguments.'); - } - return promisify.apply( - null, [jsonld.flatten].concat(slice.call(arguments))); - }; - api.frame = function(input, frame) { - if(arguments.length < 2) { - throw new TypeError('Could not frame, too few arguments.'); - } - return promisify.apply(null, [jsonld.frame].concat(slice.call(arguments))); - }; - api.fromRDF = function(dataset) { - if(arguments.length < 1) { - throw new TypeError('Could not convert from RDF, too few arguments.'); - } - return promisify.apply( - null, [jsonld.fromRDF].concat(slice.call(arguments))); - }; - api.toRDF = function(input) { - if(arguments.length < 1) { - throw new TypeError('Could not convert to RDF, too few arguments.'); - } - return promisify.apply(null, [jsonld.toRDF].concat(slice.call(arguments))); - }; - api.normalize = function(input) { - if(arguments.length < 1) { - throw new TypeError('Could not normalize, too few arguments.'); - } - return promisify.apply( - null, [jsonld.normalize].concat(slice.call(arguments))); - }; - - if(version === 'jsonld.js') { - api.link = function(input, ctx) { - if(arguments.length < 2) { - throw new TypeError('Could not link, too few arguments.'); - } - return promisify.apply( - null, [jsonld.link].concat(slice.call(arguments))); - }; - api.objectify = function(input) { - return promisify.apply( - null, [jsonld.objectify].concat(slice.call(arguments))); - }; - api.createNodeMap = function(input) { - return promisify.apply( - null, [jsonld.createNodeMap].concat(slice.call(arguments))); - }; - api.merge = function(input) { - return promisify.apply( - null, [jsonld.merge].concat(slice.call(arguments))); - }; - } - - try { - jsonld.Promise = global.Promise || require('es6-promise').Promise; - } catch(e) { - var f = function() { - throw new Error('Unable to find a Promise implementation.'); - }; - for(var method in api) { - api[method] = f; - } - } - - return api; -}; - -/** - * Converts a node.js async op into a promise w/boxed resolved value(s). - * - * @param op the operation to convert. - * - * @return the promise. - */ -jsonld.promisify = function(op) { - if(!jsonld.Promise) { - try { - jsonld.Promise = global.Promise || require('es6-promise').Promise; - } catch(e) { - throw new Error('Unable to find a Promise implementation.'); - } - } - var args = Array.prototype.slice.call(arguments, 1); - return new jsonld.Promise(function(resolve, reject) { - op.apply(null, args.concat(function(err, value) { - if(!err) { - resolve(value); - } else { - reject(err); - } - })); - }); -}; - -// extend jsonld.promises w/jsonld.js methods -jsonld.promises({api: jsonld.promises}); - -/* WebIDL API */ - -function JsonLdProcessor() {} -JsonLdProcessor.prototype = jsonld.promises({version: 'json-ld-1.0'}); -JsonLdProcessor.prototype.toString = function() { - if(this instanceof JsonLdProcessor) { - return '[object JsonLdProcessor]'; - } - return '[object JsonLdProcessorPrototype]'; -}; -jsonld.JsonLdProcessor = JsonLdProcessor; - -// IE8 has Object.defineProperty but it only -// works on DOM nodes -- so feature detection -// requires try/catch :-( -var canDefineProperty = !!Object.defineProperty; -if(canDefineProperty) { - try { - Object.defineProperty({}, 'x', {}); - } catch(e) { - canDefineProperty = false; - } -} - -if(canDefineProperty) { - Object.defineProperty(JsonLdProcessor, 'prototype', { - writable: false, - enumerable: false - }); - Object.defineProperty(JsonLdProcessor.prototype, 'constructor', { - writable: true, - enumerable: false, - configurable: true, - value: JsonLdProcessor - }); -} - -// setup browser global JsonLdProcessor -if(_browser && typeof global.JsonLdProcessor === 'undefined') { - if(canDefineProperty) { - Object.defineProperty(global, 'JsonLdProcessor', { - writable: true, - enumerable: false, - configurable: true, - value: JsonLdProcessor - }); - } else { - global.JsonLdProcessor = JsonLdProcessor; - } -} - -/* Utility API */ - -// define setImmediate and nextTick -//// nextTick implementation with browser-compatible fallback //// -// from https://github.com/caolan/async/blob/master/lib/async.js - -// capture the global reference to guard against fakeTimer mocks -var _setImmediate = typeof setImmediate === 'function' && setImmediate; - -var _delay = _setImmediate ? function(fn) { - // not a direct alias (for IE10 compatibility) - _setImmediate(fn); -} : function(fn) { - setTimeout(fn, 0); -}; - -if(typeof process === 'object' && typeof process.nextTick === 'function') { - jsonld.nextTick = process.nextTick; -} else { - jsonld.nextTick = _delay; -} -jsonld.setImmediate = _setImmediate ? _delay : jsonld.nextTick; - -/** - * Parses a link header. The results will be key'd by the value of "rel". - * - * Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json" - * - * Parses as: { - * 'http://www.w3.org/ns/json-ld#context': { - * target: http://json-ld.org/contexts/person.jsonld, - * type: 'application/ld+json' - * } - * } - * - * If there is more than one "rel" with the same IRI, then entries in the - * resulting map for that "rel" will be arrays. - * - * @param header the link header to parse. - */ -jsonld.parseLinkHeader = function(header) { - var rval = {}; - // split on unbracketed/unquoted commas - var entries = header.match(/(?:<[^>]*?>|"[^"]*?"|[^,])+/g); - var rLinkHeader = /\s*<([^>]*?)>\s*(?:;\s*(.*))?/; - for(var i = 0; i < entries.length; ++i) { - var match = entries[i].match(rLinkHeader); - if(!match) { - continue; - } - var result = {target: match[1]}; - var params = match[2]; - var rParams = /(.*?)=(?:(?:"([^"]*?)")|([^"]*?))\s*(?:(?:;\s*)|$)/g; - while(match = rParams.exec(params)) { - result[match[1]] = (match[2] === undefined) ? match[3] : match[2]; - } - var rel = result['rel'] || ''; - if(_isArray(rval[rel])) { - rval[rel].push(result); - } else if(rel in rval) { - rval[rel] = [rval[rel], result]; - } else { - rval[rel] = result; - } - } - return rval; -}; - -/** - * Creates a simple queue for requesting documents. - */ -jsonld.RequestQueue = function() { - this._requests = {}; -}; -jsonld.RequestQueue.prototype.wrapLoader = function(loader) { - this._loader = loader; - this._usePromise = (loader.length === 1); - return this.add.bind(this); -}; -jsonld.RequestQueue.prototype.add = function(url, callback) { - var self = this; - - // callback must be given if not using promises - if(!callback && !self._usePromise) { - throw new Error('callback must be specified.'); - } - - // Promise-based API - if(self._usePromise) { - return new jsonld.Promise(function(resolve, reject) { - var load = self._requests[url]; - if(!load) { - // load URL then remove from queue - load = self._requests[url] = self._loader(url) - .then(function(remoteDoc) { - delete self._requests[url]; - return remoteDoc; - }).catch(function(err) { - delete self._requests[url]; - throw err; - }); - } - // resolve/reject promise once URL has been loaded - load.then(function(remoteDoc) { - resolve(remoteDoc); - }).catch(function(err) { - reject(err); - }); - }); - } - - // callback-based API - if(url in self._requests) { - self._requests[url].push(callback); - } else { - self._requests[url] = [callback]; - self._loader(url, function(err, remoteDoc) { - var callbacks = self._requests[url]; - delete self._requests[url]; - for(var i = 0; i < callbacks.length; ++i) { - callbacks[i](err, remoteDoc); - } - }); - } -}; - -/** - * Creates a simple document cache that retains documents for a short - * period of time. - * - * FIXME: Implement simple HTTP caching instead. - * - * @param size the maximum size of the cache. - */ -jsonld.DocumentCache = function(size) { - this.order = []; - this.cache = {}; - this.size = size || 50; - this.expires = 30 * 1000; -}; -jsonld.DocumentCache.prototype.get = function(url) { - if(url in this.cache) { - var entry = this.cache[url]; - if(entry.expires >= +new Date()) { - return entry.ctx; - } - delete this.cache[url]; - this.order.splice(this.order.indexOf(url), 1); - } - return null; -}; -jsonld.DocumentCache.prototype.set = function(url, ctx) { - if(this.order.length === this.size) { - delete this.cache[this.order.shift()]; - } - this.order.push(url); - this.cache[url] = {ctx: ctx, expires: (+new Date() + this.expires)}; -}; - -/** - * Creates an active context cache. - * - * @param size the maximum size of the cache. - */ -jsonld.ActiveContextCache = function(size) { - this.order = []; - this.cache = {}; - this.size = size || 100; -}; -jsonld.ActiveContextCache.prototype.get = function(activeCtx, localCtx) { - var key1 = JSON.stringify(activeCtx); - var key2 = JSON.stringify(localCtx); - var level1 = this.cache[key1]; - if(level1 && key2 in level1) { - return level1[key2]; - } - return null; -}; -jsonld.ActiveContextCache.prototype.set = function( - activeCtx, localCtx, result) { - if(this.order.length === this.size) { - var entry = this.order.shift(); - delete this.cache[entry.activeCtx][entry.localCtx]; - } - var key1 = JSON.stringify(activeCtx); - var key2 = JSON.stringify(localCtx); - this.order.push({activeCtx: key1, localCtx: key2}); - if(!(key1 in this.cache)) { - this.cache[key1] = {}; - } - this.cache[key1][key2] = _clone(result); -}; - -/** - * Default JSON-LD cache. - */ -jsonld.cache = { - activeCtx: new jsonld.ActiveContextCache() -}; - -/** - * Accept header. - */ -var _defaults = { - headers: { - accept: 'application/ld+json, application/json' - } -}; - -/** - * Build an headers object from custom headers and assert `accept` header isn't overridden. - * - * @param {Object} optionsHeaders an object (map) of headers - * with key as header name and value as header value. - * @return {Object} an object (map) of headers with a valid `accept` header. - */ -function buildHeaders(optionsHeaders) { - optionsHeaders = optionsHeaders || {}; - - var hasAccept = Object.keys(optionsHeaders).some(function(h) { - return h.toLowerCase() === 'accept'; - }); - - if(hasAccept) { - throw new RangeError( - 'Accept header may not be specified as an option; only "' + - _defaults.headers.accept + '" is supported.'); - } - - var headers = {'Accept': _defaults.headers.accept}; - for(var k in optionsHeaders) { headers[k] = optionsHeaders[k]; } - - return headers; -} - -/** - * Document loaders. - */ -jsonld.documentLoaders = {}; - -/** - * Creates a built-in jquery document loader. - * - * @param $ the jquery instance to use. - * @param options the options to use: - * secure: require all URLs to use HTTPS. - * headers: an object (map) of headers which will be passed as request - * headers for the requested document. Accept is not allowed. - * usePromise: true to use a promises API, false for a - * callback-continuation-style API; defaults to true if Promise - * is globally defined, false if not. - * - * @return the jquery document loader. - */ -jsonld.documentLoaders.jquery = function($, options) { - options = options || {}; - var queue = new jsonld.RequestQueue(); - var headers = buildHeaders(options.headers); - - // use option or, by default, use Promise when its defined - var usePromise = ('usePromise' in options ? - options.usePromise : (typeof Promise !== 'undefined')); - if(usePromise) { - return queue.wrapLoader(function(url) { - return jsonld.promisify(loader, url); - }); - } - return queue.wrapLoader(loader); - - function loader(url, callback) { - if(url.indexOf('http:') !== 0 && url.indexOf('https:') !== 0) { - return callback(new JsonLdError( - 'URL could not be dereferenced; only "http" and "https" URLs are ' + - 'supported.', - 'jsonld.InvalidUrl', {code: 'loading document failed', url: url}), - {contextUrl: null, documentUrl: url, document: null}); - } - if(options.secure && url.indexOf('https') !== 0) { - return callback(new JsonLdError( - 'URL could not be dereferenced; secure mode is enabled and ' + - 'the URL\'s scheme is not "https".', - 'jsonld.InvalidUrl', {code: 'loading document failed', url: url}), - {contextUrl: null, documentUrl: url, document: null}); - } - $.ajax({ - url: url, - accepts: { - json: _defaults.headers.accept - }, - headers: headers, - dataType: 'json', - crossDomain: true, - success: function(data, textStatus, jqXHR) { - var doc = {contextUrl: null, documentUrl: url, document: data}; - - // handle Link Header - var contentType = jqXHR.getResponseHeader('Content-Type'); - var linkHeader = jqXHR.getResponseHeader('Link'); - if(linkHeader && contentType !== 'application/ld+json') { - // only 1 related link header permitted - linkHeader = jsonld.parseLinkHeader(linkHeader)[LINK_HEADER_REL]; - if(_isArray(linkHeader)) { - return callback(new JsonLdError( - 'URL could not be dereferenced, it has more than one ' + - 'associated HTTP Link Header.', - 'jsonld.InvalidUrl', - {code: 'multiple context link headers', url: url}), doc); - } - if(linkHeader) { - doc.contextUrl = linkHeader.target; - } - } - - callback(null, doc); - }, - error: function(jqXHR, textStatus, err) { - callback(new JsonLdError( - 'URL could not be dereferenced, an error occurred.', - 'jsonld.LoadDocumentError', - {code: 'loading document failed', url: url, cause: err}), - {contextUrl: null, documentUrl: url, document: null}); - } - }); - } -}; - -/** - * Creates a built-in node document loader. - * - * @param options the options to use: - * secure: require all URLs to use HTTPS. - * strictSSL: true to require SSL certificates to be valid, - * false not to (default: true). - * maxRedirects: the maximum number of redirects to permit, none by - * default. - * request: the object which will make the request, default is - * provided by `https://www.npmjs.com/package/request`. - * headers: an object (map) of headers which will be passed as request - * headers for the requested document. Accept is not allowed. - * usePromise: true to use a promises API, false for a - * callback-continuation-style API; false by default. - * - * @return the node document loader. - */ -jsonld.documentLoaders.node = function(options) { - options = options || {}; - var headers = buildHeaders(options.headers); - var strictSSL = ('strictSSL' in options) ? options.strictSSL : true; - var maxRedirects = ('maxRedirects' in options) ? options.maxRedirects : -1; - var request = ('request' in options) ? options.request : require('request'); - var http = require('http'); - // TODO: disable cache until HTTP caching implemented - //var cache = new jsonld.DocumentCache(); - - var queue = new jsonld.RequestQueue(); - if(options.usePromise) { - return queue.wrapLoader(function(url) { - return jsonld.promisify(loadDocument, url, []); - }); - } - - return queue.wrapLoader(function(url, callback) { - loadDocument(url, [], callback); - }); - - function loadDocument(url, redirects, callback) { - if(url.indexOf('http:') !== 0 && url.indexOf('https:') !== 0) { - return callback(new JsonLdError( - 'URL could not be dereferenced; only "http" and "https" URLs are ' + - 'supported.', - 'jsonld.InvalidUrl', {code: 'loading document failed', url: url}), - {contextUrl: null, documentUrl: url, document: null}); - } - if(options.secure && url.indexOf('https') !== 0) { - return callback(new JsonLdError( - 'URL could not be dereferenced; secure mode is enabled and ' + - 'the URL\'s scheme is not "https".', - 'jsonld.InvalidUrl', {code: 'loading document failed', url: url}), - {contextUrl: null, documentUrl: url, document: null}); - } - // TODO: disable cache until HTTP caching implemented - var doc = null;//cache.get(url); - if(doc !== null) { - return callback(null, doc); - } - - request({ - url: url, - headers: headers, - strictSSL: strictSSL, - followRedirect: false - }, handleResponse); - - function handleResponse(err, res, body) { - doc = {contextUrl: null, documentUrl: url, document: body || null}; - - // handle error - if(err) { - return callback(new JsonLdError( - 'URL could not be dereferenced, an error occurred.', - 'jsonld.LoadDocumentError', - {code: 'loading document failed', url: url, cause: err}), doc); - } - var statusText = http.STATUS_CODES[res.statusCode]; - if(res.statusCode >= 400) { - return callback(new JsonLdError( - 'URL could not be dereferenced: ' + statusText, - 'jsonld.InvalidUrl', { - code: 'loading document failed', - url: url, - httpStatusCode: res.statusCode - }), doc); - } - - // handle Link Header - if(res.headers.link && - res.headers['content-type'] !== 'application/ld+json') { - // only 1 related link header permitted - var linkHeader = jsonld.parseLinkHeader( - res.headers.link)[LINK_HEADER_REL]; - if(_isArray(linkHeader)) { - return callback(new JsonLdError( - 'URL could not be dereferenced, it has more than one associated ' + - 'HTTP Link Header.', - 'jsonld.InvalidUrl', - {code: 'multiple context link headers', url: url}), doc); - } - if(linkHeader) { - doc.contextUrl = linkHeader.target; - } - } - - // handle redirect - if(res.statusCode >= 300 && res.statusCode < 400 && - res.headers.location) { - if(redirects.length === maxRedirects) { - return callback(new JsonLdError( - 'URL could not be dereferenced; there were too many redirects.', - 'jsonld.TooManyRedirects', { - code: 'loading document failed', - url: url, - httpStatusCode: res.statusCode, - redirects: redirects - }), doc); - } - if(redirects.indexOf(url) !== -1) { - return callback(new JsonLdError( - 'URL could not be dereferenced; infinite redirection was detected.', - 'jsonld.InfiniteRedirectDetected', { - code: 'recursive context inclusion', - url: url, - httpStatusCode: res.statusCode, - redirects: redirects - }), doc); - } - redirects.push(url); - return loadDocument(res.headers.location, redirects, callback); - } - // cache for each redirected URL - redirects.push(url); - // TODO: disable cache until HTTP caching implemented - /*for(var i = 0; i < redirects.length; ++i) { - cache.set( - redirects[i], - {contextUrl: null, documentUrl: redirects[i], document: body}); - }*/ - callback(err, doc); - } - } -}; - -/** - * Creates a built-in XMLHttpRequest document loader. - * - * @param options the options to use: - * secure: require all URLs to use HTTPS. - * headers: an object (map) of headers which will be passed as request - * headers for the requested document. Accept is not allowed. - * usePromise: true to use a promises API, false for a - * callback-continuation-style API; defaults to true if Promise - * is globally defined, false if not. - * [xhr]: the XMLHttpRequest API to use. - * - * @return the XMLHttpRequest document loader. - */ -jsonld.documentLoaders.xhr = function(options) { - options = options || {}; - var rlink = /(^|(\r\n))link:/i; - var queue = new jsonld.RequestQueue(); - var headers = buildHeaders(options.headers); - - // use option or, by default, use Promise when its defined - var usePromise = ('usePromise' in options ? - options.usePromise : (typeof Promise !== 'undefined')); - if(usePromise) { - return queue.wrapLoader(function(url) { - return jsonld.promisify(loader, url); - }); - } - return queue.wrapLoader(loader); - - function loader(url, callback) { - if(url.indexOf('http:') !== 0 && url.indexOf('https:') !== 0) { - return callback(new JsonLdError( - 'URL could not be dereferenced; only "http" and "https" URLs are ' + - 'supported.', - 'jsonld.InvalidUrl', {code: 'loading document failed', url: url}), - {contextUrl: null, documentUrl: url, document: null}); - } - if(options.secure && url.indexOf('https') !== 0) { - return callback(new JsonLdError( - 'URL could not be dereferenced; secure mode is enabled and ' + - 'the URL\'s scheme is not "https".', - 'jsonld.InvalidUrl', {code: 'loading document failed', url: url}), - {contextUrl: null, documentUrl: url, document: null}); - } - var xhr = options.xhr || XMLHttpRequest; - var req = new xhr(); - req.onload = function() { - if(req.status >= 400) { - return callback(new JsonLdError( - 'URL could not be dereferenced: ' + req.statusText, - 'jsonld.LoadDocumentError', { - code: 'loading document failed', - url: url, - httpStatusCode: req.status - }), {contextUrl: null, documentUrl: url, document: null}); - } - - var doc = {contextUrl: null, documentUrl: url, document: req.response}; - - // handle Link Header (avoid unsafe header warning by existence testing) - var contentType = req.getResponseHeader('Content-Type'); - var linkHeader; - if(rlink.test(req.getAllResponseHeaders())) { - linkHeader = req.getResponseHeader('Link'); - } - if(linkHeader && contentType !== 'application/ld+json') { - // only 1 related link header permitted - linkHeader = jsonld.parseLinkHeader(linkHeader)[LINK_HEADER_REL]; - if(_isArray(linkHeader)) { - return callback(new JsonLdError( - 'URL could not be dereferenced, it has more than one ' + - 'associated HTTP Link Header.', - 'jsonld.InvalidUrl', - {code: 'multiple context link headers', url: url}), doc); - } - if(linkHeader) { - doc.contextUrl = linkHeader.target; - } - } - - callback(null, doc); - }; - req.onerror = function() { - callback(new JsonLdError( - 'URL could not be dereferenced, an error occurred.', - 'jsonld.LoadDocumentError', - {code: 'loading document failed', url: url}), - {contextUrl: null, documentUrl: url, document: null}); - }; - req.open('GET', url, true); - - for(var k in headers) { - req.setRequestHeader(k, headers[k]); - } - - req.send(); - } -}; - -/** - * Assigns the default document loader for external document URLs to a built-in - * default. Supported types currently include: 'jquery' and 'node'. - * - * To use the jquery document loader, the first parameter must be a reference - * to the main jquery object. - * - * @param type the type to set. - * @param [params] the parameters required to use the document loader. - */ -jsonld.useDocumentLoader = function(type) { - if(!(type in jsonld.documentLoaders)) { - throw new JsonLdError( - 'Unknown document loader type: "' + type + '"', - 'jsonld.UnknownDocumentLoader', - {type: type}); - } - - // set document loader - jsonld.documentLoader = jsonld.documentLoaders[type].apply( - jsonld, Array.prototype.slice.call(arguments, 1)); -}; - -/** - * Processes a local context, resolving any URLs as necessary, and returns a - * new active context in its callback. - * - * @param activeCtx the current active context. - * @param localCtx the local context to process. - * @param [options] the options to use: - * [documentLoader(url, callback(err, remoteDoc))] the document loader. - * @param callback(err, ctx) called once the operation completes. - */ -jsonld.processContext = function(activeCtx, localCtx) { - // get arguments - var options = {}; - var callbackArg = 2; - if(arguments.length > 3) { - options = arguments[2] || {}; - callbackArg += 1; - } - var callback = arguments[callbackArg]; - - // set default options - if(!('base' in options)) { - options.base = ''; - } - if(!('documentLoader' in options)) { - options.documentLoader = jsonld.loadDocument; - } - - // return initial context early for null context - if(localCtx === null) { - return callback(null, _getInitialContext(options)); - } - - // retrieve URLs in localCtx - localCtx = _clone(localCtx); - if(!(_isObject(localCtx) && '@context' in localCtx)) { - localCtx = {'@context': localCtx}; - } - _retrieveContextUrls(localCtx, options, function(err, ctx) { - if(err) { - return callback(err); - } - try { - // process context - ctx = new Processor().processContext(activeCtx, ctx, options); - } catch(ex) { - return callback(ex); - } - callback(null, ctx); - }); -}; - -/** - * Returns true if the given subject has the given property. - * - * @param subject the subject to check. - * @param property the property to look for. - * - * @return true if the subject has the given property, false if not. - */ -jsonld.hasProperty = function(subject, property) { - var rval = false; - if(property in subject) { - var value = subject[property]; - rval = (!_isArray(value) || value.length > 0); - } - return rval; -}; - -/** - * Determines if the given value is a property of the given subject. - * - * @param subject the subject to check. - * @param property the property to check. - * @param value the value to check. - * - * @return true if the value exists, false if not. - */ -jsonld.hasValue = function(subject, property, value) { - var rval = false; - if(jsonld.hasProperty(subject, property)) { - var val = subject[property]; - var isList = _isList(val); - if(_isArray(val) || isList) { - if(isList) { - val = val['@list']; - } - for(var i = 0; i < val.length; ++i) { - if(jsonld.compareValues(value, val[i])) { - rval = true; - break; - } - } - } else if(!_isArray(value)) { - // avoid matching the set of values with an array value parameter - rval = jsonld.compareValues(value, val); - } - } - return rval; -}; - -/** - * Adds a value to a subject. If the value is an array, all values in the - * array will be added. - * - * @param subject the subject to add the value to. - * @param property the property that relates the value to the subject. - * @param value the value to add. - * @param [options] the options to use: - * [propertyIsArray] true if the property is always an array, false - * if not (default: false). - * [allowDuplicate] true to allow duplicates, false not to (uses a - * simple shallow comparison of subject ID or value) (default: true). - */ -jsonld.addValue = function(subject, property, value, options) { - options = options || {}; - if(!('propertyIsArray' in options)) { - options.propertyIsArray = false; - } - if(!('allowDuplicate' in options)) { - options.allowDuplicate = true; - } - - if(_isArray(value)) { - if(value.length === 0 && options.propertyIsArray && - !(property in subject)) { - subject[property] = []; - } - for(var i = 0; i < value.length; ++i) { - jsonld.addValue(subject, property, value[i], options); - } - } else if(property in subject) { - // check if subject already has value if duplicates not allowed - var hasValue = (!options.allowDuplicate && - jsonld.hasValue(subject, property, value)); - - // make property an array if value not present or always an array - if(!_isArray(subject[property]) && - (!hasValue || options.propertyIsArray)) { - subject[property] = [subject[property]]; - } - - // add new value - if(!hasValue) { - subject[property].push(value); - } - } else { - // add new value as set or single value - subject[property] = options.propertyIsArray ? [value] : value; - } -}; - -/** - * Gets all of the values for a subject's property as an array. - * - * @param subject the subject. - * @param property the property. - * - * @return all of the values for a subject's property as an array. - */ -jsonld.getValues = function(subject, property) { - var rval = subject[property] || []; - if(!_isArray(rval)) { - rval = [rval]; - } - return rval; -}; - -/** - * Removes a property from a subject. - * - * @param subject the subject. - * @param property the property. - */ -jsonld.removeProperty = function(subject, property) { - delete subject[property]; -}; - -/** - * Removes a value from a subject. - * - * @param subject the subject. - * @param property the property that relates the value to the subject. - * @param value the value to remove. - * @param [options] the options to use: - * [propertyIsArray] true if the property is always an array, false - * if not (default: false). - */ -jsonld.removeValue = function(subject, property, value, options) { - options = options || {}; - if(!('propertyIsArray' in options)) { - options.propertyIsArray = false; - } - - // filter out value - var values = jsonld.getValues(subject, property).filter(function(e) { - return !jsonld.compareValues(e, value); - }); - - if(values.length === 0) { - jsonld.removeProperty(subject, property); - } else if(values.length === 1 && !options.propertyIsArray) { - subject[property] = values[0]; - } else { - subject[property] = values; - } -}; - -/** - * Compares two JSON-LD values for equality. Two JSON-LD values will be - * considered equal if: - * - * 1. They are both primitives of the same type and value. - * 2. They are both @values with the same @value, @type, @language, - * and @index, OR - * 3. They both have @ids they are the same. - * - * @param v1 the first value. - * @param v2 the second value. - * - * @return true if v1 and v2 are considered equal, false if not. - */ -jsonld.compareValues = function(v1, v2) { - // 1. equal primitives - if(v1 === v2) { - return true; - } - - // 2. equal @values - if(_isValue(v1) && _isValue(v2) && - v1['@value'] === v2['@value'] && - v1['@type'] === v2['@type'] && - v1['@language'] === v2['@language'] && - v1['@index'] === v2['@index']) { - return true; - } - - // 3. equal @ids - if(_isObject(v1) && ('@id' in v1) && _isObject(v2) && ('@id' in v2)) { - return v1['@id'] === v2['@id']; - } - - return false; -}; - -/** - * Gets the value for the given active context key and type, null if none is - * set. - * - * @param ctx the active context. - * @param key the context key. - * @param [type] the type of value to get (eg: '@id', '@type'), if not - * specified gets the entire entry for a key, null if not found. - * - * @return the value. - */ -jsonld.getContextValue = function(ctx, key, type) { - var rval = null; - - // return null for invalid key - if(key === null) { - return rval; - } - - // get default language - if(type === '@language' && (type in ctx)) { - rval = ctx[type]; - } - - // get specific entry information - if(ctx.mappings[key]) { - var entry = ctx.mappings[key]; - - if(_isUndefined(type)) { - // return whole entry - rval = entry; - } else if(type in entry) { - // return entry value for type - rval = entry[type]; - } - } - - return rval; -}; - -/** Registered RDF dataset parsers hashed by content-type. */ -var _rdfParsers = {}; - -/** - * Registers an RDF dataset parser by content-type, for use with - * jsonld.fromRDF. An RDF dataset parser will always be given two parameters, - * a string of input and a callback. An RDF dataset parser can be synchronous - * or asynchronous. - * - * If the parser function returns undefined or null then it will be assumed to - * be asynchronous w/a continuation-passing style and the callback parameter - * given to the parser MUST be invoked. - * - * If it returns a Promise, then it will be assumed to be asynchronous, but the - * callback parameter MUST NOT be invoked. It should instead be ignored. - * - * If it returns an RDF dataset, it will be assumed to be synchronous and the - * callback parameter MUST NOT be invoked. It should instead be ignored. - * - * @param contentType the content-type for the parser. - * @param parser(input, callback(err, dataset)) the parser function (takes a - * string as a parameter and either returns null/undefined and uses - * the given callback, returns a Promise, or returns an RDF dataset). - */ -jsonld.registerRDFParser = function(contentType, parser) { - _rdfParsers[contentType] = parser; -}; - -/** - * Unregisters an RDF dataset parser by content-type. - * - * @param contentType the content-type for the parser. - */ -jsonld.unregisterRDFParser = function(contentType) { - delete _rdfParsers[contentType]; -}; - -if(_nodejs) { - // needed for serialization of XML literals - if(typeof XMLSerializer === 'undefined') { - var XMLSerializer = null; - } - if(typeof Node === 'undefined') { - var Node = { - ELEMENT_NODE: 1, - ATTRIBUTE_NODE: 2, - TEXT_NODE: 3, - CDATA_SECTION_NODE: 4, - ENTITY_REFERENCE_NODE: 5, - ENTITY_NODE: 6, - PROCESSING_INSTRUCTION_NODE: 7, - COMMENT_NODE: 8, - DOCUMENT_NODE: 9, - DOCUMENT_TYPE_NODE: 10, - DOCUMENT_FRAGMENT_NODE: 11, - NOTATION_NODE:12 - }; - } -} - -// constants -var XSD_BOOLEAN = 'http://www.w3.org/2001/XMLSchema#boolean'; -var XSD_DOUBLE = 'http://www.w3.org/2001/XMLSchema#double'; -var XSD_INTEGER = 'http://www.w3.org/2001/XMLSchema#integer'; -var XSD_STRING = 'http://www.w3.org/2001/XMLSchema#string'; - -var RDF = 'http://www.w3.org/1999/02/22-rdf-syntax-ns#'; -var RDF_LIST = RDF + 'List'; -var RDF_FIRST = RDF + 'first'; -var RDF_REST = RDF + 'rest'; -var RDF_NIL = RDF + 'nil'; -var RDF_TYPE = RDF + 'type'; -var RDF_PLAIN_LITERAL = RDF + 'PlainLiteral'; -var RDF_XML_LITERAL = RDF + 'XMLLiteral'; -var RDF_OBJECT = RDF + 'object'; -var RDF_LANGSTRING = RDF + 'langString'; - -var LINK_HEADER_REL = 'http://www.w3.org/ns/json-ld#context'; -var MAX_CONTEXT_URLS = 10; - -/** - * A JSON-LD Error. - * - * @param msg the error message. - * @param type the error type. - * @param details the error details. - */ -var JsonLdError = function(msg, type, details) { - if(_nodejs) { - Error.call(this); - Error.captureStackTrace(this, this.constructor); - } else if(typeof Error !== 'undefined') { - this.stack = (new Error()).stack; - } - this.name = type || 'jsonld.Error'; - this.message = msg || 'An unspecified JSON-LD error occurred.'; - this.details = details || {}; -}; -if(_nodejs) { - require('util').inherits(JsonLdError, Error); -} else if(typeof Error !== 'undefined') { - JsonLdError.prototype = new Error(); -} - -/** - * Constructs a new JSON-LD Processor. - */ -var Processor = function() {}; - -/** - * Recursively compacts an element using the given active context. All values - * must be in expanded form before this method is called. - * - * @param activeCtx the active context to use. - * @param activeProperty the compacted property associated with the element - * to compact, null for none. - * @param element the element to compact. - * @param options the compaction options. - * - * @return the compacted value. - */ -Processor.prototype.compact = function( - activeCtx, activeProperty, element, options) { - // recursively compact array - if(_isArray(element)) { - var rval = []; - for(var i = 0; i < element.length; ++i) { - // compact, dropping any null values - var compacted = this.compact( - activeCtx, activeProperty, element[i], options); - if(compacted !== null) { - rval.push(compacted); - } - } - if(options.compactArrays && rval.length === 1) { - // use single element if no container is specified - var container = jsonld.getContextValue( - activeCtx, activeProperty, '@container'); - if(container === null) { - rval = rval[0]; - } - } - return rval; - } - - // recursively compact object - if(_isObject(element)) { - if(options.link && '@id' in element && element['@id'] in options.link) { - // check for a linked element to reuse - var linked = options.link[element['@id']]; - for(var i = 0; i < linked.length; ++i) { - if(linked[i].expanded === element) { - return linked[i].compacted; - } - } - } - - // do value compaction on @values and subject references - if(_isValue(element) || _isSubjectReference(element)) { - var rval = _compactValue(activeCtx, activeProperty, element); - if(options.link && _isSubjectReference(element)) { - // store linked element - if(!(element['@id'] in options.link)) { - options.link[element['@id']] = []; - } - options.link[element['@id']].push({expanded: element, compacted: rval}); - } - return rval; - } - - // FIXME: avoid misuse of active property as an expanded property? - var insideReverse = (activeProperty === '@reverse'); - - var rval = {}; - - if(options.link && '@id' in element) { - // store linked element - if(!(element['@id'] in options.link)) { - options.link[element['@id']] = []; - } - options.link[element['@id']].push({expanded: element, compacted: rval}); - } - - // process element keys in order - var keys = Object.keys(element).sort(); - for(var ki = 0; ki < keys.length; ++ki) { - var expandedProperty = keys[ki]; - var expandedValue = element[expandedProperty]; - - // compact @id and @type(s) - if(expandedProperty === '@id' || expandedProperty === '@type') { - var compactedValue; - - // compact single @id - if(_isString(expandedValue)) { - compactedValue = _compactIri( - activeCtx, expandedValue, null, - {vocab: (expandedProperty === '@type')}); - } else { - // expanded value must be a @type array - compactedValue = []; - for(var vi = 0; vi < expandedValue.length; ++vi) { - compactedValue.push(_compactIri( - activeCtx, expandedValue[vi], null, {vocab: true})); - } - } - - // use keyword alias and add value - var alias = _compactIri(activeCtx, expandedProperty); - var isArray = (_isArray(compactedValue) && expandedValue.length === 0); - jsonld.addValue( - rval, alias, compactedValue, {propertyIsArray: isArray}); - continue; - } - - // handle @reverse - if(expandedProperty === '@reverse') { - // recursively compact expanded value - var compactedValue = this.compact( - activeCtx, '@reverse', expandedValue, options); - - // handle double-reversed properties - for(var compactedProperty in compactedValue) { - if(activeCtx.mappings[compactedProperty] && - activeCtx.mappings[compactedProperty].reverse) { - var value = compactedValue[compactedProperty]; - var container = jsonld.getContextValue( - activeCtx, compactedProperty, '@container'); - var useArray = (container === '@set' || !options.compactArrays); - jsonld.addValue( - rval, compactedProperty, value, {propertyIsArray: useArray}); - delete compactedValue[compactedProperty]; - } - } - - if(Object.keys(compactedValue).length > 0) { - // use keyword alias and add value - var alias = _compactIri(activeCtx, expandedProperty); - jsonld.addValue(rval, alias, compactedValue); - } - - continue; - } - - // handle @index property - if(expandedProperty === '@index') { - // drop @index if inside an @index container - var container = jsonld.getContextValue( - activeCtx, activeProperty, '@container'); - if(container === '@index') { - continue; - } - - // use keyword alias and add value - var alias = _compactIri(activeCtx, expandedProperty); - jsonld.addValue(rval, alias, expandedValue); - continue; - } - - // skip array processing for keywords that aren't @graph or @list - if(expandedProperty !== '@graph' && expandedProperty !== '@list' && - _isKeyword(expandedProperty)) { - // use keyword alias and add value as is - var alias = _compactIri(activeCtx, expandedProperty); - jsonld.addValue(rval, alias, expandedValue); - continue; - } - - // Note: expanded value must be an array due to expansion algorithm. - - // preserve empty arrays - if(expandedValue.length === 0) { - var itemActiveProperty = _compactIri( - activeCtx, expandedProperty, expandedValue, {vocab: true}, - insideReverse); - jsonld.addValue( - rval, itemActiveProperty, expandedValue, {propertyIsArray: true}); - } - - // recusively process array values - for(var vi = 0; vi < expandedValue.length; ++vi) { - var expandedItem = expandedValue[vi]; - - // compact property and get container type - var itemActiveProperty = _compactIri( - activeCtx, expandedProperty, expandedItem, {vocab: true}, - insideReverse); - var container = jsonld.getContextValue( - activeCtx, itemActiveProperty, '@container'); - - // get @list value if appropriate - var isList = _isList(expandedItem); - var list = null; - if(isList) { - list = expandedItem['@list']; - } - - // recursively compact expanded item - var compactedItem = this.compact( - activeCtx, itemActiveProperty, isList ? list : expandedItem, options); - - // handle @list - if(isList) { - // ensure @list value is an array - if(!_isArray(compactedItem)) { - compactedItem = [compactedItem]; - } - - if(container !== '@list') { - // wrap using @list alias - var wrapper = {}; - wrapper[_compactIri(activeCtx, '@list')] = compactedItem; - compactedItem = wrapper; - - // include @index from expanded @list, if any - if('@index' in expandedItem) { - compactedItem[_compactIri(activeCtx, '@index')] = - expandedItem['@index']; - } - } else if(itemActiveProperty in rval) { - // can't use @list container for more than 1 list - throw new JsonLdError( - 'JSON-LD compact error; property has a "@list" @container ' + - 'rule but there is more than a single @list that matches ' + - 'the compacted term in the document. Compaction might mix ' + - 'unwanted items into the list.', - 'jsonld.SyntaxError', {code: 'compaction to list of lists'}); - } - } - - // handle language and index maps - if(container === '@language' || container === '@index') { - // get or create the map object - var mapObject; - if(itemActiveProperty in rval) { - mapObject = rval[itemActiveProperty]; - } else { - rval[itemActiveProperty] = mapObject = {}; - } - - // if container is a language map, simplify compacted value to - // a simple string - if(container === '@language' && _isValue(compactedItem)) { - compactedItem = compactedItem['@value']; - } - - // add compact value to map object using key from expanded value - // based on the container type - jsonld.addValue(mapObject, expandedItem[container], compactedItem); - } else { - // use an array if: compactArrays flag is false, - // @container is @set or @list , value is an empty - // array, or key is @graph - var isArray = (!options.compactArrays || container === '@set' || - container === '@list' || - (_isArray(compactedItem) && compactedItem.length === 0) || - expandedProperty === '@list' || expandedProperty === '@graph'); - - // add compact value - jsonld.addValue( - rval, itemActiveProperty, compactedItem, - {propertyIsArray: isArray}); - } - } - } - - return rval; - } - - // only primitives remain which are already compact - return element; -}; - -/** - * Recursively expands an element using the given context. Any context in - * the element will be removed. All context URLs must have been retrieved - * before calling this method. - * - * @param activeCtx the context to use. - * @param activeProperty the property for the element, null for none. - * @param element the element to expand. - * @param options the expansion options. - * @param insideList true if the element is a list, false if not. - * - * @return the expanded value. - */ -Processor.prototype.expand = function( - activeCtx, activeProperty, element, options, insideList) { - var self = this; - - // nothing to expand - if(element === null || element === undefined) { - return null; - } - - if(!_isArray(element) && !_isObject(element)) { - // drop free-floating scalars that are not in lists - if(!insideList && (activeProperty === null || - _expandIri(activeCtx, activeProperty, {vocab: true}) === '@graph')) { - return null; - } - - // expand element according to value expansion rules - return _expandValue(activeCtx, activeProperty, element); - } - - // recursively expand array - if(_isArray(element)) { - var rval = []; - var container = jsonld.getContextValue( - activeCtx, activeProperty, '@container'); - insideList = insideList || container === '@list'; - for(var i = 0; i < element.length; ++i) { - // expand element - var e = self.expand(activeCtx, activeProperty, element[i], options); - if(insideList && (_isArray(e) || _isList(e))) { - // lists of lists are illegal - throw new JsonLdError( - 'Invalid JSON-LD syntax; lists of lists are not permitted.', - 'jsonld.SyntaxError', {code: 'list of lists'}); - } - // drop null values - if(e !== null) { - if(_isArray(e)) { - rval = rval.concat(e); - } else { - rval.push(e); - } - } - } - return rval; - } - - // recursively expand object: - - // if element has a context, process it - if('@context' in element) { - activeCtx = self.processContext(activeCtx, element['@context'], options); - } - - // expand the active property - var expandedActiveProperty = _expandIri( - activeCtx, activeProperty, {vocab: true}); - - var rval = {}; - var keys = Object.keys(element).sort(); - for(var ki = 0; ki < keys.length; ++ki) { - var key = keys[ki]; - var value = element[key]; - var expandedValue; - - // skip @context - if(key === '@context') { - continue; - } - - // expand property - var expandedProperty = _expandIri(activeCtx, key, {vocab: true}); - - // drop non-absolute IRI keys that aren't keywords - if(expandedProperty === null || - !(_isAbsoluteIri(expandedProperty) || _isKeyword(expandedProperty))) { - continue; - } - - if(_isKeyword(expandedProperty)) { - if(expandedActiveProperty === '@reverse') { - throw new JsonLdError( - 'Invalid JSON-LD syntax; a keyword cannot be used as a @reverse ' + - 'property.', 'jsonld.SyntaxError', - {code: 'invalid reverse property map', value: value}); - } - if(expandedProperty in rval) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; colliding keywords detected.', - 'jsonld.SyntaxError', - {code: 'colliding keywords', keyword: expandedProperty}); - } - } - - // syntax error if @id is not a string - if(expandedProperty === '@id' && !_isString(value)) { - if(!options.isFrame) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; "@id" value must a string.', - 'jsonld.SyntaxError', {code: 'invalid @id value', value: value}); - } - if(!_isObject(value)) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; "@id" value must be a string or an ' + - 'object.', 'jsonld.SyntaxError', - {code: 'invalid @id value', value: value}); - } - } - - if(expandedProperty === '@type') { - _validateTypeValue(value); - } - - // @graph must be an array or an object - if(expandedProperty === '@graph' && - !(_isObject(value) || _isArray(value))) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; "@graph" value must not be an ' + - 'object or an array.', - 'jsonld.SyntaxError', {code: 'invalid @graph value', value: value}); - } - - // @value must not be an object or an array - if(expandedProperty === '@value' && - (_isObject(value) || _isArray(value))) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; "@value" value must not be an ' + - 'object or an array.', - 'jsonld.SyntaxError', - {code: 'invalid value object value', value: value}); - } - - // @language must be a string - if(expandedProperty === '@language') { - if(value === null) { - // drop null @language values, they expand as if they didn't exist - continue; - } - if(!_isString(value)) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; "@language" value must be a string.', - 'jsonld.SyntaxError', - {code: 'invalid language-tagged string', value: value}); - } - // ensure language value is lowercase - value = value.toLowerCase(); - } - - // @index must be a string - if(expandedProperty === '@index') { - if(!_isString(value)) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; "@index" value must be a string.', - 'jsonld.SyntaxError', - {code: 'invalid @index value', value: value}); - } - } - - // @reverse must be an object - if(expandedProperty === '@reverse') { - if(!_isObject(value)) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; "@reverse" value must be an object.', - 'jsonld.SyntaxError', {code: 'invalid @reverse value', value: value}); - } - - expandedValue = self.expand(activeCtx, '@reverse', value, options); - - // properties double-reversed - if('@reverse' in expandedValue) { - for(var property in expandedValue['@reverse']) { - jsonld.addValue( - rval, property, expandedValue['@reverse'][property], - {propertyIsArray: true}); - } - } - - // FIXME: can this be merged with code below to simplify? - // merge in all reversed properties - var reverseMap = rval['@reverse'] || null; - for(var property in expandedValue) { - if(property === '@reverse') { - continue; - } - if(reverseMap === null) { - reverseMap = rval['@reverse'] = {}; - } - jsonld.addValue(reverseMap, property, [], {propertyIsArray: true}); - var items = expandedValue[property]; - for(var ii = 0; ii < items.length; ++ii) { - var item = items[ii]; - if(_isValue(item) || _isList(item)) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; "@reverse" value must not be a ' + - '@value or an @list.', 'jsonld.SyntaxError', - {code: 'invalid reverse property value', value: expandedValue}); - } - jsonld.addValue( - reverseMap, property, item, {propertyIsArray: true}); - } - } - - continue; - } - - var container = jsonld.getContextValue(activeCtx, key, '@container'); - - if(container === '@language' && _isObject(value)) { - // handle language map container (skip if value is not an object) - expandedValue = _expandLanguageMap(value); - } else if(container === '@index' && _isObject(value)) { - // handle index container (skip if value is not an object) - expandedValue = (function _expandIndexMap(activeProperty) { - var rval = []; - var keys = Object.keys(value).sort(); - for(var ki = 0; ki < keys.length; ++ki) { - var key = keys[ki]; - var val = value[key]; - if(!_isArray(val)) { - val = [val]; - } - val = self.expand(activeCtx, activeProperty, val, options, false); - for(var vi = 0; vi < val.length; ++vi) { - var item = val[vi]; - if(!('@index' in item)) { - item['@index'] = key; - } - rval.push(item); - } - } - return rval; - })(key); - } else { - // recurse into @list or @set - var isList = (expandedProperty === '@list'); - if(isList || expandedProperty === '@set') { - var nextActiveProperty = activeProperty; - if(isList && expandedActiveProperty === '@graph') { - nextActiveProperty = null; - } - expandedValue = self.expand( - activeCtx, nextActiveProperty, value, options, isList); - if(isList && _isList(expandedValue)) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; lists of lists are not permitted.', - 'jsonld.SyntaxError', {code: 'list of lists'}); - } - } else { - // recursively expand value with key as new active property - expandedValue = self.expand(activeCtx, key, value, options, false); - } - } - - // drop null values if property is not @value - if(expandedValue === null && expandedProperty !== '@value') { - continue; - } - - // convert expanded value to @list if container specifies it - if(expandedProperty !== '@list' && !_isList(expandedValue) && - container === '@list') { - // ensure expanded value is an array - expandedValue = (_isArray(expandedValue) ? - expandedValue : [expandedValue]); - expandedValue = {'@list': expandedValue}; - } - - // FIXME: can this be merged with code above to simplify? - // merge in reverse properties - if(activeCtx.mappings[key] && activeCtx.mappings[key].reverse) { - var reverseMap = rval['@reverse'] = rval['@reverse'] || {}; - if(!_isArray(expandedValue)) { - expandedValue = [expandedValue]; - } - for(var ii = 0; ii < expandedValue.length; ++ii) { - var item = expandedValue[ii]; - if(_isValue(item) || _isList(item)) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; "@reverse" value must not be a ' + - '@value or an @list.', 'jsonld.SyntaxError', - {code: 'invalid reverse property value', value: expandedValue}); - } - jsonld.addValue( - reverseMap, expandedProperty, item, {propertyIsArray: true}); - } - continue; - } - - // add value for property - // use an array except for certain keywords - var useArray = - ['@index', '@id', '@type', '@value', '@language'].indexOf( - expandedProperty) === -1; - jsonld.addValue( - rval, expandedProperty, expandedValue, {propertyIsArray: useArray}); - } - - // get property count on expanded output - keys = Object.keys(rval); - var count = keys.length; - - if('@value' in rval) { - // @value must only have @language or @type - if('@type' in rval && '@language' in rval) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; an element containing "@value" may not ' + - 'contain both "@type" and "@language".', - 'jsonld.SyntaxError', {code: 'invalid value object', element: rval}); - } - var validCount = count - 1; - if('@type' in rval) { - validCount -= 1; - } - if('@index' in rval) { - validCount -= 1; - } - if('@language' in rval) { - validCount -= 1; - } - if(validCount !== 0) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; an element containing "@value" may only ' + - 'have an "@index" property and at most one other property ' + - 'which can be "@type" or "@language".', - 'jsonld.SyntaxError', {code: 'invalid value object', element: rval}); - } - // drop null @values - if(rval['@value'] === null) { - rval = null; - } else if('@language' in rval && !_isString(rval['@value'])) { - // if @language is present, @value must be a string - throw new JsonLdError( - 'Invalid JSON-LD syntax; only strings may be language-tagged.', - 'jsonld.SyntaxError', - {code: 'invalid language-tagged value', element: rval}); - } else if('@type' in rval && (!_isAbsoluteIri(rval['@type']) || - rval['@type'].indexOf('_:') === 0)) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; an element containing "@value" and "@type" ' + - 'must have an absolute IRI for the value of "@type".', - 'jsonld.SyntaxError', {code: 'invalid typed value', element: rval}); - } - } else if('@type' in rval && !_isArray(rval['@type'])) { - // convert @type to an array - rval['@type'] = [rval['@type']]; - } else if('@set' in rval || '@list' in rval) { - // handle @set and @list - if(count > 1 && !(count === 2 && '@index' in rval)) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; if an element has the property "@set" ' + - 'or "@list", then it can have at most one other property that is ' + - '"@index".', 'jsonld.SyntaxError', - {code: 'invalid set or list object', element: rval}); - } - // optimize away @set - if('@set' in rval) { - rval = rval['@set']; - keys = Object.keys(rval); - count = keys.length; - } - } else if(count === 1 && '@language' in rval) { - // drop objects with only @language - rval = null; - } - - // drop certain top-level objects that do not occur in lists - if(_isObject(rval) && - !options.keepFreeFloatingNodes && !insideList && - (activeProperty === null || expandedActiveProperty === '@graph')) { - // drop empty object, top-level @value/@list, or object with only @id - if(count === 0 || '@value' in rval || '@list' in rval || - (count === 1 && '@id' in rval)) { - rval = null; - } - } - - return rval; -}; - -/** - * Creates a JSON-LD node map (node ID => node). - * - * @param input the expanded JSON-LD to create a node map of. - * @param [options] the options to use: - * [issuer] a jsonld.IdentifierIssuer to use to label blank nodes. - * [namer] (deprecated). - * - * @return the node map. - */ -Processor.prototype.createNodeMap = function(input, options) { - options = options || {}; - - // produce a map of all subjects and name each bnode - var issuer = options.namer || options.issuer || new IdentifierIssuer('_:b'); - var graphs = {'@default': {}}; - _createNodeMap(input, graphs, '@default', issuer); - - // add all non-default graphs to default graph - return _mergeNodeMaps(graphs); -}; - -/** - * Performs JSON-LD flattening. - * - * @param input the expanded JSON-LD to flatten. - * - * @return the flattened output. - */ -Processor.prototype.flatten = function(input) { - var defaultGraph = this.createNodeMap(input); - - // produce flattened output - var flattened = []; - var keys = Object.keys(defaultGraph).sort(); - for(var ki = 0; ki < keys.length; ++ki) { - var node = defaultGraph[keys[ki]]; - // only add full subjects to top-level - if(!_isSubjectReference(node)) { - flattened.push(node); - } - } - return flattened; -}; - -/** - * Performs JSON-LD framing. - * - * @param input the expanded JSON-LD to frame. - * @param frame the expanded JSON-LD frame to use. - * @param options the framing options. - * - * @return the framed output. - */ -Processor.prototype.frame = function(input, frame, options) { - // create framing state - var state = { - options: options, - graphs: {'@default': {}, '@merged': {}}, - subjectStack: [], - link: {} - }; - - // produce a map of all graphs and name each bnode - // FIXME: currently uses subjects from @merged graph only - var issuer = new IdentifierIssuer('_:b'); - _createNodeMap(input, state.graphs, '@merged', issuer); - state.subjects = state.graphs['@merged']; - - // frame the subjects - var framed = []; - _frame(state, Object.keys(state.subjects).sort(), frame, framed, null); - return framed; -}; - -/** - * Performs normalization on the given RDF dataset. - * - * @param dataset the RDF dataset to normalize. - * @param options the normalization options. - * @param callback(err, normalized) called once the operation completes. - */ -Processor.prototype.normalize = function(dataset, options, callback) { - if(options.algorithm === 'URDNA2015') { - return new URDNA2015(options).main(dataset, callback); - } - if(options.algorithm === 'URGNA2012') { - return new URGNA2012(options).main(dataset, callback); - } - callback(new Error( - 'Invalid RDF Dataset Normalization algorithm: ' + options.algorithm)); -}; - -/** - * Converts an RDF dataset to JSON-LD. - * - * @param dataset the RDF dataset. - * @param options the RDF serialization options. - * @param callback(err, output) called once the operation completes. - */ -Processor.prototype.fromRDF = function(dataset, options, callback) { - var defaultGraph = {}; - var graphMap = {'@default': defaultGraph}; - var referencedOnce = {}; - - for(var name in dataset) { - var graph = dataset[name]; - if(!(name in graphMap)) { - graphMap[name] = {}; - } - if(name !== '@default' && !(name in defaultGraph)) { - defaultGraph[name] = {'@id': name}; - } - var nodeMap = graphMap[name]; - for(var ti = 0; ti < graph.length; ++ti) { - var triple = graph[ti]; - - // get subject, predicate, object - var s = triple.subject.value; - var p = triple.predicate.value; - var o = triple.object; - - if(!(s in nodeMap)) { - nodeMap[s] = {'@id': s}; - } - var node = nodeMap[s]; - - var objectIsId = (o.type === 'IRI' || o.type === 'blank node'); - if(objectIsId && !(o.value in nodeMap)) { - nodeMap[o.value] = {'@id': o.value}; - } - - if(p === RDF_TYPE && !options.useRdfType && objectIsId) { - jsonld.addValue(node, '@type', o.value, {propertyIsArray: true}); - continue; - } - - var value = _RDFToObject(o, options.useNativeTypes); - jsonld.addValue(node, p, value, {propertyIsArray: true}); - - // object may be an RDF list/partial list node but we can't know easily - // until all triples are read - if(objectIsId) { - if(o.value === RDF_NIL) { - // track rdf:nil uniquely per graph - var object = nodeMap[o.value]; - if(!('usages' in object)) { - object.usages = []; - } - object.usages.push({ - node: node, - property: p, - value: value - }); - } else if(o.value in referencedOnce) { - // object referenced more than once - referencedOnce[o.value] = false; - } else { - // keep track of single reference - referencedOnce[o.value] = { - node: node, - property: p, - value: value - }; - } - } - } - } - - // convert linked lists to @list arrays - for(var name in graphMap) { - var graphObject = graphMap[name]; - - // no @lists to be converted, continue - if(!(RDF_NIL in graphObject)) { - continue; - } - - // iterate backwards through each RDF list - var nil = graphObject[RDF_NIL]; - for(var i = 0; i < nil.usages.length; ++i) { - var usage = nil.usages[i]; - var node = usage.node; - var property = usage.property; - var head = usage.value; - var list = []; - var listNodes = []; - - // ensure node is a well-formed list node; it must: - // 1. Be referenced only once. - // 2. Have an array for rdf:first that has 1 item. - // 3. Have an array for rdf:rest that has 1 item. - // 4. Have no keys other than: @id, rdf:first, rdf:rest, and, - // optionally, @type where the value is rdf:List. - var nodeKeyCount = Object.keys(node).length; - while(property === RDF_REST && - _isObject(referencedOnce[node['@id']]) && - _isArray(node[RDF_FIRST]) && node[RDF_FIRST].length === 1 && - _isArray(node[RDF_REST]) && node[RDF_REST].length === 1 && - (nodeKeyCount === 3 || (nodeKeyCount === 4 && _isArray(node['@type']) && - node['@type'].length === 1 && node['@type'][0] === RDF_LIST))) { - list.push(node[RDF_FIRST][0]); - listNodes.push(node['@id']); - - // get next node, moving backwards through list - usage = referencedOnce[node['@id']]; - node = usage.node; - property = usage.property; - head = usage.value; - nodeKeyCount = Object.keys(node).length; - - // if node is not a blank node, then list head found - if(node['@id'].indexOf('_:') !== 0) { - break; - } - } - - // the list is nested in another list - if(property === RDF_FIRST) { - // empty list - if(node['@id'] === RDF_NIL) { - // can't convert rdf:nil to a @list object because it would - // result in a list of lists which isn't supported - continue; - } - - // preserve list head - head = graphObject[head['@id']][RDF_REST][0]; - list.pop(); - listNodes.pop(); - } - - // transform list into @list object - delete head['@id']; - head['@list'] = list.reverse(); - for(var j = 0; j < listNodes.length; ++j) { - delete graphObject[listNodes[j]]; - } - } - - delete nil.usages; - } - - var result = []; - var subjects = Object.keys(defaultGraph).sort(); - for(var i = 0; i < subjects.length; ++i) { - var subject = subjects[i]; - var node = defaultGraph[subject]; - if(subject in graphMap) { - var graph = node['@graph'] = []; - var graphObject = graphMap[subject]; - var subjects_ = Object.keys(graphObject).sort(); - for(var si = 0; si < subjects_.length; ++si) { - var node_ = graphObject[subjects_[si]]; - // only add full subjects to top-level - if(!_isSubjectReference(node_)) { - graph.push(node_); - } - } - } - // only add full subjects to top-level - if(!_isSubjectReference(node)) { - result.push(node); - } - } - - callback(null, result); -}; - -/** - * Outputs an RDF dataset for the expanded JSON-LD input. - * - * @param input the expanded JSON-LD input. - * @param options the RDF serialization options. - * - * @return the RDF dataset. - */ -Processor.prototype.toRDF = function(input, options) { - // create node map for default graph (and any named graphs) - var issuer = new IdentifierIssuer('_:b'); - var nodeMap = {'@default': {}}; - _createNodeMap(input, nodeMap, '@default', issuer); - - var dataset = {}; - var graphNames = Object.keys(nodeMap).sort(); - for(var i = 0; i < graphNames.length; ++i) { - var graphName = graphNames[i]; - // skip relative IRIs - if(graphName === '@default' || _isAbsoluteIri(graphName)) { - dataset[graphName] = _graphToRDF(nodeMap[graphName], issuer, options); - } - } - return dataset; -}; - -/** - * Processes a local context and returns a new active context. - * - * @param activeCtx the current active context. - * @param localCtx the local context to process. - * @param options the context processing options. - * - * @return the new active context. - */ -Processor.prototype.processContext = function(activeCtx, localCtx, options) { - // normalize local context to an array of @context objects - if(_isObject(localCtx) && '@context' in localCtx && - _isArray(localCtx['@context'])) { - localCtx = localCtx['@context']; - } - var ctxs = _isArray(localCtx) ? localCtx : [localCtx]; - - // no contexts in array, clone existing context - if(ctxs.length === 0) { - return activeCtx.clone(); - } - - // process each context in order, update active context - // on each iteration to ensure proper caching - var rval = activeCtx; - for(var i = 0; i < ctxs.length; ++i) { - var ctx = ctxs[i]; - - // reset to initial context - if(ctx === null) { - rval = activeCtx = _getInitialContext(options); - continue; - } - - // dereference @context key if present - if(_isObject(ctx) && '@context' in ctx) { - ctx = ctx['@context']; - } - - // context must be an object by now, all URLs retrieved before this call - if(!_isObject(ctx)) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; @context must be an object.', - 'jsonld.SyntaxError', {code: 'invalid local context', context: ctx}); - } - - // get context from cache if available - if(jsonld.cache.activeCtx) { - var cached = jsonld.cache.activeCtx.get(activeCtx, ctx); - if(cached) { - rval = activeCtx = cached; - continue; - } - } - - // update active context and clone new one before updating - activeCtx = rval; - rval = rval.clone(); - - // define context mappings for keys in local context - var defined = {}; - - // handle @base - if('@base' in ctx) { - var base = ctx['@base']; - - // clear base - if(base === null) { - base = null; - } else if(!_isString(base)) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; the value of "@base" in a ' + - '@context must be a string or null.', - 'jsonld.SyntaxError', {code: 'invalid base IRI', context: ctx}); - } else if(base !== '' && !_isAbsoluteIri(base)) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; the value of "@base" in a ' + - '@context must be an absolute IRI or the empty string.', - 'jsonld.SyntaxError', {code: 'invalid base IRI', context: ctx}); - } - - if(base !== null) { - base = jsonld.url.parse(base || ''); - } - rval['@base'] = base; - defined['@base'] = true; - } - - // handle @vocab - if('@vocab' in ctx) { - var value = ctx['@vocab']; - if(value === null) { - delete rval['@vocab']; - } else if(!_isString(value)) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; the value of "@vocab" in a ' + - '@context must be a string or null.', - 'jsonld.SyntaxError', {code: 'invalid vocab mapping', context: ctx}); - } else if(!_isAbsoluteIri(value)) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; the value of "@vocab" in a ' + - '@context must be an absolute IRI.', - 'jsonld.SyntaxError', {code: 'invalid vocab mapping', context: ctx}); - } else { - rval['@vocab'] = value; - } - defined['@vocab'] = true; - } - - // handle @language - if('@language' in ctx) { - var value = ctx['@language']; - if(value === null) { - delete rval['@language']; - } else if(!_isString(value)) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; the value of "@language" in a ' + - '@context must be a string or null.', - 'jsonld.SyntaxError', - {code: 'invalid default language', context: ctx}); - } else { - rval['@language'] = value.toLowerCase(); - } - defined['@language'] = true; - } - - // process all other keys - for(var key in ctx) { - _createTermDefinition(rval, ctx, key, defined); - } - - // cache result - if(jsonld.cache.activeCtx) { - jsonld.cache.activeCtx.set(activeCtx, ctx, rval); - } - } - - return rval; -}; - -/** - * Expands a language map. - * - * @param languageMap the language map to expand. - * - * @return the expanded language map. - */ -function _expandLanguageMap(languageMap) { - var rval = []; - var keys = Object.keys(languageMap).sort(); - for(var ki = 0; ki < keys.length; ++ki) { - var key = keys[ki]; - var val = languageMap[key]; - if(!_isArray(val)) { - val = [val]; - } - for(var vi = 0; vi < val.length; ++vi) { - var item = val[vi]; - if(item === null) { - // null values are allowed (8.5) but ignored (3.1) - continue; - } - if(!_isString(item)) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; language map values must be strings.', - 'jsonld.SyntaxError', - {code: 'invalid language map value', languageMap: languageMap}); - } - rval.push({ - '@value': item, - '@language': key.toLowerCase() - }); - } - } - return rval; -} - -/** - * Labels the blank nodes in the given value using the given IdentifierIssuer. - * - * @param issuer the IdentifierIssuer to use. - * @param element the element with blank nodes to rename. - * - * @return the element. - */ -function _labelBlankNodes(issuer, element) { - if(_isArray(element)) { - for(var i = 0; i < element.length; ++i) { - element[i] = _labelBlankNodes(issuer, element[i]); - } - } else if(_isList(element)) { - element['@list'] = _labelBlankNodes(issuer, element['@list']); - } else if(_isObject(element)) { - // relabel blank node - if(_isBlankNode(element)) { - element['@id'] = issuer.getId(element['@id']); - } - - // recursively apply to all keys - var keys = Object.keys(element).sort(); - for(var ki = 0; ki < keys.length; ++ki) { - var key = keys[ki]; - if(key !== '@id') { - element[key] = _labelBlankNodes(issuer, element[key]); - } - } - } - - return element; -} - -/** - * Expands the given value by using the coercion and keyword rules in the - * given context. - * - * @param activeCtx the active context to use. - * @param activeProperty the active property the value is associated with. - * @param value the value to expand. - * - * @return the expanded value. - */ -function _expandValue(activeCtx, activeProperty, value) { - // nothing to expand - if(value === null || value === undefined) { - return null; - } - - // special-case expand @id and @type (skips '@id' expansion) - var expandedProperty = _expandIri(activeCtx, activeProperty, {vocab: true}); - if(expandedProperty === '@id') { - return _expandIri(activeCtx, value, {base: true}); - } else if(expandedProperty === '@type') { - return _expandIri(activeCtx, value, {vocab: true, base: true}); - } - - // get type definition from context - var type = jsonld.getContextValue(activeCtx, activeProperty, '@type'); - - // do @id expansion (automatic for @graph) - if(type === '@id' || (expandedProperty === '@graph' && _isString(value))) { - return {'@id': _expandIri(activeCtx, value, {base: true})}; - } - // do @id expansion w/vocab - if(type === '@vocab') { - return {'@id': _expandIri(activeCtx, value, {vocab: true, base: true})}; - } - - // do not expand keyword values - if(_isKeyword(expandedProperty)) { - return value; - } - - var rval = {}; - - if(type !== null) { - // other type - rval['@type'] = type; - } else if(_isString(value)) { - // check for language tagging for strings - var language = jsonld.getContextValue( - activeCtx, activeProperty, '@language'); - if(language !== null) { - rval['@language'] = language; - } - } - // do conversion of values that aren't basic JSON types to strings - if(['boolean', 'number', 'string'].indexOf(typeof value) === -1) { - value = value.toString(); - } - rval['@value'] = value; - - return rval; -} - -/** - * Creates an array of RDF triples for the given graph. - * - * @param graph the graph to create RDF triples for. - * @param issuer a IdentifierIssuer for assigning blank node names. - * @param options the RDF serialization options. - * - * @return the array of RDF triples for the given graph. - */ -function _graphToRDF(graph, issuer, options) { - var rval = []; - - var ids = Object.keys(graph).sort(); - for(var i = 0; i < ids.length; ++i) { - var id = ids[i]; - var node = graph[id]; - var properties = Object.keys(node).sort(); - for(var pi = 0; pi < properties.length; ++pi) { - var property = properties[pi]; - var items = node[property]; - if(property === '@type') { - property = RDF_TYPE; - } else if(_isKeyword(property)) { - continue; - } - - for(var ii = 0; ii < items.length; ++ii) { - var item = items[ii]; - - // RDF subject - var subject = {}; - subject.type = (id.indexOf('_:') === 0) ? 'blank node' : 'IRI'; - subject.value = id; - - // skip relative IRI subjects - if(!_isAbsoluteIri(id)) { - continue; - } - - // RDF predicate - var predicate = {}; - predicate.type = (property.indexOf('_:') === 0) ? 'blank node' : 'IRI'; - predicate.value = property; - - // skip relative IRI predicates - if(!_isAbsoluteIri(property)) { - continue; - } - - // skip blank node predicates unless producing generalized RDF - if(predicate.type === 'blank node' && !options.produceGeneralizedRdf) { - continue; - } - - // convert @list to triples - if(_isList(item)) { - _listToRDF(item['@list'], issuer, subject, predicate, rval); - } else { - // convert value or node object to triple - var object = _objectToRDF(item); - // skip null objects (they are relative IRIs) - if(object) { - rval.push({subject: subject, predicate: predicate, object: object}); - } - } - } - } - } - - return rval; -} - -/** - * Converts a @list value into linked list of blank node RDF triples - * (an RDF collection). - * - * @param list the @list value. - * @param issuer a IdentifierIssuer for assigning blank node names. - * @param subject the subject for the head of the list. - * @param predicate the predicate for the head of the list. - * @param triples the array of triples to append to. - */ -function _listToRDF(list, issuer, subject, predicate, triples) { - var first = {type: 'IRI', value: RDF_FIRST}; - var rest = {type: 'IRI', value: RDF_REST}; - var nil = {type: 'IRI', value: RDF_NIL}; - - for(var i = 0; i < list.length; ++i) { - var item = list[i]; - - var blankNode = {type: 'blank node', value: issuer.getId()}; - triples.push({subject: subject, predicate: predicate, object: blankNode}); - - subject = blankNode; - predicate = first; - var object = _objectToRDF(item); - - // skip null objects (they are relative IRIs) - if(object) { - triples.push({subject: subject, predicate: predicate, object: object}); - } - - predicate = rest; - } - - triples.push({subject: subject, predicate: predicate, object: nil}); -} - -/** - * Converts a JSON-LD value object to an RDF literal or a JSON-LD string or - * node object to an RDF resource. - * - * @param item the JSON-LD value or node object. - * - * @return the RDF literal or RDF resource. - */ -function _objectToRDF(item) { - var object = {}; - - // convert value object to RDF - if(_isValue(item)) { - object.type = 'literal'; - var value = item['@value']; - var datatype = item['@type'] || null; - - // convert to XSD datatypes as appropriate - if(_isBoolean(value)) { - object.value = value.toString(); - object.datatype = datatype || XSD_BOOLEAN; - } else if(_isDouble(value) || datatype === XSD_DOUBLE) { - if(!_isDouble(value)) { - value = parseFloat(value); - } - // canonical double representation - object.value = value.toExponential(15).replace(/(\d)0*e\+?/, '$1E'); - object.datatype = datatype || XSD_DOUBLE; - } else if(_isNumber(value)) { - object.value = value.toFixed(0); - object.datatype = datatype || XSD_INTEGER; - } else if('@language' in item) { - object.value = value; - object.datatype = datatype || RDF_LANGSTRING; - object.language = item['@language']; - } else { - object.value = value; - object.datatype = datatype || XSD_STRING; - } - } else { - // convert string/node object to RDF - var id = _isObject(item) ? item['@id'] : item; - object.type = (id.indexOf('_:') === 0) ? 'blank node' : 'IRI'; - object.value = id; - } - - // skip relative IRIs - if(object.type === 'IRI' && !_isAbsoluteIri(object.value)) { - return null; - } - - return object; -} - -/** - * Converts an RDF triple object to a JSON-LD object. - * - * @param o the RDF triple object to convert. - * @param useNativeTypes true to output native types, false not to. - * - * @return the JSON-LD object. - */ -function _RDFToObject(o, useNativeTypes) { - // convert IRI/blank node object to JSON-LD - if(o.type === 'IRI' || o.type === 'blank node') { - return {'@id': o.value}; - } - - // convert literal to JSON-LD - var rval = {'@value': o.value}; - - // add language - if(o.language) { - rval['@language'] = o.language; - } else { - var type = o.datatype; - if(!type) { - type = XSD_STRING; - } - // use native types for certain xsd types - if(useNativeTypes) { - if(type === XSD_BOOLEAN) { - if(rval['@value'] === 'true') { - rval['@value'] = true; - } else if(rval['@value'] === 'false') { - rval['@value'] = false; - } - } else if(_isNumeric(rval['@value'])) { - if(type === XSD_INTEGER) { - var i = parseInt(rval['@value'], 10); - if(i.toFixed(0) === rval['@value']) { - rval['@value'] = i; - } - } else if(type === XSD_DOUBLE) { - rval['@value'] = parseFloat(rval['@value']); - } - } - // do not add native type - if([XSD_BOOLEAN, XSD_INTEGER, XSD_DOUBLE, XSD_STRING] - .indexOf(type) === -1) { - rval['@type'] = type; - } - } else if(type !== XSD_STRING) { - rval['@type'] = type; - } - } - - return rval; -} - -/** - * Compares two RDF triples for equality. - * - * @param t1 the first triple. - * @param t2 the second triple. - * - * @return true if the triples are the same, false if not. - */ -function _compareRDFTriples(t1, t2) { - var attrs = ['subject', 'predicate', 'object']; - for(var i = 0; i < attrs.length; ++i) { - var attr = attrs[i]; - if(t1[attr].type !== t2[attr].type || t1[attr].value !== t2[attr].value) { - return false; - } - } - if(t1.object.language !== t2.object.language) { - return false; - } - if(t1.object.datatype !== t2.object.datatype) { - return false; - } - return true; -} - -/////////////////////////////// DEFINE URDNA2015 ////////////////////////////// - -var URDNA2015 = (function() { - -var POSITIONS = {'subject': 's', 'object': 'o', 'name': 'g'}; - -var Normalize = function(options) { - options = options || {}; - this.name = 'URDNA2015'; - this.options = options; - this.blankNodeInfo = {}; - this.hashToBlankNodes = {}; - this.canonicalIssuer = new IdentifierIssuer('_:c14n'); - this.quads = []; - this.schedule = {}; - if('maxCallStackDepth' in options) { - this.schedule.MAX_DEPTH = options.maxCallStackDepth; - } else { - this.schedule.MAX_DEPTH = 500; - } - if('maxTotalCallStackDepth' in options) { - this.schedule.MAX_TOTAL_DEPTH = options.maxCallStackDepth; - } else { - this.schedule.MAX_TOTAL_DEPTH = 0xFFFFFFFF; - } - this.schedule.depth = 0; - this.schedule.totalDepth = 0; - if('timeSlice' in options) { - this.schedule.timeSlice = options.timeSlice; - } else { - // milliseconds - this.schedule.timeSlice = 10; - } -}; - -// do some work in a time slice, but in serial -Normalize.prototype.doWork = function(fn, callback) { - var schedule = this.schedule; - - if(schedule.totalDepth >= schedule.MAX_TOTAL_DEPTH) { - return callback(new Error( - 'Maximum total call stack depth exceeded; normalization aborting.')); - } - - (function work() { - if(schedule.depth === schedule.MAX_DEPTH) { - // stack too deep, run on next tick - schedule.depth = 0; - schedule.running = false; - return jsonld.nextTick(work); - } - - // if not yet running, force run - var now = new Date().getTime(); - if(!schedule.running) { - schedule.start = new Date().getTime(); - schedule.deadline = schedule.start + schedule.timeSlice; - } - - // TODO: should also include an estimate of expectedWorkTime - if(now < schedule.deadline) { - schedule.running = true; - schedule.depth++; - schedule.totalDepth++; - return fn(function(err, result) { - schedule.depth--; - schedule.totalDepth--; - callback(err, result); - }); - } - - // not enough time left in this slice, run after letting browser - // do some other things - schedule.depth = 0; - schedule.running = false; - jsonld.setImmediate(work); - })(); -}; - -// asynchronously loop -Normalize.prototype.forEach = function(iterable, fn, callback) { - var self = this; - var iterator; - var idx = 0; - var length; - if(_isArray(iterable)) { - length = iterable.length; - iterator = function() { - if(idx === length) { - return false; - } - iterator.value = iterable[idx++]; - iterator.key = idx; - return true; - }; - } else { - var keys = Object.keys(iterable); - length = keys.length; - iterator = function() { - if(idx === length) { - return false; - } - iterator.key = keys[idx++]; - iterator.value = iterable[iterator.key]; - return true; - }; - } - - (function iterate(err, result) { - if(err) { - return callback(err); - } - if(iterator()) { - return self.doWork(function() { - fn(iterator.value, iterator.key, iterate); - }); - } - callback(); - })(); -}; - -// asynchronous waterfall -Normalize.prototype.waterfall = function(fns, callback) { - var self = this; - self.forEach(fns, function(fn, idx, callback) { - self.doWork(fn, callback); - }, callback); -}; - -// asynchronous while -Normalize.prototype.whilst = function(condition, fn, callback) { - var self = this; - (function loop(err) { - if(err) { - return callback(err); - } - if(!condition()) { - return callback(); - } - self.doWork(fn, loop); - })(); -}; - -// 4.4) Normalization Algorithm -Normalize.prototype.main = function(dataset, callback) { - var self = this; - self.schedule.start = new Date().getTime(); - var result; - - // handle invalid output format - if(self.options.format) { - if(self.options.format !== 'application/nquads') { - return callback(new JsonLdError( - 'Unknown output format.', - 'jsonld.UnknownFormat', {format: self.options.format})); - } - } - - // 1) Create the normalization state. - - // Note: Optimize by generating non-normalized blank node map concurrently. - var nonNormalized = {}; - - self.waterfall([ - function(callback) { - // 2) For every quad in input dataset: - self.forEach(dataset, function(triples, graphName, callback) { - if(graphName === '@default') { - graphName = null; - } - self.forEach(triples, function(quad, idx, callback) { - if(graphName !== null) { - if(graphName.indexOf('_:') === 0) { - quad.name = {type: 'blank node', value: graphName}; - } else { - quad.name = {type: 'IRI', value: graphName}; - } - } - self.quads.push(quad); - - // 2.1) For each blank node that occurs in the quad, add a reference - // to the quad using the blank node identifier in the blank node to - // quads map, creating a new entry if necessary. - self.forEachComponent(quad, function(component) { - if(component.type !== 'blank node') { - return; - } - var id = component.value; - if(id in self.blankNodeInfo) { - self.blankNodeInfo[id].quads.push(quad); - } else { - nonNormalized[id] = true; - self.blankNodeInfo[id] = {quads: [quad]}; - } - }); - callback(); - }, callback); - }, callback); - }, - function(callback) { - // 3) Create a list of non-normalized blank node identifiers - // non-normalized identifiers and populate it using the keys from the - // blank node to quads map. - // Note: We use a map here and it was generated during step 2. - - // 4) Initialize simple, a boolean flag, to true. - var simple = true; - - // 5) While simple is true, issue canonical identifiers for blank nodes: - self.whilst(function() { return simple; }, function(callback) { - // 5.1) Set simple to false. - simple = false; - - // 5.2) Clear hash to blank nodes map. - self.hashToBlankNodes = {}; - - self.waterfall([ - function(callback) { - // 5.3) For each blank node identifier identifier in non-normalized - // identifiers: - self.forEach(nonNormalized, function(value, id, callback) { - // 5.3.1) Create a hash, hash, according to the Hash First Degree - // Quads algorithm. - self.hashFirstDegreeQuads(id, function(err, hash) { - if(err) { - return callback(err); - } - // 5.3.2) Add hash and identifier to hash to blank nodes map, - // creating a new entry if necessary. - if(hash in self.hashToBlankNodes) { - self.hashToBlankNodes[hash].push(id); - } else { - self.hashToBlankNodes[hash] = [id]; - } - callback(); - }); - }, callback); - }, - function(callback) { - // 5.4) For each hash to identifier list mapping in hash to blank - // nodes map, lexicographically-sorted by hash: - var hashes = Object.keys(self.hashToBlankNodes).sort(); - self.forEach(hashes, function(hash, i, callback) { - // 5.4.1) If the length of identifier list is greater than 1, - // continue to the next mapping. - var idList = self.hashToBlankNodes[hash]; - if(idList.length > 1) { - return callback(); - } - - // 5.4.2) Use the Issue Identifier algorithm, passing canonical - // issuer and the single blank node identifier in identifier - // list, identifier, to issue a canonical replacement identifier - // for identifier. - // TODO: consider changing `getId` to `issue` - var id = idList[0]; - self.canonicalIssuer.getId(id); - - // 5.4.3) Remove identifier from non-normalized identifiers. - delete nonNormalized[id]; - - // 5.4.4) Remove hash from the hash to blank nodes map. - delete self.hashToBlankNodes[hash]; - - // 5.4.5) Set simple to true. - simple = true; - callback(); - }, callback); - } - ], callback); - }, callback); - }, - function(callback) { - // 6) For each hash to identifier list mapping in hash to blank nodes map, - // lexicographically-sorted by hash: - var hashes = Object.keys(self.hashToBlankNodes).sort(); - self.forEach(hashes, function(hash, idx, callback) { - // 6.1) Create hash path list where each item will be a result of - // running the Hash N-Degree Quads algorithm. - var hashPathList = []; - - // 6.2) For each blank node identifier identifier in identifier list: - var idList = self.hashToBlankNodes[hash]; - self.waterfall([ - function(callback) { - self.forEach(idList, function(id, idx, callback) { - // 6.2.1) If a canonical identifier has already been issued for - // identifier, continue to the next identifier. - if(self.canonicalIssuer.hasId(id)) { - return callback(); - } - - // 6.2.2) Create temporary issuer, an identifier issuer - // initialized with the prefix _:b. - var issuer = new IdentifierIssuer('_:b'); - - // 6.2.3) Use the Issue Identifier algorithm, passing temporary - // issuer and identifier, to issue a new temporary blank node - // identifier for identifier. - issuer.getId(id); - - // 6.2.4) Run the Hash N-Degree Quads algorithm, passing - // temporary issuer, and append the result to the hash path list. - self.hashNDegreeQuads(id, issuer, function(err, result) { - if(err) { - return callback(err); - } - hashPathList.push(result); - callback(); - }); - }, callback); - }, - function(callback) { - // 6.3) For each result in the hash path list, - // lexicographically-sorted by the hash in result: - hashPathList.sort(function(a, b) { - return (a.hash < b.hash) ? -1 : ((a.hash > b.hash) ? 1 : 0); - }); - self.forEach(hashPathList, function(result, idx, callback) { - // 6.3.1) For each blank node identifier, existing identifier, - // that was issued a temporary identifier by identifier issuer - // in result, issue a canonical identifier, in the same order, - // using the Issue Identifier algorithm, passing canonical - // issuer and existing identifier. - for(var existing in result.issuer.existing) { - self.canonicalIssuer.getId(existing); - } - callback(); - }, callback); - } - ], callback); - }, callback); - }, function(callback) { - /* Note: At this point all blank nodes in the set of RDF quads have been - assigned canonical identifiers, which have been stored in the canonical - issuer. Here each quad is updated by assigning each of its blank nodes - its new identifier. */ - - // 7) For each quad, quad, in input dataset: - var normalized = []; - self.waterfall([ - function(callback) { - self.forEach(self.quads, function(quad, idx, callback) { - // 7.1) Create a copy, quad copy, of quad and replace any existing - // blank node identifiers using the canonical identifiers - // previously issued by canonical issuer. - // Note: We optimize away the copy here. - self.forEachComponent(quad, function(component) { - if(component.type === 'blank node' && - component.value.indexOf(self.canonicalIssuer.prefix) !== 0) { - component.value = self.canonicalIssuer.getId(component.value); - } - }); - // 7.2) Add quad copy to the normalized dataset. - normalized.push(_toNQuad(quad)); - callback(); - }, callback); - }, - function(callback) { - // sort normalized output - normalized.sort(); - - // 8) Return the normalized dataset. - if(self.options.format === 'application/nquads') { - result = normalized.join(''); - return callback(); - } - - result = _parseNQuads(normalized.join('')); - callback(); - } - ], callback); - } - ], function(err) { - callback(err, result); - }); -}; - -// 4.6) Hash First Degree Quads -Normalize.prototype.hashFirstDegreeQuads = function(id, callback) { - var self = this; - - // return cached hash - var info = self.blankNodeInfo[id]; - if('hash' in info) { - return callback(null, info.hash); - } - - // 1) Initialize nquads to an empty list. It will be used to store quads in - // N-Quads format. - var nquads = []; - - // 2) Get the list of quads quads associated with the reference blank node - // identifier in the blank node to quads map. - var quads = info.quads; - - // 3) For each quad quad in quads: - self.forEach(quads, function(quad, idx, callback) { - // 3.1) Serialize the quad in N-Quads format with the following special - // rule: - - // 3.1.1) If any component in quad is an blank node, then serialize it - // using a special identifier as follows: - var copy = {predicate: quad.predicate}; - self.forEachComponent(quad, function(component, key) { - // 3.1.2) If the blank node's existing blank node identifier matches the - // reference blank node identifier then use the blank node identifier _:a, - // otherwise, use the blank node identifier _:z. - copy[key] = self.modifyFirstDegreeComponent(id, component, key); - }); - nquads.push(_toNQuad(copy)); - callback(); - }, function(err) { - if(err) { - return callback(err); - } - // 4) Sort nquads in lexicographical order. - nquads.sort(); - - // 5) Return the hash that results from passing the sorted, joined nquads - // through the hash algorithm. - info.hash = NormalizeHash.hashNQuads(self.name, nquads); - callback(null, info.hash); - }); -}; - -// helper for modifying component during Hash First Degree Quads -Normalize.prototype.modifyFirstDegreeComponent = function(id, component) { - if(component.type !== 'blank node') { - return component; - } - component = _clone(component); - component.value = (component.value === id ? '_:a' : '_:z'); - return component; -}; - -// 4.7) Hash Related Blank Node -Normalize.prototype.hashRelatedBlankNode = function( - related, quad, issuer, position, callback) { - var self = this; - - // 1) Set the identifier to use for related, preferring first the canonical - // identifier for related if issued, second the identifier issued by issuer - // if issued, and last, if necessary, the result of the Hash First Degree - // Quads algorithm, passing related. - var id; - self.waterfall([ - function(callback) { - if(self.canonicalIssuer.hasId(related)) { - id = self.canonicalIssuer.getId(related); - return callback(); - } - if(issuer.hasId(related)) { - id = issuer.getId(related); - return callback(); - } - self.hashFirstDegreeQuads(related, function(err, hash) { - if(err) { - return callback(err); - } - id = hash; - callback(); - }); - } - ], function(err) { - if(err) { - return callback(err); - } - - // 2) Initialize a string input to the value of position. - // Note: We use a hash object instead. - var md = new NormalizeHash(self.name); - md.update(position); - - // 3) If position is not g, append <, the value of the predicate in quad, - // and > to input. - if(position !== 'g') { - md.update(self.getRelatedPredicate(quad)); - } - - // 4) Append identifier to input. - md.update(id); - - // 5) Return the hash that results from passing input through the hash - // algorithm. - return callback(null, md.digest()); - }); -}; - -// helper for getting a related predicate -Normalize.prototype.getRelatedPredicate = function(quad) { - return '<' + quad.predicate.value + '>'; -}; - -// 4.8) Hash N-Degree Quads -Normalize.prototype.hashNDegreeQuads = function(id, issuer, callback) { - var self = this; - - // 1) Create a hash to related blank nodes map for storing hashes that - // identify related blank nodes. - // Note: 2) and 3) handled within `createHashToRelated` - var hashToRelated; - var md = new NormalizeHash(self.name); - self.waterfall([ - function(callback) { - self.createHashToRelated(id, issuer, function(err, result) { - if(err) { - return callback(err); - } - hashToRelated = result; - callback(); - }); - }, - function(callback) { - // 4) Create an empty string, data to hash. - // Note: We created a hash object `md` above instead. - - // 5) For each related hash to blank node list mapping in hash to related - // blank nodes map, sorted lexicographically by related hash: - var hashes = Object.keys(hashToRelated).sort(); - self.forEach(hashes, function(hash, idx, callback) { - // 5.1) Append the related hash to the data to hash. - md.update(hash); - - // 5.2) Create a string chosen path. - var chosenPath = ''; - - // 5.3) Create an unset chosen issuer variable. - var chosenIssuer; - - // 5.4) For each permutation of blank node list: - var permutator = new Permutator(hashToRelated[hash]); - self.whilst( - function() { return permutator.hasNext(); }, - function(nextPermutation) { - var permutation = permutator.next(); - - // 5.4.1) Create a copy of issuer, issuer copy. - var issuerCopy = issuer.clone(); - - // 5.4.2) Create a string path. - var path = ''; - - // 5.4.3) Create a recursion list, to store blank node identifiers - // that must be recursively processed by this algorithm. - var recursionList = []; - - self.waterfall([ - function(callback) { - // 5.4.4) For each related in permutation: - self.forEach(permutation, function(related, idx, callback) { - // 5.4.4.1) If a canonical identifier has been issued for - // related, append it to path. - if(self.canonicalIssuer.hasId(related)) { - path += self.canonicalIssuer.getId(related); - } else { - // 5.4.4.2) Otherwise: - // 5.4.4.2.1) If issuer copy has not issued an identifier for - // related, append related to recursion list. - if(!issuerCopy.hasId(related)) { - recursionList.push(related); - } - // 5.4.4.2.2) Use the Issue Identifier algorithm, passing - // issuer copy and related and append the result to path. - path += issuerCopy.getId(related); - } - - // 5.4.4.3) If chosen path is not empty and the length of path - // is greater than or equal to the length of chosen path and - // path is lexicographically greater than chosen path, then - // skip to the next permutation. - if(chosenPath.length !== 0 && - path.length >= chosenPath.length && path > chosenPath) { - // FIXME: may cause inaccurate total depth calculation - return nextPermutation(); - } - callback(); - }, callback); - }, - function(callback) { - // 5.4.5) For each related in recursion list: - self.forEach(recursionList, function(related, idx, callback) { - // 5.4.5.1) Set result to the result of recursively executing - // the Hash N-Degree Quads algorithm, passing related for - // identifier and issuer copy for path identifier issuer. - self.hashNDegreeQuads( - related, issuerCopy, function(err, result) { - if(err) { - return callback(err); - } - - // 5.4.5.2) Use the Issue Identifier algorithm, passing issuer - // copy and related and append the result to path. - path += issuerCopy.getId(related); - - // 5.4.5.3) Append <, the hash in result, and > to path. - path += '<' + result.hash + '>'; - - // 5.4.5.4) Set issuer copy to the identifier issuer in - // result. - issuerCopy = result.issuer; - - // 5.4.5.5) If chosen path is not empty and the length of path - // is greater than or equal to the length of chosen path and - // path is lexicographically greater than chosen path, then - // skip to the next permutation. - if(chosenPath.length !== 0 && - path.length >= chosenPath.length && path > chosenPath) { - // FIXME: may cause inaccurate total depth calculation - return nextPermutation(); - } - callback(); - }); - }, callback); - }, - function(callback) { - // 5.4.6) If chosen path is empty or path is lexicographically - // less than chosen path, set chosen path to path and chosen - // issuer to issuer copy. - if(chosenPath.length === 0 || path < chosenPath) { - chosenPath = path; - chosenIssuer = issuerCopy; - } - callback(); - } - ], nextPermutation); - }, function(err) { - if(err) { - return callback(err); - } - - // 5.5) Append chosen path to data to hash. - md.update(chosenPath); - - // 5.6) Replace issuer, by reference, with chosen issuer. - issuer = chosenIssuer; - callback(); - }); - }, callback); - } - ], function(err) { - // 6) Return issuer and the hash that results from passing data to hash - // through the hash algorithm. - callback(err, {hash: md.digest(), issuer: issuer}); - }); -}; - -// helper for creating hash to related blank nodes map -Normalize.prototype.createHashToRelated = function(id, issuer, callback) { - var self = this; - - // 1) Create a hash to related blank nodes map for storing hashes that - // identify related blank nodes. - var hashToRelated = {}; - - // 2) Get a reference, quads, to the list of quads in the blank node to - // quads map for the key identifier. - var quads = self.blankNodeInfo[id].quads; - - // 3) For each quad in quads: - self.forEach(quads, function(quad, idx, callback) { - // 3.1) For each component in quad, if component is the subject, object, - // and graph name and it is a blank node that is not identified by - // identifier: - self.forEach(quad, function(component, key, callback) { - if(key === 'predicate' || - !(component.type === 'blank node' && component.value !== id)) { - return callback(); - } - // 3.1.1) Set hash to the result of the Hash Related Blank Node - // algorithm, passing the blank node identifier for component as - // related, quad, path identifier issuer as issuer, and position as - // either s, o, or g based on whether component is a subject, object, - // graph name, respectively. - var related = component.value; - var position = POSITIONS[key]; - self.hashRelatedBlankNode( - related, quad, issuer, position, function(err, hash) { - if(err) { - return callback(err); - } - // 3.1.2) Add a mapping of hash to the blank node identifier for - // component to hash to related blank nodes map, adding an entry as - // necessary. - if(hash in hashToRelated) { - hashToRelated[hash].push(related); - } else { - hashToRelated[hash] = [related]; - } - callback(); - }); - }, callback); - }, function(err) { - callback(err, hashToRelated); - }); -}; - -// helper that iterates over quad components (skips predicate) -Normalize.prototype.forEachComponent = function(quad, op) { - for(var key in quad) { - // skip `predicate` - if(key === 'predicate') { - continue; - } - op(quad[key], key, quad); - } -}; - -return Normalize; - -})(); // end of define URDNA2015 - -/////////////////////////////// DEFINE URGNA2012 ////////////////////////////// - -var URGNA2012 = (function() { - -var Normalize = function(options) { - URDNA2015.call(this, options); - this.name = 'URGNA2012'; -}; -Normalize.prototype = new URDNA2015(); - -// helper for modifying component during Hash First Degree Quads -Normalize.prototype.modifyFirstDegreeComponent = function(id, component, key) { - if(component.type !== 'blank node') { - return component; - } - component = _clone(component); - if(key === 'name') { - component.value = '_:g'; - } else { - component.value = (component.value === id ? '_:a' : '_:z'); - } - return component; -}; - -// helper for getting a related predicate -Normalize.prototype.getRelatedPredicate = function(quad) { - return quad.predicate.value; -}; - -// helper for creating hash to related blank nodes map -Normalize.prototype.createHashToRelated = function(id, issuer, callback) { - var self = this; - - // 1) Create a hash to related blank nodes map for storing hashes that - // identify related blank nodes. - var hashToRelated = {}; - - // 2) Get a reference, quads, to the list of quads in the blank node to - // quads map for the key identifier. - var quads = self.blankNodeInfo[id].quads; - - // 3) For each quad in quads: - self.forEach(quads, function(quad, idx, callback) { - // 3.1) If the quad's subject is a blank node that does not match - // identifier, set hash to the result of the Hash Related Blank Node - // algorithm, passing the blank node identifier for subject as related, - // quad, path identifier issuer as issuer, and p as position. - var position; - var related; - if(quad.subject.type === 'blank node' && quad.subject.value !== id) { - related = quad.subject.value; - position = 'p'; - } else if(quad.object.type === 'blank node' && quad.object.value !== id) { - // 3.2) Otherwise, if quad's object is a blank node that does not match - // identifier, to the result of the Hash Related Blank Node algorithm, - // passing the blank node identifier for object as related, quad, path - // identifier issuer as issuer, and r as position. - related = quad.object.value; - position = 'r'; - } else { - // 3.3) Otherwise, continue to the next quad. - return callback(); - } - // 3.4) Add a mapping of hash to the blank node identifier for the - // component that matched (subject or object) to hash to related blank - // nodes map, adding an entry as necessary. - self.hashRelatedBlankNode( - related, quad, issuer, position, function(err, hash) { - if(hash in hashToRelated) { - hashToRelated[hash].push(related); - } else { - hashToRelated[hash] = [related]; - } - callback(); - }); - }, function(err) { - callback(err, hashToRelated); - }); -}; - -return Normalize; - -})(); // end of define URGNA2012 - -/** - * Recursively flattens the subjects in the given JSON-LD expanded input - * into a node map. - * - * @param input the JSON-LD expanded input. - * @param graphs a map of graph name to subject map. - * @param graph the name of the current graph. - * @param issuer the blank node identifier issuer. - * @param name the name assigned to the current input if it is a bnode. - * @param list the list to append to, null for none. - */ -function _createNodeMap(input, graphs, graph, issuer, name, list) { - // recurse through array - if(_isArray(input)) { - for(var i = 0; i < input.length; ++i) { - _createNodeMap(input[i], graphs, graph, issuer, undefined, list); - } - return; - } - - // add non-object to list - if(!_isObject(input)) { - if(list) { - list.push(input); - } - return; - } - - // add values to list - if(_isValue(input)) { - if('@type' in input) { - var type = input['@type']; - // rename @type blank node - if(type.indexOf('_:') === 0) { - input['@type'] = type = issuer.getId(type); - } - } - if(list) { - list.push(input); - } - return; - } - - // Note: At this point, input must be a subject. - - // spec requires @type to be named first, so assign names early - if('@type' in input) { - var types = input['@type']; - for(var i = 0; i < types.length; ++i) { - var type = types[i]; - if(type.indexOf('_:') === 0) { - issuer.getId(type); - } - } - } - - // get name for subject - if(_isUndefined(name)) { - name = _isBlankNode(input) ? issuer.getId(input['@id']) : input['@id']; - } - - // add subject reference to list - if(list) { - list.push({'@id': name}); - } - - // create new subject or merge into existing one - var subjects = graphs[graph]; - var subject = subjects[name] = subjects[name] || {}; - subject['@id'] = name; - var properties = Object.keys(input).sort(); - for(var pi = 0; pi < properties.length; ++pi) { - var property = properties[pi]; - - // skip @id - if(property === '@id') { - continue; - } - - // handle reverse properties - if(property === '@reverse') { - var referencedNode = {'@id': name}; - var reverseMap = input['@reverse']; - for(var reverseProperty in reverseMap) { - var items = reverseMap[reverseProperty]; - for(var ii = 0; ii < items.length; ++ii) { - var item = items[ii]; - var itemName = item['@id']; - if(_isBlankNode(item)) { - itemName = issuer.getId(itemName); - } - _createNodeMap(item, graphs, graph, issuer, itemName); - jsonld.addValue( - subjects[itemName], reverseProperty, referencedNode, - {propertyIsArray: true, allowDuplicate: false}); - } - } - continue; - } - - // recurse into graph - if(property === '@graph') { - // add graph subjects map entry - if(!(name in graphs)) { - graphs[name] = {}; - } - var g = (graph === '@merged') ? graph : name; - _createNodeMap(input[property], graphs, g, issuer); - continue; - } - - // copy non-@type keywords - if(property !== '@type' && _isKeyword(property)) { - if(property === '@index' && property in subject && - (input[property] !== subject[property] || - input[property]['@id'] !== subject[property]['@id'])) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; conflicting @index property detected.', - 'jsonld.SyntaxError', - {code: 'conflicting indexes', subject: subject}); - } - subject[property] = input[property]; - continue; - } - - // iterate over objects - var objects = input[property]; - - // if property is a bnode, assign it a new id - if(property.indexOf('_:') === 0) { - property = issuer.getId(property); - } - - // ensure property is added for empty arrays - if(objects.length === 0) { - jsonld.addValue(subject, property, [], {propertyIsArray: true}); - continue; - } - for(var oi = 0; oi < objects.length; ++oi) { - var o = objects[oi]; - - if(property === '@type') { - // rename @type blank nodes - o = (o.indexOf('_:') === 0) ? issuer.getId(o) : o; - } - - // handle embedded subject or subject reference - if(_isSubject(o) || _isSubjectReference(o)) { - // relabel blank node @id - var id = _isBlankNode(o) ? issuer.getId(o['@id']) : o['@id']; - - // add reference and recurse - jsonld.addValue( - subject, property, {'@id': id}, - {propertyIsArray: true, allowDuplicate: false}); - _createNodeMap(o, graphs, graph, issuer, id); - } else if(_isList(o)) { - // handle @list - var _list = []; - _createNodeMap(o['@list'], graphs, graph, issuer, name, _list); - o = {'@list': _list}; - jsonld.addValue( - subject, property, o, - {propertyIsArray: true, allowDuplicate: false}); - } else { - // handle @value - _createNodeMap(o, graphs, graph, issuer, name); - jsonld.addValue( - subject, property, o, {propertyIsArray: true, allowDuplicate: false}); - } - } - } -} - -function _mergeNodeMaps(graphs) { - // add all non-default graphs to default graph - var defaultGraph = graphs['@default']; - var graphNames = Object.keys(graphs).sort(); - for(var i = 0; i < graphNames.length; ++i) { - var graphName = graphNames[i]; - if(graphName === '@default') { - continue; - } - var nodeMap = graphs[graphName]; - var subject = defaultGraph[graphName]; - if(!subject) { - defaultGraph[graphName] = subject = { - '@id': graphName, - '@graph': [] - }; - } else if(!('@graph' in subject)) { - subject['@graph'] = []; - } - var graph = subject['@graph']; - var ids = Object.keys(nodeMap).sort(); - for(var ii = 0; ii < ids.length; ++ii) { - var node = nodeMap[ids[ii]]; - // only add full subjects - if(!_isSubjectReference(node)) { - graph.push(node); - } - } - } - return defaultGraph; -} - -/** - * Frames subjects according to the given frame. - * - * @param state the current framing state. - * @param subjects the subjects to filter. - * @param frame the frame. - * @param parent the parent subject or top-level array. - * @param property the parent property, initialized to null. - */ -function _frame(state, subjects, frame, parent, property) { - // validate the frame - _validateFrame(frame); - frame = frame[0]; - - // get flags for current frame - var options = state.options; - var flags = { - embed: _getFrameFlag(frame, options, 'embed'), - explicit: _getFrameFlag(frame, options, 'explicit'), - requireAll: _getFrameFlag(frame, options, 'requireAll') - }; - - // filter out subjects that match the frame - var matches = _filterSubjects(state, subjects, frame, flags); - - // add matches to output - var ids = Object.keys(matches).sort(); - for(var idx = 0; idx < ids.length; ++idx) { - var id = ids[idx]; - var subject = matches[id]; - - if(flags.embed === '@link' && id in state.link) { - // TODO: may want to also match an existing linked subject against - // the current frame ... so different frames could produce different - // subjects that are only shared in-memory when the frames are the same - - // add existing linked subject - _addFrameOutput(parent, property, state.link[id]); - continue; - } - - /* Note: In order to treat each top-level match as a compartmentalized - result, clear the unique embedded subjects map when the property is null, - which only occurs at the top-level. */ - if(property === null) { - state.uniqueEmbeds = {}; - } - - // start output for subject - var output = {}; - output['@id'] = id; - state.link[id] = output; - - // if embed is @never or if a circular reference would be created by an - // embed, the subject cannot be embedded, just add the reference; - // note that a circular reference won't occur when the embed flag is - // `@link` as the above check will short-circuit before reaching this point - if(flags.embed === '@never' || - _createsCircularReference(subject, state.subjectStack)) { - _addFrameOutput(parent, property, output); - continue; - } - - // if only the last match should be embedded - if(flags.embed === '@last') { - // remove any existing embed - if(id in state.uniqueEmbeds) { - _removeEmbed(state, id); - } - state.uniqueEmbeds[id] = {parent: parent, property: property}; - } - - // push matching subject onto stack to enable circular embed checks - state.subjectStack.push(subject); - - // iterate over subject properties - var props = Object.keys(subject).sort(); - for(var i = 0; i < props.length; i++) { - var prop = props[i]; - - // copy keywords to output - if(_isKeyword(prop)) { - output[prop] = _clone(subject[prop]); - continue; - } - - // explicit is on and property isn't in the frame, skip processing - if(flags.explicit && !(prop in frame)) { - continue; - } - - // add objects - var objects = subject[prop]; - for(var oi = 0; oi < objects.length; ++oi) { - var o = objects[oi]; - - // recurse into list - if(_isList(o)) { - // add empty list - var list = {'@list': []}; - _addFrameOutput(output, prop, list); - - // add list objects - var src = o['@list']; - for(var n in src) { - o = src[n]; - if(_isSubjectReference(o)) { - var subframe = (prop in frame ? - frame[prop][0]['@list'] : _createImplicitFrame(flags)); - // recurse into subject reference - _frame(state, [o['@id']], subframe, list, '@list'); - } else { - // include other values automatically - _addFrameOutput(list, '@list', _clone(o)); - } - } - continue; - } - - if(_isSubjectReference(o)) { - // recurse into subject reference - var subframe = (prop in frame ? - frame[prop] : _createImplicitFrame(flags)); - _frame(state, [o['@id']], subframe, output, prop); - } else { - // include other values automatically - _addFrameOutput(output, prop, _clone(o)); - } - } - } - - // handle defaults - var props = Object.keys(frame).sort(); - for(var i = 0; i < props.length; ++i) { - var prop = props[i]; - - // skip keywords - if(_isKeyword(prop)) { - continue; - } - - // if omit default is off, then include default values for properties - // that appear in the next frame but are not in the matching subject - var next = frame[prop][0]; - var omitDefaultOn = _getFrameFlag(next, options, 'omitDefault'); - if(!omitDefaultOn && !(prop in output)) { - var preserve = '@null'; - if('@default' in next) { - preserve = _clone(next['@default']); - } - if(!_isArray(preserve)) { - preserve = [preserve]; - } - output[prop] = [{'@preserve': preserve}]; - } - } - - // add output to parent - _addFrameOutput(parent, property, output); - - // pop matching subject from circular ref-checking stack - state.subjectStack.pop(); - } -} - -/** - * Creates an implicit frame when recursing through subject matches. If - * a frame doesn't have an explicit frame for a particular property, then - * a wildcard child frame will be created that uses the same flags that the - * parent frame used. - * - * @param flags the current framing flags. - * - * @return the implicit frame. - */ -function _createImplicitFrame(flags) { - var frame = {}; - for(var key in flags) { - if(flags[key] !== undefined) { - frame['@' + key] = [flags[key]]; - } - } - return [frame]; -} - -/** - * Checks the current subject stack to see if embedding the given subject - * would cause a circular reference. - * - * @param subjectToEmbed the subject to embed. - * @param subjectStack the current stack of subjects. - * - * @return true if a circular reference would be created, false if not. - */ -function _createsCircularReference(subjectToEmbed, subjectStack) { - for(var i = subjectStack.length - 1; i >= 0; --i) { - if(subjectStack[i]['@id'] === subjectToEmbed['@id']) { - return true; - } - } - return false; -} - -/** - * Gets the frame flag value for the given flag name. - * - * @param frame the frame. - * @param options the framing options. - * @param name the flag name. - * - * @return the flag value. - */ -function _getFrameFlag(frame, options, name) { - var flag = '@' + name; - var rval = (flag in frame ? frame[flag][0] : options[name]); - if(name === 'embed') { - // default is "@last" - // backwards-compatibility support for "embed" maps: - // true => "@last" - // false => "@never" - if(rval === true) { - rval = '@last'; - } else if(rval === false) { - rval = '@never'; - } else if(rval !== '@always' && rval !== '@never' && rval !== '@link') { - rval = '@last'; - } - } - return rval; -} - -/** - * Validates a JSON-LD frame, throwing an exception if the frame is invalid. - * - * @param frame the frame to validate. - */ -function _validateFrame(frame) { - if(!_isArray(frame) || frame.length !== 1 || !_isObject(frame[0])) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; a JSON-LD frame must be a single object.', - 'jsonld.SyntaxError', {frame: frame}); - } -} - -/** - * Returns a map of all of the subjects that match a parsed frame. - * - * @param state the current framing state. - * @param subjects the set of subjects to filter. - * @param frame the parsed frame. - * @param flags the frame flags. - * - * @return all of the matched subjects. - */ -function _filterSubjects(state, subjects, frame, flags) { - // filter subjects in @id order - var rval = {}; - for(var i = 0; i < subjects.length; ++i) { - var id = subjects[i]; - var subject = state.subjects[id]; - if(_filterSubject(subject, frame, flags)) { - rval[id] = subject; - } - } - return rval; -} - -/** - * Returns true if the given subject matches the given frame. - * - * @param subject the subject to check. - * @param frame the frame to check. - * @param flags the frame flags. - * - * @return true if the subject matches, false if not. - */ -function _filterSubject(subject, frame, flags) { - // check @type (object value means 'any' type, fall through to ducktyping) - if('@type' in frame && - !(frame['@type'].length === 1 && _isObject(frame['@type'][0]))) { - var types = frame['@type']; - for(var i = 0; i < types.length; ++i) { - // any matching @type is a match - if(jsonld.hasValue(subject, '@type', types[i])) { - return true; - } - } - return false; - } - - // check ducktype - var wildcard = true; - var matchesSome = false; - for(var key in frame) { - if(_isKeyword(key)) { - // skip non-@id and non-@type - if(key !== '@id' && key !== '@type') { - continue; - } - wildcard = false; - - // check @id for a specific @id value - if(key === '@id' && _isString(frame[key])) { - if(subject[key] !== frame[key]) { - return false; - } - matchesSome = true; - continue; - } - } - - wildcard = false; - - if(key in subject) { - // frame[key] === [] means do not match if property is present - if(_isArray(frame[key]) && frame[key].length === 0 && - subject[key] !== undefined) { - return false; - } - matchesSome = true; - continue; - } - - // all properties must match to be a duck unless a @default is specified - var hasDefault = (_isArray(frame[key]) && _isObject(frame[key][0]) && - '@default' in frame[key][0]); - if(flags.requireAll && !hasDefault) { - return false; - } - } - - // return true if wildcard or subject matches some properties - return wildcard || matchesSome; -} - -/** - * Removes an existing embed. - * - * @param state the current framing state. - * @param id the @id of the embed to remove. - */ -function _removeEmbed(state, id) { - // get existing embed - var embeds = state.uniqueEmbeds; - var embed = embeds[id]; - var parent = embed.parent; - var property = embed.property; - - // create reference to replace embed - var subject = {'@id': id}; - - // remove existing embed - if(_isArray(parent)) { - // replace subject with reference - for(var i = 0; i < parent.length; ++i) { - if(jsonld.compareValues(parent[i], subject)) { - parent[i] = subject; - break; - } - } - } else { - // replace subject with reference - var useArray = _isArray(parent[property]); - jsonld.removeValue(parent, property, subject, {propertyIsArray: useArray}); - jsonld.addValue(parent, property, subject, {propertyIsArray: useArray}); - } - - // recursively remove dependent dangling embeds - var removeDependents = function(id) { - // get embed keys as a separate array to enable deleting keys in map - var ids = Object.keys(embeds); - for(var i = 0; i < ids.length; ++i) { - var next = ids[i]; - if(next in embeds && _isObject(embeds[next].parent) && - embeds[next].parent['@id'] === id) { - delete embeds[next]; - removeDependents(next); - } - } - }; - removeDependents(id); -} - -/** - * Adds framing output to the given parent. - * - * @param parent the parent to add to. - * @param property the parent property. - * @param output the output to add. - */ -function _addFrameOutput(parent, property, output) { - if(_isObject(parent)) { - jsonld.addValue(parent, property, output, {propertyIsArray: true}); - } else { - parent.push(output); - } -} - -/** - * Removes the @preserve keywords as the last step of the framing algorithm. - * - * @param ctx the active context used to compact the input. - * @param input the framed, compacted output. - * @param options the compaction options used. - * - * @return the resulting output. - */ -function _removePreserve(ctx, input, options) { - // recurse through arrays - if(_isArray(input)) { - var output = []; - for(var i = 0; i < input.length; ++i) { - var result = _removePreserve(ctx, input[i], options); - // drop nulls from arrays - if(result !== null) { - output.push(result); - } - } - input = output; - } else if(_isObject(input)) { - // remove @preserve - if('@preserve' in input) { - if(input['@preserve'] === '@null') { - return null; - } - return input['@preserve']; - } - - // skip @values - if(_isValue(input)) { - return input; - } - - // recurse through @lists - if(_isList(input)) { - input['@list'] = _removePreserve(ctx, input['@list'], options); - return input; - } - - // handle in-memory linked nodes - var idAlias = _compactIri(ctx, '@id'); - if(idAlias in input) { - var id = input[idAlias]; - if(id in options.link) { - var idx = options.link[id].indexOf(input); - if(idx === -1) { - // prevent circular visitation - options.link[id].push(input); - } else { - // already visited - return options.link[id][idx]; - } - } else { - // prevent circular visitation - options.link[id] = [input]; - } - } - - // recurse through properties - for(var prop in input) { - var result = _removePreserve(ctx, input[prop], options); - var container = jsonld.getContextValue(ctx, prop, '@container'); - if(options.compactArrays && _isArray(result) && result.length === 1 && - container === null) { - result = result[0]; - } - input[prop] = result; - } - } - return input; -} - -/** - * Compares two strings first based on length and then lexicographically. - * - * @param a the first string. - * @param b the second string. - * - * @return -1 if a < b, 1 if a > b, 0 if a == b. - */ -function _compareShortestLeast(a, b) { - if(a.length < b.length) { - return -1; - } - if(b.length < a.length) { - return 1; - } - if(a === b) { - return 0; - } - return (a < b) ? -1 : 1; -} - -/** - * Picks the preferred compaction term from the given inverse context entry. - * - * @param activeCtx the active context. - * @param iri the IRI to pick the term for. - * @param value the value to pick the term for. - * @param containers the preferred containers. - * @param typeOrLanguage either '@type' or '@language'. - * @param typeOrLanguageValue the preferred value for '@type' or '@language'. - * - * @return the preferred term. - */ -function _selectTerm( - activeCtx, iri, value, containers, typeOrLanguage, typeOrLanguageValue) { - if(typeOrLanguageValue === null) { - typeOrLanguageValue = '@null'; - } - - // preferences for the value of @type or @language - var prefs = []; - - // determine prefs for @id based on whether or not value compacts to a term - if((typeOrLanguageValue === '@id' || typeOrLanguageValue === '@reverse') && - _isSubjectReference(value)) { - // prefer @reverse first - if(typeOrLanguageValue === '@reverse') { - prefs.push('@reverse'); - } - // try to compact value to a term - var term = _compactIri(activeCtx, value['@id'], null, {vocab: true}); - if(term in activeCtx.mappings && - activeCtx.mappings[term] && - activeCtx.mappings[term]['@id'] === value['@id']) { - // prefer @vocab - prefs.push.apply(prefs, ['@vocab', '@id']); - } else { - // prefer @id - prefs.push.apply(prefs, ['@id', '@vocab']); - } - } else { - prefs.push(typeOrLanguageValue); - } - prefs.push('@none'); - - var containerMap = activeCtx.inverse[iri]; - for(var ci = 0; ci < containers.length; ++ci) { - // if container not available in the map, continue - var container = containers[ci]; - if(!(container in containerMap)) { - continue; - } - - var typeOrLanguageValueMap = containerMap[container][typeOrLanguage]; - for(var pi = 0; pi < prefs.length; ++pi) { - // if type/language option not available in the map, continue - var pref = prefs[pi]; - if(!(pref in typeOrLanguageValueMap)) { - continue; - } - - // select term - return typeOrLanguageValueMap[pref]; - } - } - - return null; -} - -/** - * Compacts an IRI or keyword into a term or prefix if it can be. If the - * IRI has an associated value it may be passed. - * - * @param activeCtx the active context to use. - * @param iri the IRI to compact. - * @param value the value to check or null. - * @param relativeTo options for how to compact IRIs: - * vocab: true to split after @vocab, false not to. - * @param reverse true if a reverse property is being compacted, false if not. - * - * @return the compacted term, prefix, keyword alias, or the original IRI. - */ -function _compactIri(activeCtx, iri, value, relativeTo, reverse) { - // can't compact null - if(iri === null) { - return iri; - } - - // default value and parent to null - if(_isUndefined(value)) { - value = null; - } - // default reverse to false - if(_isUndefined(reverse)) { - reverse = false; - } - relativeTo = relativeTo || {}; - - var inverseCtx = activeCtx.getInverse(); - - // if term is a keyword, it can only be compacted to a simple alias - if(_isKeyword(iri)) { - if(iri in inverseCtx) { - return inverseCtx[iri]['@none']['@type']['@none']; - } - return iri; - } - - // use inverse context to pick a term if iri is relative to vocab - if(relativeTo.vocab && iri in inverseCtx) { - var defaultLanguage = activeCtx['@language'] || '@none'; - - // prefer @index if available in value - var containers = []; - if(_isObject(value) && '@index' in value) { - containers.push('@index'); - } - - // defaults for term selection based on type/language - var typeOrLanguage = '@language'; - var typeOrLanguageValue = '@null'; - - if(reverse) { - typeOrLanguage = '@type'; - typeOrLanguageValue = '@reverse'; - containers.push('@set'); - } else if(_isList(value)) { - // choose the most specific term that works for all elements in @list - // only select @list containers if @index is NOT in value - if(!('@index' in value)) { - containers.push('@list'); - } - var list = value['@list']; - var commonLanguage = (list.length === 0) ? defaultLanguage : null; - var commonType = null; - for(var i = 0; i < list.length; ++i) { - var item = list[i]; - var itemLanguage = '@none'; - var itemType = '@none'; - if(_isValue(item)) { - if('@language' in item) { - itemLanguage = item['@language']; - } else if('@type' in item) { - itemType = item['@type']; - } else { - // plain literal - itemLanguage = '@null'; - } - } else { - itemType = '@id'; - } - if(commonLanguage === null) { - commonLanguage = itemLanguage; - } else if(itemLanguage !== commonLanguage && _isValue(item)) { - commonLanguage = '@none'; - } - if(commonType === null) { - commonType = itemType; - } else if(itemType !== commonType) { - commonType = '@none'; - } - // there are different languages and types in the list, so choose - // the most generic term, no need to keep iterating the list - if(commonLanguage === '@none' && commonType === '@none') { - break; - } - } - commonLanguage = commonLanguage || '@none'; - commonType = commonType || '@none'; - if(commonType !== '@none') { - typeOrLanguage = '@type'; - typeOrLanguageValue = commonType; - } else { - typeOrLanguageValue = commonLanguage; - } - } else { - if(_isValue(value)) { - if('@language' in value && !('@index' in value)) { - containers.push('@language'); - typeOrLanguageValue = value['@language']; - } else if('@type' in value) { - typeOrLanguage = '@type'; - typeOrLanguageValue = value['@type']; - } - } else { - typeOrLanguage = '@type'; - typeOrLanguageValue = '@id'; - } - containers.push('@set'); - } - - // do term selection - containers.push('@none'); - var term = _selectTerm( - activeCtx, iri, value, containers, typeOrLanguage, typeOrLanguageValue); - if(term !== null) { - return term; - } - } - - // no term match, use @vocab if available - if(relativeTo.vocab) { - if('@vocab' in activeCtx) { - // determine if vocab is a prefix of the iri - var vocab = activeCtx['@vocab']; - if(iri.indexOf(vocab) === 0 && iri !== vocab) { - // use suffix as relative iri if it is not a term in the active context - var suffix = iri.substr(vocab.length); - if(!(suffix in activeCtx.mappings)) { - return suffix; - } - } - } - } - - // no term or @vocab match, check for possible CURIEs - var choice = null; - var idx = 0; - var partialMatches = []; - var iriMap = activeCtx.fastCurieMap; - // check for partial matches of against `iri`, which means look until - // iri.length - 1, not full length - var maxPartialLength = iri.length - 1; - for(; idx < maxPartialLength && iri[idx] in iriMap; ++idx) { - iriMap = iriMap[iri[idx]]; - if('' in iriMap) { - partialMatches.push(iriMap[''][0]); - } - } - // check partial matches in reverse order to prefer longest ones first - for(var i = partialMatches.length - 1; i >= 0; --i) { - var entry = partialMatches[i]; - var terms = entry.terms; - for(var ti = 0; ti < terms.length; ++ti) { - // a CURIE is usable if: - // 1. it has no mapping, OR - // 2. value is null, which means we're not compacting an @value, AND - // the mapping matches the IRI - var curie = terms[ti] + ':' + iri.substr(entry.iri.length); - var isUsableCurie = (!(curie in activeCtx.mappings) || - (value === null && activeCtx.mappings[curie]['@id'] === iri)); - - // select curie if it is shorter or the same length but lexicographically - // less than the current choice - if(isUsableCurie && (choice === null || - _compareShortestLeast(curie, choice) < 0)) { - choice = curie; - } - } - } - - // return chosen curie - if(choice !== null) { - return choice; - } - - // compact IRI relative to base - if(!relativeTo.vocab) { - return _removeBase(activeCtx['@base'], iri); - } - - // return IRI as is - return iri; -} - -/** - * Performs value compaction on an object with '@value' or '@id' as the only - * property. - * - * @param activeCtx the active context. - * @param activeProperty the active property that points to the value. - * @param value the value to compact. - * - * @return the compaction result. - */ -function _compactValue(activeCtx, activeProperty, value) { - // value is a @value - if(_isValue(value)) { - // get context rules - var type = jsonld.getContextValue(activeCtx, activeProperty, '@type'); - var language = jsonld.getContextValue( - activeCtx, activeProperty, '@language'); - var container = jsonld.getContextValue( - activeCtx, activeProperty, '@container'); - - // whether or not the value has an @index that must be preserved - var preserveIndex = (('@index' in value) && - container !== '@index'); - - // if there's no @index to preserve ... - if(!preserveIndex) { - // matching @type or @language specified in context, compact value - if(value['@type'] === type || value['@language'] === language) { - return value['@value']; - } - } - - // return just the value of @value if all are true: - // 1. @value is the only key or @index isn't being preserved - // 2. there is no default language or @value is not a string or - // the key has a mapping with a null @language - var keyCount = Object.keys(value).length; - var isValueOnlyKey = (keyCount === 1 || - (keyCount === 2 && ('@index' in value) && !preserveIndex)); - var hasDefaultLanguage = ('@language' in activeCtx); - var isValueString = _isString(value['@value']); - var hasNullMapping = (activeCtx.mappings[activeProperty] && - activeCtx.mappings[activeProperty]['@language'] === null); - if(isValueOnlyKey && - (!hasDefaultLanguage || !isValueString || hasNullMapping)) { - return value['@value']; - } - - var rval = {}; - - // preserve @index - if(preserveIndex) { - rval[_compactIri(activeCtx, '@index')] = value['@index']; - } - - if('@type' in value) { - // compact @type IRI - rval[_compactIri(activeCtx, '@type')] = _compactIri( - activeCtx, value['@type'], null, {vocab: true}); - } else if('@language' in value) { - // alias @language - rval[_compactIri(activeCtx, '@language')] = value['@language']; - } - - // alias @value - rval[_compactIri(activeCtx, '@value')] = value['@value']; - - return rval; - } - - // value is a subject reference - var expandedProperty = _expandIri(activeCtx, activeProperty, {vocab: true}); - var type = jsonld.getContextValue(activeCtx, activeProperty, '@type'); - var compacted = _compactIri( - activeCtx, value['@id'], null, {vocab: type === '@vocab'}); - - // compact to scalar - if(type === '@id' || type === '@vocab' || expandedProperty === '@graph') { - return compacted; - } - - var rval = {}; - rval[_compactIri(activeCtx, '@id')] = compacted; - return rval; -} - -/** - * Creates a term definition during context processing. - * - * @param activeCtx the current active context. - * @param localCtx the local context being processed. - * @param term the term in the local context to define the mapping for. - * @param defined a map of defining/defined keys to detect cycles and prevent - * double definitions. - */ -function _createTermDefinition(activeCtx, localCtx, term, defined) { - if(term in defined) { - // term already defined - if(defined[term]) { - return; - } - // cycle detected - throw new JsonLdError( - 'Cyclical context definition detected.', - 'jsonld.CyclicalContext', - {code: 'cyclic IRI mapping', context: localCtx, term: term}); - } - - // now defining term - defined[term] = false; - - if(_isKeyword(term)) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; keywords cannot be overridden.', - 'jsonld.SyntaxError', - {code: 'keyword redefinition', context: localCtx, term: term}); - } - - if(term === '') { - throw new JsonLdError( - 'Invalid JSON-LD syntax; a term cannot be an empty string.', - 'jsonld.SyntaxError', - {code: 'invalid term definition', context: localCtx}); - } - - // remove old mapping - if(activeCtx.mappings[term]) { - delete activeCtx.mappings[term]; - } - - // get context term value - var value = localCtx[term]; - - // clear context entry - if(value === null || (_isObject(value) && value['@id'] === null)) { - activeCtx.mappings[term] = null; - defined[term] = true; - return; - } - - // convert short-hand value to object w/@id - if(_isString(value)) { - value = {'@id': value}; - } - - if(!_isObject(value)) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; @context property values must be ' + - 'strings or objects.', - 'jsonld.SyntaxError', - {code: 'invalid term definition', context: localCtx}); - } - - // create new mapping - var mapping = activeCtx.mappings[term] = {}; - mapping.reverse = false; - - if('@reverse' in value) { - if('@id' in value) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; a @reverse term definition must not ' + - 'contain @id.', 'jsonld.SyntaxError', - {code: 'invalid reverse property', context: localCtx}); - } - var reverse = value['@reverse']; - if(!_isString(reverse)) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; a @context @reverse value must be a string.', - 'jsonld.SyntaxError', {code: 'invalid IRI mapping', context: localCtx}); - } - - // expand and add @id mapping - var id = _expandIri( - activeCtx, reverse, {vocab: true, base: false}, localCtx, defined); - if(!_isAbsoluteIri(id)) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; a @context @reverse value must be an ' + - 'absolute IRI or a blank node identifier.', - 'jsonld.SyntaxError', {code: 'invalid IRI mapping', context: localCtx}); - } - mapping['@id'] = id; - mapping.reverse = true; - } else if('@id' in value) { - var id = value['@id']; - if(!_isString(id)) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; a @context @id value must be an array ' + - 'of strings or a string.', - 'jsonld.SyntaxError', {code: 'invalid IRI mapping', context: localCtx}); - } - if(id !== term) { - // expand and add @id mapping - id = _expandIri( - activeCtx, id, {vocab: true, base: false}, localCtx, defined); - if(!_isAbsoluteIri(id) && !_isKeyword(id)) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; a @context @id value must be an ' + - 'absolute IRI, a blank node identifier, or a keyword.', - 'jsonld.SyntaxError', - {code: 'invalid IRI mapping', context: localCtx}); - } - mapping['@id'] = id; - } - } - - // always compute whether term has a colon as an optimization for - // _compactIri - var colon = term.indexOf(':'); - mapping._termHasColon = (colon !== -1); - - if(!('@id' in mapping)) { - // see if the term has a prefix - if(mapping._termHasColon) { - var prefix = term.substr(0, colon); - if(prefix in localCtx) { - // define parent prefix - _createTermDefinition(activeCtx, localCtx, prefix, defined); - } - - if(activeCtx.mappings[prefix]) { - // set @id based on prefix parent - var suffix = term.substr(colon + 1); - mapping['@id'] = activeCtx.mappings[prefix]['@id'] + suffix; - } else { - // term is an absolute IRI - mapping['@id'] = term; - } - } else { - // non-IRIs *must* define @ids if @vocab is not available - if(!('@vocab' in activeCtx)) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; @context terms must define an @id.', - 'jsonld.SyntaxError', - {code: 'invalid IRI mapping', context: localCtx, term: term}); - } - // prepend vocab to term - mapping['@id'] = activeCtx['@vocab'] + term; - } - } - - // IRI mapping now defined - defined[term] = true; - - if('@type' in value) { - var type = value['@type']; - if(!_isString(type)) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; an @context @type values must be a string.', - 'jsonld.SyntaxError', - {code: 'invalid type mapping', context: localCtx}); - } - - if(type !== '@id' && type !== '@vocab') { - // expand @type to full IRI - type = _expandIri( - activeCtx, type, {vocab: true, base: false}, localCtx, defined); - if(!_isAbsoluteIri(type)) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; an @context @type value must be an ' + - 'absolute IRI.', - 'jsonld.SyntaxError', - {code: 'invalid type mapping', context: localCtx}); - } - if(type.indexOf('_:') === 0) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; an @context @type values must be an IRI, ' + - 'not a blank node identifier.', - 'jsonld.SyntaxError', - {code: 'invalid type mapping', context: localCtx}); - } - } - - // add @type to mapping - mapping['@type'] = type; - } - - if('@container' in value) { - var container = value['@container']; - if(container !== '@list' && container !== '@set' && - container !== '@index' && container !== '@language') { - throw new JsonLdError( - 'Invalid JSON-LD syntax; @context @container value must be ' + - 'one of the following: @list, @set, @index, or @language.', - 'jsonld.SyntaxError', - {code: 'invalid container mapping', context: localCtx}); - } - if(mapping.reverse && container !== '@index' && container !== '@set' && - container !== null) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; @context @container value for a @reverse ' + - 'type definition must be @index or @set.', 'jsonld.SyntaxError', - {code: 'invalid reverse property', context: localCtx}); - } - - // add @container to mapping - mapping['@container'] = container; - } - - if('@language' in value && !('@type' in value)) { - var language = value['@language']; - if(language !== null && !_isString(language)) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; @context @language value must be ' + - 'a string or null.', 'jsonld.SyntaxError', - {code: 'invalid language mapping', context: localCtx}); - } - - // add @language to mapping - if(language !== null) { - language = language.toLowerCase(); - } - mapping['@language'] = language; - } - - // disallow aliasing @context and @preserve - var id = mapping['@id']; - if(id === '@context' || id === '@preserve') { - throw new JsonLdError( - 'Invalid JSON-LD syntax; @context and @preserve cannot be aliased.', - 'jsonld.SyntaxError', {code: 'invalid keyword alias', context: localCtx}); - } -} - -/** - * Expands a string to a full IRI. The string may be a term, a prefix, a - * relative IRI, or an absolute IRI. The associated absolute IRI will be - * returned. - * - * @param activeCtx the current active context. - * @param value the string to expand. - * @param relativeTo options for how to resolve relative IRIs: - * base: true to resolve against the base IRI, false not to. - * vocab: true to concatenate after @vocab, false not to. - * @param localCtx the local context being processed (only given if called - * during context processing). - * @param defined a map for tracking cycles in context definitions (only given - * if called during context processing). - * - * @return the expanded value. - */ -function _expandIri(activeCtx, value, relativeTo, localCtx, defined) { - // already expanded - if(value === null || _isKeyword(value)) { - return value; - } - - // ensure value is interpreted as a string - value = String(value); - - // define term dependency if not defined - if(localCtx && value in localCtx && defined[value] !== true) { - _createTermDefinition(activeCtx, localCtx, value, defined); - } - - relativeTo = relativeTo || {}; - if(relativeTo.vocab) { - var mapping = activeCtx.mappings[value]; - - // value is explicitly ignored with a null mapping - if(mapping === null) { - return null; - } - - if(mapping) { - // value is a term - return mapping['@id']; - } - } - - // split value into prefix:suffix - var colon = value.indexOf(':'); - if(colon !== -1) { - var prefix = value.substr(0, colon); - var suffix = value.substr(colon + 1); - - // do not expand blank nodes (prefix of '_') or already-absolute - // IRIs (suffix of '//') - if(prefix === '_' || suffix.indexOf('//') === 0) { - return value; - } - - // prefix dependency not defined, define it - if(localCtx && prefix in localCtx) { - _createTermDefinition(activeCtx, localCtx, prefix, defined); - } - - // use mapping if prefix is defined - var mapping = activeCtx.mappings[prefix]; - if(mapping) { - return mapping['@id'] + suffix; - } - - // already absolute IRI - return value; - } - - // prepend vocab - if(relativeTo.vocab && '@vocab' in activeCtx) { - return activeCtx['@vocab'] + value; - } - - // prepend base - var rval = value; - if(relativeTo.base) { - rval = jsonld.prependBase(activeCtx['@base'], rval); - } - - return rval; -} - -function _prependBase(base, iri) { - // skip IRI processing - if(base === null) { - return iri; - } - // already an absolute IRI - if(iri.indexOf(':') !== -1) { - return iri; - } - - // parse base if it is a string - if(_isString(base)) { - base = jsonld.url.parse(base || ''); - } - - // parse given IRI - var rel = jsonld.url.parse(iri); - - // per RFC3986 5.2.2 - var transform = { - protocol: base.protocol || '' - }; - - if(rel.authority !== null) { - transform.authority = rel.authority; - transform.path = rel.path; - transform.query = rel.query; - } else { - transform.authority = base.authority; - - if(rel.path === '') { - transform.path = base.path; - if(rel.query !== null) { - transform.query = rel.query; - } else { - transform.query = base.query; - } - } else { - if(rel.path.indexOf('/') === 0) { - // IRI represents an absolute path - transform.path = rel.path; - } else { - // merge paths - var path = base.path; - - // append relative path to the end of the last directory from base - if(rel.path !== '') { - path = path.substr(0, path.lastIndexOf('/') + 1); - if(path.length > 0 && path.substr(-1) !== '/') { - path += '/'; - } - path += rel.path; - } - - transform.path = path; - } - transform.query = rel.query; - } - } - - // remove slashes and dots in path - transform.path = _removeDotSegments(transform.path, !!transform.authority); - - // construct URL - var rval = transform.protocol; - if(transform.authority !== null) { - rval += '//' + transform.authority; - } - rval += transform.path; - if(transform.query !== null) { - rval += '?' + transform.query; - } - if(rel.fragment !== null) { - rval += '#' + rel.fragment; - } - - // handle empty base - if(rval === '') { - rval = './'; - } - - return rval; -} - -/** - * Removes a base IRI from the given absolute IRI. - * - * @param base the base IRI. - * @param iri the absolute IRI. - * - * @return the relative IRI if relative to base, otherwise the absolute IRI. - */ -function _removeBase(base, iri) { - // skip IRI processing - if(base === null) { - return iri; - } - - if(_isString(base)) { - base = jsonld.url.parse(base || ''); - } - - // establish base root - var root = ''; - if(base.href !== '') { - root += (base.protocol || '') + '//' + (base.authority || ''); - } else if(iri.indexOf('//')) { - // support network-path reference with empty base - root += '//'; - } - - // IRI not relative to base - if(iri.indexOf(root) !== 0) { - return iri; - } - - // remove root from IRI and parse remainder - var rel = jsonld.url.parse(iri.substr(root.length)); - - // remove path segments that match (do not remove last segment unless there - // is a hash or query) - var baseSegments = base.normalizedPath.split('/'); - var iriSegments = rel.normalizedPath.split('/'); - var last = (rel.fragment || rel.query) ? 0 : 1; - while(baseSegments.length > 0 && iriSegments.length > last) { - if(baseSegments[0] !== iriSegments[0]) { - break; - } - baseSegments.shift(); - iriSegments.shift(); - } - - // use '../' for each non-matching base segment - var rval = ''; - if(baseSegments.length > 0) { - // don't count the last segment (if it ends with '/' last path doesn't - // count and if it doesn't end with '/' it isn't a path) - baseSegments.pop(); - for(var i = 0; i < baseSegments.length; ++i) { - rval += '../'; - } - } - - // prepend remaining segments - rval += iriSegments.join('/'); - - // add query and hash - if(rel.query !== null) { - rval += '?' + rel.query; - } - if(rel.fragment !== null) { - rval += '#' + rel.fragment; - } - - // handle empty base - if(rval === '') { - rval = './'; - } - - return rval; -} - -/** - * Gets the initial context. - * - * @param options the options to use: - * [base] the document base IRI. - * - * @return the initial context. - */ -function _getInitialContext(options) { - var base = jsonld.url.parse(options.base || ''); - return { - '@base': base, - mappings: {}, - inverse: null, - getInverse: _createInverseContext, - clone: _cloneActiveContext - }; - - /** - * Generates an inverse context for use in the compaction algorithm, if - * not already generated for the given active context. - * - * @return the inverse context. - */ - function _createInverseContext() { - var activeCtx = this; - - // lazily create inverse - if(activeCtx.inverse) { - return activeCtx.inverse; - } - var inverse = activeCtx.inverse = {}; - - // variables for building fast CURIE map - var fastCurieMap = activeCtx.fastCurieMap = {}; - var irisToTerms = {}; - - // handle default language - var defaultLanguage = activeCtx['@language'] || '@none'; - - // create term selections for each mapping in the context, ordered by - // shortest and then lexicographically least - var mappings = activeCtx.mappings; - var terms = Object.keys(mappings).sort(_compareShortestLeast); - for(var i = 0; i < terms.length; ++i) { - var term = terms[i]; - var mapping = mappings[term]; - if(mapping === null) { - continue; - } - - var container = mapping['@container'] || '@none'; - - // iterate over every IRI in the mapping - var ids = mapping['@id']; - if(!_isArray(ids)) { - ids = [ids]; - } - for(var ii = 0; ii < ids.length; ++ii) { - var iri = ids[ii]; - var entry = inverse[iri]; - var isKeyword = _isKeyword(iri); - - if(!entry) { - // initialize entry - inverse[iri] = entry = {}; - - if(!isKeyword && !mapping._termHasColon) { - // init IRI to term map and fast CURIE prefixes - irisToTerms[iri] = [term]; - var fastCurieEntry = {iri: iri, terms: irisToTerms[iri]}; - if(iri[0] in fastCurieMap) { - fastCurieMap[iri[0]].push(fastCurieEntry); - } else { - fastCurieMap[iri[0]] = [fastCurieEntry]; - } - } - } else if(!isKeyword && !mapping._termHasColon) { - // add IRI to term match - irisToTerms[iri].push(term); - } - - // add new entry - if(!entry[container]) { - entry[container] = { - '@language': {}, - '@type': {} - }; - } - entry = entry[container]; - - if(mapping.reverse) { - // term is preferred for values using @reverse - _addPreferredTerm(mapping, term, entry['@type'], '@reverse'); - } else if('@type' in mapping) { - // term is preferred for values using specific type - _addPreferredTerm(mapping, term, entry['@type'], mapping['@type']); - } else if('@language' in mapping) { - // term is preferred for values using specific language - var language = mapping['@language'] || '@null'; - _addPreferredTerm(mapping, term, entry['@language'], language); - } else { - // term is preferred for values w/default language or no type and - // no language - // add an entry for the default language - _addPreferredTerm(mapping, term, entry['@language'], defaultLanguage); - - // add entries for no type and no language - _addPreferredTerm(mapping, term, entry['@type'], '@none'); - _addPreferredTerm(mapping, term, entry['@language'], '@none'); - } - } - } - - // build fast CURIE map - for(var key in fastCurieMap) { - _buildIriMap(fastCurieMap, key, 1); - } - - return inverse; - } - - /** - * Runs a recursive algorithm to build a lookup map for quickly finding - * potential CURIEs. - * - * @param iriMap the map to build. - * @param key the current key in the map to work on. - * @param idx the index into the IRI to compare. - */ - function _buildIriMap(iriMap, key, idx) { - var entries = iriMap[key]; - var next = iriMap[key] = {}; - - var iri; - var letter; - for(var i = 0; i < entries.length; ++i) { - iri = entries[i].iri; - if(idx >= iri.length) { - letter = ''; - } else { - letter = iri[idx]; - } - if(letter in next) { - next[letter].push(entries[i]); - } else { - next[letter] = [entries[i]]; - } - } - - for(var key in next) { - if(key === '') { - continue; - } - _buildIriMap(next, key, idx + 1); - } - } - - /** - * Adds the term for the given entry if not already added. - * - * @param mapping the term mapping. - * @param term the term to add. - * @param entry the inverse context typeOrLanguage entry to add to. - * @param typeOrLanguageValue the key in the entry to add to. - */ - function _addPreferredTerm(mapping, term, entry, typeOrLanguageValue) { - if(!(typeOrLanguageValue in entry)) { - entry[typeOrLanguageValue] = term; - } - } - - /** - * Clones an active context, creating a child active context. - * - * @return a clone (child) of the active context. - */ - function _cloneActiveContext() { - var child = {}; - child['@base'] = this['@base']; - child.mappings = _clone(this.mappings); - child.clone = this.clone; - child.inverse = null; - child.getInverse = this.getInverse; - if('@language' in this) { - child['@language'] = this['@language']; - } - if('@vocab' in this) { - child['@vocab'] = this['@vocab']; - } - return child; - } -} - -/** - * Returns whether or not the given value is a keyword. - * - * @param v the value to check. - * - * @return true if the value is a keyword, false if not. - */ -function _isKeyword(v) { - if(!_isString(v)) { - return false; - } - switch(v) { - case '@base': - case '@context': - case '@container': - case '@default': - case '@embed': - case '@explicit': - case '@graph': - case '@id': - case '@index': - case '@language': - case '@list': - case '@omitDefault': - case '@preserve': - case '@requireAll': - case '@reverse': - case '@set': - case '@type': - case '@value': - case '@vocab': - return true; - } - return false; -} - -/** - * Returns true if the given value is an Object. - * - * @param v the value to check. - * - * @return true if the value is an Object, false if not. - */ -function _isObject(v) { - return (Object.prototype.toString.call(v) === '[object Object]'); -} - -/** - * Returns true if the given value is an empty Object. - * - * @param v the value to check. - * - * @return true if the value is an empty Object, false if not. - */ -function _isEmptyObject(v) { - return _isObject(v) && Object.keys(v).length === 0; -} - -/** - * Returns true if the given value is an Array. - * - * @param v the value to check. - * - * @return true if the value is an Array, false if not. - */ -function _isArray(v) { - return Array.isArray(v); -} - -/** - * Throws an exception if the given value is not a valid @type value. - * - * @param v the value to check. - */ -function _validateTypeValue(v) { - // can be a string or an empty object - if(_isString(v) || _isEmptyObject(v)) { - return; - } - - // must be an array - var isValid = false; - if(_isArray(v)) { - // must contain only strings - isValid = true; - for(var i = 0; i < v.length; ++i) { - if(!(_isString(v[i]))) { - isValid = false; - break; - } - } - } - - if(!isValid) { - throw new JsonLdError( - 'Invalid JSON-LD syntax; "@type" value must a string, an array of ' + - 'strings, or an empty object.', 'jsonld.SyntaxError', - {code: 'invalid type value', value: v}); - } -} - -/** - * Returns true if the given value is a String. - * - * @param v the value to check. - * - * @return true if the value is a String, false if not. - */ -function _isString(v) { - return (typeof v === 'string' || - Object.prototype.toString.call(v) === '[object String]'); -} - -/** - * Returns true if the given value is a Number. - * - * @param v the value to check. - * - * @return true if the value is a Number, false if not. - */ -function _isNumber(v) { - return (typeof v === 'number' || - Object.prototype.toString.call(v) === '[object Number]'); -} - -/** - * Returns true if the given value is a double. - * - * @param v the value to check. - * - * @return true if the value is a double, false if not. - */ -function _isDouble(v) { - return _isNumber(v) && String(v).indexOf('.') !== -1; -} - -/** - * Returns true if the given value is numeric. - * - * @param v the value to check. - * - * @return true if the value is numeric, false if not. - */ -function _isNumeric(v) { - return !isNaN(parseFloat(v)) && isFinite(v); -} - -/** - * Returns true if the given value is a Boolean. - * - * @param v the value to check. - * - * @return true if the value is a Boolean, false if not. - */ -function _isBoolean(v) { - return (typeof v === 'boolean' || - Object.prototype.toString.call(v) === '[object Boolean]'); -} - -/** - * Returns true if the given value is undefined. - * - * @param v the value to check. - * - * @return true if the value is undefined, false if not. - */ -function _isUndefined(v) { - return (typeof v === 'undefined'); -} - -/** - * Returns true if the given value is a subject with properties. - * - * @param v the value to check. - * - * @return true if the value is a subject with properties, false if not. - */ -function _isSubject(v) { - // Note: A value is a subject if all of these hold true: - // 1. It is an Object. - // 2. It is not a @value, @set, or @list. - // 3. It has more than 1 key OR any existing key is not @id. - var rval = false; - if(_isObject(v) && - !(('@value' in v) || ('@set' in v) || ('@list' in v))) { - var keyCount = Object.keys(v).length; - rval = (keyCount > 1 || !('@id' in v)); - } - return rval; -} - -/** - * Returns true if the given value is a subject reference. - * - * @param v the value to check. - * - * @return true if the value is a subject reference, false if not. - */ -function _isSubjectReference(v) { - // Note: A value is a subject reference if all of these hold true: - // 1. It is an Object. - // 2. It has a single key: @id. - return (_isObject(v) && Object.keys(v).length === 1 && ('@id' in v)); -} - -/** - * Returns true if the given value is a @value. - * - * @param v the value to check. - * - * @return true if the value is a @value, false if not. - */ -function _isValue(v) { - // Note: A value is a @value if all of these hold true: - // 1. It is an Object. - // 2. It has the @value property. - return _isObject(v) && ('@value' in v); -} - -/** - * Returns true if the given value is a @list. - * - * @param v the value to check. - * - * @return true if the value is a @list, false if not. - */ -function _isList(v) { - // Note: A value is a @list if all of these hold true: - // 1. It is an Object. - // 2. It has the @list property. - return _isObject(v) && ('@list' in v); -} - -/** - * Returns true if the given value is a blank node. - * - * @param v the value to check. - * - * @return true if the value is a blank node, false if not. - */ -function _isBlankNode(v) { - // Note: A value is a blank node if all of these hold true: - // 1. It is an Object. - // 2. If it has an @id key its value begins with '_:'. - // 3. It has no keys OR is not a @value, @set, or @list. - var rval = false; - if(_isObject(v)) { - if('@id' in v) { - rval = (v['@id'].indexOf('_:') === 0); - } else { - rval = (Object.keys(v).length === 0 || - !(('@value' in v) || ('@set' in v) || ('@list' in v))); - } - } - return rval; -} - -/** - * Returns true if the given value is an absolute IRI, false if not. - * - * @param v the value to check. - * - * @return true if the value is an absolute IRI, false if not. - */ -function _isAbsoluteIri(v) { - return _isString(v) && v.indexOf(':') !== -1; -} - -/** - * Clones an object, array, or string/number. If a typed JavaScript object - * is given, such as a Date, it will be converted to a string. - * - * @param value the value to clone. - * - * @return the cloned value. - */ -function _clone(value) { - if(value && typeof value === 'object') { - var rval; - if(_isArray(value)) { - rval = []; - for(var i = 0; i < value.length; ++i) { - rval[i] = _clone(value[i]); - } - } else if(_isObject(value)) { - rval = {}; - for(var key in value) { - rval[key] = _clone(value[key]); - } - } else { - rval = value.toString(); - } - return rval; - } - return value; -} - -/** - * Finds all @context URLs in the given JSON-LD input. - * - * @param input the JSON-LD input. - * @param urls a map of URLs (url => false/@contexts). - * @param replace true to replace the URLs in the given input with the - * @contexts from the urls map, false not to. - * @param base the base IRI to use to resolve relative IRIs. - * - * @return true if new URLs to retrieve were found, false if not. - */ -function _findContextUrls(input, urls, replace, base) { - var count = Object.keys(urls).length; - if(_isArray(input)) { - for(var i = 0; i < input.length; ++i) { - _findContextUrls(input[i], urls, replace, base); - } - return (count < Object.keys(urls).length); - } else if(_isObject(input)) { - for(var key in input) { - if(key !== '@context') { - _findContextUrls(input[key], urls, replace, base); - continue; - } - - // get @context - var ctx = input[key]; - - // array @context - if(_isArray(ctx)) { - var length = ctx.length; - for(var i = 0; i < length; ++i) { - var _ctx = ctx[i]; - if(_isString(_ctx)) { - _ctx = jsonld.prependBase(base, _ctx); - // replace w/@context if requested - if(replace) { - _ctx = urls[_ctx]; - if(_isArray(_ctx)) { - // add flattened context - Array.prototype.splice.apply(ctx, [i, 1].concat(_ctx)); - i += _ctx.length - 1; - length = ctx.length; - } else { - ctx[i] = _ctx; - } - } else if(!(_ctx in urls)) { - // @context URL found - urls[_ctx] = false; - } - } - } - } else if(_isString(ctx)) { - // string @context - ctx = jsonld.prependBase(base, ctx); - // replace w/@context if requested - if(replace) { - input[key] = urls[ctx]; - } else if(!(ctx in urls)) { - // @context URL found - urls[ctx] = false; - } - } - } - return (count < Object.keys(urls).length); - } - return false; -} - -/** - * Retrieves external @context URLs using the given document loader. Every - * instance of @context in the input that refers to a URL will be replaced - * with the JSON @context found at that URL. - * - * @param input the JSON-LD input with possible contexts. - * @param options the options to use: - * documentLoader(url, callback(err, remoteDoc)) the document loader. - * @param callback(err, input) called once the operation completes. - */ -function _retrieveContextUrls(input, options, callback) { - // if any error occurs during URL resolution, quit - var error = null; - - // recursive document loader - var documentLoader = options.documentLoader; - var retrieve = function(input, cycles, documentLoader, base, callback) { - if(Object.keys(cycles).length > MAX_CONTEXT_URLS) { - error = new JsonLdError( - 'Maximum number of @context URLs exceeded.', - 'jsonld.ContextUrlError', - {code: 'loading remote context failed', max: MAX_CONTEXT_URLS}); - return callback(error); - } - - // for tracking the URLs to retrieve - var urls = {}; - - // finished will be called once the URL queue is empty - var finished = function() { - // replace all URLs in the input - _findContextUrls(input, urls, true, base); - callback(null, input); - }; - - // find all URLs in the given input - if(!_findContextUrls(input, urls, false, base)) { - // no new URLs in input - return finished(); - } - - // queue all unretrieved URLs - var queue = []; - for(var url in urls) { - if(urls[url] === false) { - queue.push(url); - } - } - - // retrieve URLs in queue - var count = queue.length; - for(var i = 0; i < queue.length; ++i) { - (function(url) { - // check for context URL cycle - if(url in cycles) { - error = new JsonLdError( - 'Cyclical @context URLs detected.', - 'jsonld.ContextUrlError', - {code: 'recursive context inclusion', url: url}); - return callback(error); - } - var _cycles = _clone(cycles); - _cycles[url] = true; - var done = function(err, remoteDoc) { - // short-circuit if there was an error with another URL - if(error) { - return; - } - - var ctx = remoteDoc ? remoteDoc.document : null; - - // parse string context as JSON - if(!err && _isString(ctx)) { - try { - ctx = JSON.parse(ctx); - } catch(ex) { - err = ex; - } - } - - // ensure ctx is an object - if(err) { - err = new JsonLdError( - 'Dereferencing a URL did not result in a valid JSON-LD object. ' + - 'Possible causes are an inaccessible URL perhaps due to ' + - 'a same-origin policy (ensure the server uses CORS if you are ' + - 'using client-side JavaScript), too many redirects, a ' + - 'non-JSON response, or more than one HTTP Link Header was ' + - 'provided for a remote context.', - 'jsonld.InvalidUrl', - {code: 'loading remote context failed', url: url, cause: err}); - } else if(!_isObject(ctx)) { - err = new JsonLdError( - 'Dereferencing a URL did not result in a JSON object. The ' + - 'response was valid JSON, but it was not a JSON object.', - 'jsonld.InvalidUrl', - {code: 'invalid remote context', url: url, cause: err}); - } - if(err) { - error = err; - return callback(error); - } - - // use empty context if no @context key is present - if(!('@context' in ctx)) { - ctx = {'@context': {}}; - } else { - ctx = {'@context': ctx['@context']}; - } - - // append context URL to context if given - if(remoteDoc.contextUrl) { - if(!_isArray(ctx['@context'])) { - ctx['@context'] = [ctx['@context']]; - } - ctx['@context'].push(remoteDoc.contextUrl); - } - - // recurse - retrieve(ctx, _cycles, documentLoader, url, function(err, ctx) { - if(err) { - return callback(err); - } - urls[url] = ctx['@context']; - count -= 1; - if(count === 0) { - finished(); - } - }); - }; - var promise = documentLoader(url, done); - if(promise && 'then' in promise) { - promise.then(done.bind(null, null), done); - } - }(queue[i])); - } - }; - retrieve(input, {}, documentLoader, options.base, callback); -} - -// define js 1.8.5 Object.keys method if not present -if(!Object.keys) { - Object.keys = function(o) { - if(o !== Object(o)) { - throw new TypeError('Object.keys called on non-object'); - } - var rval = []; - for(var p in o) { - if(Object.prototype.hasOwnProperty.call(o, p)) { - rval.push(p); - } - } - return rval; - }; -} - -/** - * Parses RDF in the form of N-Quads. - * - * @param input the N-Quads input to parse. - * - * @return an RDF dataset. - */ -function _parseNQuads(input) { - // define partial regexes - var iri = '(?:<([^:]+:[^>]*)>)'; - var bnode = '(_:(?:[A-Za-z0-9]+))'; - var plain = '"([^"\\\\]*(?:\\\\.[^"\\\\]*)*)"'; - var datatype = '(?:\\^\\^' + iri + ')'; - var language = '(?:@([a-z]+(?:-[a-z0-9]+)*))'; - var literal = '(?:' + plain + '(?:' + datatype + '|' + language + ')?)'; - var comment = '(?:#.*)?'; - var ws = '[ \\t]+'; - var wso = '[ \\t]*'; - var eoln = /(?:\r\n)|(?:\n)|(?:\r)/g; - var empty = new RegExp('^' + wso + comment + '$'); - - // define quad part regexes - var subject = '(?:' + iri + '|' + bnode + ')' + ws; - var property = iri + ws; - var object = '(?:' + iri + '|' + bnode + '|' + literal + ')' + wso; - var graphName = '(?:\\.|(?:(?:' + iri + '|' + bnode + ')' + wso + '\\.))'; - - // full quad regex - var quad = new RegExp( - '^' + wso + subject + property + object + graphName + wso + comment + '$'); - - // build RDF dataset - var dataset = {}; - - // split N-Quad input into lines - var lines = input.split(eoln); - var lineNumber = 0; - for(var li = 0; li < lines.length; ++li) { - var line = lines[li]; - lineNumber++; - - // skip empty lines - if(empty.test(line)) { - continue; - } - - // parse quad - var match = line.match(quad); - if(match === null) { - throw new JsonLdError( - 'Error while parsing N-Quads; invalid quad.', - 'jsonld.ParseError', {line: lineNumber}); - } - - // create RDF triple - var triple = {}; - - // get subject - if(!_isUndefined(match[1])) { - triple.subject = {type: 'IRI', value: match[1]}; - } else { - triple.subject = {type: 'blank node', value: match[2]}; - } - - // get predicate - triple.predicate = {type: 'IRI', value: match[3]}; - - // get object - if(!_isUndefined(match[4])) { - triple.object = {type: 'IRI', value: match[4]}; - } else if(!_isUndefined(match[5])) { - triple.object = {type: 'blank node', value: match[5]}; - } else { - triple.object = {type: 'literal'}; - if(!_isUndefined(match[7])) { - triple.object.datatype = match[7]; - } else if(!_isUndefined(match[8])) { - triple.object.datatype = RDF_LANGSTRING; - triple.object.language = match[8]; - } else { - triple.object.datatype = XSD_STRING; - } - var unescaped = match[6] - .replace(/\\"/g, '"') - .replace(/\\t/g, '\t') - .replace(/\\n/g, '\n') - .replace(/\\r/g, '\r') - .replace(/\\\\/g, '\\'); - triple.object.value = unescaped; - } - - // get graph name ('@default' is used for the default graph) - var name = '@default'; - if(!_isUndefined(match[9])) { - name = match[9]; - } else if(!_isUndefined(match[10])) { - name = match[10]; - } - - // initialize graph in dataset - if(!(name in dataset)) { - dataset[name] = [triple]; - } else { - // add triple if unique to its graph - var unique = true; - var triples = dataset[name]; - for(var ti = 0; unique && ti < triples.length; ++ti) { - if(_compareRDFTriples(triples[ti], triple)) { - unique = false; - } - } - if(unique) { - triples.push(triple); - } - } - } - - return dataset; -} - -// register the N-Quads RDF parser -jsonld.registerRDFParser('application/nquads', _parseNQuads); - -/** - * Converts an RDF dataset to N-Quads. - * - * @param dataset the RDF dataset to convert. - * - * @return the N-Quads string. - */ -function _toNQuads(dataset) { - var quads = []; - for(var graphName in dataset) { - var triples = dataset[graphName]; - for(var ti = 0; ti < triples.length; ++ti) { - var triple = triples[ti]; - if(graphName === '@default') { - graphName = null; - } - quads.push(_toNQuad(triple, graphName)); - } - } - return quads.sort().join(''); -} - -/** - * Converts an RDF triple and graph name to an N-Quad string (a single quad). - * - * @param triple the RDF triple or quad to convert (a triple or quad may be - * passed, if a triple is passed then `graphName` should be given - * to specify the name of the graph the triple is in, `null` for - * the default graph). - * @param graphName the name of the graph containing the triple, null for - * the default graph. - * - * @return the N-Quad string. - */ -function _toNQuad(triple, graphName) { - var s = triple.subject; - var p = triple.predicate; - var o = triple.object; - var g = graphName || null; - if('name' in triple && triple.name) { - g = triple.name.value; - } - - var quad = ''; - - // subject is an IRI - if(s.type === 'IRI') { - quad += '<' + s.value + '>'; - } else { - quad += s.value; - } - quad += ' '; - - // predicate is an IRI - if(p.type === 'IRI') { - quad += '<' + p.value + '>'; - } else { - quad += p.value; - } - quad += ' '; - - // object is IRI, bnode, or literal - if(o.type === 'IRI') { - quad += '<' + o.value + '>'; - } else if(o.type === 'blank node') { - quad += o.value; - } else { - var escaped = o.value - .replace(/\\/g, '\\\\') - .replace(/\t/g, '\\t') - .replace(/\n/g, '\\n') - .replace(/\r/g, '\\r') - .replace(/\"/g, '\\"'); - quad += '"' + escaped + '"'; - if(o.datatype === RDF_LANGSTRING) { - if(o.language) { - quad += '@' + o.language; - } - } else if(o.datatype !== XSD_STRING) { - quad += '^^<' + o.datatype + '>'; - } - } - - // graph - if(g !== null && g !== undefined) { - if(g.indexOf('_:') !== 0) { - quad += ' <' + g + '>'; - } else { - quad += ' ' + g; - } - } - - quad += ' .\n'; - return quad; -} - -/** - * Parses the RDF dataset found via the data object from the RDFa API. - * - * @param data the RDFa API data object. - * - * @return the RDF dataset. - */ -function _parseRdfaApiData(data) { - var dataset = {}; - dataset['@default'] = []; - - var subjects = data.getSubjects(); - for(var si = 0; si < subjects.length; ++si) { - var subject = subjects[si]; - if(subject === null) { - continue; - } - - // get all related triples - var triples = data.getSubjectTriples(subject); - if(triples === null) { - continue; - } - var predicates = triples.predicates; - for(var predicate in predicates) { - // iterate over objects - var objects = predicates[predicate].objects; - for(var oi = 0; oi < objects.length; ++oi) { - var object = objects[oi]; - - // create RDF triple - var triple = {}; - - // add subject - if(subject.indexOf('_:') === 0) { - triple.subject = {type: 'blank node', value: subject}; - } else { - triple.subject = {type: 'IRI', value: subject}; - } - - // add predicate - if(predicate.indexOf('_:') === 0) { - triple.predicate = {type: 'blank node', value: predicate}; - } else { - triple.predicate = {type: 'IRI', value: predicate}; - } - - // serialize XML literal - var value = object.value; - if(object.type === RDF_XML_LITERAL) { - // initialize XMLSerializer - if(!XMLSerializer) { - _defineXMLSerializer(); - } - var serializer = new XMLSerializer(); - value = ''; - for(var x = 0; x < object.value.length; x++) { - if(object.value[x].nodeType === Node.ELEMENT_NODE) { - value += serializer.serializeToString(object.value[x]); - } else if(object.value[x].nodeType === Node.TEXT_NODE) { - value += object.value[x].nodeValue; - } - } - } - - // add object - triple.object = {}; - - // object is an IRI - if(object.type === RDF_OBJECT) { - if(object.value.indexOf('_:') === 0) { - triple.object.type = 'blank node'; - } else { - triple.object.type = 'IRI'; - } - } else { - // object is a literal - triple.object.type = 'literal'; - if(object.type === RDF_PLAIN_LITERAL) { - if(object.language) { - triple.object.datatype = RDF_LANGSTRING; - triple.object.language = object.language; - } else { - triple.object.datatype = XSD_STRING; - } - } else { - triple.object.datatype = object.type; - } - } - triple.object.value = value; - - // add triple to dataset in default graph - dataset['@default'].push(triple); - } - } - } - - return dataset; -} - -// register the RDFa API RDF parser -jsonld.registerRDFParser('rdfa-api', _parseRdfaApiData); - -/** - * Creates a new IdentifierIssuer. A IdentifierIssuer issues unique - * identifiers, keeping track of any previously issued identifiers. - * - * @param prefix the prefix to use (''). - */ -function IdentifierIssuer(prefix) { - this.prefix = prefix; - this.counter = 0; - this.existing = {}; -} -jsonld.IdentifierIssuer = IdentifierIssuer; -// backwards-compability -jsonld.UniqueNamer = IdentifierIssuer; - -/** - * Copies this IdentifierIssuer. - * - * @return a copy of this IdentifierIssuer. - */ -IdentifierIssuer.prototype.clone = function() { - var copy = new IdentifierIssuer(this.prefix); - copy.counter = this.counter; - copy.existing = _clone(this.existing); - return copy; -}; - -/** - * Gets the new identifier for the given old identifier, where if no old - * identifier is given a new identifier will be generated. - * - * @param [old] the old identifier to get the new identifier for. - * - * @return the new identifier. - */ -IdentifierIssuer.prototype.getId = function(old) { - // return existing old identifier - if(old && old in this.existing) { - return this.existing[old]; - } - - // get next identifier - var identifier = this.prefix + this.counter; - this.counter += 1; - - // save mapping - if(old) { - this.existing[old] = identifier; - } - - return identifier; -}; -// alias -IdentifierIssuer.prototype.getName = IdentifierIssuer.prototype.getName; - -/** - * Returns true if the given old identifer has already been assigned a new - * identifier. - * - * @param old the old identifier to check. - * - * @return true if the old identifier has been assigned a new identifier, false - * if not. - */ -IdentifierIssuer.prototype.hasId = function(old) { - return (old in this.existing); -}; -// alias -IdentifierIssuer.prototype.isNamed = IdentifierIssuer.prototype.hasId; - -/** - * A Permutator iterates over all possible permutations of the given array - * of elements. - * - * @param list the array of elements to iterate over. - */ -var Permutator = function(list) { - // original array - this.list = list.sort(); - // indicates whether there are more permutations - this.done = false; - // directional info for permutation algorithm - this.left = {}; - for(var i = 0; i < list.length; ++i) { - this.left[list[i]] = true; - } -}; - -/** - * Returns true if there is another permutation. - * - * @return true if there is another permutation, false if not. - */ -Permutator.prototype.hasNext = function() { - return !this.done; -}; - -/** - * Gets the next permutation. Call hasNext() to ensure there is another one - * first. - * - * @return the next permutation. - */ -Permutator.prototype.next = function() { - // copy current permutation - var rval = this.list.slice(); - - /* Calculate the next permutation using the Steinhaus-Johnson-Trotter - permutation algorithm. */ - - // get largest mobile element k - // (mobile: element is greater than the one it is looking at) - var k = null; - var pos = 0; - var length = this.list.length; - for(var i = 0; i < length; ++i) { - var element = this.list[i]; - var left = this.left[element]; - if((k === null || element > k) && - ((left && i > 0 && element > this.list[i - 1]) || - (!left && i < (length - 1) && element > this.list[i + 1]))) { - k = element; - pos = i; - } - } - - // no more permutations - if(k === null) { - this.done = true; - } else { - // swap k and the element it is looking at - var swap = this.left[k] ? pos - 1 : pos + 1; - this.list[pos] = this.list[swap]; - this.list[swap] = k; - - // reverse the direction of all elements larger than k - for(var i = 0; i < length; ++i) { - if(this.list[i] > k) { - this.left[this.list[i]] = !this.left[this.list[i]]; - } - } - } - - return rval; -}; - -//////////////////////// DEFINE NORMALIZATION HASH API //////////////////////// - -/** - * Creates a new NormalizeHash for use by the given normalization algorithm. - * - * @param algorithm the RDF Dataset Normalization algorithm to use: - * 'URDNA2015' or 'URGNA2012'. - */ -var NormalizeHash = function(algorithm) { - if(!(this instanceof NormalizeHash)) { - return new NormalizeHash(algorithm); - } - if(['URDNA2015', 'URGNA2012'].indexOf(algorithm) === -1) { - throw new Error( - 'Invalid RDF Dataset Normalization algorithm: ' + algorithm); - } - NormalizeHash._init.call(this, algorithm); -}; -NormalizeHash.hashNQuads = function(algorithm, nquads) { - var md = new NormalizeHash(algorithm); - for(var i = 0; i < nquads.length; ++i) { - md.update(nquads[i]); - } - return md.digest(); -}; - -// switch definition of NormalizeHash based on environment -(function(_nodejs) { - -if(_nodejs) { - // define NormalizeHash using native crypto lib - var crypto = require('crypto'); - NormalizeHash._init = function(algorithm) { - if(algorithm === 'URDNA2015') { - algorithm = 'sha256'; - } else { - // assume URGNA2012 - algorithm = 'sha1'; - } - this.md = crypto.createHash(algorithm); - }; - NormalizeHash.prototype.update = function(msg) { - return this.md.update(msg, 'utf8'); - }; - NormalizeHash.prototype.digest = function() { - return this.md.digest('hex'); - }; - return; -} - -// define NormalizeHash using JavaScript -NormalizeHash._init = function(algorithm) { - if(algorithm === 'URDNA2015') { - algorithm = new sha256.Algorithm(); - } else { - // assume URGNA2012 - algorithm = new sha1.Algorithm(); - } - this.md = new MessageDigest(algorithm); -}; -NormalizeHash.prototype.update = function(msg) { - return this.md.update(msg); -}; -NormalizeHash.prototype.digest = function() { - return this.md.digest().toHex(); -}; - -/////////////////////////// DEFINE MESSAGE DIGEST API ///////////////////////// - -/** - * Creates a new MessageDigest. - * - * @param algorithm the algorithm to use. - */ -var MessageDigest = function(algorithm) { - if(!(this instanceof MessageDigest)) { - return new MessageDigest(algorithm); - } - - this._algorithm = algorithm; - - // create shared padding as needed - if(!MessageDigest._padding || - MessageDigest._padding.length < this._algorithm.blockSize) { - MessageDigest._padding = String.fromCharCode(128); - var c = String.fromCharCode(0x00); - var n = 64; - while(n > 0) { - if(n & 1) { - MessageDigest._padding += c; - } - n >>>= 1; - if(n > 0) { - c += c; - } - } - } - - // start digest automatically for first time - this.start(); -}; - -/** - * Starts the digest. - * - * @return this digest object. - */ -MessageDigest.prototype.start = function() { - // up to 56-bit message length for convenience - this.messageLength = 0; - - // full message length - this.fullMessageLength = []; - var int32s = this._algorithm.messageLengthSize / 4; - for(var i = 0; i < int32s; ++i) { - this.fullMessageLength.push(0); - } - - // input buffer - this._input = new MessageDigest.ByteBuffer(); - - // get starting state - this.state = this._algorithm.start(); - - return this; -}; - -/** - * Updates the digest with the given message input. The input must be - * a string of characters. - * - * @param msg the message input to update with (ByteBuffer or string). - * - * @return this digest object. - */ -MessageDigest.prototype.update = function(msg) { - // encode message as a UTF-8 encoded binary string - msg = new MessageDigest.ByteBuffer(unescape(encodeURIComponent(msg))); - - // update message length - this.messageLength += msg.length(); - var len = msg.length(); - len = [(len / 0x100000000) >>> 0, len >>> 0]; - for(var i = this.fullMessageLength.length - 1; i >= 0; --i) { - this.fullMessageLength[i] += len[1]; - len[1] = len[0] + ((this.fullMessageLength[i] / 0x100000000) >>> 0); - this.fullMessageLength[i] = this.fullMessageLength[i] >>> 0; - len[0] = ((len[1] / 0x100000000) >>> 0); - } - - // add bytes to input buffer - this._input.putBytes(msg.bytes()); - - // digest blocks - while(this._input.length() >= this._algorithm.blockSize) { - this.state = this._algorithm.digest(this.state, this._input); - } - - // compact input buffer every 2K or if empty - if(this._input.read > 2048 || this._input.length() === 0) { - this._input.compact(); - } - - return this; -}; - -/** - * Produces the digest. - * - * @return a byte buffer containing the digest value. - */ -MessageDigest.prototype.digest = function() { - /* Note: Here we copy the remaining bytes in the input buffer and add the - appropriate padding. Then we do the final update on a copy of the state so - that if the user wants to get intermediate digests they can do so. */ - - /* Determine the number of bytes that must be added to the message to - ensure its length is appropriately congruent. In other words, the data to - be digested must be a multiple of `blockSize`. This data includes the - message, some padding, and the length of the message. Since the length of - the message will be encoded as `messageLengthSize` bytes, that means that - the last segment of the data must have `blockSize` - `messageLengthSize` - bytes of message and padding. Therefore, the length of the message plus the - padding must be congruent to X mod `blockSize` because - `blockSize` - `messageLengthSize` = X. - - For example, SHA-1 is congruent to 448 mod 512 and SHA-512 is congruent to - 896 mod 1024. SHA-1 uses a `blockSize` of 64 bytes (512 bits) and a - `messageLengthSize` of 8 bytes (64 bits). SHA-512 uses a `blockSize` of - 128 bytes (1024 bits) and a `messageLengthSize` of 16 bytes (128 bits). - - In order to fill up the message length it must be filled with padding that - begins with 1 bit followed by all 0 bits. Padding must *always* be present, - so if the message length is already congruent, then `blockSize` padding bits - must be added. */ - - // create final block - var finalBlock = new MessageDigest.ByteBuffer(); - finalBlock.putBytes(this._input.bytes()); - - // compute remaining size to be digested (include message length size) - var remaining = ( - this.fullMessageLength[this.fullMessageLength.length - 1] + - this._algorithm.messageLengthSize); - - // add padding for overflow blockSize - overflow - // _padding starts with 1 byte with first bit is set (byte value 128), then - // there may be up to (blockSize - 1) other pad bytes - var overflow = remaining & (this._algorithm.blockSize - 1); - finalBlock.putBytes(MessageDigest._padding.substr( - 0, this._algorithm.blockSize - overflow)); - - // serialize message length in bits in big-endian order; since length - // is stored in bytes we multiply by 8 (left shift by 3 and merge in - // remainder from ) - var messageLength = new MessageDigest.ByteBuffer(); - for(var i = 0; i < this.fullMessageLength.length; ++i) { - messageLength.putInt32((this.fullMessageLength[i] << 3) | - (this.fullMessageLength[i + 1] >>> 28)); - } - - // write the length of the message (algorithm-specific) - this._algorithm.writeMessageLength(finalBlock, messageLength); - - // digest final block - var state = this._algorithm.digest(this.state.copy(), finalBlock); - - // write state to buffer - var rval = new MessageDigest.ByteBuffer(); - state.write(rval); - return rval; -}; - -/** - * Creates a simple byte buffer for message digest operations. - * - * @param data the data to put in the buffer. - */ -MessageDigest.ByteBuffer = function(data) { - if(typeof data === 'string') { - this.data = data; - } else { - this.data = ''; - } - this.read = 0; -}; - -/** - * Puts a 32-bit integer into this buffer in big-endian order. - * - * @param i the 32-bit integer. - */ -MessageDigest.ByteBuffer.prototype.putInt32 = function(i) { - this.data += ( - String.fromCharCode(i >> 24 & 0xFF) + - String.fromCharCode(i >> 16 & 0xFF) + - String.fromCharCode(i >> 8 & 0xFF) + - String.fromCharCode(i & 0xFF)); -}; - -/** - * Gets a 32-bit integer from this buffer in big-endian order and - * advances the read pointer by 4. - * - * @return the word. - */ -MessageDigest.ByteBuffer.prototype.getInt32 = function() { - var rval = ( - this.data.charCodeAt(this.read) << 24 ^ - this.data.charCodeAt(this.read + 1) << 16 ^ - this.data.charCodeAt(this.read + 2) << 8 ^ - this.data.charCodeAt(this.read + 3)); - this.read += 4; - return rval; -}; - -/** - * Puts the given bytes into this buffer. - * - * @param bytes the bytes as a binary-encoded string. - */ -MessageDigest.ByteBuffer.prototype.putBytes = function(bytes) { - this.data += bytes; -}; - -/** - * Gets the bytes in this buffer. - * - * @return a string full of UTF-8 encoded characters. - */ -MessageDigest.ByteBuffer.prototype.bytes = function() { - return this.data.slice(this.read); -}; - -/** - * Gets the number of bytes in this buffer. - * - * @return the number of bytes in this buffer. - */ -MessageDigest.ByteBuffer.prototype.length = function() { - return this.data.length - this.read; -}; - -/** - * Compacts this buffer. - */ -MessageDigest.ByteBuffer.prototype.compact = function() { - this.data = this.data.slice(this.read); - this.read = 0; -}; - -/** - * Converts this buffer to a hexadecimal string. - * - * @return a hexadecimal string. - */ -MessageDigest.ByteBuffer.prototype.toHex = function() { - var rval = ''; - for(var i = this.read; i < this.data.length; ++i) { - var b = this.data.charCodeAt(i); - if(b < 16) { - rval += '0'; - } - rval += b.toString(16); - } - return rval; -}; - -///////////////////////////// DEFINE SHA-1 ALGORITHM ////////////////////////// - -var sha1 = { - // used for word storage - _w: null -}; - -sha1.Algorithm = function() { - this.name = 'sha1', - this.blockSize = 64; - this.digestLength = 20; - this.messageLengthSize = 8; -}; - -sha1.Algorithm.prototype.start = function() { - if(!sha1._w) { - sha1._w = new Array(80); - } - return sha1._createState(); -}; - -sha1.Algorithm.prototype.writeMessageLength = function( - finalBlock, messageLength) { - // message length is in bits and in big-endian order; simply append - finalBlock.putBytes(messageLength.bytes()); -}; - -sha1.Algorithm.prototype.digest = function(s, input) { - // consume 512 bit (64 byte) chunks - var t, a, b, c, d, e, f, i; - var len = input.length(); - var _w = sha1._w; - while(len >= 64) { - // initialize hash value for this chunk - a = s.h0; - b = s.h1; - c = s.h2; - d = s.h3; - e = s.h4; - - // the _w array will be populated with sixteen 32-bit big-endian words - // and then extended into 80 32-bit words according to SHA-1 algorithm - // and for 32-79 using Max Locktyukhin's optimization - - // round 1 - for(i = 0; i < 16; ++i) { - t = input.getInt32(); - _w[i] = t; - f = d ^ (b & (c ^ d)); - t = ((a << 5) | (a >>> 27)) + f + e + 0x5A827999 + t; - e = d; - d = c; - c = (b << 30) | (b >>> 2); - b = a; - a = t; - } - for(; i < 20; ++i) { - t = (_w[i - 3] ^ _w[i - 8] ^ _w[i - 14] ^ _w[i - 16]); - t = (t << 1) | (t >>> 31); - _w[i] = t; - f = d ^ (b & (c ^ d)); - t = ((a << 5) | (a >>> 27)) + f + e + 0x5A827999 + t; - e = d; - d = c; - c = (b << 30) | (b >>> 2); - b = a; - a = t; - } - // round 2 - for(; i < 32; ++i) { - t = (_w[i - 3] ^ _w[i - 8] ^ _w[i - 14] ^ _w[i - 16]); - t = (t << 1) | (t >>> 31); - _w[i] = t; - f = b ^ c ^ d; - t = ((a << 5) | (a >>> 27)) + f + e + 0x6ED9EBA1 + t; - e = d; - d = c; - c = (b << 30) | (b >>> 2); - b = a; - a = t; - } - for(; i < 40; ++i) { - t = (_w[i - 6] ^ _w[i - 16] ^ _w[i - 28] ^ _w[i - 32]); - t = (t << 2) | (t >>> 30); - _w[i] = t; - f = b ^ c ^ d; - t = ((a << 5) | (a >>> 27)) + f + e + 0x6ED9EBA1 + t; - e = d; - d = c; - c = (b << 30) | (b >>> 2); - b = a; - a = t; - } - // round 3 - for(; i < 60; ++i) { - t = (_w[i - 6] ^ _w[i - 16] ^ _w[i - 28] ^ _w[i - 32]); - t = (t << 2) | (t >>> 30); - _w[i] = t; - f = (b & c) | (d & (b ^ c)); - t = ((a << 5) | (a >>> 27)) + f + e + 0x8F1BBCDC + t; - e = d; - d = c; - c = (b << 30) | (b >>> 2); - b = a; - a = t; - } - // round 4 - for(; i < 80; ++i) { - t = (_w[i - 6] ^ _w[i - 16] ^ _w[i - 28] ^ _w[i - 32]); - t = (t << 2) | (t >>> 30); - _w[i] = t; - f = b ^ c ^ d; - t = ((a << 5) | (a >>> 27)) + f + e + 0xCA62C1D6 + t; - e = d; - d = c; - c = (b << 30) | (b >>> 2); - b = a; - a = t; - } - - // update hash state - s.h0 = (s.h0 + a) | 0; - s.h1 = (s.h1 + b) | 0; - s.h2 = (s.h2 + c) | 0; - s.h3 = (s.h3 + d) | 0; - s.h4 = (s.h4 + e) | 0; - - len -= 64; - } - - return s; -}; - -sha1._createState = function() { - var state = { - h0: 0x67452301, - h1: 0xEFCDAB89, - h2: 0x98BADCFE, - h3: 0x10325476, - h4: 0xC3D2E1F0 - }; - state.copy = function() { - var rval = sha1._createState(); - rval.h0 = state.h0; - rval.h1 = state.h1; - rval.h2 = state.h2; - rval.h3 = state.h3; - rval.h4 = state.h4; - return rval; - }; - state.write = function(buffer) { - buffer.putInt32(state.h0); - buffer.putInt32(state.h1); - buffer.putInt32(state.h2); - buffer.putInt32(state.h3); - buffer.putInt32(state.h4); - }; - return state; -}; - -//////////////////////////// DEFINE SHA-256 ALGORITHM ///////////////////////// - -var sha256 = { - // shared state - _k: null, - _w: null -}; - -sha256.Algorithm = function() { - this.name = 'sha256', - this.blockSize = 64; - this.digestLength = 32; - this.messageLengthSize = 8; -}; - -sha256.Algorithm.prototype.start = function() { - if(!sha256._k) { - sha256._init(); - } - return sha256._createState(); -}; - -sha256.Algorithm.prototype.writeMessageLength = function( - finalBlock, messageLength) { - // message length is in bits and in big-endian order; simply append - finalBlock.putBytes(messageLength.bytes()); -}; - -sha256.Algorithm.prototype.digest = function(s, input) { - // consume 512 bit (64 byte) chunks - var t1, t2, s0, s1, ch, maj, i, a, b, c, d, e, f, g, h; - var len = input.length(); - var _k = sha256._k; - var _w = sha256._w; - while(len >= 64) { - // the w array will be populated with sixteen 32-bit big-endian words - // and then extended into 64 32-bit words according to SHA-256 - for(i = 0; i < 16; ++i) { - _w[i] = input.getInt32(); - } - for(; i < 64; ++i) { - // XOR word 2 words ago rot right 17, rot right 19, shft right 10 - t1 = _w[i - 2]; - t1 = - ((t1 >>> 17) | (t1 << 15)) ^ - ((t1 >>> 19) | (t1 << 13)) ^ - (t1 >>> 10); - // XOR word 15 words ago rot right 7, rot right 18, shft right 3 - t2 = _w[i - 15]; - t2 = - ((t2 >>> 7) | (t2 << 25)) ^ - ((t2 >>> 18) | (t2 << 14)) ^ - (t2 >>> 3); - // sum(t1, word 7 ago, t2, word 16 ago) modulo 2^32 - _w[i] = (t1 + _w[i - 7] + t2 + _w[i - 16]) | 0; - } - - // initialize hash value for this chunk - a = s.h0; - b = s.h1; - c = s.h2; - d = s.h3; - e = s.h4; - f = s.h5; - g = s.h6; - h = s.h7; - - // round function - for(i = 0; i < 64; ++i) { - // Sum1(e) - s1 = - ((e >>> 6) | (e << 26)) ^ - ((e >>> 11) | (e << 21)) ^ - ((e >>> 25) | (e << 7)); - // Ch(e, f, g) (optimized the same way as SHA-1) - ch = g ^ (e & (f ^ g)); - // Sum0(a) - s0 = - ((a >>> 2) | (a << 30)) ^ - ((a >>> 13) | (a << 19)) ^ - ((a >>> 22) | (a << 10)); - // Maj(a, b, c) (optimized the same way as SHA-1) - maj = (a & b) | (c & (a ^ b)); - - // main algorithm - t1 = h + s1 + ch + _k[i] + _w[i]; - t2 = s0 + maj; - h = g; - g = f; - f = e; - e = (d + t1) | 0; - d = c; - c = b; - b = a; - a = (t1 + t2) | 0; - } - - // update hash state - s.h0 = (s.h0 + a) | 0; - s.h1 = (s.h1 + b) | 0; - s.h2 = (s.h2 + c) | 0; - s.h3 = (s.h3 + d) | 0; - s.h4 = (s.h4 + e) | 0; - s.h5 = (s.h5 + f) | 0; - s.h6 = (s.h6 + g) | 0; - s.h7 = (s.h7 + h) | 0; - len -= 64; - } - - return s; -}; - -sha256._createState = function() { - var state = { - h0: 0x6A09E667, - h1: 0xBB67AE85, - h2: 0x3C6EF372, - h3: 0xA54FF53A, - h4: 0x510E527F, - h5: 0x9B05688C, - h6: 0x1F83D9AB, - h7: 0x5BE0CD19 - }; - state.copy = function() { - var rval = sha256._createState(); - rval.h0 = state.h0; - rval.h1 = state.h1; - rval.h2 = state.h2; - rval.h3 = state.h3; - rval.h4 = state.h4; - rval.h5 = state.h5; - rval.h6 = state.h6; - rval.h7 = state.h7; - return rval; - }; - state.write = function(buffer) { - buffer.putInt32(state.h0); - buffer.putInt32(state.h1); - buffer.putInt32(state.h2); - buffer.putInt32(state.h3); - buffer.putInt32(state.h4); - buffer.putInt32(state.h5); - buffer.putInt32(state.h6); - buffer.putInt32(state.h7); - }; - return state; -}; - -sha256._init = function() { - // create K table for SHA-256 - sha256._k = [ - 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, - 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5, - 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, - 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174, - 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, - 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da, - 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, - 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967, - 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, - 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, - 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, - 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070, - 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, - 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3, - 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, - 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2]; - - // used for word storage - sha256._w = new Array(64); -}; - -})(_nodejs); // end definition of NormalizeHash - -if(!XMLSerializer) { - -var _defineXMLSerializer = function() { - XMLSerializer = require('xmldom').XMLSerializer; -}; - -} // end _defineXMLSerializer - -// define URL parser -// parseUri 1.2.2 -// (c) Steven Levithan -// MIT License -// with local jsonld.js modifications -jsonld.url = {}; -jsonld.url.parsers = { - simple: { - // RFC 3986 basic parts - keys: ['href','scheme','authority','path','query','fragment'], - regex: /^(?:([^:\/?#]+):)?(?:\/\/([^\/?#]*))?([^?#]*)(?:\?([^#]*))?(?:#(.*))?/ - }, - full: { - keys: ['href','protocol','scheme','authority','auth','user','password','hostname','port','path','directory','file','query','fragment'], - regex: /^(([^:\/?#]+):)?(?:\/\/((?:(([^:@]*)(?::([^:@]*))?)?@)?([^:\/?#]*)(?::(\d*))?))?(?:(((?:[^?#\/]*\/)*)([^?#]*))(?:\?([^#]*))?(?:#(.*))?)/ - } -}; -jsonld.url.parse = function(str, parser) { - var parsed = {}; - var o = jsonld.url.parsers[parser || 'full']; - var m = o.regex.exec(str); - var i = o.keys.length; - while(i--) { - parsed[o.keys[i]] = (m[i] === undefined) ? null : m[i]; - } - - // remove default ports in found in URLs - if((parsed.scheme === 'https' && parsed.port === '443') || - (parsed.scheme === 'http' && parsed.port === '80')) { - parsed.href = parsed.href.replace(':' + parsed.port, ''); - parsed.authority = parsed.authority.replace(':' + parsed.port, ''); - parsed.port = null; - } - - parsed.normalizedPath = _removeDotSegments(parsed.path, !!parsed.authority); - return parsed; -}; - -/** - * Removes dot segments from a URL path. - * - * @param path the path to remove dot segments from. - * @param hasAuthority true if the URL has an authority, false if not. - */ -function _removeDotSegments(path, hasAuthority) { - var rval = ''; - - if(path.indexOf('/') === 0) { - rval = '/'; - } - - // RFC 3986 5.2.4 (reworked) - var input = path.split('/'); - var output = []; - while(input.length > 0) { - if(input[0] === '.' || (input[0] === '' && input.length > 1)) { - input.shift(); - continue; - } - if(input[0] === '..') { - input.shift(); - if(hasAuthority || - (output.length > 0 && output[output.length - 1] !== '..')) { - output.pop(); - } else { - // leading relative URL '..' - output.push('..'); - } - continue; - } - output.push(input.shift()); - } - - return rval + output.join('/'); -} - -if(_nodejs) { - // use node document loader by default - jsonld.useDocumentLoader('node'); -} else if(typeof XMLHttpRequest !== 'undefined') { - // use xhr document loader by default - jsonld.useDocumentLoader('xhr'); -} - -if(_nodejs) { - jsonld.use = function(extension) { - switch(extension) { - // TODO: Deprecated as of 0.4.0. Remove at some point. - case 'request': - // use node JSON-LD request extension - jsonld.request = require('jsonld-request'); - break; - default: - throw new JsonLdError( - 'Unknown extension.', - 'jsonld.UnknownExtension', {extension: extension}); - } - }; - - // expose version - var _module = {exports: {}, filename: __dirname}; - require('pkginfo')(_module, 'version'); - jsonld.version = _module.exports.version; -} - -// end of jsonld API factory -return jsonld; -}; - -// external APIs: - -// used to generate a new jsonld API instance -var factory = function() { - return wrapper(function() { - return factory(); - }); -}; - -if(!_nodejs && (typeof define === 'function' && define.amd)) { - // export AMD API - define([], function() { - // now that module is defined, wrap main jsonld API instance - wrapper(factory); - return factory; - }); -} else { - // wrap the main jsonld API instance - wrapper(factory); - - if(typeof require === 'function' && - typeof module !== 'undefined' && module.exports) { - // export CommonJS/nodejs API - module.exports = factory; - } - - if(_browser) { - // export simple browser API - if(typeof jsonld === 'undefined') { - jsonld = jsonldjs = factory; - } else { - jsonldjs = factory; - } - } -} - -return factory; - -})(); diff --git a/karma.conf.js b/karma.conf.js new file mode 100644 index 00000000..701e0a39 --- /dev/null +++ b/karma.conf.js @@ -0,0 +1,171 @@ +/** + * Karam configuration for jsonld.js. + * + * Set dirs, manifests, or js to run: + * JSONLD_TESTS="f1 f2 ..." + * Output an EARL report: + * EARL=filename + * Bail with tests fail: + * BAIL=true + * + * @author Dave Longley + * @author David I. Lehn + * + * Copyright (c) 2011-2017 Digital Bazaar, Inc. All rights reserved. + */ +const webpack = require('webpack'); + +module.exports = function(config) { + // bundler to test: webpack, browserify + var bundler = process.env.BUNDLER || 'webpack'; + + var frameworks = ['mocha', 'server-side']; + // main bundle preprocessors + var preprocessors = ['babel']; + + if(bundler === 'browserify') { + frameworks.push(bundler); + preprocessors.push(bundler); + } else if(bundler === 'webpack') { + preprocessors.push(bundler); + preprocessors.push('sourcemap'); + } else { + throw Error('Unknown bundler'); + } + + config.set({ + // base path that will be used to resolve all patterns (eg. files, exclude) + basePath: '', + + // frameworks to use + // available frameworks: https://npmjs.org/browse/keyword/karma-adapter + frameworks: frameworks, + + // list of files / patterns to load in the browser + files: [ + { + pattern: 'tests/test-karma.js', + watched: false, served: true, included: true + } + ], + + // list of files to exclude + exclude: [ + ], + + // preprocess matching files before serving them to the browser + // available preprocessors: https://npmjs.org/browse/keyword/karma-preprocessor + preprocessors: { + //'tests/*.js': ['webpack', 'babel'] //preprocessors + 'tests/*.js': preprocessors + }, + + webpack: { + devtool: 'inline-source-map', + plugins: [ + new webpack.DefinePlugin({ + 'process.env.JSONLD_TESTS': JSON.stringify(process.env.JSONLD_TESTS), + 'process.env.TEST_ROOT_DIR': JSON.stringify(__dirname), + 'process.env.EARL': JSON.stringify(process.env.EARL), + 'process.env.BAIL': JSON.stringify(process.env.BAIL) + }) + ], + module: { + rules: [ + { + test: /\.js$/, + include: [{ + // exclude node_modules by default + exclude: /(node_modules)/ + }, { + // include rdf-canonize + include: /(node_modules\/rdf-canonize)/ + }], + use: { + loader: 'babel-loader', + options: { + presets: ['env'], + plugins: [ + ['transform-object-rest-spread', {useBuiltIns: true}] + ] + } + } + } + ] + }, + node: { + Buffer: false, + process: false, + crypto: false, + setImmediate: false + } + }, + + browserify: { + debug: true + //transform: ['uglifyify'] + }, + + // test results reporter to use + // possible values: 'dots', 'progress' + // available reporters: https://npmjs.org/browse/keyword/karma-reporter + //reporters: ['progress'], + reporters: ['mocha'], + + // web server port + port: 9876, + + // enable / disable colors in the output (reporters and logs) + colors: true, + + // level of logging + // possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG + logLevel: config.LOG_INFO, + + // enable / disable watching file and executing tests whenever any file changes + autoWatch: false, + + // start these browsers + // available browser launchers: https://npmjs.org/browse/keyword/karma-launcher + //browsers: ['PhantomJS', 'Chrome', 'Firefox', 'Safari'], + browsers: ['PhantomJS'], + + customLaunchers: { + IE9: { + base: 'IE', + 'x-ua-compatible': 'IE=EmulateIE9' + }, + IE8: { + base: 'IE', + 'x-ua-compatible': 'IE=EmulateIE8' + } + }, + + phantomjsLauncher: { + // Have phantomjs exit if a ResourceError is encountered (useful if karma + // exits without killing phantom) + exitOnResourceError: true + }, + + // Continuous Integration mode + // if true, Karma captures browsers, runs the tests and exits + singleRun: true, + + // Concurrency level + // how many browser should be started simultaneous + concurrency: Infinity, + + // Mocha + client: { + mocha: { + // increase from default 2s + timeout: 10000, + reporter: 'html', + delay: true + } + }, + + // Proxied paths + proxies: {} + }); +}; diff --git a/lib/ActiveContextCache.js b/lib/ActiveContextCache.js new file mode 100644 index 00000000..35c9893f --- /dev/null +++ b/lib/ActiveContextCache.js @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2017 Digital Bazaar, Inc. All rights reserved. + */ +'use strict'; + +const {clone} = require('./util'); + +module.exports = class ActiveContextCache { + /** + * Creates an active context cache. + * + * @param size the maximum size of the cache. + */ + constructor(size = 100) { + this.order = []; + this.cache = {}; + this.size = size; + } + + get(activeCtx, localCtx) { + const key1 = JSON.stringify(activeCtx); + const key2 = JSON.stringify(localCtx); + const level1 = this.cache[key1]; + if(level1 && key2 in level1) { + return level1[key2]; + } + return null; + } + + set(activeCtx, localCtx, result) { + if(this.order.length === this.size) { + const entry = this.order.shift(); + delete this.cache[entry.activeCtx][entry.localCtx]; + } + const key1 = JSON.stringify(activeCtx); + const key2 = JSON.stringify(localCtx); + this.order.push({activeCtx: key1, localCtx: key2}); + if(!(key1 in this.cache)) { + this.cache[key1] = {}; + } + this.cache[key1][key2] = clone(result); + } +}; diff --git a/lib/DocumentCache.js b/lib/DocumentCache.js new file mode 100644 index 00000000..b29ad3e6 --- /dev/null +++ b/lib/DocumentCache.js @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2017 Digital Bazaar, Inc. All rights reserved. + */ +'use strict'; + +module.exports = class DocumentCache { + /** + * Creates a simple document cache that retains documents for a short + * period of time. + * + * FIXME: Implement simple HTTP caching instead. + * + * @param options the options to use: + * [size] the maximum size of the cache (default: 50). + * [expires] expiration time for each entry in ms (default: 30000). + */ + constructor({size = 50, expires = 30000}) { + this.order = []; + this.cache = {}; + this.size = size; + this.expires = expires; + } + + get(url) { + if(url in this.cache) { + const entry = this.cache[url]; + if(entry.expires >= Date.now()) { + return entry.ctx; + } + delete this.cache[url]; + this.order.splice(this.order.indexOf(url), 1); + } + return null; + } + + set(url, ctx) { + if(this.order.length === this.size) { + delete this.cache[this.order.shift()]; + } + this.order.push(url); + this.cache[url] = {ctx: ctx, expires: (Date.now() + this.expires)}; + } +}; diff --git a/lib/JsonLdError.js b/lib/JsonLdError.js new file mode 100644 index 00000000..5972d158 --- /dev/null +++ b/lib/JsonLdError.js @@ -0,0 +1,23 @@ +/* + * Copyright (c) 2017 Digital Bazaar, Inc. All rights reserved. + */ +'use strict'; + +module.exports = class JsonLdError extends Error { + /** + * Creates a JSON-LD Error. + * + * @param msg the error message. + * @param type the error type. + * @param details the error details. + */ + constructor( + message = 'An unspecified JSON-LD error occurred.', + name = 'jsonld.Error', + details = {}) { + super(message); + this.name = name; + this.message = message; + this.details = details; + } +}; diff --git a/lib/JsonLdProcessor.js b/lib/JsonLdProcessor.js new file mode 100644 index 00000000..9b1c1239 --- /dev/null +++ b/lib/JsonLdProcessor.js @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2017 Digital Bazaar, Inc. All rights reserved. + */ +'use strict'; + +module.exports = jsonld => { + class JsonLdProcessor { + toString() { + return '[object JsonLdProcessor]'; + } + } + Object.defineProperty(JsonLdProcessor, 'prototype', { + writable: false, + enumerable: false + }); + Object.defineProperty(JsonLdProcessor.prototype, 'constructor', { + writable: true, + enumerable: false, + configurable: true, + value: JsonLdProcessor + }); + + // The Web IDL test harness will check the number of parameters defined in + // the functions below. The number of parameters must exactly match the + // required (non-optional) parameters of the JsonLdProcessor interface as + // defined here: + // https://www.w3.org/TR/json-ld-api/#the-jsonldprocessor-interface + + JsonLdProcessor.compact = function(input, ctx) { + if(arguments.length < 2) { + return Promise.reject( + new TypeError('Could not compact, too few arguments.')); + } + return jsonld.compact(input, ctx); + }; + JsonLdProcessor.expand = function(input) { + if(arguments.length < 1) { + return Promise.reject( + new TypeError('Could not expand, too few arguments.')); + } + return jsonld.expand(input); + }; + JsonLdProcessor.flatten = function(input) { + if(arguments.length < 1) { + return Promise.reject( + new TypeError('Could not flatten, too few arguments.')); + } + return jsonld.flatten(input); + }; + + return JsonLdProcessor; +}; diff --git a/lib/NQuads.js b/lib/NQuads.js new file mode 100644 index 00000000..9c4d03e6 --- /dev/null +++ b/lib/NQuads.js @@ -0,0 +1,7 @@ +/* + * Copyright (c) 2017 Digital Bazaar, Inc. All rights reserved. + */ +'use strict'; + +// TODO: move `NQuads` to its own package +module.exports = require('rdf-canonize').NQuads; diff --git a/lib/Rdfa.js b/lib/Rdfa.js new file mode 100644 index 00000000..ae7b8ca6 --- /dev/null +++ b/lib/Rdfa.js @@ -0,0 +1,140 @@ +/* + * Copyright (c) 2017 Digital Bazaar, Inc. All rights reserved. + */ +/* global Node, XMLSerializer */ +'use strict'; + +const { + RDF_LANGSTRING, + RDF_PLAIN_LITERAL, + RDF_OBJECT, + RDF_XML_LITERAL, + XSD_STRING, +} = require('./constants'); + +let _Node; +if(typeof Node !== 'undefined') { + _Node = Node; +} else { + _Node = { + ELEMENT_NODE: 1, + ATTRIBUTE_NODE: 2, + TEXT_NODE: 3, + CDATA_SECTION_NODE: 4, + ENTITY_REFERENCE_NODE: 5, + ENTITY_NODE: 6, + PROCESSING_INSTRUCTION_NODE: 7, + COMMENT_NODE: 8, + DOCUMENT_NODE: 9, + DOCUMENT_TYPE_NODE: 10, + DOCUMENT_FRAGMENT_NODE: 11, + NOTATION_NODE: 12 + }; +} + +module.exports = class Rdfa { + /** + * Parses the RDF dataset found via the data object from the RDFa API. + * + * @param data the RDFa API data object. + * + * @return the RDF dataset. + */ + parse(data) { + const dataset = {}; + dataset['@default'] = []; + + const subjects = data.getSubjects(); + for(let si = 0; si < subjects.length; ++si) { + const subject = subjects[si]; + if(subject === null) { + continue; + } + + // get all related triples + const triples = data.getSubjectTriples(subject); + if(triples === null) { + continue; + } + const predicates = triples.predicates; + for(let predicate in predicates) { + // iterate over objects + const objects = predicates[predicate].objects; + for(let oi = 0; oi < objects.length; ++oi) { + const object = objects[oi]; + + // create RDF triple + const triple = {}; + + // add subject + if(subject.indexOf('_:') === 0) { + triple.subject = {type: 'blank node', value: subject}; + } else { + triple.subject = {type: 'IRI', value: subject}; + } + + // add predicate + if(predicate.indexOf('_:') === 0) { + triple.predicate = {type: 'blank node', value: predicate}; + } else { + triple.predicate = {type: 'IRI', value: predicate}; + } + + // serialize XML literal + let value = object.value; + if(object.type === RDF_XML_LITERAL) { + // initialize XMLSerializer + const XMLSerializer = getXMLSerializerClass(); + const serializer = new XMLSerializer(); + value = ''; + for(let x = 0; x < object.value.length; x++) { + if(object.value[x].nodeType === _Node.ELEMENT_NODE) { + value += serializer.serializeToString(object.value[x]); + } else if(object.value[x].nodeType === _Node.TEXT_NODE) { + value += object.value[x].nodeValue; + } + } + } + + // add object + triple.object = {}; + + // object is an IRI + if(object.type === RDF_OBJECT) { + if(object.value.indexOf('_:') === 0) { + triple.object.type = 'blank node'; + } else { + triple.object.type = 'IRI'; + } + } else { + // object is a literal + triple.object.type = 'literal'; + if(object.type === RDF_PLAIN_LITERAL) { + if(object.language) { + triple.object.datatype = RDF_LANGSTRING; + triple.object.language = object.language; + } else { + triple.object.datatype = XSD_STRING; + } + } else { + triple.object.datatype = object.type; + } + } + triple.object.value = value; + + // add triple to dataset in default graph + dataset['@default'].push(triple); + } + } + } + + return dataset; + } +}; + +function getXMLSerializerClass() { + if(typeof XMLSerializer === 'undefined') { + return require('xmldom').XMLSerializer; + } + return XMLSerializer; +} diff --git a/lib/RequestQueue.js b/lib/RequestQueue.js new file mode 100644 index 00000000..efa928bc --- /dev/null +++ b/lib/RequestQueue.js @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2017 Digital Bazaar, Inc. All rights reserved. + */ +'use strict'; + +const {callbackify, normalizeDocumentLoader} = require('./util'); + +module.exports = class RequestQueue { + /** + * Creates a simple queue for requesting documents. + */ + constructor() { + this._requests = {}; + this.add = callbackify(this.add.bind(this)); + } + + wrapLoader(loader) { + const self = this; + self._loader = normalizeDocumentLoader(loader); + return function(url) { + return self.add.apply(self, arguments); + }; + } + + async add(url) { + const self = this; + + let promise = self._requests[url]; + if(promise) { + // URL already queued, wait for it to load + return Promise.resolve(promise); + } + + // queue URL and load it + promise = self._requests[url] = self._loader(url); + + try { + return await promise; + } finally { + delete self._requests[url]; + } + } +}; diff --git a/lib/compact.js b/lib/compact.js new file mode 100644 index 00000000..1b2de753 --- /dev/null +++ b/lib/compact.js @@ -0,0 +1,800 @@ +/* + * Copyright (c) 2017 Digital Bazaar, Inc. All rights reserved. + */ +'use strict'; + +const JsonLdError = require('./JsonLdError'); + +const { + isArray: _isArray, + isObject: _isObject, + isString: _isString +} = require('./types'); + +const { + isList: _isList, + isValue: _isValue, + isSimpleGraph: _isSimpleGraph, + isSubjectReference: _isSubjectReference +} = require('./graphTypes'); + +const { + expandIri: _expandIri, + getContextValue: _getContextValue, + isKeyword: _isKeyword +} = require('./context'); + +const { + removeBase: _removeBase +} = require('./url'); + +const { + addValue: _addValue, + compareShortestLeast: _compareShortestLeast +} = require('./util'); + +const api = {}; +module.exports = api; + +/** + * Recursively compacts an element using the given active context. All values + * must be in expanded form before this method is called. + * + * @param activeCtx the active context to use. + * @param activeProperty the compacted property associated with the element + * to compact, null for none. + * @param element the element to compact. + * @param options the compaction options. + * @param compactionMap the compaction map to use. + * + * @return the compacted value. + */ +api.compact = ({ + activeCtx, + activeProperty = null, + element, + options = {}, + compactionMap = () => undefined +}) => { + // recursively compact array + if(_isArray(element)) { + let rval = []; + for(let i = 0; i < element.length; ++i) { + // compact, dropping any null values unless custom mapped + let compacted = api.compact({ + activeCtx, + activeProperty, + element: element[i], + options, + compactionMap + }); + if(compacted === null) { + // TODO: use `await` to support async + compacted = compactionMap({ + unmappedValue: element[i], + activeCtx, + activeProperty, + parent: element, + index: i, + options + }); + if(compacted === undefined) { + continue; + } + } + rval.push(compacted); + } + if(options.compactArrays && rval.length === 1) { + // use single element if no container is specified + const container = _getContextValue( + activeCtx, activeProperty, '@container') || []; + if(container.length === 0) { + rval = rval[0]; + } + } + return rval; + } + + // recursively compact object + if(_isObject(element)) { + if(options.link && '@id' in element && element['@id'] in options.link) { + // check for a linked element to reuse + const linked = options.link[element['@id']]; + for(let i = 0; i < linked.length; ++i) { + if(linked[i].expanded === element) { + return linked[i].compacted; + } + } + } + + // do value compaction on @values and subject references + if(_isValue(element) || _isSubjectReference(element)) { + const rval = api.compactValue({activeCtx, activeProperty, value: element}); + if(options.link && _isSubjectReference(element)) { + // store linked element + if(!(element['@id'] in options.link)) { + options.link[element['@id']] = []; + } + options.link[element['@id']].push({expanded: element, compacted: rval}); + } + return rval; + } + + // FIXME: avoid misuse of active property as an expanded property? + const insideReverse = (activeProperty === '@reverse'); + + const rval = {}; + + if(options.link && '@id' in element) { + // store linked element + if(!(element['@id'] in options.link)) { + options.link[element['@id']] = []; + } + options.link[element['@id']].push({expanded: element, compacted: rval}); + } + + // process element keys in order + const keys = Object.keys(element).sort(); + for(let ki = 0; ki < keys.length; ++ki) { + const expandedProperty = keys[ki]; + const expandedValue = element[expandedProperty]; + + // compact @id and @type(s) + if(expandedProperty === '@id' || expandedProperty === '@type') { + let compactedValue; + + // compact single @id + if(_isString(expandedValue)) { + compactedValue = api.compactIri({ + activeCtx, + iri: expandedValue, + relativeTo: {vocab: (expandedProperty === '@type')} + }); + } else { + // expanded value must be a @type array + compactedValue = []; + for(let vi = 0; vi < expandedValue.length; ++vi) { + compactedValue.push(api.compactIri( + {activeCtx, iri: expandedValue[vi], relativeTo: {vocab: true}})); + } + } + + // use keyword alias and add value + const alias = api.compactIri({activeCtx, iri: expandedProperty, relativeTo: {vocab: true}}); + const isArray = _isArray(compactedValue) && expandedValue.length === 0; + _addValue(rval, alias, compactedValue, {propertyIsArray: isArray}); + continue; + } + + // handle @reverse + if(expandedProperty === '@reverse') { + // recursively compact expanded value + const compactedValue = api.compact({ + activeCtx, + activeProperty: '@reverse', + element: expandedValue, + options, + compactionMap + }); + + // handle double-reversed properties + for(let compactedProperty in compactedValue) { + if(activeCtx.mappings[compactedProperty] && + activeCtx.mappings[compactedProperty].reverse) { + const value = compactedValue[compactedProperty]; + const container = _getContextValue( + activeCtx, compactedProperty, '@container') || []; + const useArray = ( + container.includes('@set') || !options.compactArrays); + _addValue( + rval, compactedProperty, value, {propertyIsArray: useArray}); + delete compactedValue[compactedProperty]; + } + } + + if(Object.keys(compactedValue).length > 0) { + // use keyword alias and add value + const alias = api.compactIri({activeCtx, iri: expandedProperty, relativeTo: {vocab: true}}); + _addValue(rval, alias, compactedValue); + } + + continue; + } + + // handle @index property + if(expandedProperty === '@index') { + // drop @index if inside an @index container + const container = _getContextValue( + activeCtx, activeProperty, '@container') || []; + if(container.includes('@index')) { + continue; + } + + // use keyword alias and add value + const alias = api.compactIri({activeCtx, iri: expandedProperty, relativeTo: {vocab: true}}); + _addValue(rval, alias, expandedValue); + continue; + } + + // skip array processing for keywords that aren't @graph or @list + if(expandedProperty !== '@graph' && expandedProperty !== '@list' && + _isKeyword(expandedProperty)) { + // use keyword alias and add value as is + const alias = api.compactIri({activeCtx, iri: expandedProperty, relativeTo: {vocab: true}}); + _addValue(rval, alias, expandedValue); + continue; + } + + // Note: expanded value must be an array due to expansion algorithm. + if(!_isArray(expandedValue)) { + throw new JsonLdError( + 'JSON-LD expansion error; expanded value must be an array.', + 'jsonld.SyntaxError'); + } + + // preserve empty arrays + if(expandedValue.length === 0) { + const itemActiveProperty = api.compactIri({ + activeCtx, + iri: expandedProperty, + value: expandedValue, + relativeTo: {vocab: true}, + reverse: insideReverse + }); + _addValue( + rval, itemActiveProperty, expandedValue, {propertyIsArray: true}); + } + + // recusively process array values + for(let vi = 0; vi < expandedValue.length; ++vi) { + const expandedItem = expandedValue[vi]; + + // compact property and get container type + const itemActiveProperty = api.compactIri({ + activeCtx, + iri: expandedProperty, + value: expandedItem, + relativeTo: {vocab: true}, + reverse: insideReverse + }); + const container = _getContextValue( + activeCtx, itemActiveProperty, '@container') || []; + + // get simple @graph or @list value if appropriate + const isSimpleGraph = _isSimpleGraph(expandedItem); + const isList = _isList(expandedItem); + let inner; + if(isList) { + inner = expandedItem['@list']; + } else if(isSimpleGraph) { + inner = expandedItem['@graph']; + } + + // recursively compact expanded item + let compactedItem = api.compact({ + activeCtx, + activeProperty: itemActiveProperty, + element: (isList || isSimpleGraph) ? inner : expandedItem, + options, + compactionMap + }); + + // handle @list + if(isList) { + // ensure @list value is an array + if(!_isArray(compactedItem)) { + compactedItem = [compactedItem]; + } + + if(!container.includes('@list')) { + // wrap using @list alias + compactedItem = { + [api.compactIri({activeCtx, iri: '@list', relativeTo: {vocab: true}})]: compactedItem + }; + + // include @index from expanded @list, if any + if('@index' in expandedItem) { + compactedItem[api.compactIri({activeCtx, iri: '@index', relativeTo: {vocab: true}})] = + expandedItem['@index']; + } + } else if(itemActiveProperty in rval) { + // can't use @list container for more than 1 list + throw new JsonLdError( + 'JSON-LD compact error; property has a "@list" @container ' + + 'rule but there is more than a single @list that matches ' + + 'the compacted term in the document. Compaction might mix ' + + 'unwanted items into the list.', + 'jsonld.SyntaxError', {code: 'compaction to list of lists'}); + } + } + + // handle simple @graph + if(isSimpleGraph && !container.includes('@graph')) { + // wrap using @graph alias + compactedItem = { + [api.compactIri({activeCtx, iri: '@graph', relativeTo: {vocab: true}})]: compactedItem + }; + + // include @index from expanded @graph, if any + if('@index' in expandedItem) { + compactedItem[api.compactIri({activeCtx, iri: '@index', relativeTo: {vocab: true}})] = + expandedItem['@index']; + } + } + + // handle language and index maps + if(container.includes('@language') || container.includes('@index')) { + // get or create the map object + let mapObject; + if(itemActiveProperty in rval) { + mapObject = rval[itemActiveProperty]; + } else { + rval[itemActiveProperty] = mapObject = {}; + } + + // if container is a language map, simplify compacted value to + // a simple string + if(container.includes('@language') && _isValue(compactedItem)) { + compactedItem = compactedItem['@value']; + } + + // add compact value to map object using key from expanded value + // based on the container type + const c = container.includes('@language') ? '@language' : '@index'; + _addValue(mapObject, expandedItem[c], compactedItem); + } else { + // use an array if: compactArrays flag is false, + // @container is @set or @list , value is an empty + // array, or key is @graph + const isArray = (!options.compactArrays || + container.includes('@set') || container.includes('@list') || + (_isArray(compactedItem) && compactedItem.length === 0) || + expandedProperty === '@list' || expandedProperty === '@graph'); + + // add compact value + _addValue( + rval, itemActiveProperty, compactedItem, + {propertyIsArray: isArray}); + } + } + } + + return rval; + } + + // only primitives remain which are already compact + return element; +}; + +/** + * Compacts an IRI or keyword into a term or prefix if it can be. If the + * IRI has an associated value it may be passed. + * + * @param activeCtx the active context to use. + * @param iri the IRI to compact. + * @param value the value to check or null. + * @param relativeTo options for how to compact IRIs: + * vocab: true to split after @vocab, false not to. + * @param reverse true if a reverse property is being compacted, false if not. + * + * @return the compacted term, prefix, keyword alias, or the original IRI. + */ +api.compactIri = ({ + activeCtx, + iri, + value = null, + relativeTo = {vocab: false}, + reverse = false +}) => { + // can't compact null + if(iri === null) { + return iri; + } + + const inverseCtx = activeCtx.getInverse(); + + // if term is a keyword, it may be compacted to a simple alias + if(_isKeyword(iri) && + iri in inverseCtx && + '@none' in inverseCtx[iri] && + '@type' in inverseCtx[iri]['@none'] && + '@none' in inverseCtx[iri]['@none']['@type']) { + return inverseCtx[iri]['@none']['@type']['@none']; + } + + // use inverse context to pick a term if iri is relative to vocab + if(relativeTo.vocab && iri in inverseCtx) { + const defaultLanguage = activeCtx['@language'] || '@none'; + + // prefer @index if available in value + const containers = []; + if(_isObject(value) && '@index' in value) { + containers.push('@index'); + } + + // prefer `['@graph', '@set']` and then `@graph` if value is a simple graph + // TODO: support `@graphId`? + if(_isSimpleGraph(value)) { + containers.push('@graph@set'); + containers.push('@graph'); + } + + // defaults for term selection based on type/language + let typeOrLanguage = '@language'; + let typeOrLanguageValue = '@null'; + + if(reverse) { + typeOrLanguage = '@type'; + typeOrLanguageValue = '@reverse'; + containers.push('@set'); + } else if(_isList(value)) { + // choose the most specific term that works for all elements in @list + // only select @list containers if @index is NOT in value + if(!('@index' in value)) { + containers.push('@list'); + } + const list = value['@list']; + if(list.length === 0) { + // any empty list can be matched against any term that uses the + // @list container regardless of @type or @language + typeOrLanguage = '@any'; + typeOrLanguageValue = '@none'; + } else { + let commonLanguage = (list.length === 0) ? defaultLanguage : null; + let commonType = null; + for(let i = 0; i < list.length; ++i) { + const item = list[i]; + let itemLanguage = '@none'; + let itemType = '@none'; + if(_isValue(item)) { + if('@language' in item) { + itemLanguage = item['@language']; + } else if('@type' in item) { + itemType = item['@type']; + } else { + // plain literal + itemLanguage = '@null'; + } + } else { + itemType = '@id'; + } + if(commonLanguage === null) { + commonLanguage = itemLanguage; + } else if(itemLanguage !== commonLanguage && _isValue(item)) { + commonLanguage = '@none'; + } + if(commonType === null) { + commonType = itemType; + } else if(itemType !== commonType) { + commonType = '@none'; + } + // there are different languages and types in the list, so choose + // the most generic term, no need to keep iterating the list + if(commonLanguage === '@none' && commonType === '@none') { + break; + } + } + commonLanguage = commonLanguage || '@none'; + commonType = commonType || '@none'; + if(commonType !== '@none') { + typeOrLanguage = '@type'; + typeOrLanguageValue = commonType; + } else { + typeOrLanguageValue = commonLanguage; + } + } + } else { + if(_isValue(value)) { + if('@language' in value && !('@index' in value)) { + containers.push('@language'); + typeOrLanguageValue = value['@language']; + } else if('@type' in value) { + typeOrLanguage = '@type'; + typeOrLanguageValue = value['@type']; + } + } else { + typeOrLanguage = '@type'; + typeOrLanguageValue = '@id'; + } + containers.push('@set'); + } + + // do term selection + containers.push('@none'); + const term = _selectTerm( + activeCtx, iri, value, containers, typeOrLanguage, typeOrLanguageValue); + if(term !== null) { + return term; + } + } + + // no term match, use @vocab if available + if(relativeTo.vocab) { + if('@vocab' in activeCtx) { + // determine if vocab is a prefix of the iri + const vocab = activeCtx['@vocab']; + if(iri.indexOf(vocab) === 0 && iri !== vocab) { + // use suffix as relative iri if it is not a term in the active context + const suffix = iri.substr(vocab.length); + if(!(suffix in activeCtx.mappings)) { + return suffix; + } + } + } + } + + // no term or @vocab match, check for possible CURIEs + let choice = null; + // TODO: make FastCurieMap a class with a method to do this lookup + const partialMatches = []; + let iriMap = activeCtx.fastCurieMap; + // check for partial matches of against `iri`, which means look until + // iri.length - 1, not full length + const maxPartialLength = iri.length - 1; + for(let i = 0; i < maxPartialLength && iri[i] in iriMap; ++i) { + iriMap = iriMap[iri[i]]; + if('' in iriMap) { + partialMatches.push(iriMap[''][0]); + } + } + // check partial matches in reverse order to prefer longest ones first + for(let i = partialMatches.length - 1; i >= 0; --i) { + const entry = partialMatches[i]; + const terms = entry.terms; + for(let ti = 0; ti < terms.length; ++ti) { + // a CURIE is usable if: + // 1. it has no mapping, OR + // 2. value is null, which means we're not compacting an @value, AND + // the mapping matches the IRI + const curie = terms[ti] + ':' + iri.substr(entry.iri.length); + const isUsableCurie = (!(curie in activeCtx.mappings) || + (value === null && activeCtx.mappings[curie]['@id'] === iri)); + + // select curie if it is shorter or the same length but lexicographically + // less than the current choice + if(isUsableCurie && (choice === null || + _compareShortestLeast(curie, choice) < 0)) { + choice = curie; + } + } + } + + // return chosen curie + if(choice !== null) { + return choice; + } + + // compact IRI relative to base + if(!relativeTo.vocab) { + return _removeBase(activeCtx['@base'], iri); + } + + // return IRI as is + return iri; +}; + +/** + * Performs value compaction on an object with '@value' or '@id' as the only + * property. + * + * @param activeCtx the active context. + * @param activeProperty the active property that points to the value. + * @param value the value to compact. + * + * @return the compaction result. + */ +api.compactValue = ({activeCtx, activeProperty, value}) => { + // value is a @value + if(_isValue(value)) { + // get context rules + const type = _getContextValue(activeCtx, activeProperty, '@type'); + const language = _getContextValue(activeCtx, activeProperty, '@language'); + const container = _getContextValue(activeCtx, activeProperty, '@container') || []; + + // whether or not the value has an @index that must be preserved + const preserveIndex = '@index' in value && !container.includes('@index'); + + // if there's no @index to preserve ... + if(!preserveIndex) { + // matching @type or @language specified in context, compact value + if(value['@type'] === type || value['@language'] === language) { + return value['@value']; + } + } + + // return just the value of @value if all are true: + // 1. @value is the only key or @index isn't being preserved + // 2. there is no default language or @value is not a string or + // the key has a mapping with a null @language + const keyCount = Object.keys(value).length; + const isValueOnlyKey = (keyCount === 1 || + (keyCount === 2 && '@index' in value && !preserveIndex)); + const hasDefaultLanguage = ('@language' in activeCtx); + const isValueString = _isString(value['@value']); + const hasNullMapping = (activeCtx.mappings[activeProperty] && + activeCtx.mappings[activeProperty]['@language'] === null); + if(isValueOnlyKey && + (!hasDefaultLanguage || !isValueString || hasNullMapping)) { + return value['@value']; + } + + const rval = {}; + + // preserve @index + if(preserveIndex) { + rval[api.compactIri({activeCtx, iri: '@index', relativeTo: {vocab: true}})] = value['@index']; + } + + if('@type' in value) { + // compact @type IRI + rval[api.compactIri({activeCtx, iri: '@type', relativeTo: {vocab: true}})] = api.compactIri( + {activeCtx, iri: value['@type'], relativeTo: {vocab: true}}); + } else if('@language' in value) { + // alias @language + rval[api.compactIri({activeCtx, iri: '@language', relativeTo: {vocab: true}})] = value['@language']; + } + + // alias @value + rval[api.compactIri({activeCtx, iri: '@value', relativeTo: {vocab: true}})] = value['@value']; + + return rval; + } + + // value is a subject reference + const expandedProperty = _expandIri(activeCtx, activeProperty, {vocab: true}); + const type = _getContextValue(activeCtx, activeProperty, '@type'); + const compacted = api.compactIri( + {activeCtx, iri: value['@id'], relativeTo: {vocab: type === '@vocab'}}); + + // compact to scalar + if(type === '@id' || type === '@vocab' || expandedProperty === '@graph') { + return compacted; + } + + return { + [api.compactIri({activeCtx, iri: '@id', relativeTo: {vocab: true}})]: compacted + }; +}; + +/** + * Removes the @preserve keywords as the last step of the compaction + * algorithm when it is running on framed output. + * + * @param ctx the active context used to compact the input. + * @param input the framed, compacted output. + * @param options the compaction options used. + * + * @return the resulting output. + */ +api.removePreserve = (ctx, input, options) => { + // recurse through arrays + if(_isArray(input)) { + const output = []; + for(let i = 0; i < input.length; ++i) { + const result = api.removePreserve(ctx, input[i], options); + // drop nulls from arrays + if(result !== null) { + output.push(result); + } + } + input = output; + } else if(_isObject(input)) { + // remove @preserve + if('@preserve' in input) { + if(input['@preserve'] === '@null') { + return null; + } + return input['@preserve']; + } + + // skip @values + if(_isValue(input)) { + return input; + } + + // recurse through @lists + if(_isList(input)) { + input['@list'] = api.removePreserve(ctx, input['@list'], options); + return input; + } + + // handle in-memory linked nodes + const idAlias = api.compactIri({activeCtx: ctx, iri: '@id', relativeTo: {vocab: true}}); + if(idAlias in input) { + const id = input[idAlias]; + if(id in options.link) { + const idx = options.link[id].indexOf(input); + if(idx !== -1) { + // already visited + return options.link[id][idx]; + } + // prevent circular visitation + options.link[id].push(input); + } else { + // prevent circular visitation + options.link[id] = [input]; + } + } + + // recurse through properties + for(let prop in input) { + let result = api.removePreserve(ctx, input[prop], options); + const container = _getContextValue(ctx, prop, '@container') || []; + if(options.compactArrays && _isArray(result) && result.length === 1 && + container.length === 0) { + result = result[0]; + } + input[prop] = result; + } + } + return input; +}; + +/** + * Picks the preferred compaction term from the given inverse context entry. + * + * @param activeCtx the active context. + * @param iri the IRI to pick the term for. + * @param value the value to pick the term for. + * @param containers the preferred containers. + * @param typeOrLanguage either '@type' or '@language'. + * @param typeOrLanguageValue the preferred value for '@type' or '@language'. + * + * @return the preferred term. + */ +function _selectTerm( + activeCtx, iri, value, containers, typeOrLanguage, typeOrLanguageValue) { + if(typeOrLanguageValue === null) { + typeOrLanguageValue = '@null'; + } + + // preferences for the value of @type or @language + const prefs = []; + + // determine prefs for @id based on whether or not value compacts to a term + if((typeOrLanguageValue === '@id' || typeOrLanguageValue === '@reverse') && + _isSubjectReference(value)) { + // prefer @reverse first + if(typeOrLanguageValue === '@reverse') { + prefs.push('@reverse'); + } + // try to compact value to a term + const term = api.compactIri( + {activeCtx, iri: value['@id'], relativeTo: {vocab: true}}); + if(term in activeCtx.mappings && + activeCtx.mappings[term] && + activeCtx.mappings[term]['@id'] === value['@id']) { + // prefer @vocab + prefs.push.apply(prefs, ['@vocab', '@id']); + } else { + // prefer @id + prefs.push.apply(prefs, ['@id', '@vocab']); + } + } else { + prefs.push(typeOrLanguageValue); + } + prefs.push('@none'); + + const containerMap = activeCtx.inverse[iri]; + for(let ci = 0; ci < containers.length; ++ci) { + // if container not available in the map, continue + const container = containers[ci]; + if(!(container in containerMap)) { + continue; + } + + const typeOrLanguageValueMap = containerMap[container][typeOrLanguage]; + for(let pi = 0; pi < prefs.length; ++pi) { + // if type/language option not available in the map, continue + const pref = prefs[pi]; + if(!(pref in typeOrLanguageValueMap)) { + continue; + } + + // select term + return typeOrLanguageValueMap[pref]; + } + } + + return null; +} diff --git a/lib/constants.js b/lib/constants.js new file mode 100644 index 00000000..17511c75 --- /dev/null +++ b/lib/constants.js @@ -0,0 +1,28 @@ +/* + * Copyright (c) 2017 Digital Bazaar, Inc. All rights reserved. + */ +'use strict'; + +const RDF = 'http://www.w3.org/1999/02/22-rdf-syntax-ns#'; +const XSD = 'http://www.w3.org/2001/XMLSchema#'; + +module.exports = { + LINK_HEADER_REL: 'http://www.w3.org/ns/json-ld#context', + + RDF, + RDF_LIST: RDF + 'List', + RDF_FIRST: RDF + 'first', + RDF_REST: RDF + 'rest', + RDF_NIL: RDF + 'nil', + RDF_TYPE: RDF + 'type', + RDF_PLAIN_LITERAL: RDF + 'PlainLiteral', + RDF_XML_LITERAL: RDF + 'XMLLiteral', + RDF_OBJECT: RDF + 'object', + RDF_LANGSTRING: RDF + 'langString', + + XSD, + XSD_BOOLEAN: XSD + 'boolean', + XSD_DOUBLE: XSD + 'double', + XSD_INTEGER: XSD + 'integer', + XSD_STRING: XSD + 'string', +}; diff --git a/lib/context.js b/lib/context.js new file mode 100644 index 00000000..f9097650 --- /dev/null +++ b/lib/context.js @@ -0,0 +1,1006 @@ +/* + * Copyright (c) 2017 Digital Bazaar, Inc. All rights reserved. + */ +'use strict'; + +const util = require('./util'); +const ActiveContextCache = require('./ActiveContextCache'); +const JsonLdError = require('./JsonLdError'); + +const { + isArray: _isArray, + isObject: _isObject, + isString: _isString, + isUndefined: _isUndefined +} = require('./types'); + +const { + isAbsolute: _isAbsoluteIri, + isRelative: _isRelativeIri, + prependBase, + parse: parseUrl +} = require('./url'); + +const MAX_CONTEXT_URLS = 10; + +const api = {}; +module.exports = api; + +api.cache = new ActiveContextCache(); + +/** + * Processes a local context and returns a new active context. + * + * @param activeCtx the current active context. + * @param localCtx the local context to process. + * @param options the context processing options. + * + * @return the new active context. + */ +api.process = ({activeCtx, localCtx, options}) => { + // normalize local context to an array of @context objects + if(_isObject(localCtx) && '@context' in localCtx && + _isArray(localCtx['@context'])) { + localCtx = localCtx['@context']; + } + const ctxs = _isArray(localCtx) ? localCtx : [localCtx]; + + // no contexts in array, clone existing context + if(ctxs.length === 0) { + return activeCtx.clone(); + } + + // process each context in order, update active context + // on each iteration to ensure proper caching + let rval = activeCtx; + for(let i = 0; i < ctxs.length; ++i) { + let ctx = ctxs[i]; + + // reset to initial context + if(ctx === null) { + rval = activeCtx = api.getInitialContext(options); + continue; + } + + // dereference @context key if present + if(_isObject(ctx) && '@context' in ctx) { + ctx = ctx['@context']; + } + + // context must be an object by now, all URLs retrieved before this call + if(!_isObject(ctx)) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; @context must be an object.', + 'jsonld.SyntaxError', {code: 'invalid local context', context: ctx}); + } + + // get context from cache if available + if(api.cache) { + const cached = api.cache.get(activeCtx, ctx); + if(cached) { + rval = activeCtx = cached; + continue; + } + } + + // update active context and clone new one before updating + activeCtx = rval; + rval = rval.clone(); + + // define context mappings for keys in local context + const defined = {}; + + // handle @version + if('@version' in ctx) { + if(ctx['@version'] !== 1.1) { + throw new JsonLdError( + 'Unsupported JSON-LD version: ' + ctx['@version'], + 'jsonld.UnsupportedVersion', + {code: 'invalid @version value', context: ctx}); + } + if(activeCtx.processingMode && activeCtx.processingMode.indexOf('json-ld-1.1') !== 0) { + throw new JsonLdError( + '@version: ' + ctx['@version'] + ' not compatible with ' + activeCtx.processingMode, + 'jsonld.ProcessingModeConflict', + {code: 'processing mode conflict', context: ctx}); + } + rval.processingMode = 'json-ld-1.1'; + rval['@version'] = ctx['@version']; + defined['@version'] = true; + } + + // if not set explicitly, set processingMode to "json-ld-1.0" + rval.processingMode = rval.processingMode || activeCtx.processingMode || 'json-ld-1.0'; + + // handle @base + if('@base' in ctx) { + let base = ctx['@base']; + + if(base === null) { + // no action + } else if(_isAbsoluteIri(base)) { + base = parseUrl(base); + } else if(_isRelativeIri(base)) { + base = parseUrl(prependBase(activeCtx['@base'].href, base)); + } else { + throw new JsonLdError( + 'Invalid JSON-LD syntax; the value of "@base" in a ' + + '@context must be an absolute IRI, a relative IRI, or null.', + 'jsonld.SyntaxError', {code: 'invalid base IRI', context: ctx}); + } + + rval['@base'] = base; + defined['@base'] = true; + } + + // handle @vocab + if('@vocab' in ctx) { + const value = ctx['@vocab']; + if(value === null) { + delete rval['@vocab']; + } else if(!_isString(value)) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; the value of "@vocab" in a ' + + '@context must be a string or null.', + 'jsonld.SyntaxError', {code: 'invalid vocab mapping', context: ctx}); + } else if(!_isAbsoluteIri(value)) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; the value of "@vocab" in a ' + + '@context must be an absolute IRI.', + 'jsonld.SyntaxError', {code: 'invalid vocab mapping', context: ctx}); + } else { + rval['@vocab'] = value; + } + defined['@vocab'] = true; + } + + // handle @language + if('@language' in ctx) { + const value = ctx['@language']; + if(value === null) { + delete rval['@language']; + } else if(!_isString(value)) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; the value of "@language" in a ' + + '@context must be a string or null.', + 'jsonld.SyntaxError', + {code: 'invalid default language', context: ctx}); + } else { + rval['@language'] = value.toLowerCase(); + } + defined['@language'] = true; + } + + // process all other keys + for(let key in ctx) { + api.createTermDefinition(rval, ctx, key, defined); + } + + // cache result + if(api.cache) { + api.cache.set(activeCtx, ctx, rval); + } + } + + return rval; +}; + +/** + * Creates a term definition during context processing. + * + * @param activeCtx the current active context. + * @param localCtx the local context being processed. + * @param term the term in the local context to define the mapping for. + * @param defined a map of defining/defined keys to detect cycles and prevent + * double definitions. + */ +api.createTermDefinition = (activeCtx, localCtx, term, defined) => { + if(term in defined) { + // term already defined + if(defined[term]) { + return; + } + // cycle detected + throw new JsonLdError( + 'Cyclical context definition detected.', + 'jsonld.CyclicalContext', + {code: 'cyclic IRI mapping', context: localCtx, term: term}); + } + + // now defining term + defined[term] = false; + + if(api.isKeyword(term)) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; keywords cannot be overridden.', + 'jsonld.SyntaxError', + {code: 'keyword redefinition', context: localCtx, term: term}); + } + + if(term === '') { + throw new JsonLdError( + 'Invalid JSON-LD syntax; a term cannot be an empty string.', + 'jsonld.SyntaxError', + {code: 'invalid term definition', context: localCtx}); + } + + // remove old mapping + if(activeCtx.mappings[term]) { + delete activeCtx.mappings[term]; + } + + // get context term value + let value = localCtx[term]; + + // clear context entry + if(value === null || (_isObject(value) && value['@id'] === null)) { + activeCtx.mappings[term] = null; + defined[term] = true; + return; + } + + // convert short-hand value to object w/@id + if(_isString(value)) { + value = {'@id': value}; + } + + if(!_isObject(value)) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; @context term values must be ' + + 'strings or objects.', + 'jsonld.SyntaxError', + {code: 'invalid term definition', context: localCtx}); + } + + // create new mapping + const mapping = activeCtx.mappings[term] = {}; + mapping.reverse = false; + + // make sure term definition only has expected keywords + const validKeys = ['@container', '@id', '@language', '@reverse', '@type']; + + for(let kw in value) { + if(!validKeys.includes(kw)) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; a term definition must not contain ' + kw, + 'jsonld.SyntaxError', + {code: 'invalid term definition', context: localCtx}); + } + } + + if('@reverse' in value) { + if('@id' in value) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; a @reverse term definition must not ' + + 'contain @id.', 'jsonld.SyntaxError', + {code: 'invalid reverse property', context: localCtx}); + } + const reverse = value['@reverse']; + if(!_isString(reverse)) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; a @context @reverse value must be a string.', + 'jsonld.SyntaxError', {code: 'invalid IRI mapping', context: localCtx}); + } + + // expand and add @id mapping + const id = api.expandIri( + activeCtx, reverse, {vocab: true, base: false}, localCtx, defined); + if(!_isAbsoluteIri(id)) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; a @context @reverse value must be an ' + + 'absolute IRI or a blank node identifier.', + 'jsonld.SyntaxError', {code: 'invalid IRI mapping', context: localCtx}); + } + mapping['@id'] = id; + mapping.reverse = true; + } else if('@id' in value) { + let id = value['@id']; + if(!_isString(id)) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; a @context @id value must be an array ' + + 'of strings or a string.', + 'jsonld.SyntaxError', {code: 'invalid IRI mapping', context: localCtx}); + } + if(id !== term) { + // expand and add @id mapping + id = api.expandIri( + activeCtx, id, {vocab: true, base: false}, localCtx, defined); + if(!_isAbsoluteIri(id) && !api.isKeyword(id)) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; a @context @id value must be an ' + + 'absolute IRI, a blank node identifier, or a keyword.', + 'jsonld.SyntaxError', + {code: 'invalid IRI mapping', context: localCtx}); + } + mapping['@id'] = id; + } + } + + // always compute whether term has a colon as an optimization for + // _compactIri + const colon = term.indexOf(':'); + mapping._termHasColon = (colon !== -1); + + if(!('@id' in mapping)) { + // see if the term has a prefix + if(mapping._termHasColon) { + const prefix = term.substr(0, colon); + if(prefix in localCtx) { + // define parent prefix + api.createTermDefinition(activeCtx, localCtx, prefix, defined); + } + + if(activeCtx.mappings[prefix]) { + // set @id based on prefix parent + const suffix = term.substr(colon + 1); + mapping['@id'] = activeCtx.mappings[prefix]['@id'] + suffix; + } else { + // term is an absolute IRI + mapping['@id'] = term; + } + } else { + // non-IRIs *must* define @ids if @vocab is not available + if(!('@vocab' in activeCtx)) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; @context terms must define an @id.', + 'jsonld.SyntaxError', + {code: 'invalid IRI mapping', context: localCtx, term: term}); + } + // prepend vocab to term + mapping['@id'] = activeCtx['@vocab'] + term; + } + } + + // IRI mapping now defined + defined[term] = true; + + if('@type' in value) { + let type = value['@type']; + if(!_isString(type)) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; an @context @type values must be a string.', + 'jsonld.SyntaxError', + {code: 'invalid type mapping', context: localCtx}); + } + + if(type !== '@id' && type !== '@vocab') { + // expand @type to full IRI + type = api.expandIri( + activeCtx, type, {vocab: true, base: false}, localCtx, defined); + if(!_isAbsoluteIri(type)) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; an @context @type value must be an ' + + 'absolute IRI.', + 'jsonld.SyntaxError', + {code: 'invalid type mapping', context: localCtx}); + } + if(type.indexOf('_:') === 0) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; an @context @type values must be an IRI, ' + + 'not a blank node identifier.', + 'jsonld.SyntaxError', + {code: 'invalid type mapping', context: localCtx}); + } + } + + // add @type to mapping + mapping['@type'] = type; + } + + if('@container' in value) { + // normalize container to an array form + const container = _isString(value['@container']) ? [value['@container']] : (value['@container'] || []); + const validContainers = ['@list', '@set', '@index', '@language']; + let isValid = true; + let hasSet = container.includes('@set'); + + // JSON-LD 1.1 support + if(activeCtx.processingMode && activeCtx.processingMode.indexOf('json-ld-1.1') === 0) { + // TODO: @id and @type + validContainers.push('@graph'); + + // check container length + isValid &= container.length <= (hasSet ? 2 : 1); + } else { + // in JSON-LD 1.0, container must not be an array (it must be a string, which is one of the validContainers) + isValid &= !_isArray(value['@container']); + + // check container length + isValid &= container.length <= 1; + } + + // check against valid containers + isValid &= container.every(c => validContainers.includes(c)); + + // @set not allowed with @list + isValid &= !(hasSet && container.includes('@list')); + + if(!isValid) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; @context @container value must be ' + + 'one of the following: ' + validContainers.join(', '), + 'jsonld.SyntaxError', + {code: 'invalid container mapping', context: localCtx}); + } + + if(mapping.reverse && !container.every(c => ['@index', '@set'].includes(c))) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; @context @container value for a @reverse ' + + 'type definition must be @index or @set.', 'jsonld.SyntaxError', + {code: 'invalid reverse property', context: localCtx}); + } + + // add @container to mapping + mapping['@container'] = container; + } + + if('@language' in value && !('@type' in value)) { + let language = value['@language']; + if(language !== null && !_isString(language)) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; @context @language value must be ' + + 'a string or null.', 'jsonld.SyntaxError', + {code: 'invalid language mapping', context: localCtx}); + } + + // add @language to mapping + if(language !== null) { + language = language.toLowerCase(); + } + mapping['@language'] = language; + } + + // disallow aliasing @context and @preserve + const id = mapping['@id']; + if(id === '@context' || id === '@preserve') { + throw new JsonLdError( + 'Invalid JSON-LD syntax; @context and @preserve cannot be aliased.', + 'jsonld.SyntaxError', {code: 'invalid keyword alias', context: localCtx}); + } +}; + +/** + * Expands a string to a full IRI. The string may be a term, a prefix, a + * relative IRI, or an absolute IRI. The associated absolute IRI will be + * returned. + * + * @param activeCtx the current active context. + * @param value the string to expand. + * @param relativeTo options for how to resolve relative IRIs: + * base: true to resolve against the base IRI, false not to. + * vocab: true to concatenate after @vocab, false not to. + * @param localCtx the local context being processed (only given if called + * during context processing). + * @param defined a map for tracking cycles in context definitions (only given + * if called during context processing). + * + * @return the expanded value. + */ +api.expandIri = (activeCtx, value, relativeTo, localCtx, defined) => { + // already expanded + if(value === null || api.isKeyword(value)) { + return value; + } + + // ensure value is interpreted as a string + value = String(value); + + // define term dependency if not defined + if(localCtx && value in localCtx && defined[value] !== true) { + api.createTermDefinition(activeCtx, localCtx, value, defined); + } + + relativeTo = relativeTo || {}; + if(relativeTo.vocab) { + const mapping = activeCtx.mappings[value]; + + // value is explicitly ignored with a null mapping + if(mapping === null) { + return null; + } + + if(mapping) { + // value is a term + return mapping['@id']; + } + } + + // split value into prefix:suffix + const colon = value.indexOf(':'); + if(colon !== -1) { + const prefix = value.substr(0, colon); + const suffix = value.substr(colon + 1); + + // do not expand blank nodes (prefix of '_') or already-absolute + // IRIs (suffix of '//') + if(prefix === '_' || suffix.indexOf('//') === 0) { + return value; + } + + // prefix dependency not defined, define it + if(localCtx && prefix in localCtx) { + api.createTermDefinition(activeCtx, localCtx, prefix, defined); + } + + // use mapping if prefix is defined + const mapping = activeCtx.mappings[prefix]; + if(mapping) { + return mapping['@id'] + suffix; + } + + // already absolute IRI + return value; + } + + // prepend vocab + if(relativeTo.vocab && '@vocab' in activeCtx) { + return activeCtx['@vocab'] + value; + } + + // prepend base + if(relativeTo.base) { + return prependBase(activeCtx['@base'], value); + } + + return value; +}; + +/** + * Gets the initial context. + * + * @param options the options to use: + * [base] the document base IRI. + * + * @return the initial context. + */ +api.getInitialContext = (options) => { + const base = parseUrl(options.base || ''); + return { + '@base': base, + processingMode: options.processingMode, + mappings: {}, + inverse: null, + getInverse: _createInverseContext, + clone: _cloneActiveContext + }; + + /** + * Generates an inverse context for use in the compaction algorithm, if + * not already generated for the given active context. + * + * @return the inverse context. + */ + function _createInverseContext() { + const activeCtx = this; + + // lazily create inverse + if(activeCtx.inverse) { + return activeCtx.inverse; + } + const inverse = activeCtx.inverse = {}; + + // variables for building fast CURIE map + const fastCurieMap = activeCtx.fastCurieMap = {}; + const irisToTerms = {}; + + // handle default language + const defaultLanguage = activeCtx['@language'] || '@none'; + + // create term selections for each mapping in the context, ordered by + // shortest and then lexicographically least + const mappings = activeCtx.mappings; + const terms = Object.keys(mappings).sort(util.compareShortestLeast); + for(let i = 0; i < terms.length; ++i) { + const term = terms[i]; + const mapping = mappings[term]; + if(mapping === null) { + continue; + } + + let container = mapping['@container'] || '@none'; + container = [].concat(container).sort().join(''); + + // iterate over every IRI in the mapping + const ids = [].concat(mapping['@id']); + for(let ii = 0; ii < ids.length; ++ii) { + const iri = ids[ii]; + let entry = inverse[iri]; + const isKeyword = api.isKeyword(iri); + + if(!entry) { + // initialize entry + inverse[iri] = entry = {}; + + if(!isKeyword && !mapping._termHasColon) { + // init IRI to term map and fast CURIE prefixes + irisToTerms[iri] = [term]; + const fastCurieEntry = {iri: iri, terms: irisToTerms[iri]}; + if(iri[0] in fastCurieMap) { + fastCurieMap[iri[0]].push(fastCurieEntry); + } else { + fastCurieMap[iri[0]] = [fastCurieEntry]; + } + } + } else if(!isKeyword && !mapping._termHasColon) { + // add IRI to term match + irisToTerms[iri].push(term); + } + + // add new entry + if(!entry[container]) { + entry[container] = { + '@language': {}, + '@type': {}, + '@any': {} + }; + } + entry = entry[container]; + _addPreferredTerm(term, entry['@any'], '@none'); + + if(mapping.reverse) { + // term is preferred for values using @reverse + _addPreferredTerm(term, entry['@type'], '@reverse'); + } else if('@type' in mapping) { + // term is preferred for values using specific type + _addPreferredTerm(term, entry['@type'], mapping['@type']); + } else if('@language' in mapping) { + // term is preferred for values using specific language + const language = mapping['@language'] || '@null'; + _addPreferredTerm(term, entry['@language'], language); + } else { + // term is preferred for values w/default language or no type and + // no language + // add an entry for the default language + _addPreferredTerm(term, entry['@language'], defaultLanguage); + + // add entries for no type and no language + _addPreferredTerm(term, entry['@type'], '@none'); + _addPreferredTerm(term, entry['@language'], '@none'); + } + } + } + + // build fast CURIE map + for(let key in fastCurieMap) { + _buildIriMap(fastCurieMap, key, 1); + } + + return inverse; + } + + /** + * Runs a recursive algorithm to build a lookup map for quickly finding + * potential CURIEs. + * + * @param iriMap the map to build. + * @param key the current key in the map to work on. + * @param idx the index into the IRI to compare. + */ + function _buildIriMap(iriMap, key, idx) { + const entries = iriMap[key]; + const next = iriMap[key] = {}; + + let iri; + let letter; + for(let i = 0; i < entries.length; ++i) { + iri = entries[i].iri; + if(idx >= iri.length) { + letter = ''; + } else { + letter = iri[idx]; + } + if(letter in next) { + next[letter].push(entries[i]); + } else { + next[letter] = [entries[i]]; + } + } + + for(let key in next) { + if(key === '') { + continue; + } + _buildIriMap(next, key, idx + 1); + } + } + + /** + * Adds the term for the given entry if not already added. + * + * @param term the term to add. + * @param entry the inverse context typeOrLanguage entry to add to. + * @param typeOrLanguageValue the key in the entry to add to. + */ + function _addPreferredTerm(term, entry, typeOrLanguageValue) { + if(!(typeOrLanguageValue in entry)) { + entry[typeOrLanguageValue] = term; + } + } + + /** + * Clones an active context, creating a child active context. + * + * @return a clone (child) of the active context. + */ + function _cloneActiveContext() { + const child = {}; + child['@base'] = this['@base']; + child.mappings = util.clone(this.mappings); + child.clone = this.clone; + child.inverse = null; + child.getInverse = this.getInverse; + if('@language' in this) { + child['@language'] = this['@language']; + } + if('@vocab' in this) { + child['@vocab'] = this['@vocab']; + } + return child; + } +}; + +/** + * Gets the value for the given active context key and type, null if none is + * set. + * + * @param ctx the active context. + * @param key the context key. + * @param [type] the type of value to get (eg: '@id', '@type'), if not + * specified gets the entire entry for a key, null if not found. + * + * @return the value. + */ +api.getContextValue = (ctx, key, type) => { + // return null for invalid key + if(key === null) { + return null; + } + + // get specific entry information + if(ctx.mappings[key]) { + const entry = ctx.mappings[key]; + + if(_isUndefined(type)) { + // return whole entry + return entry; + } + if(type in entry) { + // return entry value for type + return entry[type]; + } + } + + // get default language + if(type === '@language' && (type in ctx)) { + return ctx[type]; + } + + return null; +}; + +/** + * Retrieves external @context URLs using the given document loader. Every + * instance of @context in the input that refers to a URL will be replaced + * with the JSON @context found at that URL. + * + * @param input the JSON-LD input with possible contexts. + * @param options the options to use: + * documentLoader(url, [callback(err, remoteDoc)]) the document loader. + * @param callback(err, input) called once the operation completes. + */ +api.getAllContexts = async (input, options) => { + return _retrieveContextUrls(input, options); +}; + +/** + * Returns whether or not the given value is a keyword. + * + * @param v the value to check. + * + * @return true if the value is a keyword, false if not. + */ +api.isKeyword = v => { + if(!_isString(v)) { + return false; + } + switch(v) { + case '@base': + case '@container': + case '@context': + case '@default': + case '@embed': + case '@explicit': + case '@graph': + case '@id': + case '@index': + case '@language': + case '@list': + case '@omitDefault': + case '@preserve': + case '@requireAll': + case '@reverse': + case '@set': + case '@type': + case '@value': + case '@version': + case '@vocab': + return true; + } + return false; +}; + +async function _retrieveContextUrls(input, options) { + const documentLoader = util.normalizeDocumentLoader(options.documentLoader); + + // retrieve all @context URLs in input + const _urls = {}; + await retrieve(input, {}, documentLoader); + + return input; + + // recursive function that will retrieve all @context URLs in documents + async function retrieve(doc, cycles, documentLoader) { + if(Object.keys(cycles).length > MAX_CONTEXT_URLS) { + throw new JsonLdError( + 'Maximum number of @context URLs exceeded.', + 'jsonld.ContextUrlError', + {code: 'loading remote context failed', max: MAX_CONTEXT_URLS}); + } + + // find all URLs in the given document, reusing already retrieved URLs + const urls = {}; + Object.keys(_urls).forEach(url => { + if(_urls[url] !== false) { + urls[url] = _urls[url]; + } + }); + _findContextUrls(doc, urls, false, options.base); + + // queue all unretrieved URLs + const queue = Object.keys(urls).filter(u => urls[u] === false); + + // retrieve URLs in queue + return Promise.all(queue.map(async url => { + // check for context URL cycle + if(url in cycles) { + throw new JsonLdError( + 'Cyclical @context URLs detected.', + 'jsonld.ContextUrlError', + {code: 'recursive context inclusion', url: url}); + } + + const _cycles = util.clone(cycles); + _cycles[url] = true; + let remoteDoc; + let ctx; + + try { + remoteDoc = await documentLoader(url); + ctx = remoteDoc.document || null; + // parse string context as JSON + if(_isString(ctx)) { + ctx = JSON.parse(ctx); + } + } catch(e) { + throw new JsonLdError( + 'Dereferencing a URL did not result in a valid JSON-LD object. ' + + 'Possible causes are an inaccessible URL perhaps due to ' + + 'a same-origin policy (ensure the server uses CORS if you are ' + + 'using client-side JavaScript), too many redirects, a ' + + 'non-JSON response, or more than one HTTP Link Header was ' + + 'provided for a remote context.', + 'jsonld.InvalidUrl', + {code: 'loading remote context failed', url: url, cause: e}); + } + + // ensure ctx is an object + if(!_isObject(ctx)) { + throw new JsonLdError( + 'Dereferencing a URL did not result in a JSON object. The ' + + 'response was valid JSON, but it was not a JSON object.', + 'jsonld.InvalidUrl', + {code: 'invalid remote context', url: url}); + } + + // use empty context if no @context key is present + if(!('@context' in ctx)) { + ctx = {'@context': {}}; + } else { + ctx = {'@context': ctx['@context']}; + } + + // append @context URL to context if given + if(remoteDoc.contextUrl) { + if(!_isArray(ctx['@context'])) { + ctx['@context'] = [ctx['@context']]; + } + ctx['@context'].push(remoteDoc.contextUrl); + } + + // recurse + await retrieve(ctx, _cycles, documentLoader); + + // store retrieved context w/replaced @context URLs + urls[url] = ctx['@context']; + + // replace all @context URLs in the document + _findContextUrls(doc, urls, true, options.base); + })); + } +} + +/** + * Finds all @context URLs in the given JSON-LD input. + * + * @param input the JSON-LD input. + * @param urls a map of URLs (url => false/@contexts). + * @param replace true to replace the URLs in the given input with the + * @contexts from the urls map, false not to. + * @param base the base IRI to use to resolve relative IRIs. + * + * @return true if new URLs to retrieve were found, false if not. + */ +function _findContextUrls(input, urls, replace, base) { + if(_isArray(input)) { + for(let i = 0; i < input.length; ++i) { + _findContextUrls(input[i], urls, replace, base); + } + return; + } + + if(!_isObject(input)) { + // no @context URLs can be found in non-object input + return; + } + + // input is an object + for(let key in input) { + if(key !== '@context') { + _findContextUrls(input[key], urls, replace, base); + continue; + } + + // get @context + let ctx = input[key]; + + if(_isArray(ctx)) { + // array @context + let length = ctx.length; + for(let i = 0; i < length; ++i) { + let _ctx = ctx[i]; + if(_isString(_ctx)) { + _ctx = prependBase(base, _ctx); + // replace w/@context if requested + if(replace) { + if(urls[_ctx] !== false) { + _ctx = urls[_ctx]; + if(_isArray(_ctx)) { + // add flattened context + Array.prototype.splice.apply(ctx, [i, 1].concat(_ctx)); + i += _ctx.length - 1; + length = ctx.length; + } else { + ctx[i] = _ctx; + } + } + } else if(!(_ctx in urls)) { + // @context URL found + urls[_ctx] = false; + } + } + } + } else if(_isString(ctx)) { + // string @context + ctx = prependBase(base, ctx); + // replace w/@context if requested + if(replace) { + if(urls[ctx] !== false) { + input[key] = urls[ctx]; + } + } else if(!(ctx in urls)) { + // @context URL found + urls[ctx] = false; + } + } + } +} diff --git a/lib/documentLoaders/node.js b/lib/documentLoaders/node.js new file mode 100644 index 00000000..31b9133d --- /dev/null +++ b/lib/documentLoaders/node.js @@ -0,0 +1,162 @@ +/* + * Copyright (c) 2017 Digital Bazaar, Inc. All rights reserved. + */ +'use strict'; + +const {parseLinkHeader, buildHeaders} = require('../util'); +const {LINK_HEADER_REL} = require('../constants'); +const JsonLdError = require('../JsonLdError'); +const RequestQueue = require('../RequestQueue'); + +/** + * Creates a built-in node document loader. + * + * @param options the options to use: + * secure: require all URLs to use HTTPS. + * strictSSL: true to require SSL certificates to be valid, + * false not to (default: true). + * maxRedirects: the maximum number of redirects to permit, none by + * default. + * request: the object which will make the request, default is + * provided by `https://www.npmjs.com/package/request`. + * headers: an object (map) of headers which will be passed as request + * headers for the requested document. Accept is not allowed. + * + * @return the node document loader. + */ +module.exports = ({ + secure, + strictSSL = true, + maxRedirects = -1, + request, + headers = {} +} = {strictSSL: true, maxRedirects: -1, headers: {}}) => { + headers = buildHeaders(headers); + // TODO: use `r2` + request = request || require('request'); + const http = require('http'); + // TODO: disable cache until HTTP caching implemented + //const cache = new DocumentCache(); + + const queue = new RequestQueue(); + return queue.wrapLoader(function(url) { + return loadDocument(url, []); + }); + + async function loadDocument(url, redirects) { + if(url.indexOf('http:') !== 0 && url.indexOf('https:') !== 0) { + throw new JsonLdError( + 'URL could not be dereferenced; only "http" and "https" URLs are ' + + 'supported.', + 'jsonld.InvalidUrl', {code: 'loading document failed', url: url}); + } + if(secure && url.indexOf('https') !== 0) { + throw new JsonLdError( + 'URL could not be dereferenced; secure mode is enabled and ' + + 'the URL\'s scheme is not "https".', + 'jsonld.InvalidUrl', {code: 'loading document failed', url: url}); + } + // TODO: disable cache until HTTP caching implemented + let doc = null;//cache.get(url); + if(doc !== null) { + return doc; + } + + let result; + try { + result = await _request(request, { + url: url, + headers: headers, + strictSSL: strictSSL, + followRedirect: false + }); + } catch(e) { + throw new JsonLdError( + 'URL could not be dereferenced, an error occurred.', + 'jsonld.LoadDocumentError', + {code: 'loading document failed', url: url, cause: e}); + } + + const {res, body} = result; + + doc = {contextUrl: null, documentUrl: url, document: body || null}; + + // handle error + const statusText = http.STATUS_CODES[res.statusCode]; + if(res.statusCode >= 400) { + throw new JsonLdError( + 'URL could not be dereferenced: ' + statusText, + 'jsonld.InvalidUrl', { + code: 'loading document failed', + url: url, + httpStatusCode: res.statusCode + }); + } + + // handle Link Header + if(res.headers.link && + res.headers['content-type'] !== 'application/ld+json') { + // only 1 related link header permitted + const linkHeader = parseLinkHeader(res.headers.link)[LINK_HEADER_REL]; + if(Array.isArray(linkHeader)) { + throw new JsonLdError( + 'URL could not be dereferenced, it has more than one associated ' + + 'HTTP Link Header.', + 'jsonld.InvalidUrl', + {code: 'multiple context link headers', url: url}); + } + if(linkHeader) { + doc.contextUrl = linkHeader.target; + } + } + + // handle redirect + if(res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) { + if(redirects.length === maxRedirects) { + throw new JsonLdError( + 'URL could not be dereferenced; there were too many redirects.', + 'jsonld.TooManyRedirects', { + code: 'loading document failed', + url: url, + httpStatusCode: res.statusCode, + redirects: redirects + }); + } + if(redirects.indexOf(url) !== -1) { + throw new JsonLdError( + 'URL could not be dereferenced; infinite redirection was detected.', + 'jsonld.InfiniteRedirectDetected', { + code: 'recursive context inclusion', + url: url, + httpStatusCode: res.statusCode, + redirects: redirects + }); + } + redirects.push(url); + return loadDocument(res.headers.location, redirects); + } + + // cache for each redirected URL + redirects.push(url); + // TODO: disable cache until HTTP caching implemented + /*for(let i = 0; i < redirects.length; ++i) { + cache.set( + redirects[i], + {contextUrl: null, documentUrl: redirects[i], document: body}); + }*/ + + return doc; + } +}; + +function _request(request, options) { + return new Promise((resolve, reject) => { + request(options, (err, res, body) => { + if(err) { + reject(err); + } else { + resolve({res: res, body: body}); + } + }); + }); +} diff --git a/lib/documentLoaders/xhr.js b/lib/documentLoaders/xhr.js new file mode 100644 index 00000000..be31c545 --- /dev/null +++ b/lib/documentLoaders/xhr.js @@ -0,0 +1,106 @@ +/* + * Copyright (c) 2017 Digital Bazaar, Inc. All rights reserved. + */ +'use strict'; + +const {parseLinkHeader, buildHeaders} = require('../util'); +const {LINK_HEADER_REL} = require('../constants'); +const JsonLdError = require('../JsonLdError'); +const RequestQueue = require('../RequestQueue'); + +const REGEX_LINK_HEADER = /(^|(\r\n))link:/i; + +/** + * Creates a built-in XMLHttpRequest document loader. + * + * @param options the options to use: + * secure: require all URLs to use HTTPS. + * headers: an object (map) of headers which will be passed as request + * headers for the requested document. Accept is not allowed. + * [xhr]: the XMLHttpRequest API to use. + * + * @return the XMLHttpRequest document loader. + */ +module.exports = ({ + secure, + headers = {}, + xhr +} = {headers: {}}) => { + headers = buildHeaders(headers); + const queue = new RequestQueue(); + return queue.wrapLoader(loader); + + async function loader(url) { + if(url.indexOf('http:') !== 0 && url.indexOf('https:') !== 0) { + throw new JsonLdError( + 'URL could not be dereferenced; only "http" and "https" URLs are ' + + 'supported.', + 'jsonld.InvalidUrl', {code: 'loading document failed', url: url}); + } + if(secure && url.indexOf('https') !== 0) { + throw new JsonLdError( + 'URL could not be dereferenced; secure mode is enabled and ' + + 'the URL\'s scheme is not "https".', + 'jsonld.InvalidUrl', {code: 'loading document failed', url: url}); + } + + let req; + try { + req = await _get(xhr, url, headers); + } catch(e) { + throw new JsonLdError( + 'URL could not be dereferenced, an error occurred.', + 'jsonld.LoadDocumentError', + {code: 'loading document failed', url: url, cause: e}); + } + + if(req.status >= 400) { + throw new JsonLdError( + 'URL could not be dereferenced: ' + req.statusText, + 'jsonld.LoadDocumentError', { + code: 'loading document failed', + url: url, + httpStatusCode: req.status + }); + } + + const doc = {contextUrl: null, documentUrl: url, document: req.response}; + + // handle Link Header (avoid unsafe header warning by existence testing) + const contentType = req.getResponseHeader('Content-Type'); + let linkHeader; + if(REGEX_LINK_HEADER.test(req.getAllResponseHeaders())) { + linkHeader = req.getResponseHeader('Link'); + } + if(linkHeader && contentType !== 'application/ld+json') { + // only 1 related link header permitted + linkHeader = parseLinkHeader(linkHeader)[LINK_HEADER_REL]; + if(Array.isArray(linkHeader)) { + throw new JsonLdError( + 'URL could not be dereferenced, it has more than one ' + + 'associated HTTP Link Header.', + 'jsonld.InvalidUrl', + {code: 'multiple context link headers', url: url}); + } + if(linkHeader) { + doc.contextUrl = linkHeader.target; + } + } + + return doc; + } +}; + +function _get(xhr, url, headers) { + xhr = xhr || XMLHttpRequest; + const req = new xhr(); + return new Promise((resolve, reject) => { + req.onload = () => resolve(req); + req.onerror = err => reject(err); + req.open('GET', url, true); + for(let k in headers) { + req.setRequestHeader(k, headers[k]); + } + req.send(); + }); +} diff --git a/lib/expand.js b/lib/expand.js new file mode 100644 index 00000000..76637bb6 --- /dev/null +++ b/lib/expand.js @@ -0,0 +1,675 @@ +/* + * Copyright (c) 2017 Digital Bazaar, Inc. All rights reserved. + */ +'use strict'; + +const JsonLdError = require('./JsonLdError'); + +const { + isArray: _isArray, + isObject: _isObject, + isString: _isString +} = require('./types'); + +const { + isList: _isList, + isValue: _isValue +} = require('./graphTypes'); + +const { + expandIri: _expandIri, + getContextValue: _getContextValue, + isKeyword: _isKeyword, + process: _processContext +} = require('./context'); + +const { + isAbsolute: _isAbsoluteIri +} = require('./url'); + +const { + addValue: _addValue, + validateTypeValue: _validateTypeValue +} = require('./util'); + +const api = {}; +module.exports = api; + +/** + * Recursively expands an element using the given context. Any context in + * the element will be removed. All context URLs must have been retrieved + * before calling this method. + * + * @param activeCtx the context to use. + * @param activeProperty the property for the element, null for none. + * @param element the element to expand. + * @param options the expansion options. + * @param insideList true if the element is a list, false if not. + * @param expansionMap(info) a function that can be used to custom map + * unmappable values (or to throw an error when they are detected); + * if this function returns `undefined` then the default behavior + * will be used. + * + * @return a Promise that resolves to the expanded value. + */ +api.expand = ({ + activeCtx, + activeProperty = null, + element, + options = {}, + insideList = false, + expansionMap = () => undefined +}) => { + // nothing to expand + if(element === null || element === undefined) { + return null; + } + + if(!_isArray(element) && !_isObject(element)) { + // drop free-floating scalars that are not in lists unless custom mapped + if(!insideList && (activeProperty === null || + _expandIri(activeCtx, activeProperty, {vocab: true}) === '@graph')) { + // TODO: use `await` to support async + const mapped = expansionMap({ + unmappedValue: element, + activeCtx, + activeProperty, + options, + insideList + }); + if(mapped === undefined) { + return null; + } + return mapped; + } + + // expand element according to value expansion rules + return _expandValue({activeCtx, activeProperty, value: element}); + } + + // recursively expand array + if(_isArray(element)) { + let rval = []; + const container = _getContextValue( + activeCtx, activeProperty, '@container') || []; + insideList = insideList || container.includes('@list'); + for(let i = 0; i < element.length; ++i) { + // expand element + let e = api.expand({ + activeCtx, + activeProperty, + element: element[i], + options, + expansionMap + }); + if(insideList && (_isArray(e) || _isList(e))) { + // lists of lists are illegal + throw new JsonLdError( + 'Invalid JSON-LD syntax; lists of lists are not permitted.', + 'jsonld.SyntaxError', {code: 'list of lists'}); + } + + if(e === null) { + // TODO: add `await` for async support + e = expansionMap({ + unmappedValue: element[i], + activeCtx, + activeProperty, + parent: element, + index: i, + options, + expandedParent: rval, + insideList + }); + if(e === undefined) { + continue; + } + } + + if(_isArray(e)) { + rval = rval.concat(e); + } else { + rval.push(e); + } + } + return rval; + } + + // recursively expand object: + + // if element has a context, process it + if('@context' in element) { + activeCtx = _processContext( + {activeCtx, localCtx: element['@context'], options}); + } + + // expand the active property + const expandedActiveProperty = _expandIri( + activeCtx, activeProperty, {vocab: true}); + + let rval = {}; + let keys = Object.keys(element).sort(); + for(let ki = 0; ki < keys.length; ++ki) { + const key = keys[ki]; + let value = element[key]; + let expandedValue; + + // skip @context + if(key === '@context') { + continue; + } + + // expand property + let expandedProperty = _expandIri(activeCtx, key, {vocab: true}); + + // drop non-absolute IRI keys that aren't keywords unless custom mapped + if(expandedProperty === null || + !(_isAbsoluteIri(expandedProperty) || _isKeyword(expandedProperty))) { + // TODO: use `await` to support async + expandedProperty = expansionMap({ + unmappedProperty: key, + activeCtx, + activeProperty, + parent: element, + options, + insideList, + value, + expandedParent: rval + }); + if(expandedProperty === undefined) { + continue; + } + } + + if(_isKeyword(expandedProperty)) { + if(expandedActiveProperty === '@reverse') { + throw new JsonLdError( + 'Invalid JSON-LD syntax; a keyword cannot be used as a @reverse ' + + 'property.', 'jsonld.SyntaxError', + {code: 'invalid reverse property map', value: value}); + } + if(expandedProperty in rval) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; colliding keywords detected.', + 'jsonld.SyntaxError', + {code: 'colliding keywords', keyword: expandedProperty}); + } + } + + // syntax error if @id is not a string + if(expandedProperty === '@id' && !_isString(value)) { + if(!options.isFrame) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; "@id" value must a string.', + 'jsonld.SyntaxError', {code: 'invalid @id value', value: value}); + } + if(!_isObject(value)) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; "@id" value must be a string or an ' + + 'object.', 'jsonld.SyntaxError', + {code: 'invalid @id value', value: value}); + } + } + + if(expandedProperty === '@type') { + _validateTypeValue(value); + } + + // @graph must be an array or an object + if(expandedProperty === '@graph' && + !(_isObject(value) || _isArray(value))) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; "@graph" value must not be an ' + + 'object or an array.', + 'jsonld.SyntaxError', {code: 'invalid @graph value', value: value}); + } + + // @value must not be an object or an array + if(expandedProperty === '@value' && + (_isObject(value) || _isArray(value))) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; "@value" value must not be an ' + + 'object or an array.', + 'jsonld.SyntaxError', + {code: 'invalid value object value', value: value}); + } + + // @language must be a string + if(expandedProperty === '@language') { + if(value === null) { + // drop null @language values, they expand as if they didn't exist + continue; + } + if(!_isString(value)) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; "@language" value must be a string.', + 'jsonld.SyntaxError', + {code: 'invalid language-tagged string', value: value}); + } + // ensure language value is lowercase + value = value.toLowerCase(); + } + + // @index must be a string + if(expandedProperty === '@index') { + if(!_isString(value)) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; "@index" value must be a string.', + 'jsonld.SyntaxError', + {code: 'invalid @index value', value: value}); + } + } + + // @reverse must be an object + if(expandedProperty === '@reverse') { + if(!_isObject(value)) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; "@reverse" value must be an object.', + 'jsonld.SyntaxError', {code: 'invalid @reverse value', value: value}); + } + + expandedValue = api.expand({ + activeCtx, + activeProperty: + '@reverse', + element: value, + options, + expansionMap + }); + + // properties double-reversed + if('@reverse' in expandedValue) { + for(let property in expandedValue['@reverse']) { + _addValue( + rval, property, expandedValue['@reverse'][property], + {propertyIsArray: true}); + } + } + + // FIXME: can this be merged with code below to simplify? + // merge in all reversed properties + let reverseMap = rval['@reverse'] || null; + for(let property in expandedValue) { + if(property === '@reverse') { + continue; + } + if(reverseMap === null) { + reverseMap = rval['@reverse'] = {}; + } + _addValue(reverseMap, property, [], {propertyIsArray: true}); + const items = expandedValue[property]; + for(let ii = 0; ii < items.length; ++ii) { + const item = items[ii]; + if(_isValue(item) || _isList(item)) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; "@reverse" value must not be a ' + + '@value or an @list.', 'jsonld.SyntaxError', + {code: 'invalid reverse property value', value: expandedValue}); + } + _addValue(reverseMap, property, item, {propertyIsArray: true}); + } + } + + continue; + } + + const container = _getContextValue(activeCtx, key, '@container') || []; + + if(container.includes('@language') && _isObject(value)) { + // handle language map container (skip if value is not an object) + expandedValue = _expandLanguageMap(value); + } else if(container.includes('@index') && _isObject(value)) { + // handle index container (skip if value is not an object) + expandedValue = _expandIndexMap({ + activeCtx, + options, + activeProperty: key, + value, + expansionMap + }); + } else { + // recurse into @list or @set + const isList = (expandedProperty === '@list'); + if(isList || expandedProperty === '@set') { + let nextActiveProperty = activeProperty; + if(isList && expandedActiveProperty === '@graph') { + nextActiveProperty = null; + } + expandedValue = api.expand({ + activeCtx, + activeProperty: nextActiveProperty, + element: value, + options, + insideList: isList, + expansionMap + }); + if(isList && _isList(expandedValue)) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; lists of lists are not permitted.', + 'jsonld.SyntaxError', {code: 'list of lists'}); + } + } else { + // recursively expand value with key as new active property + expandedValue = api.expand({ + activeCtx, + activeProperty: key, + element: value, + options, + insideList: false, + expansionMap + }); + } + } + + // drop null values if property is not @value + if(expandedValue === null && expandedProperty !== '@value') { + // TODO: use `await` to support async + expandedValue = expansionMap({ + unmappedValue: value, + expandedProperty, + activeCtx, + activeProperty, + parent: element, + options, + insideList, + key: key, + expandedParent: rval + }); + if(expandedValue === undefined) { + continue; + } + } + + // convert expanded value to @list if container specifies it + if(expandedProperty !== '@list' && !_isList(expandedValue) && + container.includes('@list')) { + // ensure expanded value is an array + expandedValue = (_isArray(expandedValue) ? + expandedValue : [expandedValue]); + expandedValue = {'@list': expandedValue}; + } + + // convert expanded value to @graph if container specifies it + if(container.includes('@graph')) { + // ensure expanded value is an array + expandedValue = [].concat(expandedValue); + expandedValue = {'@graph': expandedValue}; + } + + // FIXME: can this be merged with code above to simplify? + // merge in reverse properties + if(activeCtx.mappings[key] && activeCtx.mappings[key].reverse) { + const reverseMap = rval['@reverse'] = rval['@reverse'] || {}; + if(!_isArray(expandedValue)) { + expandedValue = [expandedValue]; + } + for(let ii = 0; ii < expandedValue.length; ++ii) { + const item = expandedValue[ii]; + if(_isValue(item) || _isList(item)) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; "@reverse" value must not be a ' + + '@value or an @list.', 'jsonld.SyntaxError', + {code: 'invalid reverse property value', value: expandedValue}); + } + _addValue(reverseMap, expandedProperty, item, {propertyIsArray: true}); + } + continue; + } + + // add value for property + // use an array except for certain keywords + const useArray = + !['@index', '@id', '@type', '@value', '@language'].includes(expandedProperty); + _addValue( + rval, expandedProperty, expandedValue, {propertyIsArray: useArray}); + } + + // get property count on expanded output + keys = Object.keys(rval); + let count = keys.length; + + if('@value' in rval) { + // @value must only have @language or @type + if('@type' in rval && '@language' in rval) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; an element containing "@value" may not ' + + 'contain both "@type" and "@language".', + 'jsonld.SyntaxError', {code: 'invalid value object', element: rval}); + } + let validCount = count - 1; + if('@type' in rval) { + validCount -= 1; + } + if('@index' in rval) { + validCount -= 1; + } + if('@language' in rval) { + validCount -= 1; + } + if(validCount !== 0) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; an element containing "@value" may only ' + + 'have an "@index" property and at most one other property ' + + 'which can be "@type" or "@language".', + 'jsonld.SyntaxError', {code: 'invalid value object', element: rval}); + } + // drop null @values unless custom mapped + if(rval['@value'] === null) { + // TODO: use `await` to support async + const mapped = expansionMap({ + unmappedValue: rval, + activeCtx, + activeProperty, + element, + options, + insideList + }); + if(mapped !== undefined) { + rval = mapped; + } else { + rval = null; + } + } else if('@language' in rval && !_isString(rval['@value'])) { + // if @language is present, @value must be a string + throw new JsonLdError( + 'Invalid JSON-LD syntax; only strings may be language-tagged.', + 'jsonld.SyntaxError', + {code: 'invalid language-tagged value', element: rval}); + } else if('@type' in rval && (!_isAbsoluteIri(rval['@type']) || + rval['@type'].indexOf('_:') === 0)) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; an element containing "@value" and "@type" ' + + 'must have an absolute IRI for the value of "@type".', + 'jsonld.SyntaxError', {code: 'invalid typed value', element: rval}); + } + } else if('@type' in rval && !_isArray(rval['@type'])) { + // convert @type to an array + rval['@type'] = [rval['@type']]; + } else if('@set' in rval || '@list' in rval) { + // handle @set and @list + if(count > 1 && !(count === 2 && '@index' in rval)) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; if an element has the property "@set" ' + + 'or "@list", then it can have at most one other property that is ' + + '"@index".', 'jsonld.SyntaxError', + {code: 'invalid set or list object', element: rval}); + } + // optimize away @set + if('@set' in rval) { + rval = rval['@set']; + keys = Object.keys(rval); + count = keys.length; + } + } else if(count === 1 && '@language' in rval) { + // drop objects with only @language unless custom mapped + // TODO: use `await` to support async + const mapped = expansionMap(rval, { + unmappedValue: rval, + activeCtx, + activeProperty, + element, + options, + insideList + }); + if(mapped !== undefined) { + rval = mapped; + } else { + rval = null; + } + } + + // drop certain top-level objects that do not occur in lists, unless custom + // mapped + if(_isObject(rval) && + !options.keepFreeFloatingNodes && !insideList && + (activeProperty === null || expandedActiveProperty === '@graph')) { + // drop empty object, top-level @value/@list, or object with only @id + if(count === 0 || '@value' in rval || '@list' in rval || + (count === 1 && '@id' in rval)) { + // TODO: use `await` to support async + const mapped = expansionMap({ + unmappedValue: rval, + activeCtx, + activeProperty, + element, + options, + insideList + }); + if(mapped !== undefined) { + rval = mapped; + } else { + rval = null; + } + } + } + + return rval; +}; + +/** + * Expands the given value by using the coercion and keyword rules in the + * given context. + * + * @param activeCtx the active context to use. + * @param activeProperty the active property the value is associated with. + * @param value the value to expand. + * + * @return the expanded value. + */ +function _expandValue({activeCtx, activeProperty, value}) { + // nothing to expand + if(value === null || value === undefined) { + return null; + } + + // special-case expand @id and @type (skips '@id' expansion) + const expandedProperty = _expandIri(activeCtx, activeProperty, {vocab: true}); + if(expandedProperty === '@id') { + return _expandIri(activeCtx, value, {base: true}); + } else if(expandedProperty === '@type') { + return _expandIri(activeCtx, value, {vocab: true, base: true}); + } + + // get type definition from context + const type = _getContextValue(activeCtx, activeProperty, '@type'); + + // do @id expansion (automatic for @graph) + if((type === '@id' || expandedProperty === '@graph') && _isString(value)) { + return {'@id': _expandIri(activeCtx, value, {base: true})}; + } + // do @id expansion w/vocab + if(type === '@vocab' && _isString(value)) { + return {'@id': _expandIri(activeCtx, value, {vocab: true, base: true})}; + } + + // do not expand keyword values + if(_isKeyword(expandedProperty)) { + return value; + } + + const rval = {}; + + if(type && !['@id', '@vocab'].includes(type)) { + // other type + rval['@type'] = type; + } else if(_isString(value)) { + // check for language tagging for strings + const language = _getContextValue(activeCtx, activeProperty, '@language'); + if(language !== null) { + rval['@language'] = language; + } + } + // do conversion of values that aren't basic JSON types to strings + if(!['boolean', 'number', 'string'].includes(typeof value)) { + value = value.toString(); + } + rval['@value'] = value; + + return rval; +} + +/** + * Expands a language map. + * + * @param languageMap the language map to expand. + * + * @return the expanded language map. + */ +function _expandLanguageMap(languageMap) { + const rval = []; + const keys = Object.keys(languageMap).sort(); + for(let ki = 0; ki < keys.length; ++ki) { + const key = keys[ki]; + let val = languageMap[key]; + if(!_isArray(val)) { + val = [val]; + } + for(let vi = 0; vi < val.length; ++vi) { + const item = val[vi]; + if(item === null) { + // null values are allowed (8.5) but ignored (3.1) + continue; + } + if(!_isString(item)) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; language map values must be strings.', + 'jsonld.SyntaxError', + {code: 'invalid language map value', languageMap: languageMap}); + } + rval.push({ + '@value': item, + '@language': key.toLowerCase() + }); + } + } + return rval; +} + +function _expandIndexMap( + {activeCtx, options, activeProperty, value, expansionMap}) { + const rval = []; + const keys = Object.keys(value).sort(); + for(let ki = 0; ki < keys.length; ++ki) { + const key = keys[ki]; + let val = value[key]; + if(!_isArray(val)) { + val = [val]; + } + val = api.expand({ + activeCtx, + activeProperty, + element: val, + options, + insideList: false, + expansionMap + }); + for(let vi = 0; vi < val.length; ++vi) { + const item = val[vi]; + if(!('@index' in item)) { + item['@index'] = key; + } + rval.push(item); + } + } + return rval; +} diff --git a/lib/flatten.js b/lib/flatten.js new file mode 100644 index 00000000..d8b34bbe --- /dev/null +++ b/lib/flatten.js @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2017 Digital Bazaar, Inc. All rights reserved. + */ +'use strict'; + +const { + isSubjectReference: _isSubjectReference +} = require('./graphTypes'); + +const { + createMergedNodeMap: _createMergedNodeMap +} = require('./nodeMap'); + +const api = {}; +module.exports = api; + +/** + * Performs JSON-LD flattening. + * + * @param input the expanded JSON-LD to flatten. + * + * @return the flattened output. + */ +api.flatten = input => { + const defaultGraph = _createMergedNodeMap(input); + + // produce flattened output + const flattened = []; + const keys = Object.keys(defaultGraph).sort(); + for(let ki = 0; ki < keys.length; ++ki) { + const node = defaultGraph[keys[ki]]; + // only add full subjects to top-level + if(!_isSubjectReference(node)) { + flattened.push(node); + } + } + return flattened; +}; diff --git a/lib/frame.js b/lib/frame.js new file mode 100644 index 00000000..adc9c278 --- /dev/null +++ b/lib/frame.js @@ -0,0 +1,445 @@ +/* + * Copyright (c) 2017 Digital Bazaar, Inc. All rights reserved. + */ +'use strict'; + +const {isKeyword} = require('./context'); +const graphTypes = require('./graphTypes'); +const types = require('./types'); +const util = require('./util'); +const JsonLdError = require('./JsonLdError'); +const { + createNodeMap: _createNodeMap +} = require('./nodeMap'); + +const api = {}; +module.exports = api; + +/** + * Performs JSON-LD `merged` framing. + * + * @param input the expanded JSON-LD to frame. + * @param frame the expanded JSON-LD frame to use. + * @param options the framing options. + * + * @return the framed output. + */ +api.frameMerged = (input, frame, options) => { + // create framing state + const state = { + options: options, + graphs: {'@default': {}, '@merged': {}}, + subjectStack: [], + link: {} + }; + + // produce a map of all graphs and name each bnode + // FIXME: currently uses subjects from @merged graph only + const issuer = new util.IdentifierIssuer('_:b'); + _createNodeMap(input, state.graphs, '@merged', issuer); + state.subjects = state.graphs['@merged']; + + // frame the subjects + const framed = []; + api.frame(state, Object.keys(state.subjects).sort(), frame, framed); + return framed; +}; + +/** + * Frames subjects according to the given frame. + * + * @param state the current framing state. + * @param subjects the subjects to filter. + * @param frame the frame. + * @param parent the parent subject or top-level array. + * @param property the parent property, initialized to null. + */ +api.frame = (state, subjects, frame, parent, property = null) => { + // validate the frame + _validateFrame(frame); + frame = frame[0]; + + // get flags for current frame + const options = state.options; + const flags = { + embed: _getFrameFlag(frame, options, 'embed'), + explicit: _getFrameFlag(frame, options, 'explicit'), + requireAll: _getFrameFlag(frame, options, 'requireAll') + }; + + // filter out subjects that match the frame + const matches = _filterSubjects(state, subjects, frame, flags); + + // add matches to output + const ids = Object.keys(matches).sort(); + for(let idx = 0; idx < ids.length; ++idx) { + const id = ids[idx]; + const subject = matches[id]; + + if(flags.embed === '@link' && id in state.link) { + // TODO: may want to also match an existing linked subject against + // the current frame ... so different frames could produce different + // subjects that are only shared in-memory when the frames are the same + + // add existing linked subject + _addFrameOutput(parent, property, state.link[id]); + continue; + } + + /* Note: In order to treat each top-level match as a compartmentalized + result, clear the unique embedded subjects map when the property is null, + which only occurs at the top-level. */ + if(property === null) { + state.uniqueEmbeds = {}; + } + + // start output for subject + const output = {}; + output['@id'] = id; + state.link[id] = output; + + // if embed is @never or if a circular reference would be created by an + // embed, the subject cannot be embedded, just add the reference; + // note that a circular reference won't occur when the embed flag is + // `@link` as the above check will short-circuit before reaching this point + if(flags.embed === '@never' || + _createsCircularReference(subject, state.subjectStack)) { + _addFrameOutput(parent, property, output); + continue; + } + + // if only the last match should be embedded + if(flags.embed === '@last') { + // remove any existing embed + if(id in state.uniqueEmbeds) { + _removeEmbed(state, id); + } + state.uniqueEmbeds[id] = {parent: parent, property: property}; + } + + // push matching subject onto stack to enable circular embed checks + state.subjectStack.push(subject); + + // iterate over subject properties + let props = Object.keys(subject).sort(); + for(let i = 0; i < props.length; i++) { + const prop = props[i]; + + // copy keywords to output + if(isKeyword(prop)) { + output[prop] = util.clone(subject[prop]); + continue; + } + + // explicit is on and property isn't in the frame, skip processing + if(flags.explicit && !(prop in frame)) { + continue; + } + + // add objects + const objects = subject[prop]; + for(var oi = 0; oi < objects.length; ++oi) { + let o = objects[oi]; + + // recurse into list + if(graphTypes.isList(o)) { + // add empty list + const list = {'@list': []}; + _addFrameOutput(output, prop, list); + + // add list objects + const src = o['@list']; + for(let n in src) { + o = src[n]; + if(graphTypes.isSubjectReference(o)) { + const subframe = (prop in frame ? + frame[prop][0]['@list'] : _createImplicitFrame(flags)); + // recurse into subject reference + api.frame(state, [o['@id']], subframe, list, '@list'); + } else { + // include other values automatically + _addFrameOutput(list, '@list', util.clone(o)); + } + } + continue; + } + + if(graphTypes.isSubjectReference(o)) { + // recurse into subject reference + const subframe = (prop in frame ? + frame[prop] : _createImplicitFrame(flags)); + api.frame(state, [o['@id']], subframe, output, prop); + } else { + // include other values automatically + _addFrameOutput(output, prop, util.clone(o)); + } + } + } + + // handle defaults + props = Object.keys(frame).sort(); + for(let i = 0; i < props.length; ++i) { + const prop = props[i]; + + // skip keywords + if(isKeyword(prop)) { + continue; + } + + // if omit default is off, then include default values for properties + // that appear in the next frame but are not in the matching subject + const next = frame[prop][0]; + const omitDefaultOn = _getFrameFlag(next, options, 'omitDefault'); + if(!omitDefaultOn && !(prop in output)) { + let preserve = '@null'; + if('@default' in next) { + preserve = util.clone(next['@default']); + } + if(!types.isArray(preserve)) { + preserve = [preserve]; + } + output[prop] = [{'@preserve': preserve}]; + } + } + + // add output to parent + _addFrameOutput(parent, property, output); + + // pop matching subject from circular ref-checking stack + state.subjectStack.pop(); + } +}; + +/** + * Creates an implicit frame when recursing through subject matches. If + * a frame doesn't have an explicit frame for a particular property, then + * a wildcard child frame will be created that uses the same flags that the + * parent frame used. + * + * @param flags the current framing flags. + * + * @return the implicit frame. + */ +function _createImplicitFrame(flags) { + const frame = {}; + for(let key in flags) { + if(flags[key] !== undefined) { + frame['@' + key] = [flags[key]]; + } + } + return [frame]; +} + +/** + * Checks the current subject stack to see if embedding the given subject + * would cause a circular reference. + * + * @param subjectToEmbed the subject to embed. + * @param subjectStack the current stack of subjects. + * + * @return true if a circular reference would be created, false if not. + */ +function _createsCircularReference(subjectToEmbed, subjectStack) { + for(let i = subjectStack.length - 1; i >= 0; --i) { + if(subjectStack[i]['@id'] === subjectToEmbed['@id']) { + return true; + } + } + return false; +} + +/** + * Gets the frame flag value for the given flag name. + * + * @param frame the frame. + * @param options the framing options. + * @param name the flag name. + * + * @return the flag value. + */ +function _getFrameFlag(frame, options, name) { + let flag = '@' + name; + let rval = (flag in frame ? frame[flag][0] : options[name]); + if(name === 'embed') { + // default is "@last" + // backwards-compatibility support for "embed" maps: + // true => "@last" + // false => "@never" + if(rval === true) { + rval = '@last'; + } else if(rval === false) { + rval = '@never'; + } else if(rval !== '@always' && rval !== '@never' && rval !== '@link') { + rval = '@last'; + } + } + return rval; +} + +/** + * Validates a JSON-LD frame, throwing an exception if the frame is invalid. + * + * @param frame the frame to validate. + */ +function _validateFrame(frame) { + if(!types.isArray(frame) || frame.length !== 1 || !types.isObject(frame[0])) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; a JSON-LD frame must be a single object.', + 'jsonld.SyntaxError', {frame: frame}); + } +} + +/** + * Returns a map of all of the subjects that match a parsed frame. + * + * @param state the current framing state. + * @param subjects the set of subjects to filter. + * @param frame the parsed frame. + * @param flags the frame flags. + * + * @return all of the matched subjects. + */ +function _filterSubjects(state, subjects, frame, flags) { + // filter subjects in @id order + const rval = {}; + for(let i = 0; i < subjects.length; ++i) { + const id = subjects[i]; + const subject = state.subjects[id]; + if(_filterSubject(subject, frame, flags)) { + rval[id] = subject; + } + } + return rval; +} + +/** + * Returns true if the given subject matches the given frame. + * + * @param subject the subject to check. + * @param frame the frame to check. + * @param flags the frame flags. + * + * @return true if the subject matches, false if not. + */ +function _filterSubject(subject, frame, flags) { + // check @type (object value means 'any' type, fall through to ducktyping) + if('@type' in frame && + !(frame['@type'].length === 1 && types.isObject(frame['@type'][0]))) { + const nodeTypes = frame['@type']; + for(let i = 0; i < nodeTypes.length; ++i) { + // any matching @type is a match + if(util.hasValue(subject, '@type', nodeTypes[i])) { + return true; + } + } + return false; + } + + // check ducktype + let wildcard = true; + let matchesSome = false; + for(let key in frame) { + if(isKeyword(key)) { + // skip non-@id and non-@type + if(key !== '@id' && key !== '@type') { + continue; + } + wildcard = false; + + // check @id for a specific @id value + if(key === '@id' && types.isString(frame[key])) { + if(subject[key] !== frame[key]) { + return false; + } + matchesSome = true; + continue; + } + } + + wildcard = false; + + if(key in subject) { + // frame[key] === [] means do not match if property is present + if(types.isArray(frame[key]) && frame[key].length === 0 && + subject[key] !== undefined) { + return false; + } + matchesSome = true; + continue; + } + + // all properties must match to be a duck unless a @default is specified + const hasDefault = ( + types.isArray(frame[key]) && types.isObject(frame[key][0]) && + '@default' in frame[key][0]); + if(flags.requireAll && !hasDefault) { + return false; + } + } + + // return true if wildcard or subject matches some properties + return wildcard || matchesSome; +} + +/** + * Removes an existing embed. + * + * @param state the current framing state. + * @param id the @id of the embed to remove. + */ +function _removeEmbed(state, id) { + // get existing embed + const embeds = state.uniqueEmbeds; + const embed = embeds[id]; + const parent = embed.parent; + const property = embed.property; + + // create reference to replace embed + const subject = {'@id': id}; + + // remove existing embed + if(types.isArray(parent)) { + // replace subject with reference + for(let i = 0; i < parent.length; ++i) { + if(util.compareValues(parent[i], subject)) { + parent[i] = subject; + break; + } + } + } else { + // replace subject with reference + const useArray = types.isArray(parent[property]); + util.removeValue(parent, property, subject, {propertyIsArray: useArray}); + util.addValue(parent, property, subject, {propertyIsArray: useArray}); + } + + // recursively remove dependent dangling embeds + const removeDependents = id => { + // get embed keys as a separate array to enable deleting keys in map + const ids = Object.keys(embeds); + for(let i = 0; i < ids.length; ++i) { + const next = ids[i]; + if(next in embeds && types.isObject(embeds[next].parent) && + embeds[next].parent['@id'] === id) { + delete embeds[next]; + removeDependents(next); + } + } + }; + removeDependents(id); +} + +/** + * Adds framing output to the given parent. + * + * @param parent the parent to add to. + * @param property the parent property. + * @param output the output to add. + */ +function _addFrameOutput(parent, property, output) { + if(types.isObject(parent)) { + util.addValue(parent, property, output, {propertyIsArray: true}); + } else { + parent.push(output); + } +} diff --git a/lib/fromRdf.js b/lib/fromRdf.js new file mode 100644 index 00000000..11c6ab9d --- /dev/null +++ b/lib/fromRdf.js @@ -0,0 +1,328 @@ +/* + * Copyright (c) 2017 Digital Bazaar, Inc. All rights reserved. + */ +'use strict'; + +const graphTypes = require('./graphTypes'); +const types = require('./types'); +const util = require('./util'); + +// constants +const { + RDF, + RDF_LIST, + RDF_FIRST, + RDF_REST, + RDF_NIL, + RDF_TYPE, + RDF_PLAIN_LITERAL, + RDF_XML_LITERAL, + RDF_OBJECT, + RDF_LANGSTRING, + + XSD, + XSD_BOOLEAN, + XSD_DOUBLE, + XSD_INTEGER, + XSD_STRING, +} = require('./constants'); + +const api = {}; +module.exports = api; + +/** + * Converts an RDF dataset to JSON-LD. + * + * @param dataset the RDF dataset. + * @param options the RDF serialization options. + * + * @return a Promise that resolves to the JSON-LD output. + */ +api.fromRDF = async ( + dataset, {useRdfType = false, useNativeTypes = false}) => { + const defaultGraph = {}; + const graphMap = {'@default': defaultGraph}; + const referencedOnce = {}; + + for(const quad of dataset) { + // TODO: change 'name' to 'graph' + const name = (quad.graph.termType === 'DefaultGraph') ? + '@default': quad.graph.value; + if(!(name in graphMap)) { + graphMap[name] = {}; + } + if(name !== '@default' && !(name in defaultGraph)) { + defaultGraph[name] = {'@id': name}; + } + + const nodeMap = graphMap[name]; + + // get subject, predicate, object + const s = quad.subject.value; + const p = quad.predicate.value; + const o = quad.object; + + if(!(s in nodeMap)) { + nodeMap[s] = {'@id': s}; + } + const node = nodeMap[s]; + + const objectIsNode = o.termType.endsWith('Node'); + if(objectIsNode && !(o.value in nodeMap)) { + nodeMap[o.value] = {'@id': o.value}; + } + + if(p === RDF_TYPE && !useRdfType && objectIsNode) { + util.addValue(node, '@type', o.value, {propertyIsArray: true}); + continue; + } + + const value = _RDFToObject(o, useNativeTypes); + util.addValue(node, p, value, {propertyIsArray: true}); + + // object may be an RDF list/partial list node but we can't know easily + // until all triples are read + if(objectIsNode) { + if(o.value === RDF_NIL) { + // track rdf:nil uniquely per graph + const object = nodeMap[o.value]; + if(!('usages' in object)) { + object.usages = []; + } + object.usages.push({ + node: node, + property: p, + value: value + }); + } else if(o.value in referencedOnce) { + // object referenced more than once + referencedOnce[o.value] = false; + } else { + // keep track of single reference + referencedOnce[o.value] = { + node: node, + property: p, + value: value + }; + } + } + } + +/* + for(let name in dataset) { + const graph = dataset[name]; + if(!(name in graphMap)) { + graphMap[name] = {}; + } + if(name !== '@default' && !(name in defaultGraph)) { + defaultGraph[name] = {'@id': name}; + } + const nodeMap = graphMap[name]; + for(let ti = 0; ti < graph.length; ++ti) { + const triple = graph[ti]; + + // get subject, predicate, object + const s = triple.subject.value; + const p = triple.predicate.value; + const o = triple.object; + + if(!(s in nodeMap)) { + nodeMap[s] = {'@id': s}; + } + const node = nodeMap[s]; + + const objectIsId = (o.type === 'IRI' || o.type === 'blank node'); + if(objectIsId && !(o.value in nodeMap)) { + nodeMap[o.value] = {'@id': o.value}; + } + + if(p === RDF_TYPE && !useRdfType && objectIsId) { + util.addValue(node, '@type', o.value, {propertyIsArray: true}); + continue; + } + + const value = _RDFToObject(o, useNativeTypes); + util.addValue(node, p, value, {propertyIsArray: true}); + + // object may be an RDF list/partial list node but we can't know easily + // until all triples are read + if(objectIsId) { + if(o.value === RDF_NIL) { + // track rdf:nil uniquely per graph + const object = nodeMap[o.value]; + if(!('usages' in object)) { + object.usages = []; + } + object.usages.push({ + node: node, + property: p, + value: value + }); + } else if(o.value in referencedOnce) { + // object referenced more than once + referencedOnce[o.value] = false; + } else { + // keep track of single reference + referencedOnce[o.value] = { + node: node, + property: p, + value: value + }; + } + } + } + }*/ + + // convert linked lists to @list arrays + for(let name in graphMap) { + const graphObject = graphMap[name]; + + // no @lists to be converted, continue + if(!(RDF_NIL in graphObject)) { + continue; + } + + // iterate backwards through each RDF list + const nil = graphObject[RDF_NIL]; + for(let usage of nil.usages) { + let node = usage.node; + let property = usage.property; + let head = usage.value; + const list = []; + const listNodes = []; + + // ensure node is a well-formed list node; it must: + // 1. Be referenced only once. + // 2. Have an array for rdf:first that has 1 item. + // 3. Have an array for rdf:rest that has 1 item. + // 4. Have no keys other than: @id, rdf:first, rdf:rest, and, + // optionally, @type where the value is rdf:List. + let nodeKeyCount = Object.keys(node).length; + while(property === RDF_REST && + types.isObject(referencedOnce[node['@id']]) && + types.isArray(node[RDF_FIRST]) && node[RDF_FIRST].length === 1 && + types.isArray(node[RDF_REST]) && node[RDF_REST].length === 1 && + (nodeKeyCount === 3 || + (nodeKeyCount === 4 && types.isArray(node['@type']) && + node['@type'].length === 1 && node['@type'][0] === RDF_LIST))) { + list.push(node[RDF_FIRST][0]); + listNodes.push(node['@id']); + + // get next node, moving backwards through list + usage = referencedOnce[node['@id']]; + node = usage.node; + property = usage.property; + head = usage.value; + nodeKeyCount = Object.keys(node).length; + + // if node is not a blank node, then list head found + if(!graphTypes.isBlankNode(node)) { + break; + } + } + + // the list is nested in another list + if(property === RDF_FIRST) { + // empty list + if(node['@id'] === RDF_NIL) { + // can't convert rdf:nil to a @list object because it would + // result in a list of lists which isn't supported + continue; + } + + // preserve list head + head = graphObject[head['@id']][RDF_REST][0]; + list.pop(); + listNodes.pop(); + } + + // transform list into @list object + delete head['@id']; + head['@list'] = list.reverse(); + for(const listNode of listNodes) { + delete graphObject[listNode]; + } + } + + delete nil.usages; + } + + const result = []; + const subjects = Object.keys(defaultGraph).sort(); + for(const subject of subjects) { + const node = defaultGraph[subject]; + if(subject in graphMap) { + const graph = node['@graph'] = []; + const graphObject = graphMap[subject]; + const graphSubjects = Object.keys(graphObject).sort(); + for(const graphSubject of graphSubjects) { + const node = graphObject[graphSubject]; + // only add full subjects to top-level + if(!graphTypes.isSubjectReference(node)) { + graph.push(node); + } + } + } + // only add full subjects to top-level + if(!graphTypes.isSubjectReference(node)) { + result.push(node); + } + } + + return result; +}; + +/** + * Converts an RDF triple object to a JSON-LD object. + * + * @param o the RDF triple object to convert. + * @param useNativeTypes true to output native types, false not to. + * + * @return the JSON-LD object. + */ +function _RDFToObject(o, useNativeTypes) { + // convert NamedNode/BlankNode object to JSON-LD + if(o.termType.endsWith('Node')) { + return {'@id': o.value}; + } + + // convert literal to JSON-LD + const rval = {'@value': o.value}; + + // add language + if(o.language) { + rval['@language'] = o.language; + } else { + let type = o.datatype.value; + if(!type) { + type = XSD_STRING; + } + // use native types for certain xsd types + if(useNativeTypes) { + if(type === XSD_BOOLEAN) { + if(rval['@value'] === 'true') { + rval['@value'] = true; + } else if(rval['@value'] === 'false') { + rval['@value'] = false; + } + } else if(types.isNumeric(rval['@value'])) { + if(type === XSD_INTEGER) { + const i = parseInt(rval['@value'], 10); + if(i.toFixed(0) === rval['@value']) { + rval['@value'] = i; + } + } else if(type === XSD_DOUBLE) { + rval['@value'] = parseFloat(rval['@value']); + } + } + // do not add native type + if(![XSD_BOOLEAN, XSD_INTEGER, XSD_DOUBLE, XSD_STRING].includes(type)) { + rval['@type'] = type; + } + } else if(type !== XSD_STRING) { + rval['@type'] = type; + } + } + + return rval; +} diff --git a/lib/graphTypes.js b/lib/graphTypes.js new file mode 100644 index 00000000..9f4dcae2 --- /dev/null +++ b/lib/graphTypes.js @@ -0,0 +1,108 @@ +/* + * Copyright (c) 2017 Digital Bazaar, Inc. All rights reserved. + */ +'use strict'; + +const types = require('./types'); + +const api = {}; +module.exports = api; + +/** + * Returns true if the given value is a subject with properties. + * + * @param v the value to check. + * + * @return true if the value is a subject with properties, false if not. + */ +api.isSubject = v => { + // Note: A value is a subject if all of these hold true: + // 1. It is an Object. + // 2. It is not a @value, @set, or @list. + // 3. It has more than 1 key OR any existing key is not @id. + if(types.isObject(v) && + !(('@value' in v) || ('@set' in v) || ('@list' in v))) { + const keyCount = Object.keys(v).length; + return (keyCount > 1 || !('@id' in v)); + } + return false; +}; + +/** + * Returns true if the given value is a subject reference. + * + * @param v the value to check. + * + * @return true if the value is a subject reference, false if not. + */ +api.isSubjectReference = v => + // Note: A value is a subject reference if all of these hold true: + // 1. It is an Object. + // 2. It has a single key: @id. + (types.isObject(v) && Object.keys(v).length === 1 && ('@id' in v)); + +/** + * Returns true if the given value is a @value. + * + * @param v the value to check. + * + * @return true if the value is a @value, false if not. + */ +api.isValue = v => + // Note: A value is a @value if all of these hold true: + // 1. It is an Object. + // 2. It has the @value property. + types.isObject(v) && ('@value' in v); + +/** + * Returns true if the given value is a @list. + * + * @param v the value to check. + * + * @return true if the value is a @list, false if not. + */ +api.isList = v => + // Note: A value is a @list if all of these hold true: + // 1. It is an Object. + // 2. It has the @list property. + types.isObject(v) && ('@list' in v); + +/** + * Returns true if the given value is a simple @graph. + * + * @return true if the value is a simple @graph, false if not. + */ +api.isSimpleGraph = v => { + // Note: A value is a simple graph if all of these hold true: + // 1. It is an object. + // 2. It has an `@graph` key. + // 3. It has only 1 key or 2 keys where one of them is `@index`. + if(!types.isObject(v)) { + return false; + } + const keyLength = Object.keys(v).length; + return ('@graph' in v && + (keyLength === 1 || (keyLength === 2 && '@index' in v))); +}; + +/** + * Returns true if the given value is a blank node. + * + * @param v the value to check. + * + * @return true if the value is a blank node, false if not. + */ +api.isBlankNode = v => { + // Note: A value is a blank node if all of these hold true: + // 1. It is an Object. + // 2. If it has an @id key its value begins with '_:'. + // 3. It has no keys OR is not a @value, @set, or @list. + if(types.isObject(v)) { + if('@id' in v) { + return (v['@id'].indexOf('_:') === 0); + } + return (Object.keys(v).length === 0 || + !(('@value' in v) || ('@set' in v) || ('@list' in v))); + } + return false; +}; diff --git a/lib/index.js b/lib/index.js new file mode 100644 index 00000000..64605023 --- /dev/null +++ b/lib/index.js @@ -0,0 +1,12 @@ +/** + * jsonld.js library. + * + * @author Dave Longley + * + * Copyright 2010-2017 Digital Bazaar, Inc. + */ +if(require('semver').gte(process.version, '8.0.0')) { + module.exports = require('./jsonld'); +} else { + module.exports = require('../dist/node6/lib/jsonld'); +} diff --git a/lib/jsonld.js b/lib/jsonld.js new file mode 100644 index 00000000..26b1515f --- /dev/null +++ b/lib/jsonld.js @@ -0,0 +1,1072 @@ +/** + * A JavaScript implementation of the JSON-LD API. + * + * @author Dave Longley + * + * @license BSD 3-Clause License + * Copyright (c) 2011-2017 Digital Bazaar, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * Neither the name of the Digital Bazaar, Inc. nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS + * IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED + * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A + * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED + * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +(function() { + +const canonize = require('rdf-canonize'); +const util = require('./util'); +const IdentifierIssuer = util.IdentifierIssuer; +const JsonLdError = require('./JsonLdError'); +const NQuads = require('./NQuads'); +const Rdfa = require('./Rdfa'); + +const {expand: _expand} = require('./expand'); +const {flatten: _flatten} = require('./flatten'); +const {frameMerged: _frameMerged} = require('./frame'); +const {fromRDF: _fromRDF} = require('./fromRdf'); +const {toRDF: _toRDF} = require('./toRdf'); + +const { + isArray: _isArray, + isObject: _isObject, + isString: _isString +} = require('./types'); + +const { + isSubjectReference: _isSubjectReference, +} = require('./graphTypes'); + +const { + getInitialContext: _getInitialContext, + process: _processContext, + getAllContexts: _getAllContexts +} = require('./context'); + +const { + compact: _compact, + compactIri: _compactIri, + removePreserve: _removePreserve +} = require('./compact'); + +const { + createNodeMap: _createNodeMap, + createMergedNodeMap: _createMergedNodeMap, + mergeNodeMaps: _mergeNodeMaps +} = require('./nodeMap'); + +// determine if in-browser or using node.js +const _nodejs = ( + typeof process !== 'undefined' && process.versions && process.versions.node); +const _browser = !_nodejs && + (typeof window !== 'undefined' || typeof self !== 'undefined'); + +// attaches jsonld API to the given object +const wrapper = function(jsonld) { + +/* Core API */ + +/** + * Performs JSON-LD compaction. + * + * @param input the JSON-LD input to compact. + * @param ctx the context to compact with. + * @param [options] options to use: + * [base] the base IRI to use. + * [compactArrays] true to compact arrays to single values when + * appropriate, false not to (default: true). + * [graph] true to always output a top-level graph (default: false). + * [expandContext] a context to expand with. + * [skipExpansion] true to assume the input is expanded and skip + * expansion, false not to, defaults to false. + * [documentLoader(url, callback(err, remoteDoc))] the document loader. + * [expansionMap(info)] a function that can be used to custom map + * unmappable values (or to throw an error when they are detected); + * if this function returns `undefined` then the default behavior + * will be used. + * [framing] true if compaction is occuring during a framing operation. + * [compactionMap(info)] a function that can be used to custom map + * unmappable values (or to throw an error when they are detected); + * if this function returns `undefined` then the default behavior + * will be used. + * @param [callback(err, compacted)] called once the operation completes. + * + * @return a Promise that resolves to the compacted output. + */ +jsonld.compact = util.callbackify(async function(input, ctx, options) { + if(arguments.length < 2) { + throw new TypeError('Could not compact, too few arguments.'); + } + + if(ctx === null) { + throw new JsonLdError( + 'The compaction context must not be null.', + 'jsonld.CompactError', {code: 'invalid local context'}); + } + + // nothing to compact + if(input === null) { + return null; + } + + // set default options + options = _setDefaults(options, { + base: _isString(input) ? input : '', + compactArrays: true, + graph: false, + skipExpansion: false, + link: false + }); + if(options.link) { + // force skip expansion when linking, "link" is not part of the public + // API, it should only be called from framing + options.skipExpansion = true; + } + + // expand input + let expanded; + if(options.skipExpansion) { + expanded = input; + } else { + expanded = await jsonld.expand(input, options); + } + + // process context + const activeCtx = await jsonld.processContext( + _getInitialContext(options), ctx, options); + + // do compaction + let compacted = _compact({ + activeCtx, + element: expanded, + options, + compactionMap: options.compactionMap + }); + + // perform clean up + if(options.compactArrays && !options.graph && _isArray(compacted)) { + if(compacted.length === 1) { + // simplify to a single item + compacted = compacted[0]; + } else if(compacted.length === 0) { + // simplify to an empty object + compacted = {}; + } + } else if(options.graph && _isObject(compacted)) { + // always use array if graph option is on + compacted = [compacted]; + } + + // follow @context key + if(_isObject(ctx) && '@context' in ctx) { + ctx = ctx['@context']; + } + + // build output context + ctx = util.clone(ctx); + if(!_isArray(ctx)) { + ctx = [ctx]; + } + // remove empty contexts + const tmp = ctx; + ctx = []; + for(let i = 0; i < tmp.length; ++i) { + if(!_isObject(tmp[i]) || Object.keys(tmp[i]).length > 0) { + ctx.push(tmp[i]); + } + } + + // remove array if only one context + const hasContext = (ctx.length > 0); + if(ctx.length === 1) { + ctx = ctx[0]; + } + + // add context and/or @graph + if(_isArray(compacted)) { + // use '@graph' keyword + const graphAlias = _compactIri({activeCtx, iri: '@graph', relativeTo: {vocab: true}}); + const graph = compacted; + compacted = {}; + if(hasContext) { + compacted['@context'] = ctx; + } + compacted[graphAlias] = graph; + } else if(_isObject(compacted) && hasContext) { + // reorder keys so @context is first + const graph = compacted; + compacted = {'@context': ctx}; + for(let key in graph) { + compacted[key] = graph[key]; + } + } + + if(options.framing) { + // get graph alias + const graph = _compactIri({activeCtx, iri: '@graph', relativeTo: {vocab: true}}); + // remove @preserve from results + options.link = {}; + compacted[graph] = _removePreserve(activeCtx, compacted[graph], options); + } + + return compacted; +}); + +/** + * Performs JSON-LD expansion. + * + * @param input the JSON-LD input to expand. + * @param [options] the options to use: + * [base] the base IRI to use. + * [expandContext] a context to expand with. + * [keepFreeFloatingNodes] true to keep free-floating nodes, + * false not to, defaults to false. + * [documentLoader(url, callback(err, remoteDoc))] the document loader. + * [expansionMap(info)] a function that can be used to custom map + * unmappable values (or to throw an error when they are detected); + * if this function returns `undefined` then the default behavior + * will be used. + * @param [callback(err, expanded)] called once the operation completes. + * + * @return a Promise that resolves to the expanded output. + */ +jsonld.expand = util.callbackify(async function(input, options) { + if(arguments.length < 1) { + throw new TypeError('Could not expand, too few arguments.'); + } + + // set default options + options = _setDefaults(options, { + keepFreeFloatingNodes: false + }); + if(options.expansionMap === false) { + options.expansionMap = undefined; + } + + // build set of objects that may have @contexts to resolve + const toResolve = {}; + + // build set of contexts to process prior to expansion + const contextsToProcess = []; + + // if an `expandContext` has been given ensure it gets resolved + if('expandContext' in options) { + const expandContext = util.clone(options.expandContext); + if(_isObject(expandContext) && '@context' in expandContext) { + toResolve.expandContext = expandContext; + } else { + toResolve.expandContext = {'@context': expandContext}; + } + contextsToProcess.push(toResolve.expandContext); + } + + // if input is a string, attempt to dereference remote document + let defaultBase; + if(!_isString(input)) { + // input is not a URL, do not need to retrieve it first + toResolve.input = util.clone(input); + } else { + // load remote doc + const remoteDoc = await jsonld.get(input, options); + defaultBase = remoteDoc.documentUrl; + toResolve.input = remoteDoc.document; + if(remoteDoc.contextUrl) { + // context included in HTTP link header and must be resolved + toResolve.remoteContext = {'@context': remoteDoc.contextUrl}; + contextsToProcess.push(toResolve.remoteContext); + } + } + + // set default base + if(!('base' in options)) { + options.base = defaultBase || ''; + } + + // get all contexts in `toResolve` + await _getAllContexts(toResolve, options); + + // process any additional contexts + let activeCtx = _getInitialContext(options); + contextsToProcess.forEach(localCtx => { + activeCtx = _processContext({activeCtx, localCtx, options}); + }); + + // expand resolved input + let expanded = _expand({ + activeCtx, + element: toResolve.input, + options, + expansionMap: options.expansionMap + }); + + // optimize away @graph with no other properties + if(_isObject(expanded) && ('@graph' in expanded) && + Object.keys(expanded).length === 1) { + expanded = expanded['@graph']; + } else if(expanded === null) { + expanded = []; + } + + // normalize to an array + if(!_isArray(expanded)) { + expanded = [expanded]; + } + + return expanded; +}); + +/** + * Performs JSON-LD flattening. + * + * @param input the JSON-LD to flatten. + * @param ctx the context to use to compact the flattened output, or null. + * @param [options] the options to use: + * [base] the base IRI to use. + * [expandContext] a context to expand with. + * [documentLoader(url, callback(err, remoteDoc))] the document loader. + * @param [callback(err, flattened)] called once the operation completes. + * + * @return a Promise that resolves to the flattened output. + */ +jsonld.flatten = util.callbackify(async function(input, ctx, options) { + if(arguments.length < 1) { + return new TypeError('Could not flatten, too few arguments.'); + } + + if(typeof ctx === 'function') { + ctx = null; + } else { + ctx = ctx || null; + } + + // set default options + options = _setDefaults(options, { + base: _isString(input) ? input : '' + }); + + // expand input + const expanded = await jsonld.expand(input, options); + + // do flattening + const flattened = _flatten(expanded); + + if(ctx === null) { + // no compaction required + return flattened; + } + + // compact result (force @graph option to true, skip expansion) + options.graph = true; + options.skipExpansion = true; + const compacted = await jsonld.compact(flattened, ctx, options); + + return compacted; +}); + +/** + * Performs JSON-LD framing. + * + * @param input the JSON-LD input to frame. + * @param frame the JSON-LD frame to use. + * @param [options] the framing options. + * [base] the base IRI to use. + * [expandContext] a context to expand with. + * [embed] default @embed flag: '@last', '@always', '@never', '@link' + * (default: '@last'). + * [explicit] default @explicit flag (default: false). + * [requireAll] default @requireAll flag (default: true). + * [omitDefault] default @omitDefault flag (default: false). + * [documentLoader(url, callback(err, remoteDoc))] the document loader. + * @param [callback(err, framed)] called once the operation completes. + * + * @return a Promise that resolves to the framed output. + */ +jsonld.frame = util.callbackify(async function(input, frame, options) { + if(arguments.length < 2) { + throw new TypeError('Could not frame, too few arguments.'); + } + + // set default options + options = _setDefaults(options, { + base: _isString(input) ? input : '', + embed: '@last', + explicit: false, + requireAll: true, + omitDefault: false + }); + + // if frame is a string, attempt to dereference remote document + if(_isString(frame)) { + // load remote doc + const remoteDoc = await jsonld.get(frame, options); + frame = remoteDoc.document; + + if(remoteDoc.contextUrl) { + // inject link header @context into frame + let ctx = frame['@context']; + if(!ctx) { + ctx = remoteDoc.contextUrl; + } else if(_isArray(ctx)) { + ctx.push(remoteDoc.contextUrl); + } else { + ctx = [ctx, remoteDoc.contextUrl]; + } + frame['@context'] = ctx; + } + } + + let frameContext = frame ? frame['@context'] || {} : {}; + + // expand input + const expanded = await jsonld.expand(input, options); + + // expand frame + const opts = util.clone(options); + opts.isFrame = true; + opts.keepFreeFloatingNodes = true; + const expandedFrame = await jsonld.expand(frame, opts); + + // do merged framing + const framed = _frameMerged(expanded, expandedFrame, opts); + + // compact result (force @graph option to true, skip expansion, + // check for linked embeds) + opts.graph = true; + opts.skipExpansion = true; + opts.link = {}; + opts.framing = true; + const compacted = await jsonld.compact(framed, frameContext, opts); + + return compacted; +}); + +/** + * **Experimental** + * + * Links a JSON-LD document's nodes in memory. + * + * @param input the JSON-LD document to link. + * @param [ctx] the JSON-LD context to apply. + * @param [options] the options to use: + * [base] the base IRI to use. + * [expandContext] a context to expand with. + * [documentLoader(url, callback(err, remoteDoc))] the document loader. + * @param [callback(err, linked)] called once the operation completes. + * + * @return a Promise that resolves to the linked output. + */ +jsonld.link = util.callbackify(async function(input, ctx, options) { + // API matches running frame with a wildcard frame and embed: '@link' + // get arguments + const frame = {}; + if(ctx) { + frame['@context'] = ctx; + } + frame['@embed'] = '@link'; + return jsonld.frame(input, frame, options); +}); + +/** + * Performs RDF dataset normalization on the given input. The input is JSON-LD + * unless the 'inputFormat' option is used. The output is an RDF dataset + * unless the 'format' option is used. + * + * @param input the input to normalize as JSON-LD or as a format specified by + * the 'inputFormat' option. + * @param [options] the options to use: + * [algorithm] the normalization algorithm to use, `URDNA2015` or + * `URGNA2012` (default: `URGNA2012`). + * [base] the base IRI to use. + * [expandContext] a context to expand with. + * [inputFormat] the format if input is not JSON-LD: + * 'application/nquads' for N-Quads. + * [format] the format if output is a string: + * 'application/nquads' for N-Quads. + * [documentLoader(url, callback(err, remoteDoc))] the document loader. + * @param [callback(err, normalized)] called once the operation completes. + * + * @return a Promise that resolves to the normalized output. + */ +jsonld.normalize = jsonld.canonize = util.callbackify(async function( + input, options) { + if(arguments.length < 1) { + throw new TypeError('Could not canonize, too few arguments.'); + } + + // set default options + options = _setDefaults(options, { + base: _isString(input) ? input : '', + algorithm: 'URDNA2015' + }); + if('inputFormat' in options) { + if(options.inputFormat !== 'application/nquads') { + throw new JsonLdError( + 'Unknown canonicalization input format.', + 'jsonld.CanonizeError'); + } + // TODO: `await` for async parsers + const parsedInput = NQuads.parse(input); + + // do canonicalization + return canonize.canonize(parsedInput, options); + } + + // convert to RDF dataset then do normalization + const opts = util.clone(options); + delete opts.format; + opts.produceGeneralizedRdf = false; + const dataset = await jsonld.toRDF(input, opts); + + // do canonicalization + return canonize.canonize(dataset, options); +}); + +/** + * Converts an RDF dataset to JSON-LD. + * + * @param dataset a serialized string of RDF in a format specified by the + * format option or an RDF dataset to convert. + * @param [options] the options to use: + * [format] the format if dataset param must first be parsed: + * 'application/nquads' for N-Quads (default). + * [rdfParser] a custom RDF-parser to use to parse the dataset. + * [useRdfType] true to use rdf:type, false to use @type + * (default: false). + * [useNativeTypes] true to convert XSD types into native types + * (boolean, integer, double), false not to (default: false). + * @param [callback(err, output)] called once the operation completes. + * + * @return a Promise that resolves to the JSON-LD document. + */ +jsonld.fromRDF = util.callbackify(async function(dataset, options) { + if(arguments.length < 1) { + throw new TypeError('Could not convert from RDF, too few arguments.'); + } + + // set default options + options = _setDefaults(options, { + format: _isString(dataset) ? 'application/nquads' : undefined + }); + + let {format, rdfParser} = options; + + // handle special format + if(format) { + // check supported formats + rdfParser = rdfParser || _rdfParsers[format]; + if(!rdfParser) { + throw new JsonLdError( + 'Unknown input format.', + 'jsonld.UnknownFormat', {format}); + } + } else { + // no-op parser, assume dataset already parsed + rdfParser = () => dataset; + } + + // TODO: call `normalizeAsyncFn` on parser fn + + // rdfParser can be callback, promise-based, or synchronous + let parsedDataset; + if(rdfParser.length > 1) { + // convert callback-based rdf parser to promise-based + parsedDataset = new Promise((resolve, reject) => { + rdfParser(dataset, (err, dataset) => { + if(err) { + reject(err); + } else { + resolve(dataset); + } + }); + }); + } else { + parsedDataset = Promise.resolve(rdfParser(dataset)); + } + + parsedDataset = await parsedDataset; + + // back-compat with old parsers that produced legacy dataset format + if(!Array.isArray(parsedDataset)) { + parsedDataset = NQuads.legacyDatasetToQuads(dataset); + } + + return _fromRDF(parsedDataset, options); +}); + +/** + * Outputs the RDF dataset found in the given JSON-LD object. + * + * @param input the JSON-LD input. + * @param [options] the options to use: + * [base] the base IRI to use. + * [expandContext] a context to expand with. + * [format] the format to use to output a string: + * 'application/nquads' for N-Quads. + * [produceGeneralizedRdf] true to output generalized RDF, false + * to produce only standard RDF (default: false). + * [documentLoader(url, callback(err, remoteDoc))] the document loader. + * @param [callback(err, dataset)] called once the operation completes. + * + * @return a Promise that resolves to the RDF dataset. + */ +jsonld.toRDF = util.callbackify(async function(input, options) { + if(arguments.length < 1) { + throw new TypeError('Could not convert to RDF, too few arguments.'); + } + + // set default options + options = _setDefaults(options, { + base: _isString(input) ? input : '' + }); + + // TODO: support toRDF custom map? + + // expand input + const expanded = await jsonld.expand(input, options); + + // output RDF dataset + const dataset = _toRDF(expanded, options); + if(options.format) { + if(options.format === 'application/nquads') { + return await NQuads.serialize(dataset); + } + throw new JsonLdError( + 'Unknown output format.', + 'jsonld.UnknownFormat', {format: options.format}); + } + + return dataset; +}); + +/** + * **Experimental** + * + * Recursively flattens the nodes in the given JSON-LD input into a merged + * map of node ID => node. All graphs will be merged into the default graph. + * + * @param input the JSON-LD input. + * @param [options] the options to use: + * [base] the base IRI to use. + * [expandContext] a context to expand with. + * [issuer] a jsonld.IdentifierIssuer to use to label blank nodes. + * [documentLoader(url, callback(err, remoteDoc))] the document loader. + * @param [callback(err, nodeMap)] called once the operation completes. + * + * @return a Promise that resolves to the merged node map. + */ +jsonld.createNodeMap = util.callbackify(async function(input, options) { + if(arguments.length < 1) { + throw new TypeError('Could not create node map, too few arguments.'); + } + + // set default options + options = _setDefaults(options, { + base: _isString(input) ? input : '' + }); + + // expand input + const expanded = jsonld.expand(input, options); + + return _createMergedNodeMap(expanded, options); +}); + +/** + * **Experimental** + * + * Merges two or more JSON-LD documents into a single flattened document. + * + * @param docs the JSON-LD documents to merge together. + * @param ctx the context to use to compact the merged result, or null. + * @param [options] the options to use: + * [base] the base IRI to use. + * [expandContext] a context to expand with. + * [issuer] a jsonld.IdentifierIssuer to use to label blank nodes. + * [mergeNodes] true to merge properties for nodes with the same ID, + * false to ignore new properties for nodes with the same ID once + * the ID has been defined; note that this may not prevent merging + * new properties where a node is in the `object` position + * (default: true). + * [documentLoader(url, callback(err, remoteDoc))] the document loader. + * @param [callback(err, merged)] called once the operation completes. + * + * @return a Promise that resolves to the merged output. + */ +jsonld.merge = util.callbackify(async function(docs, ctx, options) { + if(arguments.length < 1) { + throw new TypeError('Could not merge, too few arguments.'); + } + if(!_isArray(docs)) { + throw new TypeError('Could not merge, "docs" must be an array.'); + } + + if(typeof ctx === 'function') { + ctx = null; + } else { + ctx = ctx || null; + } + + // set default options + options = _setDefaults(options, {}); + + // expand all documents + const expanded = await Promise.all(docs.map(doc => { + const opts = Object.assign({}, options); + return jsonld.expand(doc, opts); + })); + + let mergeNodes = true; + if('mergeNodes' in options) { + mergeNodes = options.mergeNodes; + } + + const issuer = options.issuer || new IdentifierIssuer('_:b'); + const graphs = {'@default': {}}; + + for(let i = 0; i < expanded.length; ++i) { + // uniquely relabel blank nodes + const doc = util.relabelBlankNodes(expanded[i], { + issuer: new IdentifierIssuer('_:b' + i + '-') + }); + + // add nodes to the shared node map graphs if merging nodes, to a + // separate graph set if not + const _graphs = (mergeNodes || i === 0) ? graphs : {'@default': {}}; + _createNodeMap(doc, _graphs, '@default', issuer); + + if(_graphs !== graphs) { + // merge document graphs but don't merge existing nodes + for(let graphName in _graphs) { + const _nodeMap = _graphs[graphName]; + if(!(graphName in graphs)) { + graphs[graphName] = _nodeMap; + continue; + } + const nodeMap = graphs[graphName]; + for(let key in _nodeMap) { + if(!(key in nodeMap)) { + nodeMap[key] = _nodeMap[key]; + } + } + } + } + } + + // add all non-default graphs to default graph + const defaultGraph = _mergeNodeMaps(graphs); + + // produce flattened output + const flattened = []; + const keys = Object.keys(defaultGraph).sort(); + for(let ki = 0; ki < keys.length; ++ki) { + const node = defaultGraph[keys[ki]]; + // only add full subjects to top-level + if(!_isSubjectReference(node)) { + flattened.push(node); + } + } + + if(ctx === null) { + return flattened; + } + + // compact result (force @graph option to true, skip expansion) + options.graph = true; + options.skipExpansion = true; + const compacted = jsonld.compact(flattened, ctx, options); + + return compacted; +}); + +/** + * The default document loader for external documents. If the environment + * is node.js, a callback-continuation-style document loader is used; otherwise, + * a promises-style document loader is used. + * + * @param url the URL to load. + * @param callback(err, remoteDoc) called once the operation completes, + * if using a non-promises API. + * + * @return a promise, if using a promises API. + */ +Object.defineProperty(jsonld, 'documentLoader', { + get: () => jsonld._documentLoader, + set: v => jsonld._documentLoader = util.normalizeDocumentLoader(v) +}); +// default document loader not implemented +jsonld.documentLoader = async url => { + throw new JsonLdError( + 'Could not retrieve a JSON-LD document from the URL. URL ' + + 'dereferencing not implemented.', 'jsonld.LoadDocumentError', + {code: 'loading document failed', url: url}); +}; + +/** + * Deprecated default document loader. Do not use or override. + */ +jsonld.loadDocument = util.callbackify(async function() { + return jsonld.documentLoader.apply(null, arguments); +}); + +/** + * Gets a remote JSON-LD document using the default document loader or + * one given in the passed options. + * + * @param url the URL to fetch. + * @param [options] the options to use: + * [documentLoader] the document loader to use. + * @param [callback(err, remoteDoc)] called once the operation completes. + * + * @return a Promise that resolves to the retrieved remote document. + */ +jsonld.get = util.callbackify(async function(url, options) { + let load; + if(typeof options.documentLoader === 'function') { + load = util.normalizeDocumentLoader(options.documentLoader); + } else { + load = jsonld.documentLoader; + } + + const remoteDoc = await load(url); + + // TODO: can this be moved into `normalizeDocumentLoader`? + try { + if(!remoteDoc.document) { + throw new JsonLdError( + 'No remote document found at the given URL.', + 'jsonld.NullRemoteDocument'); + } + if(_isString(remoteDoc.document)) { + remoteDoc.document = JSON.parse(remoteDoc.document); + } + } catch(e) { + throw new JsonLdError( + 'Could not retrieve a JSON-LD document from the URL.', + 'jsonld.LoadDocumentError', { + code: 'loading document failed', + cause: e, + remoteDoc: remoteDoc + }); + } + + return remoteDoc; +}); + +/** + * Processes a local context, resolving any URLs as necessary, and returns a + * new active context in its callback. + * + * @param activeCtx the current active context. + * @param localCtx the local context to process. + * @param [options] the options to use: + * [documentLoader(url, callback(err, remoteDoc))] the document loader. + * @param [callback(err, activeCtx)] called once the operation completes. + * + * @return a Promise that resolves to the new active context. + */ +jsonld.processContext = util.callbackify(async function( + activeCtx, localCtx, options) { + // set default options + options = _setDefaults(options, { + base: '' + }); + + // return initial context early for null context + if(localCtx === null) { + return _getInitialContext(options); + } + + // get URLs in localCtx + localCtx = util.clone(localCtx); + if(!(_isObject(localCtx) && '@context' in localCtx)) { + localCtx = {'@context': localCtx}; + } + let ctx = await _getAllContexts(localCtx, options); + + return _processContext({activeCtx, localCtx: ctx, options}); +}); + +// backwards compatibility +jsonld.getContextValue = require('./context').getContextValue; + +/** + * Document loaders. + */ +jsonld.documentLoaders = {}; +jsonld.documentLoaders.node = require('./documentLoaders/node'); +jsonld.documentLoaders.xhr = require('./documentLoaders/xhr'); + +/** + * Assigns the default document loader for external document URLs to a built-in + * default. Supported types currently include: 'xhr' and 'node'. + * + * @param type the type to set. + * @param [params] the parameters required to use the document loader. + */ +jsonld.useDocumentLoader = function(type) { + if(!(type in jsonld.documentLoaders)) { + throw new JsonLdError( + 'Unknown document loader type: "' + type + '"', + 'jsonld.UnknownDocumentLoader', + {type: type}); + } + + // set document loader + jsonld.documentLoader = util.normalizeDocumentLoader( + jsonld.documentLoaders[type].apply( + jsonld, Array.prototype.slice.call(arguments, 1))); +}; + +/** Registered RDF dataset parsers hashed by content-type. */ +const _rdfParsers = {}; + +/** + * Registers an RDF dataset parser by content-type, for use with + * jsonld.fromRDF. An RDF dataset parser will always be given two parameters, + * a string of input and a callback. An RDF dataset parser can be synchronous + * or asynchronous. + * + * If the parser function returns undefined or null then it will be assumed to + * be asynchronous w/a continuation-passing style and the callback parameter + * given to the parser MUST be invoked. + * + * If it returns a Promise, then it will be assumed to be asynchronous, but the + * callback parameter MUST NOT be invoked. It should instead be ignored. + * + * If it returns an RDF dataset, it will be assumed to be synchronous and the + * callback parameter MUST NOT be invoked. It should instead be ignored. + * + * @param contentType the content-type for the parser. + * @param parser(input, callback(err, dataset)) the parser function (takes a + * string as a parameter and either returns null/undefined and uses + * the given callback, returns a Promise, or returns an RDF dataset). + */ +jsonld.registerRDFParser = function(contentType, parser) { + _rdfParsers[contentType] = parser; +}; + +/** + * Unregisters an RDF dataset parser by content-type. + * + * @param contentType the content-type for the parser. + */ +jsonld.unregisterRDFParser = function(contentType) { + delete _rdfParsers[contentType]; +}; + +// register the N-Quads RDF parser +jsonld.registerRDFParser('application/nquads', NQuads.parse); + +// register the RDFa API RDF parser +jsonld.registerRDFParser('rdfa-api', Rdfa.parse); + +/* URL API */ +jsonld.url = require('./url'); + +/* Utility API */ +jsonld.util = util; +// backwards compatibility +Object.assign(jsonld, util); + +// reexpose API as jsonld.promises for backwards compatability +jsonld.promises = jsonld; + +// backwards compatibility +jsonld.RequestQueue = require('./RequestQueue'); + +/* WebIDL API */ +jsonld.JsonLdProcessor = require('./JsonLdProcessor')(jsonld); + +// setup browser global JsonLdProcessor +if(_browser && typeof global.JsonLdProcessor === 'undefined') { + Object.defineProperty(global, 'JsonLdProcessor', { + writable: true, + enumerable: false, + configurable: true, + value: jsonld.JsonLdProcessor + }); +} + +// set platform-specific defaults/APIs +if(_nodejs) { + // use node document loader by default + jsonld.useDocumentLoader('node'); +} else if(typeof XMLHttpRequest !== 'undefined') { + // use xhr document loader by default + jsonld.useDocumentLoader('xhr'); +} + +function _setDefaults(options, { + documentLoader = jsonld.documentLoader, + ...defaults +}) { + if(typeof options === 'function') { + options = {}; + } + options = options || {}; + return Object.assign({}, {documentLoader}, defaults, options); +} + +// end of jsonld API `wrapper` factory +return jsonld; +}; + +// external APIs: + +// used to generate a new jsonld API instance +const factory = function() { + return wrapper(function() { + return factory(); + }); +}; + +if(!_nodejs && (typeof define === 'function' && define.amd)) { + // export AMD API + define([], function() { + // now that module is defined, wrap main jsonld API instance + wrapper(factory); + return factory; + }); +} else { + // wrap the main jsonld API instance + wrapper(factory); + + if(typeof require === 'function' && + typeof module !== 'undefined' && module.exports) { + // export CommonJS/nodejs API + module.exports = factory; + } + + if(_browser) { + // export simple browser API + if(typeof jsonld === 'undefined') { + jsonld = jsonldjs = factory; + } else { + jsonldjs = factory; + } + } +} + +return factory; + +})(); diff --git a/lib/nodeMap.js b/lib/nodeMap.js new file mode 100644 index 00000000..ec3570ad --- /dev/null +++ b/lib/nodeMap.js @@ -0,0 +1,242 @@ +/* + * Copyright (c) 2017 Digital Bazaar, Inc. All rights reserved. + */ +'use strict'; + +const {isKeyword} = require('./context'); +const graphTypes = require('./graphTypes'); +const types = require('./types'); +const util = require('./util'); +const JsonLdError = require('./JsonLdError'); + +const api = {}; +module.exports = api; + +/** + * Creates a merged JSON-LD node map (node ID => node). + * + * @param input the expanded JSON-LD to create a node map of. + * @param [options] the options to use: + * [issuer] a jsonld.IdentifierIssuer to use to label blank nodes. + * + * @return the node map. + */ +api.createMergedNodeMap = (input, options) => { + options = options || {}; + + // produce a map of all subjects and name each bnode + const issuer = options.issuer || new util.IdentifierIssuer('_:b'); + const graphs = {'@default': {}}; + api.createNodeMap(input, graphs, '@default', issuer); + + // add all non-default graphs to default graph + return api.mergeNodeMaps(graphs); +}; + +/** + * Recursively flattens the subjects in the given JSON-LD expanded input + * into a node map. + * + * @param input the JSON-LD expanded input. + * @param graphs a map of graph name to subject map. + * @param graph the name of the current graph. + * @param issuer the blank node identifier issuer. + * @param name the name assigned to the current input if it is a bnode. + * @param list the list to append to, null for none. + */ +api.createNodeMap = (input, graphs, graph, issuer, name, list) => { + // recurse through array + if(types.isArray(input)) { + for(let i = 0; i < input.length; ++i) { + api.createNodeMap(input[i], graphs, graph, issuer, undefined, list); + } + return; + } + + // add non-object to list + if(!types.isObject(input)) { + if(list) { + list.push(input); + } + return; + } + + // add values to list + if(graphTypes.isValue(input)) { + if('@type' in input) { + let type = input['@type']; + // rename @type blank node + if(type.indexOf('_:') === 0) { + input['@type'] = type = issuer.getId(type); + } + } + if(list) { + list.push(input); + } + return; + } + + // Note: At this point, input must be a subject. + + // spec requires @type to be named first, so assign names early + if('@type' in input) { + const types = input['@type']; + for(let i = 0; i < types.length; ++i) { + const type = types[i]; + if(type.indexOf('_:') === 0) { + issuer.getId(type); + } + } + } + + // get name for subject + if(types.isUndefined(name)) { + name = graphTypes.isBlankNode(input) ? + issuer.getId(input['@id']) : input['@id']; + } + + // add subject reference to list + if(list) { + list.push({'@id': name}); + } + + // create new subject or merge into existing one + const subjects = graphs[graph]; + const subject = subjects[name] = subjects[name] || {}; + subject['@id'] = name; + const properties = Object.keys(input).sort(); + for(let pi = 0; pi < properties.length; ++pi) { + let property = properties[pi]; + + // skip @id + if(property === '@id') { + continue; + } + + // handle reverse properties + if(property === '@reverse') { + const referencedNode = {'@id': name}; + const reverseMap = input['@reverse']; + for(let reverseProperty in reverseMap) { + const items = reverseMap[reverseProperty]; + for(let ii = 0; ii < items.length; ++ii) { + const item = items[ii]; + let itemName = item['@id']; + if(graphTypes.isBlankNode(item)) { + itemName = issuer.getId(itemName); + } + api.createNodeMap(item, graphs, graph, issuer, itemName); + util.addValue( + subjects[itemName], reverseProperty, referencedNode, + {propertyIsArray: true, allowDuplicate: false}); + } + } + continue; + } + + // recurse into graph + if(property === '@graph') { + // add graph subjects map entry + if(!(name in graphs)) { + graphs[name] = {}; + } + const g = (graph === '@merged') ? graph : name; + api.createNodeMap(input[property], graphs, g, issuer); + continue; + } + + // copy non-@type keywords + if(property !== '@type' && isKeyword(property)) { + if(property === '@index' && property in subject && + (input[property] !== subject[property] || + input[property]['@id'] !== subject[property]['@id'])) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; conflicting @index property detected.', + 'jsonld.SyntaxError', + {code: 'conflicting indexes', subject: subject}); + } + subject[property] = input[property]; + continue; + } + + // iterate over objects + const objects = input[property]; + + // if property is a bnode, assign it a new id + if(property.indexOf('_:') === 0) { + property = issuer.getId(property); + } + + // ensure property is added for empty arrays + if(objects.length === 0) { + util.addValue(subject, property, [], {propertyIsArray: true}); + continue; + } + for(let oi = 0; oi < objects.length; ++oi) { + let o = objects[oi]; + + if(property === '@type') { + // rename @type blank nodes + o = (o.indexOf('_:') === 0) ? issuer.getId(o) : o; + } + + // handle embedded subject or subject reference + if(graphTypes.isSubject(o) || graphTypes.isSubjectReference(o)) { + // relabel blank node @id + const id = graphTypes.isBlankNode(o) ? + issuer.getId(o['@id']) : o['@id']; + + // add reference and recurse + util.addValue( + subject, property, {'@id': id}, + {propertyIsArray: true, allowDuplicate: false}); + api.createNodeMap(o, graphs, graph, issuer, id); + } else if(graphTypes.isList(o)) { + // handle @list + const _list = []; + api.createNodeMap(o['@list'], graphs, graph, issuer, name, _list); + o = {'@list': _list}; + util.addValue( + subject, property, o, + {propertyIsArray: true, allowDuplicate: false}); + } else { + // handle @value + api.createNodeMap(o, graphs, graph, issuer, name); + util.addValue( + subject, property, o, {propertyIsArray: true, allowDuplicate: false}); + } + } + } +}; + +api.mergeNodeMaps = graphs => { + // add all non-default graphs to default graph + const defaultGraph = graphs['@default']; + const graphNames = Object.keys(graphs).sort(); + for(let i = 0; i < graphNames.length; ++i) { + const graphName = graphNames[i]; + if(graphName === '@default') { + continue; + } + const nodeMap = graphs[graphName]; + let subject = defaultGraph[graphName]; + if(!subject) { + defaultGraph[graphName] = subject = { + '@id': graphName, + '@graph': [] + }; + } else if(!('@graph' in subject)) { + subject['@graph'] = []; + } + const graph = subject['@graph']; + const ids = Object.keys(nodeMap).sort(); + for(let ii = 0; ii < ids.length; ++ii) { + const node = nodeMap[ids[ii]]; + // only add full subjects + if(!graphTypes.isSubjectReference(node)) { + graph.push(node); + } + } + } + return defaultGraph; +}; diff --git a/lib/toRdf.js b/lib/toRdf.js new file mode 100644 index 00000000..bb926113 --- /dev/null +++ b/lib/toRdf.js @@ -0,0 +1,257 @@ +/* + * Copyright (c) 2017 Digital Bazaar, Inc. All rights reserved. + */ +'use strict'; + +const {createNodeMap} = require('./nodeMap'); +const {isKeyword} = require('./context'); +const graphTypes = require('./graphTypes'); +const types = require('./types'); +const util = require('./util'); + +const { + RDF, + RDF_LIST, + RDF_FIRST, + RDF_REST, + RDF_NIL, + RDF_TYPE, + RDF_PLAIN_LITERAL, + RDF_XML_LITERAL, + RDF_OBJECT, + RDF_LANGSTRING, + + XSD, + XSD_BOOLEAN, + XSD_DOUBLE, + XSD_INTEGER, + XSD_STRING, +} = require('./constants'); + +const { + isAbsolute: _isAbsoluteIri +} = require('./url'); + +const api = {}; +module.exports = api; + +/** + * Outputs an RDF dataset for the expanded JSON-LD input. + * + * @param input the expanded JSON-LD input. + * @param options the RDF serialization options. + * + * @return the RDF dataset. + */ +api.toRDF = (input, options) => { + // create node map for default graph (and any named graphs) + const issuer = new util.IdentifierIssuer('_:b'); + const nodeMap = {'@default': {}}; + createNodeMap(input, nodeMap, '@default', issuer); + + const dataset = []; + const graphNames = Object.keys(nodeMap).sort(); + for(const graphName of graphNames) { + let graphTerm; + if(graphName === '@default') { + graphTerm = {termType: 'DefaultGraph', value: ''}; + } else if(_isAbsoluteIri(graphName)) { + if(graphName.startsWith('_:')) { + graphTerm = {termType: 'BlankNode'}; + } else { + graphTerm = {termType: 'NamedNode'}; + } + graphTerm.value = graphName; + } else { + // skip relative IRIs (not valid RDF) + continue; + } + _graphToRDF(dataset, nodeMap[graphName], graphTerm, issuer, options); + } + + return dataset; +}; + +/** + * Adds RDF quads for a particular graph to the given dataset. + * + * @param dataset the dataset to append RDF quads to. + * @param graph the graph to create RDF quads for. + * @param graphTerm the graph term for each quad. + * @param issuer a IdentifierIssuer for assigning blank node names. + * @param options the RDF serialization options. + * + * @return the array of RDF triples for the given graph. + */ +function _graphToRDF(dataset, graph, graphTerm, issuer, options) { + const ids = Object.keys(graph).sort(); + for(let i = 0; i < ids.length; ++i) { + const id = ids[i]; + const node = graph[id]; + const properties = Object.keys(node).sort(); + for(let property of properties) { + const items = node[property]; + if(property === '@type') { + property = RDF_TYPE; + } else if(isKeyword(property)) { + continue; + } + + for(const item of items) { + // RDF subject + const subject = { + termType: id.startsWith('_:') ? 'BlankNode' : 'NamedNode', + value: id + }; + + // skip relative IRI subjects (not valid RDF) + if(!_isAbsoluteIri(id)) { + continue; + } + + // RDF predicate + const predicate = { + termType: property.startsWith('_:') ? 'BlankNode' : 'NamedNode', + value: property + }; + + // skip relative IRI predicates (not valid RDF) + if(!_isAbsoluteIri(property)) { + continue; + } + + // skip blank node predicates unless producing generalized RDF + if(predicate.termType === 'BlankNode' && + !options.produceGeneralizedRdf) { + continue; + } + + // convert @list to triples + if(graphTypes.isList(item)) { + _listToRDF( + item['@list'], issuer, subject, predicate, dataset, graphTerm); + } else { + // convert value or node object to triple + const object = _objectToRDF(item); + // skip null objects (they are relative IRIs) + if(object) { + dataset.push({ + subject: subject, + predicate: predicate, + object: object, + graph: graphTerm + }); + } + } + } + } + } +} + +/** + * Converts a @list value into linked list of blank node RDF quads + * (an RDF collection). + * + * @param list the @list value. + * @param issuer a IdentifierIssuer for assigning blank node names. + * @param subject the subject for the head of the list. + * @param predicate the predicate for the head of the list. + * @param dataset the array of quads to append to. + * @param graphTerm the graph term for each quad. + */ +function _listToRDF(list, issuer, subject, predicate, dataset, graphTerm) { + const first = {termType: 'NamedNode', value: RDF_FIRST}; + const rest = {termType: 'NamedNode', value: RDF_REST}; + const nil = {termType: 'NamedNode', value: RDF_NIL}; + + for(const item of list) { + const blankNode = {termType: 'BlankNode', value: issuer.getId()}; + dataset.push({ + subject: subject, + predicate: predicate, + object: blankNode, + graph: graphTerm + }); + + subject = blankNode; + predicate = first; + const object = _objectToRDF(item); + + // skip null objects (they are relative IRIs) + if(object) { + dataset.push({ + subject: subject, + predicate: predicate, + object: object, + graph: graphTerm + }); + } + + predicate = rest; + } + + dataset.push({ + subject: subject, + predicate: predicate, + object: nil, + graph: graphTerm + }); +} + +/** + * Converts a JSON-LD value object to an RDF literal or a JSON-LD string or + * node object to an RDF resource. + * + * @param item the JSON-LD value or node object. + * + * @return the RDF literal or RDF resource. + */ +function _objectToRDF(item) { + const object = {}; + + // convert value object to RDF + if(graphTypes.isValue(item)) { + object.termType = 'Literal'; + object.value = undefined; + object.datatype = { + termType: 'NamedNode' + }; + let value = item['@value']; + const datatype = item['@type'] || null; + + // convert to XSD datatypes as appropriate + if(types.isBoolean(value)) { + object.value = value.toString(); + object.datatype.value = datatype || XSD_BOOLEAN; + } else if(types.isDouble(value) || datatype === XSD_DOUBLE) { + if(!types.isDouble(value)) { + value = parseFloat(value); + } + // canonical double representation + object.value = value.toExponential(15).replace(/(\d)0*e\+?/, '$1E'); + object.datatype.value = datatype || XSD_DOUBLE; + } else if(types.isNumber(value)) { + object.value = value.toFixed(0); + object.datatype.value = datatype || XSD_INTEGER; + } else if('@language' in item) { + object.value = value; + object.datatype.value = datatype || RDF_LANGSTRING; + object.language = item['@language']; + } else { + object.value = value; + object.datatype.value = datatype || XSD_STRING; + } + } else { + // convert string/node object to RDF + const id = types.isObject(item) ? item['@id'] : item; + object.termType = id.startsWith('_:') ? 'BlankNode' : 'NamedNode'; + object.value = id; + } + + // skip relative IRIs, not valid RDF + if(object.termType === 'NamedNode' && !_isAbsoluteIri(object.value)) { + return null; + } + + return object; +} diff --git a/lib/types.js b/lib/types.js new file mode 100644 index 00000000..26034c6a --- /dev/null +++ b/lib/types.js @@ -0,0 +1,91 @@ +/* + * Copyright (c) 2017 Digital Bazaar, Inc. All rights reserved. + */ +'use strict'; + +const api = {}; +module.exports = api; + +/** + * Returns true if the given value is an Array. + * + * @param v the value to check. + * + * @return true if the value is an Array, false if not. + */ +api.isArray = Array.isArray; + +/** + * Returns true if the given value is a Boolean. + * + * @param v the value to check. + * + * @return true if the value is a Boolean, false if not. + */ +api.isBoolean = v => (typeof v === 'boolean' || + Object.prototype.toString.call(v) === '[object Boolean]'); + +/** + * Returns true if the given value is a double. + * + * @param v the value to check. + * + * @return true if the value is a double, false if not. + */ +api.isDouble = v => api.isNumber(v) && String(v).indexOf('.') !== -1; + +/** + * Returns true if the given value is an empty Object. + * + * @param v the value to check. + * + * @return true if the value is an empty Object, false if not. + */ +api.isEmptyObject = v => api.isObject(v) && Object.keys(v).length === 0; + +/** + * Returns true if the given value is a Number. + * + * @param v the value to check. + * + * @return true if the value is a Number, false if not. + */ +api.isNumber = v => (typeof v === 'number' || + Object.prototype.toString.call(v) === '[object Number]'); + +/** + * Returns true if the given value is numeric. + * + * @param v the value to check. + * + * @return true if the value is numeric, false if not. + */ +api.isNumeric = v => !isNaN(parseFloat(v)) && isFinite(v); + +/** + * Returns true if the given value is an Object. + * + * @param v the value to check. + * + * @return true if the value is an Object, false if not. + */ +api.isObject = v => Object.prototype.toString.call(v) === '[object Object]'; + +/** + * Returns true if the given value is a String. + * + * @param v the value to check. + * + * @return true if the value is a String, false if not. + */ +api.isString = v => (typeof v === 'string' || + Object.prototype.toString.call(v) === '[object String]'); + +/** + * Returns true if the given value is undefined. + * + * @param v the value to check. + * + * @return true if the value is undefined, false if not. + */ +api.isUndefined = v => typeof v === 'undefined'; diff --git a/lib/url.js b/lib/url.js new file mode 100644 index 00000000..9d8dab45 --- /dev/null +++ b/lib/url.js @@ -0,0 +1,294 @@ +/* + * Copyright (c) 2017 Digital Bazaar, Inc. All rights reserved. + */ +'use strict'; + +const types = require('./types'); + +const api = {}; +module.exports = api; + +// define URL parser +// parseUri 1.2.2 +// (c) Steven Levithan +// MIT License +// with local jsonld.js modifications +api.parsers = { + simple: { + // RFC 3986 basic parts + keys: ['href','scheme','authority','path','query','fragment'], + regex: /^(?:([^:\/?#]+):)?(?:\/\/([^\/?#]*))?([^?#]*)(?:\?([^#]*))?(?:#(.*))?/ + }, + full: { + keys: ['href','protocol','scheme','authority','auth','user','password','hostname','port','path','directory','file','query','fragment'], + regex: /^(([^:\/?#]+):)?(?:\/\/((?:(([^:@]*)(?::([^:@]*))?)?@)?([^:\/?#]*)(?::(\d*))?))?(?:(((?:[^?#\/]*\/)*)([^?#]*))(?:\?([^#]*))?(?:#(.*))?)/ + } +}; +api.parse = (str, parser) => { + const parsed = {}; + const o = api.parsers[parser || 'full']; + const m = o.regex.exec(str); + let i = o.keys.length; + while(i--) { + parsed[o.keys[i]] = (m[i] === undefined) ? null : m[i]; + } + + // remove default ports in found in URLs + if((parsed.scheme === 'https' && parsed.port === '443') || + (parsed.scheme === 'http' && parsed.port === '80')) { + parsed.href = parsed.href.replace(':' + parsed.port, ''); + parsed.authority = parsed.authority.replace(':' + parsed.port, ''); + parsed.port = null; + } + + parsed.normalizedPath = api.removeDotSegments(parsed.path); + return parsed; +}; + +/** + * Prepends a base IRI to the given relative IRI. + * + * @param base the base IRI. + * @param iri the relative IRI. + * + * @return the absolute IRI. + */ +api.prependBase = (base, iri) => { + // skip IRI processing + if(base === null) { + return iri; + } + // already an absolute IRI + if(iri.indexOf(':') !== -1) { + return iri; + } + + // parse base if it is a string + if(types.isString(base)) { + base = api.parse(base || ''); + } + + // parse given IRI + const rel = api.parse(iri); + + // per RFC3986 5.2.2 + const transform = { + protocol: base.protocol || '' + }; + + if(rel.authority !== null) { + transform.authority = rel.authority; + transform.path = rel.path; + transform.query = rel.query; + } else { + transform.authority = base.authority; + + if(rel.path === '') { + transform.path = base.path; + if(rel.query !== null) { + transform.query = rel.query; + } else { + transform.query = base.query; + } + } else { + if(rel.path.indexOf('/') === 0) { + // IRI represents an absolute path + transform.path = rel.path; + } else { + // merge paths + var path = base.path; + + // append relative path to the end of the last directory from base + path = path.substr(0, path.lastIndexOf('/') + 1); + if(path.length > 0 && path.substr(-1) !== '/') { + path += '/'; + } + path += rel.path; + + transform.path = path; + } + transform.query = rel.query; + } + } + + if(rel.path !== '') { + // remove slashes and dots in path + transform.path = api.removeDotSegments(transform.path); + } + + // construct URL + let rval = transform.protocol; + if(transform.authority !== null) { + rval += '//' + transform.authority; + } + rval += transform.path; + if(transform.query !== null) { + rval += '?' + transform.query; + } + if(rel.fragment !== null) { + rval += '#' + rel.fragment; + } + + // handle empty base + if(rval === '') { + rval = './'; + } + + return rval; +}; + +/** + * Removes a base IRI from the given absolute IRI. + * + * @param base the base IRI. + * @param iri the absolute IRI. + * + * @return the relative IRI if relative to base, otherwise the absolute IRI. + */ +api.removeBase = (base, iri) => { + // skip IRI processing + if(base === null) { + return iri; + } + + if(types.isString(base)) { + base = api.parse(base || ''); + } + + // establish base root + let root = ''; + if(base.href !== '') { + root += (base.protocol || '') + '//' + (base.authority || ''); + } else if(iri.indexOf('//')) { + // support network-path reference with empty base + root += '//'; + } + + // IRI not relative to base + if(iri.indexOf(root) !== 0) { + return iri; + } + + // remove root from IRI and parse remainder + const rel = api.parse(iri.substr(root.length)); + + // remove path segments that match (do not remove last segment unless there + // is a hash or query) + const baseSegments = base.normalizedPath.split('/'); + const iriSegments = rel.normalizedPath.split('/'); + const last = (rel.fragment || rel.query) ? 0 : 1; + while(baseSegments.length > 0 && iriSegments.length > last) { + if(baseSegments[0] !== iriSegments[0]) { + break; + } + baseSegments.shift(); + iriSegments.shift(); + } + + // use '../' for each non-matching base segment + let rval = ''; + if(baseSegments.length > 0) { + // don't count the last segment (if it ends with '/' last path doesn't + // count and if it doesn't end with '/' it isn't a path) + baseSegments.pop(); + for(let i = 0; i < baseSegments.length; ++i) { + rval += '../'; + } + } + + // prepend remaining segments + rval += iriSegments.join('/'); + + // add query and hash + if(rel.query !== null) { + rval += '?' + rel.query; + } + if(rel.fragment !== null) { + rval += '#' + rel.fragment; + } + + // handle empty base + if(rval === '') { + rval = './'; + } + + return rval; +}; + +/** + * Removes dot segments from a URL path. + * + * @param path the path to remove dot segments from. + */ +api.removeDotSegments = path => { + // RFC 3986 5.2.4 (reworked) + + // empty path shortcut + if(path.length === 0) { + return ''; + } + + const input = path.split('/'); + const output = []; + + while(input.length > 0) { + const next = input.shift(); + const done = input.length === 0; + + if(next === '.') { + if(done) { + // ensure output has trailing / + output.push(''); + } + continue; + } + + if(next === '..') { + output.pop(); + if(done) { + // ensure output has trailing / + output.push(''); + } + continue; + } + + output.push(next); + } + + // ensure output has leading / + if(output.length > 0 && output[0] !== '') { + output.unshift(''); + } + if(output.length === 1 && output[0] === '') { + return '/'; + } + + return output.join('/'); +}; + +// TODO: time better isAbsolute/isRelative checks using full regexes: +// http://jmrware.com/articles/2009/uri_regexp/URI_regex.html + +// regex to check for absolute IRI (starting scheme and ':') or blank node IRI +const isAbsoluteRegex = /^([A-Za-z][A-Za-z0-9+-.]*|_):/; + +/** + * Returns true if the given value is an absolute IRI or blank node IRI, false + * if not. + * Note: This weak check only checks for a correct starting scheme. + * + * @param v the value to check. + * + * @return true if the value is an absolute IRI, false if not. + */ +api.isAbsolute = v => types.isString(v) && isAbsoluteRegex.test(v); + +/** + * Returns true if the given value is a relative IRI, false if not. + * Note: this is a weak check. + * + * @param v the value to check. + * + * @return true if the value is a relative IRI, false if not. + */ +api.isRelative = v => types.isString(v); diff --git a/lib/util.js b/lib/util.js new file mode 100644 index 00000000..970551b8 --- /dev/null +++ b/lib/util.js @@ -0,0 +1,482 @@ +/* + * Copyright (c) 2017 Digital Bazaar, Inc. All rights reserved. + */ +'use strict'; + +const graphTypes = require('./graphTypes'); +const types = require('./types'); +// TODO: move `IdentifierIssuer` to its own package +const IdentifierIssuer = require('rdf-canonize').IdentifierIssuer; +const JsonLdError = require('./JsonLdError'); + +// constants +const REGEX_LINK_HEADERS = /(?:<[^>]*?>|"[^"]*?"|[^,])+/g; +const REGEX_LINK_HEADER = /\s*<([^>]*?)>\s*(?:;\s*(.*))?/; +const REGEX_LINK_HEADER_PARAMS = + /(.*?)=(?:(?:"([^"]*?)")|([^"]*?))\s*(?:(?:;\s*)|$)/g; + +const DEFAULTS = { + headers: { + accept: 'application/ld+json, application/json' + } +}; + +const api = {}; +module.exports = api; +api.IdentifierIssuer = IdentifierIssuer; + +// define setImmediate and nextTick +//// nextTick implementation with browser-compatible fallback //// +// from https://github.com/caolan/async/blob/master/lib/async.js + +// capture the global reference to guard against fakeTimer mocks +const _setImmediate = typeof setImmediate === 'function' && setImmediate; + +const _delay = _setImmediate ? + // not a direct alias (for IE10 compatibility) + fn => _setImmediate(fn) : + fn => setTimeout(fn, 0); + +if(typeof process === 'object' && typeof process.nextTick === 'function') { + api.nextTick = process.nextTick; +} else { + api.nextTick = _delay; +} +api.setImmediate = _setImmediate ? _delay : api.nextTick; + +/** + * Clones an object, array, or string/number. If a typed JavaScript object + * is given, such as a Date, it will be converted to a string. + * + * @param value the value to clone. + * + * @return the cloned value. + */ +api.clone = function(value) { + if(value && typeof value === 'object') { + let rval; + if(types.isArray(value)) { + rval = []; + for(let i = 0; i < value.length; ++i) { + rval[i] = api.clone(value[i]); + } + } else if(types.isObject(value)) { + rval = {}; + for(let key in value) { + rval[key] = api.clone(value[key]); + } + } else { + rval = value.toString(); + } + return rval; + } + return value; +}; + +/** + * Builds an HTTP headers object for making a JSON-LD request from custom + * headers and asserts the `accept` header isn't overridden. + * + * @param headers an object of headers with keys as header names and values + * as header values. + * + * @return an object of headers with a valid `accept` header. + */ +api.buildHeaders = (headers = {}) => { + const hasAccept = Object.keys(headers).some( + h => h.toLowerCase() === 'accept'); + + if(hasAccept) { + throw new RangeError( + 'Accept header may not be specified; only "' + + DEFAULTS.headers.accept + '" is supported.'); + } + + return Object.assign({'Accept': DEFAULTS.headers.accept}, headers); +}; + +/** + * Parses a link header. The results will be key'd by the value of "rel". + * + * Link: ; + * rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json" + * + * Parses as: { + * 'http://www.w3.org/ns/json-ld#context': { + * target: http://json-ld.org/contexts/person.jsonld, + * type: 'application/ld+json' + * } + * } + * + * If there is more than one "rel" with the same IRI, then entries in the + * resulting map for that "rel" will be arrays. + * + * @param header the link header to parse. + */ +api.parseLinkHeader = header => { + const rval = {}; + // split on unbracketed/unquoted commas + const entries = header.match(REGEX_LINK_HEADERS); + for(let i = 0; i < entries.length; ++i) { + let match = entries[i].match(REGEX_LINK_HEADER); + if(!match) { + continue; + } + const result = {target: match[1]}; + const params = match[2]; + while((match = REGEX_LINK_HEADER_PARAMS.exec(params))) { + result[match[1]] = (match[2] === undefined) ? match[3] : match[2]; + } + const rel = result['rel'] || ''; + if(Array.isArray(rval[rel])) { + rval[rel].push(result); + } else if(rel in rval) { + rval[rel] = [rval[rel], result]; + } else { + rval[rel] = result; + } + } + return rval; +}; + +/** + * Throws an exception if the given value is not a valid @type value. + * + * @param v the value to check. + */ +api.validateTypeValue = v => { + // can be a string or an empty object + if(types.isString(v) || types.isEmptyObject(v)) { + return; + } + + // must be an array + let isValid = false; + if(types.isArray(v)) { + // must contain only strings + isValid = true; + for(let i = 0; i < v.length; ++i) { + if(!(types.isString(v[i]))) { + isValid = false; + break; + } + } + } + + if(!isValid) { + throw new JsonLdError( + 'Invalid JSON-LD syntax; "@type" value must a string, an array of ' + + 'strings, or an empty object.', 'jsonld.SyntaxError', + {code: 'invalid type value', value: v}); + } +}; + +/** + * Returns true if the given subject has the given property. + * + * @param subject the subject to check. + * @param property the property to look for. + * + * @return true if the subject has the given property, false if not. + */ +api.hasProperty = (subject, property) => { + if(property in subject) { + const value = subject[property]; + return (!types.isArray(value) || value.length > 0); + } + return false; +}; + +/** + * Determines if the given value is a property of the given subject. + * + * @param subject the subject to check. + * @param property the property to check. + * @param value the value to check. + * + * @return true if the value exists, false if not. + */ +api.hasValue = (subject, property, value) => { + if(api.hasProperty(subject, property)) { + let val = subject[property]; + const isList = graphTypes.isList(val); + if(types.isArray(val) || isList) { + if(isList) { + val = val['@list']; + } + for(let i = 0; i < val.length; ++i) { + if(api.compareValues(value, val[i])) { + return true; + } + } + } else if(!types.isArray(value)) { + // avoid matching the set of values with an array value parameter + return api.compareValues(value, val); + } + } + return false; +}; + +/** + * Adds a value to a subject. If the value is an array, all values in the + * array will be added. + * + * @param subject the subject to add the value to. + * @param property the property that relates the value to the subject. + * @param value the value to add. + * @param [options] the options to use: + * [propertyIsArray] true if the property is always an array, false + * if not (default: false). + * [allowDuplicate] true to allow duplicates, false not to (uses a + * simple shallow comparison of subject ID or value) (default: true). + */ +api.addValue = (subject, property, value, options) => { + options = options || {}; + if(!('propertyIsArray' in options)) { + options.propertyIsArray = false; + } + if(!('allowDuplicate' in options)) { + options.allowDuplicate = true; + } + + if(types.isArray(value)) { + if(value.length === 0 && options.propertyIsArray && + !(property in subject)) { + subject[property] = []; + } + for(let i = 0; i < value.length; ++i) { + api.addValue(subject, property, value[i], options); + } + } else if(property in subject) { + // check if subject already has value if duplicates not allowed + const hasValue = (!options.allowDuplicate && + api.hasValue(subject, property, value)); + + // make property an array if value not present or always an array + if(!types.isArray(subject[property]) && + (!hasValue || options.propertyIsArray)) { + subject[property] = [subject[property]]; + } + + // add new value + if(!hasValue) { + subject[property].push(value); + } + } else { + // add new value as set or single value + subject[property] = options.propertyIsArray ? [value] : value; + } +}; + +/** + * Gets all of the values for a subject's property as an array. + * + * @param subject the subject. + * @param property the property. + * + * @return all of the values for a subject's property as an array. + */ +api.getValues = (subject, property) => [].concat(subject[property] || []); + +/** + * Removes a property from a subject. + * + * @param subject the subject. + * @param property the property. + */ +api.removeProperty = (subject, property) => { + delete subject[property]; +}; + +/** + * Removes a value from a subject. + * + * @param subject the subject. + * @param property the property that relates the value to the subject. + * @param value the value to remove. + * @param [options] the options to use: + * [propertyIsArray] true if the property is always an array, false + * if not (default: false). + */ +api.removeValue = (subject, property, value, options) => { + options = options || {}; + if(!('propertyIsArray' in options)) { + options.propertyIsArray = false; + } + + // filter out value + const values = api.getValues(subject, property).filter( + e => !api.compareValues(e, value)); + + if(values.length === 0) { + api.removeProperty(subject, property); + } else if(values.length === 1 && !options.propertyIsArray) { + subject[property] = values[0]; + } else { + subject[property] = values; + } +}; + +/** + * Relabels all blank nodes in the given JSON-LD input. + * + * @param input the JSON-LD input. + * @param [options] the options to use: + * [issuer] an IdentifierIssuer to use to label blank nodes. + */ +api.relabelBlankNodes = (input, options) => { + options = options || {}; + const issuer = options.issuer || new IdentifierIssuer('_:b'); + return _labelBlankNodes(issuer, input); +}; + +/** + * Compares two JSON-LD values for equality. Two JSON-LD values will be + * considered equal if: + * + * 1. They are both primitives of the same type and value. + * 2. They are both @values with the same @value, @type, @language, + * and @index, OR + * 3. They both have @ids they are the same. + * + * @param v1 the first value. + * @param v2 the second value. + * + * @return true if v1 and v2 are considered equal, false if not. + */ +api.compareValues = (v1, v2) => { + // 1. equal primitives + if(v1 === v2) { + return true; + } + + // 2. equal @values + if(graphTypes.isValue(v1) && graphTypes.isValue(v2) && + v1['@value'] === v2['@value'] && + v1['@type'] === v2['@type'] && + v1['@language'] === v2['@language'] && + v1['@index'] === v2['@index']) { + return true; + } + + // 3. equal @ids + if(types.isObject(v1) && ('@id' in v1) && types.isObject(v2) && ('@id' in v2)) { + return v1['@id'] === v2['@id']; + } + + return false; +}; + +/** + * Compares two strings first based on length and then lexicographically. + * + * @param a the first string. + * @param b the second string. + * + * @return -1 if a < b, 1 if a > b, 0 if a == b. + */ +api.compareShortestLeast = (a, b) => { + if(a.length < b.length) { + return -1; + } + if(b.length < a.length) { + return 1; + } + if(a === b) { + return 0; + } + return (a < b) ? -1 : 1; +}; + +api.normalizeDocumentLoader = fn => { + if(fn.length < 2) { + return api.callbackify(fn); + } + + return async function(url) { + let callback = arguments[1]; + return new Promise((resolve, reject) => { + try { + fn(url, (err, remoteDoc) => { + if(typeof callback === 'function') { + return _invokeCallback(callback, err, remoteDoc); + } else if(err) { + reject(err); + } else { + resolve(remoteDoc); + } + }); + } catch(e) { + if(typeof callback === 'function') { + return _invokeCallback(callback, e); + } + reject(e); + } + }); + }; +}; + +api.callbackify = fn => { + return async function(...args) { + const callback = args[args.length - 1]; + if(typeof callback === 'function') { + args.pop(); + } + + let result; + try { + result = await fn.apply(null, args); + } catch(e) { + if(typeof callback === 'function') { + return _invokeCallback(callback, e); + } + throw e; + } + + if(typeof callback === 'function') { + return _invokeCallback(callback, null, result); + } + + return result; + }; +}; + +function _invokeCallback(callback, err, result) { + // execute on next tick to prevent "unhandled rejected promise" + // and simulate what would have happened in a promiseless API + api.nextTick(() => callback(err, result)); +} + +/** + * Labels the blank nodes in the given value using the given IdentifierIssuer. + * + * @param issuer the IdentifierIssuer to use. + * @param element the element with blank nodes to rename. + * + * @return the element. + */ +function _labelBlankNodes(issuer, element) { + if(types.isArray(element)) { + for(let i = 0; i < element.length; ++i) { + element[i] = _labelBlankNodes(issuer, element[i]); + } + } else if(graphTypes.isList(element)) { + element['@list'] = _labelBlankNodes(issuer, element['@list']); + } else if(types.isObject(element)) { + // relabel blank node + if(graphTypes.isBlankNode(element)) { + element['@id'] = issuer.getId(element['@id']); + } + + // recursively apply to all keys + const keys = Object.keys(element).sort(); + for(let ki = 0; ki < keys.length; ++ki) { + const key = keys[ki]; + if(key !== '@id') { + element[key] = _labelBlankNodes(issuer, element[key]); + } + } + } + + return element; +} diff --git a/package.json b/package.json index 59d5b716..66bd09d1 100644 --- a/package.json +++ b/package.json @@ -1,49 +1,81 @@ { "name": "jsonld", - "version": "0.4.13-0", + "version": "0.5.17-0", "description": "A JSON-LD Processor and API implementation in JavaScript.", - "homepage": "http://github.com/digitalbazaar/jsonld.js", + "homepage": "https://github.com/digitalbazaar/jsonld.js", "author": { "name": "Digital Bazaar, Inc.", "email": "support@digitalbazaar.com", - "url": "http://digitalbazaar.com/" + "url": "https://digitalbazaar.com/" }, "contributors": [ - { - "name": "Dave Longley", - "email": "dlongley@digitalbazaar.com" - } + "Dave Longley ", + "David I. Lehn " ], "repository": { "type": "git", - "url": "http://github.com/digitalbazaar/jsonld.js" + "url": "https://github.com/digitalbazaar/jsonld.js" }, "bugs": { "url": "https://github.com/digitalbazaar/jsonld.js/issues", "email": "support@digitalbazaar.com" }, "license": "BSD-3-Clause", - "main": "js/jsonld.js", + "main": "lib/index.js", + "files": [ + "browser/*.js", + "dist/*.js", + "dist/*.js.map", + "dist/node6/**/*.js", + "lib/*.js", + "lib/**/*.js" + ], "dependencies": { - "es6-promise": "^2.0.0", - "pkginfo": "~0.4.0", - "request": "^2.81.0", + "rdf-canonize": "^0.2.1", + "request": "^2.83.0", + "semver": "^5.5.0", "xmldom": "0.1.19" }, "devDependencies": { - "chai": "^3.5.0", - "commander": "^2.8.0", + "babel-cli": "^6.26.0", + "babel-loader": "^7.1.2", + "babel-plugin-transform-object-rest-spread": "^6.26.0", + "babel-preset-env": "^1.6.1", + "babel-preset-node6-es6": "^11.2.5", + "browserify": "^15.2.0", + "chai": "^4.1.2", + "commander": "^2.13.0", + "core-js": "^2.5.3", "cors": "^2.7.1", - "express": "^4.13.3", + "express": "^4.16.2", + "fs-extra": "^5.0.0", "istanbul": "^0.4.3", + "join-path-js": "0.0.0", "jscs": "^3.0.0", "jshint": "^2.9.1", - "mocha": "^3.3.0", - "mocha-phantomjs": "~3.5.6", - "phantomjs": "~1.9.18" + "karma": "^2.0.0", + "karma-babel-preprocessor": "^7.0.0", + "karma-browserify": "^5.1.3", + "karma-chrome-launcher": "^2.2.0", + "karma-edge-launcher": "^0.4.2", + "karma-firefox-launcher": "^1.1.0", + "karma-ie-launcher": "^1.0.0", + "karma-mocha": "^1.3.0", + "karma-mocha-reporter": "^2.2.5", + "karma-phantomjs-launcher": "^1.0.4", + "karma-safari-launcher": "^1.0.0", + "karma-server-side": "^1.7.0", + "karma-sourcemap-loader": "^0.3.7", + "karma-tap-reporter": "0.0.6", + "karma-webpack": "^2.0.5", + "mocha": "^5.0.0", + "mocha-lcov-reporter": "^1.3.0", + "regenerator-runtime": "^0.11.1", + "webpack": "^3.7.1", + "webpack-merge": "^4.1.1" }, "engines": { - "node": "*" + "node": ">=6" }, "keywords": [ "JSON", @@ -54,20 +86,26 @@ "jsonld" ], "scripts": { - "test-local": "make test-local", - "test-node": "make test-node", - "test-browser": "make test-browser", - "test": "make test", - "coverage": "make test-coverage", - "coverage-report": "make test-coverage-report", - "jscs": "jscs js/jsonld.js tests/*.js", - "jshint": "jshint js/jsonld.js tests/*.js" + "prepublish": "npm run build", + "build": "npm run build-webpack && npm run build-node6", + "build-webpack": "webpack", + "build-node6": "babel --no-babelrc --out-dir dist/node6 --presets=node6-es6 lib/*.js lib/*/*.js", + "fetch-test-suites": "npm run fetch-json-ld-org-test-suite && npm run fetch-normalization-test-suite", + "fetch-json-ld-org-test-suite": "if [ ! -e test-suites/json-ld.org ]; then git clone --depth 1 https://github.com/json-ld/json-ld.org.git test-suites/json-ld.org; fi", + "fetch-normalization-test-suite": "if [ ! -e test-suites/normalization ]; then git clone --depth 1 https://github.com/json-ld/normalization.git test-suites/normalization; fi", + "test": "NODE_ENV=test mocha --delay -t 30000 -A -R ${REPORTER:-spec} tests/test.js", + "test-karma": "karma start", + "coverage": "istanbul cover ./node_modules/.bin/_mocha -- --delay -t 30000 -u exports -R ${REPORTER:-spec} tests/test.js", + "coverage-lcov": "istanbul cover ./node_modules/.bin/_mocha --report lcovonly -- --delay -t 30000 -u exports -R ${REPORTER:-spec} tests/test.js", + "coverage-report": "istanbul report", + "jscs": "jscs lib/*.js tests/*.js", + "jshint": "jshint lib/*.js tests/*.js" }, "browser": { + "lib/index.js": "./lib/jsonld.js", "crypto": "./browser/ignore.js", "http": "./browser/ignore.js", "jsonld-request": "./browser/ignore.js", - "pkginfo": "./browser/ignore.js", "request": "./browser/ignore.js", "url": "./browser/ignore.js", "util": "./browser/ignore.js", diff --git a/tests/earl-report.js b/tests/earl-report.js new file mode 100644 index 00000000..b61bdd94 --- /dev/null +++ b/tests/earl-report.js @@ -0,0 +1,97 @@ +/** + * EARL Report + * + * @author Dave Longley + * + * Copyright (c) 2011-2017 Digital Bazaar, Inc. All rights reserved. + */ + +/** + * Create an EARL Reporter. + * + * @param options {Object} reporter options + * id: {String} report id + */ +function EarlReport(options) { + var today = new Date(); + today = today.getFullYear() + '-' + + (today.getMonth() < 9 ? + '0' + (today.getMonth() + 1) : today.getMonth() + 1) + '-' + + (today.getDate() < 10 ? '0' + today.getDate() : today.getDate()); + this.id = options.id; + this._report = { + '@context': { + 'doap': 'http://usefulinc.com/ns/doap#', + 'foaf': 'http://xmlns.com/foaf/0.1/', + 'dc': 'http://purl.org/dc/terms/', + 'earl': 'http://www.w3.org/ns/earl#', + 'xsd': 'http://www.w3.org/2001/XMLSchema#', + 'doap:homepage': {'@type': '@id'}, + 'doap:license': {'@type': '@id'}, + 'dc:creator': {'@type': '@id'}, + 'foaf:homepage': {'@type': '@id'}, + 'subjectOf': {'@reverse': 'earl:subject'}, + 'earl:assertedBy': {'@type': '@id'}, + 'earl:mode': {'@type': '@id'}, + 'earl:test': {'@type': '@id'}, + 'earl:outcome': {'@type': '@id'}, + 'dc:date': {'@type': 'xsd:date'} + }, + '@id': 'https://github.com/digitalbazaar/jsonld.js', + '@type': [ + 'doap:Project', + 'earl:TestSubject', + 'earl:Software' + ], + 'doap:name': 'jsonld.js', + 'dc:title': 'jsonld.js', + 'doap:homepage': 'https://github.com/digitalbazaar/jsonld.js', + 'doap:license': + 'https://github.com/digitalbazaar/jsonld.js/blob/master/LICENSE', + 'doap:description': 'A JSON-LD processor for JavaScript', + 'doap:programming-language': 'JavaScript', + 'dc:creator': 'https://github.com/dlongley', + 'doap:developer': { + '@id': 'https://github.com/dlongley', + '@type': [ + 'foaf:Person', + 'earl:Assertor' + ], + 'foaf:name': 'Dave Longley', + 'foaf:homepage': 'https://github.com/dlongley' + }, + 'dc:date': { + '@value': today, + '@type': 'xsd:date' + }, + 'subjectOf': [] + }; + this._report['@id'] += '#' + this.id; + this._report['doap:name'] += ' ' + this.id; + this._report['dc:title'] += ' ' + this.id; +} + +EarlReport.prototype.addAssertion = function(test, pass) { + this._report.subjectOf.push({ + '@type': 'earl:Assertion', + 'earl:assertedBy': this._report['doap:developer']['@id'], + 'earl:mode': 'earl:automatic', + 'earl:test': test['@id'], + 'earl:result': { + '@type': 'earl:TestResult', + 'dc:date': new Date().toISOString(), + 'earl:outcome': pass ? 'earl:passed' : 'earl:failed' + } + }); + return this; +}; + +EarlReport.prototype.report = function() { + return this._report; +}; + +EarlReport.prototype.reportJson = function() { + return JSON.stringify(this._report, null, 2); +}; + +module.exports = EarlReport; diff --git a/tests/graph-container.js b/tests/graph-container.js new file mode 100644 index 00000000..4e147d18 --- /dev/null +++ b/tests/graph-container.js @@ -0,0 +1,139 @@ +/** + * Temporary graph-container tests. + */ +const jsonld = require('..'); +const assert = require('assert'); + +describe('@graph container', () => { + it('should expand @graph container', done => { + const doc = { + '@context': { + '@version': 1.1, + 'input': {'@id': 'foo:input', '@container': '@graph'}, + 'value': 'foo:value' + }, + input: { + value: 'x' + } + }; + jsonld.expand(doc, (err, expanded) => { + assert.ifError(err); + assert.deepEqual(expanded, [{ + "foo:input": [{ + "@graph": [{ + "foo:value": [{ + "@value": "x" + }] + }] + }] + }]); + done(); + }); + }); + + it('should expand ["@graph", "@set"] container', done => { + const doc = { + '@context': { + '@version': 1.1, + 'input': {'@id': 'foo:input', '@container': ['@graph', '@set']}, + 'value': 'foo:value' + }, + input: [{ + value: 'x' + }] + }; + jsonld.expand(doc, (err, expanded) => { + assert.ifError(err); + assert.deepEqual(expanded, [{ + "foo:input": [{ + "@graph": [{ + "foo:value": [{ + "@value": "x" + }] + }] + }] + }]); + done(); + }); + }); + + it('should expand and then compact @graph container', done => { + const doc = { + '@context': { + '@version': 1.1, + 'input': {'@id': 'foo:input', '@container': '@graph'}, + 'value': 'foo:value' + }, + input: { + value: 'x' + } + }; + jsonld.expand(doc, (err, expanded) => { + assert.ifError(err); + + jsonld.compact(expanded, doc['@context'], (err, compacted) => { + assert.ifError(err); + assert.deepEqual(compacted, { + "@context": { + "@version": 1.1, + "input": { + "@id": "foo:input", + "@container": "@graph" + }, + "value": "foo:value" + }, + "input": { + "value": "x" + } + }); + done(); + }); + }); + }); + + it('should expand and then compact @graph container into a @set', done => { + const doc = { + '@context': { + '@version': 1.1, + 'input': {'@id': 'foo:input', '@container': '@graph'}, + 'value': 'foo:value' + }, + input: { + value: 'x' + } + }; + const newContext = { + '@context': { + '@version': 1.1, + 'input': {'@id': 'foo:input', '@container': ['@graph', '@set']}, + 'value': 'foo:value' + } + }; + jsonld.expand(doc, (err, expanded) => { + assert.ifError(err); + + jsonld.compact(expanded, newContext, (err, compacted) => { + assert.ifError(err); + assert.deepEqual(compacted, { + "@context": { + "@version": 1.1, + "input": { + "@id": "foo:input", + "@container": [ + "@graph", + "@set" + ] + }, + "value": "foo:value" + }, + "input": [ + { + "value": "x" + } + ] + }); + done(); + }); + }); + }); +}); diff --git a/tests/misc.js b/tests/misc.js new file mode 100644 index 00000000..59d3ebc3 --- /dev/null +++ b/tests/misc.js @@ -0,0 +1,210 @@ +/** + * Misc tests. + */ +const jsonld = require('..'); +const assert = require('assert'); + +// TODO: need more tests for jsonld.link and jsonld.merge + +describe('link tests', () => { + const doc = { + '@id': 'ex:1', + 'a:foo': { + '@id': 'ex:1' + } + }; + + it('should create a circular link', done => { + jsonld.link(doc, {}, (err, output) => { + assert.ifError(err); + output = output['@graph'][0]; + assert.equal(output, output['a:foo']); + done(); + }); + }); +}); + +describe('merge tests', () => { + const docA = {'@id': 'ex:1', 'a:foo': [{'@value': 1}]}; + const docB = {'@id': 'ex:1', 'b:foo': [{'@value': 2}]}; + const merged = [Object.assign({}, docA, docB)]; + + it('should merge nodes from two different documents', done => { + jsonld.merge([docA, docB], (err, output) => { + assert.ifError(err); + assert.deepEqual(output, merged); + done(); + }); + }); +}); + +describe('other toRDF tests', () => { + const emptyRdf = []; + + it('should process with options and callback', done => { + jsonld.toRDF({}, {}, (err, output) => { + assert.ifError(err); + assert.deepEqual(output, emptyRdf); + done(); + }); + }); + + it('should process with no options and callback', done => { + jsonld.toRDF({}, (err, output) => { + assert.ifError(err); + assert.deepEqual(output, emptyRdf); + done(); + }); + }); + + it('should process with options and promise', done => { + const p = jsonld.toRDF({}, {}); + assert(p instanceof Promise); + p.catch(e => { + assert.fail(); + }).then(output => { + assert.deepEqual(output, emptyRdf); + done(); + }); + }); + + it('should process with no options and promise', done => { + const p = jsonld.toRDF({}); + assert(p instanceof Promise); + p.catch(e => { + assert.fail(); + }).then(output => { + assert.deepEqual(output, emptyRdf); + done(); + }); + }); + + it('should fail with no args and callback', done => { + jsonld.toRDF((err, output) => { + assert(err); + done(); + }); + }); + + it('should fail with no args and promise', done => { + const p = jsonld.toRDF(); + assert(p instanceof Promise); + p.then(output => { + assert.fail(); + }).catch(e => { + assert(e); + done(); + }) + }); + + it('should fail for bad format and callback', done => { + jsonld.toRDF({}, {format: 'bogus'}, (err, output) => { + assert(err); + assert.equal(err.name, 'jsonld.UnknownFormat'); + done(); + }); + }); + + it('should fail for bad format and promise', done => { + const p = jsonld.toRDF({}, {format: 'bogus'}); + assert(p instanceof Promise); + p.then(() => { + assert.fail(); + }).catch(e => { + assert(e); + assert.equal(e.name, 'jsonld.UnknownFormat'); + done(); + }); + }); + + it('should handle nquads format', done => { + const doc = { + '@id': 'https://example.com/', + 'https://example.com/test': 'test' + }; + jsonld.toRDF(doc, {format: 'application/nquads'}, (err, output) => { + assert.ifError(err); + assert.equal( + output, + ' "test" .\n'); + done(); + }); + }); +}); + +describe('loading multiple levels of contexts', () => { + const documentLoader = url => { + if(url === 'https://example.com/context1') { + return { + document: { + '@context': { + 'ex': 'https://example.com/#' + } + }, + contextUrl: null, + documentUrl: url + } + } + if(url === 'https://example.com/context2') { + return { + document: { + '@context': { + 'ex': 'https://example.com/#' + } + }, + contextUrl: null, + documentUrl: url + } + } + }; + const doc = { + '@context': 'https://example.com/context1', + 'ex:foo': { + '@context': 'https://example.com/context2', + 'ex:bar': 'test' + } + }; + const expected = [{ + 'https://example.com/#foo': [{ + 'https://example.com/#bar': [{ + '@value': 'test' + }] + }] + }]; + + it('should handle loading multiple levels of contexts (promise)', () => { + return jsonld.expand(doc, {documentLoader}).then(output => { + assert.deepEqual(output, expected); + }); + }); + + it('should handle loading multiple levels of contexts (callback)', done => { + jsonld.expand(doc, {documentLoader}, (err, output) => { + assert.ifError(err); + assert.deepEqual(output, expected); + done(); + }); + }); +}); + +describe('url tests', () => { + it('should detect absolute IRIs', done => { + // absolute IRIs + assert(jsonld.url.isAbsolute('a:')); + assert(jsonld.url.isAbsolute('a:b')); + assert(jsonld.url.isAbsolute('a:b:c')); + // blank nodes + assert(jsonld.url.isAbsolute('_:')); + assert(jsonld.url.isAbsolute('_:a')); + assert(jsonld.url.isAbsolute('_:a:b')); + + // not absolute or blank node + assert(!jsonld.url.isAbsolute(':')); + assert(!jsonld.url.isAbsolute('a')); + assert(!jsonld.url.isAbsolute('/:')); + assert(!jsonld.url.isAbsolute('/a:')); + assert(!jsonld.url.isAbsolute('/a:b')); + assert(!jsonld.url.isAbsolute('_')); + done(); + }); +}); diff --git a/test/node-document-loader-tests.js b/tests/node-document-loader-tests.js similarity index 98% rename from test/node-document-loader-tests.js rename to tests/node-document-loader-tests.js index fb522012..8868a6c5 100644 --- a/test/node-document-loader-tests.js +++ b/tests/node-document-loader-tests.js @@ -3,7 +3,7 @@ * * @author goofballLogic */ -var jsonld = require('../js/jsonld'); +var jsonld = require('..'); var assert = require('assert'); describe('For the node.js document loader', function() { @@ -85,7 +85,7 @@ describe('For the node.js document loader', function() { 'Accept': 'video/mp4' }; - it('constructing the document loader should fail', function() { + it('constructing the document loader should fail', function(done) { var expectedMessage = 'Accept header may not be specified as an option; only "application/ld+json, application/json" is supported.'; assert.throws( jsonld.useDocumentLoader.bind(jsonld, documentLoaderType, options), @@ -94,6 +94,7 @@ describe('For the node.js document loader', function() { assert.equal(err.message, expectedMessage); return true; }); + done(); }); }); diff --git a/tests/test-common.js b/tests/test-common.js new file mode 100644 index 00000000..a27c67c6 --- /dev/null +++ b/tests/test-common.js @@ -0,0 +1,754 @@ +/** + * Common test runner for JSON-LD. + * + * @author Dave Longley + * @author David I. Lehn + * + * Copyright (c) 2011-2017 Digital Bazaar, Inc. All rights reserved. + */ +const EarlReport = require('./earl-report'); +const join = require('join-path-js'); + +module.exports = function(options) { + +'use strict'; + +const assert = options.assert; +const jsonld = options.jsonld; + +const manifest = options.manifest || { + '@context': 'https://json-ld.org/test-suite/context.jsonld', + '@id': '', + '@type': 'mf:Manifest', + description: 'Top level jsonld.js manifest', + name: 'jsonld.js', + sequence: options.entries || [], + filename: '/' +}; + +const TEST_TYPES = { + 'jld:CompactTest': { + skip: { + specVersion: ['json-ld-1.1'] + }, + fn: 'compact', + params: [ + readTestUrl('input'), + readTestJson('context'), + createTestOptions() + ], + compare: compareExpectedJson + }, + 'jld:ExpandTest': { + skip: { + regex: [/#t[cmn]/, /#t008[0-7]/] + }, + fn: 'expand', + params: [ + readTestUrl('input'), + createTestOptions() + ], + compare: compareExpectedJson + }, + 'jld:FlattenTest': { + skip: { + specVersion: ['json-ld-1.1'] + }, + fn: 'flatten', + params: [ + readTestUrl('input'), + readTestJson('context'), + createTestOptions() + ], + compare: compareExpectedJson + }, + 'jld:FrameTest': { + skip: { + specVersion: ['json-ld-1.1'] + }, + fn: 'frame', + params: [ + readTestUrl('input'), + readTestJson('frame'), + createTestOptions() + ], + compare: compareExpectedJson + }, + 'jld:FromRDFTest': { + fn: 'fromRDF', + params: [ + readTestNQuads('input'), + createTestOptions({format: 'application/nquads'}) + ], + compare: compareExpectedJson + }, + 'jld:NormalizeTest': { + fn: 'normalize', + params: [ + readTestUrl('input'), + createTestOptions({format: 'application/nquads'}) + ], + compare: compareExpectedNQuads + }, + 'jld:ToRDFTest': { + skip: {}, + fn: 'toRDF', + params: [ + readTestUrl('input'), + createTestOptions({format: 'application/nquads'}) + ], + compare: compareExpectedNQuads + }, + 'rdfn:Urgna2012EvalTest': { + fn: 'normalize', + params: [ + readTestNQuads('action'), + createTestOptions({ + algorithm: 'URGNA2012', + inputFormat: 'application/nquads', + format: 'application/nquads' + }) + ], + compare: compareExpectedNQuads + }, + 'rdfn:Urdna2015EvalTest': { + fn: 'normalize', + params: [ + readTestNQuads('action'), + createTestOptions({ + algorithm: 'URDNA2015', + inputFormat: 'application/nquads', + format: 'application/nquads' + }) + ], + compare: compareExpectedNQuads + } +}; + +const SKIP_TESTS = []; + +// create earl report +if(options.earl && options.earl.filename) { + options.earl.report = new EarlReport({id: options.earl.id}); +} + +return new Promise((resolve, reject) => { + +// async generated tests +// _tests => [{suite}, ...] +// suite => { +// title: ..., +// tests: [test, ...], +// suites: [suite, ...] +// } +const _tests = []; + +return addManifest(manifest, _tests) + .then(() => { + _testsToMocha(_tests); + }).then(() => { + if(options.earl.report) { + describe('Writing EARL report to: ' + options.earl.filename, function() { + it('should print the earl report', function() { + return options.writeFile( + options.earl.filename, options.earl.report.reportJson()); + }); + }); + } + }).then(() => resolve()); + +// build mocha tests from local test structure +function _testsToMocha(tests) { + tests.forEach(suite => { + if(suite.skip) { + describe.skip(suite.title); + return; + } + describe(suite.title, () => { + suite.tests.forEach(test => { + if(test.skip) { + it.skip(test.title); + return; + } + it(test.title, test.f); + }); + _testsToMocha(suite.suites); + }); + suite.imports.forEach(f => { + options.import(f) + }); + }); +}; + +}); + +/** + * Adds the tests for all entries in the given manifest. + * + * @param manifest {Object} the manifest. + * @param parent {Object} the parent test structure + * @return {Promise} + */ +function addManifest(manifest, parent) { + return new Promise((resolve, reject) => { + // create test structure + const suite = { + title: manifest.name || manifest.label, + tests: [], + suites: [], + imports: [] + }; + parent.push(suite); + + // get entries and sequence (alias for entries) + var entries = [].concat( + getJsonLdValues(manifest, 'entries'), + getJsonLdValues(manifest, 'sequence') + ); + + var includes = getJsonLdValues(manifest, 'include'); + // add includes to sequence as jsonld files + for(var i = 0; i < includes.length; ++i) { + entries.push(includes[i] + '.jsonld'); + } + + // resolve all entry promises and process + Promise.all(entries).then(entries => { + let p = Promise.resolve(); + entries.forEach((entry) => { + if(typeof entry === 'string' && entry.endsWith('js')) { + // process later as a plain JavaScript file + suite.imports.push(entry); + return; + } else if(typeof entry === 'function') { + // process as a function that returns a promise + p = p.then(() => { + return entry(options); + }).then(childSuite => { + if(suite) { + suite.suites.push(childSuite); + } + }); + return; + } + p = p.then(() => { + return readManifestEntry(manifest, entry); + }).then(entry => { + if(isJsonLdType(entry, '__SKIP__')) { + // special local skip logic + suite.tests.push(entry); + } else if(isJsonLdType(entry, 'mf:Manifest')) { + // entry is another manifest + return addManifest(entry, suite.suites); + } else { + // assume entry is a test + return addTest(manifest, entry, suite.tests); + } + }); + }); + return p; + }).then(() => { + resolve() + }).catch(err => { + console.error(err); + reject(err) + }); + }); +} + +/** + * Adds a test. + * + * @param manifest {Object} the manifest. + * @param parent {Object} the test. + * @param tests {Array} the list of tests to add to. + * @return {Promise} + */ +function addTest(manifest, test, tests) { + // expand @id and input base + var test_id = test['@id'] || test['id']; + //var number = test_id.substr(2); + test['@id'] = manifest.baseIri + basename(manifest.filename) + test_id; + test.base = manifest.baseIri + test.input; + test.manifest = manifest; + var description = test_id + ' ' + (test.purpose || test.name); + + tests.push({ + title: description + ' (promise)', + f: makeFn({useCallbacks: false}) + }); + tests.push({ + title: description + ' (callback)', + f: makeFn({useCallbacks: true}) + }); + + function makeFn({useCallbacks}) { + return function(done) { + var self = this; + self.timeout(5000); + var testInfo = TEST_TYPES[getJsonLdTestType(test)]; + + // skip unknown and explicitly skipped test types + var testTypes = Object.keys(TEST_TYPES); + if(!isJsonLdType(test, testTypes) || isJsonLdType(test, SKIP_TESTS)) { + var type = [].concat( + getJsonLdValues(test, '@type'), + getJsonLdValues(test, 'type') + ); + //console.log('Skipping test "' + test.name + '" of type: ' + type); + self.skip(); + } + + if(testInfo.skip && testInfo.skip.type) { + //console.log('Skipping test "' + test.name + '" of type: ' + type); + self.skip(); + } + + if(testInfo.skip && testInfo.skip.regex) { + testInfo.skip.regex.forEach(function(re) { + if(re.test(description)) { + //console.log('Skipping test "' + test.name + '" of description: ' + description); + self.skip(); + } + }); + } + + var testOptions = getJsonLdValues(test, 'option'); + + testOptions.forEach(function(opt) { + var processingModes = getJsonLdValues(opt, 'processingMode'); + processingModes.forEach(function(pm) { + var skipModes = []; + if(testInfo.skip && testInfo.skip.processingMode) { + skipModes = testInfo.skip.processingMode; + } + if(skipModes.indexOf(pm) !== -1) { + //console.log('Skipping test "' + test.name + '" of processing mode: ' + pm); + self.skip(); + } + }); + }); + + testOptions.forEach(function(opt) { + var specVersions = getJsonLdValues(opt, 'specVersion'); + specVersions.forEach(function(sv) { + var skipVersions = []; + if(testInfo.skip && testInfo.skip.specVersion) { + skipVersions = testInfo.skip.specVersion; + } + if(skipVersions.indexOf(sv) !== -1) { + //console.log('Skipping test "' + test.name + '" for spec version: ' + sv); + self.skip(); + } + }); + }); + + var fn = testInfo.fn; + var params = testInfo.params; + params = params.map(function(param) {return param(test);}); + var callback = function(err, result) { + Promise.resolve().then(() => { + if(isNegativeTest(test)) { + return compareExpectedError(test, err); + } else { + // default is to assume positive and skip isPositiveTest(test) check + if(err) { + throw err; + } + return testInfo.compare(test, result); + } + }).then(() => { + if(options.earl.report) { + options.earl.report.addAssertion(test, true); + } + done(); + }).catch(err => { + if(options.bailOnError) { + if(err.name !== 'AssertionError') { + console.error('\nError: ', JSON.stringify(err, null, 2)); + } + options.exit(); + } + if(options.earl.report) { + options.earl.report.addAssertion(test, false); + } + console.error('Error: ', JSON.stringify(err, null, 2)); + done(err); + }); + }; + + // add nodejs style callback + if(useCallbacks) { + params.push(callback); + } + + // resolve test data run + Promise.all(params).then(values => { + // get appropriate API and run test + var api = useCallbacks ? jsonld : jsonld.promises; + var promise = api[fn].apply(api, values); + + // promise style + if(!useCallbacks) { + return promise.then(callback.bind(null, null), callback); + } + }).catch(err => { + console.error(err); + throw err; + }); + }; + } +} + +function isPositiveTest(test) { + return isJsonLdType(test, 'jld:PositiveEvaluationTest'); +} + +function isNegativeTest(test) { + return isJsonLdType(test, 'jld:NegativeEvaluationTest'); +} + +function getJsonLdTestType(test) { + var types = Object.keys(TEST_TYPES); + for(var i = 0; i < types.length; ++i) { + if(isJsonLdType(test, types[i])) { + return types[i]; + } + } + return null; +} + +function readManifestEntry(manifest, entry) { + let p = Promise.resolve(); + let _entry = entry; + if(typeof entry === 'string') { + let _filename; + p = p.then(() => { + if(entry.endsWith('json') || entry.endsWith('jsonld')) { + // load as file + return entry; + } + // load as dir with manifest.jsonld + return joinPath(entry, 'manifest.jsonld'); + }).then(entry => { + const dir = dirname(manifest.filename); + return joinPath(dir, entry); + }).then(filename => { + _filename = filename; + return readJson(filename); + }).then(entry => { + _entry = entry; + _entry.filename = _filename; + return _entry; + }).catch(err => { + if(err.code === 'ENOENT') { + //console.log('File does not exist, skipping: ' + _filename); + // return a "skip" entry + _entry = { + type: '__SKIP__', + title: 'Not found, skipping: ' + _filename, + filename: _filename, + skip: true + }; + return; + } + throw err; + }); + } + return p.then(() => { + _entry.dirname = dirname(_entry.filename || manifest.filename); + return _entry; + }); +} + +function readTestUrl(property) { + return function(test) { + if(!test[property]) { + return null; + } + return test.manifest.baseIri + test[property]; + }; +} + +function readTestJson(property) { + return function(test) { + if(!test[property]) { + return null; + } + return joinPath(test.dirname, test[property]) + .then(readJson); + }; +} + +function readTestNQuads(property) { + return function(test) { + if(!test[property]) { + return null; + } + return joinPath(test.dirname, test[property]) + .then(readFile); + }; +} + +function createTestOptions(opts) { + return function(test) { + var options = { + documentLoader: createDocumentLoader(test) + }; + var httpOptions = ['contentType', 'httpLink', 'httpStatus', 'redirectTo']; + var testOptions = test.option || {}; + for(var key in testOptions) { + if(httpOptions.indexOf(key) === -1) { + options[key] = testOptions[key]; + } + } + if(opts) { + // extend options + for(var key in opts) { + options[key] = opts[key]; + } + } + let p = Promise.resolve(); + for(var key in options) { + if(key === 'expandContext') { + p = p.then(() => { + return joinPath(test.dirname, options[key]); + }).then(filename => { + return readJson(filename); + }).then(json => { + options[key] = json; + }); + } + } + + return p.then(() => options); + }; +} + +// find the expected output property or throw error +function _getExpectProperty(test) { + if('expect' in test) { + return 'expect'; + } else if('result' in test) { + return 'result'; + } else { + throw Error('No expected output property found'); + } +} + +function compareExpectedJson(test, result) { + let _expect; + return readTestJson(_getExpectProperty(test))(test).then(expect => { + _expect = expect; + assert.deepEqual(result, expect); + }).catch(err => { + if(options.bailOnError) { + console.log('\nTEST FAILED\n'); + console.log('EXPECTED: ' + JSON.stringify(_expect, null, 2)); + console.log('ACTUAL: ' + JSON.stringify(result, null, 2)); + } + throw err; + }); +} + +function compareExpectedNQuads(test, result) { + let _expect; + return readTestNQuads(_getExpectProperty(test))(test).then(expect => { + _expect = expect; + assert.equal(result, expect); + }).catch(err => { + if(options.bailOnError) { + console.log('\nTEST FAILED\n'); + console.log('EXPECTED:\n' + _expect); + console.log('ACTUAL:\n' + result); + } + throw err; + }); +} + +function compareExpectedError(test, err) { + let expect; + let result; + return Promise.resolve().then(() => { + expect = test[_getExpectProperty(test)]; + result = getJsonLdErrorCode(err); + assert.ok(err); + assert.equal(result, expect); + }).catch(err => { + if(options.bailOnError) { + console.log('\nTEST FAILED\n'); + console.log('EXPECTED: ' + expect); + console.log('ACTUAL: ' + result); + } + throw err; + }); +} + +function isJsonLdType(node, type) { + var nodeType = [].concat( + getJsonLdValues(node, '@type'), + getJsonLdValues(node, 'type') + ); + type = Array.isArray(type) ? type : [type]; + for(var i = 0; i < type.length; ++i) { + if(nodeType.indexOf(type[i]) !== -1) { + return true; + } + } + return false; +} + +function getJsonLdValues(node, property) { + var rval = []; + if(property in node) { + rval = node[property]; + if(!Array.isArray(rval)) { + rval = [rval]; + } + } + return rval; +} + +function getJsonLdErrorCode(err) { + if(!err) { + return null; + } + if(err.details) { + if(err.details.code) { + return err.details.code; + } + if(err.details.cause) { + return getJsonLdErrorCode(err.details.cause); + } + } + return err.name; +} + +function readJson(filename) { + return readFile(filename).then((data) => { + return JSON.parse(data); + }); +} + +function readFile(filename) { + return options.readFile(filename); +} + +function joinPath() { + return Promise.resolve( + join.apply(null, Array.prototype.slice.call(arguments))); +} + +function dirname(filename) { + if(options.nodejs) { + return options.nodejs.path.dirname(filename); + } + var idx = filename.lastIndexOf('/'); + if(idx === -1) { + return filename; + } + return filename.substr(0, idx); +} + +function basename(filename) { + if(options.nodejs) { + return options.nodejs.path.basename(filename); + } + var idx = filename.lastIndexOf('/'); + if(idx === -1) { + return filename; + } + return filename.substr(idx + 1); +} + +/** + * Creates a test remote document loader. + * + * @param test the test to use the document loader for. + * + * @return the document loader. + */ +function createDocumentLoader(test) { + const _httpTestSuiteBase = 'http://json-ld.org/test-suite'; + const _httpsTestSuiteBase = 'https://json-ld.org/test-suite'; + var localLoader = function(url, callback) { + // always load remote-doc tests remotely in node + if(options.nodejs && test.manifest.name === 'Remote document') { + return jsonld.loadDocument(url, callback); + } + + // FIXME: this check only works for main test suite and will not work if: + // - running other tests and main test suite not installed + // - use other absolute URIs but want to load local files + var isTestSuite = + url.startsWith(_httpTestSuiteBase) || + url.startsWith(_httpsTestSuiteBase); + // TODO: improve this check + var isRelative = url.indexOf(':') === -1; + if(isTestSuite || isRelative) { + // attempt to load official test-suite files or relative URLs locally + loadLocally(url).then(callback.bind(null, null), callback); + // don't return the promise + return; + } + + // load remotely + return jsonld.loadDocument(url, callback); + }; + + return localLoader; + + function loadLocally(url) { + var doc = {contextUrl: null, documentUrl: url, document: null}; + var options = test.option; + if(options && url === test.base) { + if('redirectTo' in options && parseInt(options.httpStatus, 10) >= 300) { + doc.documentUrl = test.manifest.baseIri + options.redirectTo; + } else if('httpLink' in options) { + var contentType = options.contentType || null; + if(!contentType && url.indexOf('.jsonld', url.length - 7) !== -1) { + contentType = 'application/ld+json'; + } + var linkHeader = options.httpLink; + if(Array.isArray(linkHeader)) { + linkHeader = linkHeader.join(','); + } + linkHeader = jsonld.parseLinkHeader( + linkHeader)['http://www.w3.org/ns/json-ld#context']; + if(linkHeader && contentType !== 'application/ld+json') { + if(Array.isArray(linkHeader)) { + throw {name: 'multiple context link headers'}; + } + doc.contextUrl = linkHeader.target; + } + } + } + + var p = Promise.resolve(); + if(doc.documentUrl.indexOf(':') === -1) { + p = p.then(() => { + return joinPath(test.manifest.dirname, doc.documentUrl); + }).then(filename => { + doc.documentUrl = 'file://' + filename; + return filename; + }); + } else { + p = p.then(() => { + return joinPath( + test.manifest.dirname, + doc.documentUrl.substr(test.manifest.baseIri.length)); + }).then(fn => { + return fn; + }); + } + + return p.then(readJson).then(json => { + doc.document = json; + return doc; + }).catch(err => { + throw {name: 'loading document failed', url: url}; + }); + } +} + +}; diff --git a/tests/test-karma.js b/tests/test-karma.js new file mode 100644 index 00000000..77afd59d --- /dev/null +++ b/tests/test-karma.js @@ -0,0 +1,113 @@ +/** + * Karma test runner for jsonld.js. + * + * Use environment vars to control, set via karma.conf.js/webpack: + * + * Set dirs, manifests, or js to run: + * JSONLD_TESTS="r1 r2 ..." + * Output an EARL report: + * EARL=filename + * Bail with tests fail: + * BAIL=true + * + * @author Dave Longley + * @author David I. Lehn + * + * Copyright (c) 2011-2017 Digital Bazaar, Inc. All rights reserved. + */ +// FIXME: hack to ensure delay is set first +mocha.setup({delay: true, ui: 'bdd'}); + +// test suite compatibility +require('core-js/fn/string/ends-with'); +require('core-js/fn/string/starts-with'); + +// jsonld compatibility +require('core-js/fn/array/includes'); +require('core-js/fn/object/assign'); +require('core-js/fn/promise'); +require('core-js/fn/symbol'); +require('regenerator-runtime/runtime'); + +const assert = require('chai').assert; +const common = require('./test-common'); +const jsonld = require('..'); +const server = require('karma-server-side'); +const webidl = require('./test-webidl'); +const join = require('join-path-js'); + +const entries = []; + +if(process.env.JSONLD_TESTS) { + entries.push(...process.env.JSONLD_TESTS.split(' ')); +} else { + const _top = process.env.TEST_ROOT_DIR; + // TODO: support just adding certain entries in EARL mode? + + // json-ld.org main test suite + // FIXME: add path detection + entries.push(join(_top, 'test-suites/json-ld.org/test-suite')); + entries.push(join(_top, '../json-ld.org/test-suite')); + + // json-ld.org normalization test suite + // FIXME: add path detection + entries.push(join(_top, 'test-suites/normalization/tests')); + entries.push(join(_top, '../normalization/tests')); + + // other tests + entries.push(join(_top, 'tests/new-embed-api')); + + // WebIDL tests + entries.push(webidl) +} + +const options = { + nodejs: false, + assert: assert, + jsonld: jsonld, + exit: code => { + // FIXME: karma phantomjs does not expose this API + if(window.phantom && window.phantom.exit) { + return phantom.exit(); + } + console.error('exit not implemented'); + throw new Error('exit not implemented'); + }, + earl: { + id: 'browser', + filename: process.env.EARL + }, + bailOnError: process.env.BAIL === 'true', + entries: entries, + readFile: filename => { + return server.run(filename, function(filename) { + var fs = serverRequire('fs-extra'); + return fs.readFile(filename, 'utf8').then(data => { + return data; + }); + }); + }, + writeFile: (filename, data) => { + return server.run(filename, data, function(filename, data) { + var fs = serverRequire('fs-extra'); + return fs.outputFile(filename, data); + }); + }, + import: f => { console.error('import not implemented'); } +}; + +// wait for setup of all tests then run mocha +common(options).then(() => { + run(); +}).then(() => { + // FIXME: karma phantomjs does not expose this API + if(window.phantom && window.phantom.exit) { + phantom.exit(0); + } +}).catch(err => { + console.error(err); + // FIXME: karma phantomjs does not expose this API + if(window.phantom && window.phantom.exit) { + phantom.exit(0); + } +}); diff --git a/tests/test-webidl.js b/tests/test-webidl.js new file mode 100644 index 00000000..5f229601 --- /dev/null +++ b/tests/test-webidl.js @@ -0,0 +1,95 @@ +/** + * Web IDL test runner for JSON-LD. + * + * @author Dave Longley + * + * Copyright (c) 2011-2017 Digital Bazaar, Inc. All rights reserved. + */ +const assert = require('chai').assert; +const jsonld = require('..'); + +require('./webidl/testharness.js'); +require('./webidl/WebIDLParser.js'); +require('./webidl/idlharness.js'); + +module.exports = options => { + +'use strict'; + +return new Promise((resolve, reject) => { + // add mocha suite + const suite = { + title: 'WebIDL', + tests: [], + suites: [], + imports: [] + }; + + //add_start_callback(() => {}); + //add_test_state_callback((test) => {}); + add_result_callback(function(test) { + var _test = { + title: test.name, + f: null + }; + suite.tests.push(_test); + + _test.f = function(done) { + var msg = test.message || ''; + /* + // HACK: PhantomJS can't set prototype to non-writable? + if(msg.indexOf( + 'JsonLdProcessor.prototype is writable expected false') !== -1) { + test.status = 0; + } + // HACK: PhantomJS can't set window property to non-enumerable? + if(msg.indexOf( + '"JsonLdProcessor" is enumerable expected false') !== -1) { + test.status = 0; + } + */ + // HACK: PhantomJS issues + if(msg.indexOf( + 'JsonLdProcessor.length should be configurable expected true') !== -1) { + this.skip(); + } + if(msg.indexOf( + 'JsonLdProcessor.name should be configurable expected true') !== -1) { + this.skip(); + } + //earl.addAssertion({'@id': ?}, test.status === 0); + assert.equal(test.status, 0, test.message); + done(); + }; + }); + add_completion_callback(function(tests, status) { + resolve(suite); + }); + + // FIXME: should this be in main lib? is there a better way? + // ensure that stringification tests are passed + var toString = Object.prototype.toString; + Object.prototype.toString = function() { + // FIXME: is proto output needed? + if(this === window.JsonLdProcessor.prototype) { + return '[object JsonLdProcessorPrototype]'; + } else if(this && this.constructor === window.JsonLdProcessor) { + return '[object JsonLdProcessor]'; + } + return toString.apply(this, arguments); + }; + + options.readFile('./tests/webidl/JsonLdProcessor.idl').then(idl => { + setup({explicit_done: true}); + var idl_array = new IdlArray(); + idl_array.add_idls(idl); + idl_array.add_objects({JsonLdProcessor: ['new JsonLdProcessor()']}); + idl_array.test(); + done(); + }).catch(err => { + console.error('WebIDL Error', err); + reject(err); + }); +}); + +}; diff --git a/tests/test.js b/tests/test.js index b371cc6c..4aad1bc4 100644 --- a/tests/test.js +++ b/tests/test.js @@ -1,832 +1,88 @@ /** - * Test runner for JSON-LD. + * Node.js test runner for jsonld.js. + * + * Use environment vars to control: + * + * Set dirs, manifests, or js to run: + * JSONLD_TESTS="r1 r2 ..." + * Output an EARL report: + * EARL=filename + * Bail with tests fail: + * BAIL=true * * @author Dave Longley + * @author David I. Lehn * - * Copyright (c) 2011-2013 Digital Bazaar, Inc. All rights reserved. + * Copyright (c) 2011-2017 Digital Bazaar, Inc. All rights reserved. */ -(function() { - -'use strict'; +const assert = require('chai').assert; +const common = require('./test-common'); +const fs = require('fs-extra'); +const jsonld = require('..'); +const path = require('path'); -// detect node.js (vs. phantomJS) -var _nodejs = (typeof process !== 'undefined' && - process.versions && process.versions.node); +const entries = []; -if(_nodejs) { - var _jsdir = getEnv().JSDIR || 'js'; - var fs = require('fs'); - var path = require('path'); - var jsonld = require('../' + _jsdir + '/jsonld')(); - var assert = require('assert'); - var program = require('commander'); - program - .option('--earl [filename]', 'Output an earl report') - .option('--bail', 'Bail when a test fails') - .parse(process.argv); +if(process.env.JSONLD_TESTS) { + entries.push(...process.env.JSONLD_TESTS.split(' ')); } else { - // Function.bind polyfill for phantomjs from: - // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/bind#Compatibility - (function() { - if (!Function.prototype.bind) { - Function.prototype.bind = function(oThis) { - if (typeof this !== 'function') { - // closest thing possible to the ECMAScript 5 - // internal IsCallable function - throw new TypeError('Function.prototype.bind - what is trying to be bound is not callable'); - } - - var aArgs = Array.prototype.slice.call(arguments, 1), - fToBind = this, - fNOP = function() {}, - fBound = function() { - return fToBind.apply(this instanceof fNOP - ? this - : oThis, - aArgs.concat(Array.prototype.slice.call(arguments))); - }; - - if (this.prototype) { - // native functions don't have a prototype - fNOP.prototype = this.prototype; - } - fBound.prototype = new fNOP(); + const _top = path.resolve(__dirname, '..'); - return fBound; - }; - } - })(); + // json-ld.org main test suite + const orgPath = path.resolve(_top, 'test-suites/json-ld.org/test-suite'); + if(fs.existsSync(orgPath)) { + entries.push(orgPath); + } else { + // default to sibling dir + entries.push(path.resolve(_top, '../json-ld.org/test-suite')); + } - var fs = require('fs'); - var system = require('system'); - require('./setImmediate'); - var _jsdir = getEnv().JSDIR || 'js'; - require('../' + _jsdir + '/jsonld'); - jsonld = jsonldjs; - window.Promise = require('es6-promise').Promise; - var assert = require('chai').assert; - require('mocha/mocha'); - require('mocha-phantomjs/lib/mocha-phantomjs/core_extensions'); - var program = {}; - for(var i = 0; i < system.args.length; ++i) { - var arg = system.args[i]; - if(arg.indexOf('--') === 0) { - var argname = arg.substr(2); - switch(argname) { - case 'earl': - program[argname] = system.args[i + 1]; - ++i; - break; - default: - program[argname] = true; - } - } + // json-ld.org normalization test suite + const normPath = path.resolve(_top, 'test-suites/normalization/tests'); + if(fs.existsSync(normPath)) { + entries.push(normPath); + } else { + // default up to sibling dir + entries.push(path.resolve(_top, '../normalization/tests')); } - mocha.setup({ - reporter: 'spec', - ui: 'bdd' - }); + // other tests + entries.push(path.resolve(_top, 'tests/misc.js')); + entries.push(path.resolve(_top, 'tests/graph-container.js')); + entries.push(path.resolve(_top, 'tests/new-embed-api')); + // TODO: avoid network traffic and re-enable + //entries.push(path.resolve(_top, 'tests/node-document-loader-tests.js')); } -var JSONLD_TEST_SUITE = '../json-ld.org/test-suite'; -var ROOT_MANIFEST_DIR = resolvePath( - getEnv().JSONLD_TEST_SUITE || JSONLD_TEST_SUITE); - -var TEST_TYPES = { - 'jld:CompactTest': { - skip: { - regex: [/#t0073/, /#t0074/], - specVersion: ['json-ld-1.1'] - }, - fn: 'compact', - params: [ - readTestUrl('input'), - readTestJson('context'), - createTestOptions() - ], - compare: compareExpectedJson +const options = { + nodejs: { + path: path }, - 'jld:ExpandTest': { - skip: { - specVersion: ['json-ld-1.1'] - }, - fn: 'expand', - params: [ - readTestUrl('input'), - createTestOptions() - ], - compare: compareExpectedJson + assert: assert, + jsonld: jsonld, + exit: code => process.exit(code), + earl: { + id: 'node.js', + filename: process.env.EARL }, - 'jld:FlattenTest': { - skip: { - specVersion: ['json-ld-1.1'] - }, - fn: 'flatten', - params: [ - readTestUrl('input'), - readTestJson('context'), - createTestOptions() - ], - compare: compareExpectedJson + bailOnError: process.env.BAIL === 'true', + entries: entries, + readFile: filename => { + return fs.readFile(filename, 'utf8'); }, - 'jld:FrameTest': { - skip: { - specVersion: ['json-ld-1.1'] - }, - fn: 'frame', - params: [ - readTestUrl('input'), - readTestJson('frame'), - createTestOptions() - ], - compare: compareExpectedJson + writeFile: (filename, data) => { + return fs.outputFile(filename, data); }, - 'jld:FromRDFTest': { - fn: 'fromRDF', - params: [ - readTestNQuads('input'), - createTestOptions({format: 'application/nquads'}) - ], - compare: compareExpectedJson - }, - 'jld:NormalizeTest': { - fn: 'normalize', - params: [ - readTestUrl('input'), - createTestOptions({format: 'application/nquads'}) - ], - compare: compareExpectedNQuads - }, - 'jld:ToRDFTest': { - skip: { - regex: [/RFC3986/] - }, - fn: 'toRDF', - params: [ - readTestUrl('input'), - createTestOptions({format: 'application/nquads'}) - ], - compare: compareExpectedNQuads - }, - 'rdfn:Urgna2012EvalTest': { - fn: 'normalize', - params: [ - readTestNQuads('action'), - createTestOptions({ - algorithm: 'URGNA2012', - inputFormat: 'application/nquads', - format: 'application/nquads' - }) - ], - compare: compareExpectedNQuads - }, - 'rdfn:Urdna2015EvalTest': { - fn: 'normalize', - params: [ - readTestNQuads('action'), - createTestOptions({ - algorithm: 'URDNA2015', - inputFormat: 'application/nquads', - format: 'application/nquads' - }) - ], - compare: compareExpectedNQuads - } + import: f => require(f) }; -var SKIP_TESTS = []; - -// create earl report -var earl = new EarlReport(); - -// run tests -describe('JSON-LD', function() { - if(!program['webidl-only']) { - var filename = joinPath(ROOT_MANIFEST_DIR, 'manifest.jsonld'); - var rootManifest = readJson(filename); - rootManifest.filename = filename; - addManifest(rootManifest); - } - - // run Web IDL tests - // FIXME: hack to prevent Web IDL tests from running when running - // local manifest tests that aren't part of the main JSON-LD test suite; - // testing arch needs to be reworked to better support local tests and - // separate them from official ones and what goes into EARL report, etc. - if(!_nodejs && ROOT_MANIFEST_DIR.indexOf('json-ld.org/test-suite') !== -1) { - require('./webidl/testharness.js'); - require('./webidl/WebIDLParser.js'); - require('./webidl/idlharness.js'); - - describe('Web IDL', function() { - add_result_callback(function(test) { - it(test.name, function(done) { - // HACK: phantomJS can't set prototype to non-writable? - var msg = test.message || ''; - if(msg.indexOf( - 'JsonLdProcessor.prototype is writable expected false') !== -1) { - test.status = 0; - } - // HACK: phantomJS can't set window property to non-enumerable? - if(msg.indexOf( - '"JsonLdProcessor" is enumerable expected false') !== -1) { - test.status = 0; - } - //earl.addAssertion({'@id': ?}, test.status === 0); - assert.equal(test.status, 0, test.message); - done(); - }); - }); - //add_completion_callback(function(tests, status) {}); - - // ensure that stringification tests are passed - var toString = Object.prototype.toString; - Object.prototype.toString = function() { - if(this === window.JsonLdProcessor.prototype) { - return '[object JsonLdProcessorPrototype]'; - } else if(this && this.constructor === window.JsonLdProcessor) { - return '[object JsonLdProcessor]'; - } - return toString.apply(this, arguments); - }; - - window.processor = new JsonLdProcessor(); - - var idl_array = new IdlArray(); - idl_array.add_idls(readFile('./tests/webidl/JsonLdProcessor.idl')); - idl_array.add_objects({JsonLdProcessor: ['window.processor']}); - idl_array.test(); - }); - } - - if(program.earl) { - var filename = resolvePath(program.earl); - describe('Writing EARL report to: ' + filename, function() { - it('should print the earl report', function(done) { - earl.write(filename); - done(); - }); - }); - } +// wait for setup of all tests then run mocha +common(options).then(() => { + run(); +}).catch(err => { + console.error(err); }); -if(!_nodejs) { - mocha.run(function() { - phantom.exit(); - }); -} - -/** - * Adds the tests for all entries in the given manifest. - * - * @param manifest the manifest. - */ -function addManifest(manifest) { - describe(manifest.name || manifest.label, function() { - // get entries and sequence (alias for entries) - var entries = [].concat( - getJsonLdValues(manifest, 'entries'), - getJsonLdValues(manifest, 'sequence') - ); - - var includes = getJsonLdValues(manifest, 'include'); - // add includes to sequence as jsonld files - for(var i = 0; i < includes.length; ++i) { - entries.push(includes[i] + '.jsonld'); - } - - // process entries - for(var i = 0; i < entries.length; ++i) { - var entry = readManifestEntry(manifest, entries[i]); - - if(isJsonLdType(entry, 'mf:Manifest')) { - // entry is another manifest - addManifest(entry); - } else { - // assume entry is a test - addTest(manifest, entry); - } - } - }); -} - -function addTest(manifest, test) { - // expand @id and input base - var test_id = test['@id'] || test['id']; - //var number = test_id.substr(2); - test['@id'] = manifest.baseIri + basename(manifest.filename) + test_id; - test.base = manifest.baseIri + test.input; - test.manifest = manifest; - var description = test_id + ' ' + (test.purpose || test.name); - - // get appropriate API and run test - var api = _nodejs ? jsonld : jsonld.promises; - it(description, function(done) { - var self = this; - this.timeout(5000); - var testInfo = TEST_TYPES[getJsonLdTestType(test)]; - - // skip unknown and explicitly skipped test types - var testTypes = Object.keys(TEST_TYPES); - if(!isJsonLdType(test, testTypes) || isJsonLdType(test, SKIP_TESTS)) { - var type = [].concat( - getJsonLdValues(test, '@type'), - getJsonLdValues(test, 'type') - ); - //console.log('Skipping test "' + test.name + '" of type: ' + type); - self.skip(); - } - - if(testInfo.skip && testInfo.skip.type) { - //console.log('Skipping test "' + test.name + '" of type: ' + type); - self.skip(); - } - - if(testInfo.skip && testInfo.skip.regex) { - testInfo.skip.regex.forEach(function(re) { - if(re.test(description)) { - //console.log('Skipping test "' + test.name + '" of description: ' + description); - self.skip(); - } - }); - } - - var options = getJsonLdValues(test, 'option'); - - options.forEach(function(opt) { - var processingModes = getJsonLdValues(opt, 'processingMode'); - processingModes.forEach(function(pm) { - var skipModes = []; - if(testInfo.skip && testInfo.skip.processingMode) { - skipModes = testInfo.skip.processingMode; - } - if(skipModes.indexOf(pm) !== -1) { - //console.log('Skipping test "' + test.name + '" of processing mode: ' + pm); - self.skip(); - } - }); - }); - - options.forEach(function(opt) { - var specVersions = getJsonLdValues(opt, 'specVersion'); - specVersions.forEach(function(sv) { - var skipVersions = []; - if(testInfo.skip && testInfo.skip.specVersion) { - skipVersions = testInfo.skip.specVersion; - } - if(skipVersions.indexOf(sv) !== -1) { - //console.log('Skipping test "' + test.name + '" for spec version: ' + sv); - self.skip(); - } - }); - }); - - var fn = testInfo.fn; - var params = testInfo.params; - params = params.map(function(param) {return param(test);}); - var callback = function(err, result) { - try { - if(isNegativeTest(test)) { - compareExpectedError(test, err); - // default is to assume positive and skip isPositiveTest(test) check - } else { - if(err) { - throw err; - } - testInfo.compare(test, result); - } - earl.addAssertion(test, true); - return done(); - } catch(ex) { - if(program.bail) { - if(ex.name !== 'AssertionError') { - console.log('\nError: ', JSON.stringify(ex, null, 2)); - } - if(_nodejs) { - process.exit(); - } else { - phantom.exit(); - } - } - earl.addAssertion(test, false); - return done(ex); - } - }; - - if(_nodejs) { - params.push(callback); - } - - // promise is undefined for node.js API - var promise = api[fn].apply(api, params); - - if(!_nodejs) { - promise.then(callback.bind(null, null), callback); - } - }); -} - -function isPositiveTest(test) { - return isJsonLdType(test, 'jld:PositiveEvaluationTest'); -} - -function isNegativeTest(test) { - return isJsonLdType(test, 'jld:NegativeEvaluationTest'); -} - -function getJsonLdTestType(test) { - var types = Object.keys(TEST_TYPES); - for(var i = 0; i < types.length; ++i) { - if(isJsonLdType(test, types[i])) { - return types[i]; - } - } - return null; -} - -function readManifestEntry(manifest, entry) { - var dir = dirname(manifest.filename); - if(typeof entry === 'string') { - var filename = joinPath(dir, entry); - entry = readJson(filename); - entry.filename = filename; - } - entry.dirname = dirname(entry.filename || manifest.filename); - return entry; -} - -function readTestUrl(property) { - return function(test) { - if(!test[property]) { - return null; - } - return test.manifest.baseIri + test[property]; - }; -} - -function readTestJson(property) { - return function(test) { - if(!test[property]) { - return null; - } - var filename = joinPath(test.dirname, test[property]); - return readJson(filename); - }; -} - -function readTestNQuads(property) { - return function(test) { - if(!test[property]) { - return null; - } - var filename = joinPath(test.dirname, test[property]); - return readFile(filename); - }; -} - -function createTestOptions(opts) { - return function(test) { - var options = { - documentLoader: createDocumentLoader(test) - }; - var httpOptions = ['contentType', 'httpLink', 'httpStatus', 'redirectTo']; - var testOptions = test.option || {}; - for(var key in testOptions) { - if(httpOptions.indexOf(key) === -1) { - options[key] = testOptions[key]; - } - } - if(opts) { - // extend options - for(var key in opts) { - options[key] = opts[key]; - } - } - for(var key in options) { - if(key === 'expandContext') { - var filename = joinPath(test.dirname, options[key]); - options[key] = readJson(filename); - } - } - - return options; - }; -} - -// find the expected output property or throw error -function _getExpectProperty(test) { - if('expect' in test) { - return 'expect'; - } else if('result' in test) { - return 'result'; - } else { - throw Error('No expected output property found'); - } -} - -function compareExpectedJson(test, result) { - try { - var expect = readTestJson(_getExpectProperty(test))(test); - assert.deepEqual(result, expect); - } catch(ex) { - if(program.bail) { - console.log('\nTEST FAILED\n'); - console.log('EXPECTED: ' + JSON.stringify(expect, null, 2)); - console.log('ACTUAL: ' + JSON.stringify(result, null, 2)); - } - throw ex; - } -} - -function compareExpectedNQuads(test, result) { - try { - var expect = readTestNQuads(_getExpectProperty(test))(test); - assert.equal(result, expect); - } catch(ex) { - if(program.bail) { - console.log('\nTEST FAILED\n'); - console.log('EXPECTED:\n' + expect); - console.log('ACTUAL:\n' + result); - } - throw ex; - } -} - -function compareExpectedError(test, err) { - try { - var expect = test[_getExpectProperty(test)]; - var result = getJsonLdErrorCode(err); - assert.ok(err); - assert.equal(result, expect); - } catch(ex) { - if(program.bail) { - console.log('\nTEST FAILED\n'); - console.log('EXPECTED: ' + expect); - console.log('ACTUAL: ' + result); - } - throw ex; - } -} - -function isJsonLdType(node, type) { - var nodeType = [].concat( - getJsonLdValues(node, '@type'), - getJsonLdValues(node, 'type') - ); - type = Array.isArray(type) ? type : [type]; - for(var i = 0; i < type.length; ++i) { - if(nodeType.indexOf(type[i]) !== -1) { - return true; - } - } - return false; -} - -function getJsonLdValues(node, property) { - var rval = []; - if(property in node) { - rval = node[property]; - if(!Array.isArray(rval)) { - rval = [rval]; - } - } - return rval; -} - -function getJsonLdErrorCode(err) { - if(!err) { - return null; - } - if(err.details) { - if(err.details.code) { - return err.details.code; - } - if(err.details.cause) { - return getJsonLdErrorCode(err.details.cause); - } - } - return err.name; -} - -function readJson(filename) { - return JSON.parse(readFile(filename)); -} - -function readFile(filename) { - if(_nodejs) { - return fs.readFileSync(filename, 'utf8'); - } - return fs.read(filename); -} - -function resolvePath(to) { - if(_nodejs) { - return path.resolve(to); - } - return fs.absolute(to); -} - -function joinPath() { - return (_nodejs ? path : fs).join.apply( - null, Array.prototype.slice.call(arguments)); -} - -function dirname(filename) { - if(_nodejs) { - return path.dirname(filename); - } - var idx = filename.lastIndexOf(fs.separator); - if(idx === -1) { - return filename; - } - return filename.substr(0, idx); -} - -function basename(filename) { - if(_nodejs) { - return path.basename(filename); - } - var idx = filename.lastIndexOf(fs.separator); - if(idx === -1) { - return filename; - } - return filename.substr(idx + 1); -} - -function getEnv() { - if(_nodejs) { - return process.env; - } - return system.env; -} - -/** - * Creates a test remote document loader. - * - * @param test the test to use the document loader for. - * - * @return the document loader. - */ -function createDocumentLoader(test) { - var base = 'http://json-ld.org/test-suite'; - var loader = jsonld.documentLoader; - var localLoader = function(url, callback) { - // always load remote-doc tests remotely in node - if(_nodejs && test.manifest.name === 'Remote document') { - return loader(url, callback); - } - - var idx = url.indexOf(base); - if(idx === 0 || url.indexOf(':') === -1) { - // attempt to load official test-suite files or relative URLs locally - var rval; - try { - rval = loadLocally(url); - } catch(ex) { - return callback(ex); - } - return callback(null, rval); - } - - // load remotely - return jsonld.loadDocument(url, callback); - }; - - return _nodejs ? localLoader : function(url) { - return jsonld.promisify(localLoader, url); - }; - - function loadLocally(url) { - var doc = {contextUrl: null, documentUrl: url, document: null}; - var options = test.option; - if(options && url === test.base) { - if('redirectTo' in options && parseInt(options.httpStatus, 10) >= 300) { - doc.documentUrl = test.manifest.baseIri + options.redirectTo; - } else if('httpLink' in options) { - var contentType = options.contentType || null; - if(!contentType && url.indexOf('.jsonld', url.length - 7) !== -1) { - contentType = 'application/ld+json'; - } - var linkHeader = options.httpLink; - if(Array.isArray(linkHeader)) { - linkHeader = linkHeader.join(','); - } - linkHeader = jsonld.parseLinkHeader( - linkHeader)['http://www.w3.org/ns/json-ld#context']; - if(linkHeader && contentType !== 'application/ld+json') { - if(Array.isArray(linkHeader)) { - throw {name: 'multiple context link headers'}; - } - doc.contextUrl = linkHeader.target; - } - } - } - - var filename; - if(doc.documentUrl.indexOf(':') === -1) { - filename = joinPath(ROOT_MANIFEST_DIR, doc.documentUrl); - doc.documentUrl = 'file://' + filename; - } else { - filename = joinPath( - ROOT_MANIFEST_DIR, doc.documentUrl.substr(base.length)); - } - try { - doc.document = readJson(filename); - } catch(ex) { - throw {name: 'loading document failed'}; - } - return doc; - } -} - -function EarlReport() { - var today = new Date(); - today = today.getFullYear() + '-' + - (today.getMonth() < 9 ? - '0' + (today.getMonth() + 1) : today.getMonth() + 1) + '-' + - (today.getDate() < 10 ? '0' + today.getDate() : today.getDate()); - this.report = { - '@context': { - 'doap': 'http://usefulinc.com/ns/doap#', - 'foaf': 'http://xmlns.com/foaf/0.1/', - 'dc': 'http://purl.org/dc/terms/', - 'earl': 'http://www.w3.org/ns/earl#', - 'xsd': 'http://www.w3.org/2001/XMLSchema#', - 'doap:homepage': {'@type': '@id'}, - 'doap:license': {'@type': '@id'}, - 'dc:creator': {'@type': '@id'}, - 'foaf:homepage': {'@type': '@id'}, - 'subjectOf': {'@reverse': 'earl:subject'}, - 'earl:assertedBy': {'@type': '@id'}, - 'earl:mode': {'@type': '@id'}, - 'earl:test': {'@type': '@id'}, - 'earl:outcome': {'@type': '@id'}, - 'dc:date': {'@type': 'xsd:date'} - }, - '@id': 'https://github.com/digitalbazaar/jsonld.js', - '@type': [ - 'doap:Project', - 'earl:TestSubject', - 'earl:Software' - ], - 'doap:name': 'jsonld.js', - 'dc:title': 'jsonld.js', - 'doap:homepage': 'https://github.com/digitalbazaar/jsonld.js', - 'doap:license': - 'https://github.com/digitalbazaar/jsonld.js/blob/master/LICENSE', - 'doap:description': 'A JSON-LD processor for JavaScript', - 'doap:programming-language': 'JavaScript', - 'dc:creator': 'https://github.com/dlongley', - 'doap:developer': { - '@id': 'https://github.com/dlongley', - '@type': [ - 'foaf:Person', - 'earl:Assertor' - ], - 'foaf:name': 'Dave Longley', - 'foaf:homepage': 'https://github.com/dlongley' - }, - 'dc:date': { - '@value': today, - '@type': 'xsd:date' - }, - 'subjectOf': [] - }; - if(_nodejs) { - this.report['@id'] += '#node.js'; - this.report['doap:name'] += ' node.js'; - this.report['dc:title'] += ' node.js'; - } else { - this.report['@id'] += '#browser'; - this.report['doap:name'] += ' browser'; - this.report['dc:title'] += ' browser'; - } -} - -EarlReport.prototype.addAssertion = function(test, pass) { - this.report.subjectOf.push({ - '@type': 'earl:Assertion', - 'earl:assertedBy': this.report['doap:developer']['@id'], - 'earl:mode': 'earl:automatic', - 'earl:test': test['@id'], - 'earl:result': { - '@type': 'earl:TestResult', - 'dc:date': new Date().toISOString(), - 'earl:outcome': pass ? 'earl:passed' : 'earl:failed' - } - }); - return this; -}; - -EarlReport.prototype.write = function(filename) { - var json = JSON.stringify(this.report, null, 2); - if(_nodejs) { - fs.writeFileSync(filename, json); - } else { - fs.write(filename, json, 'w'); - } - return this; -}; - -})(); +process.on('unhandledRejection', (reason, p) => { + console.error('Unhandled Rejection at:', p, 'reason:', reason); +}); diff --git a/tests/webidl/JsonLdProcessor.idl b/tests/webidl/JsonLdProcessor.idl index 6db52b6f..31cac3a8 100644 --- a/tests/webidl/JsonLdProcessor.idl +++ b/tests/webidl/JsonLdProcessor.idl @@ -1,67 +1,79 @@ [Constructor] interface JsonLdProcessor { - Promise compact (any input, JsonLdContext context, optional JsonLdOptions options); - Promise expand (any input, optional JsonLdOptions options); - Promise flatten (any input, optional JsonLdContext? context, optional JsonLdOptions options); + static Promise compact(JsonLdInput input, JsonLdContext context, optional JsonLdOptions? options); + static Promise> expand(JsonLdInput input, optional JsonLdOptions? options); + static Promise flatten(JsonLdInput input, optional JsonLdContext? context, optional JsonLdOptions? options); }; -typedef (object or DOMString or (object or DOMString[])) JsonLdContext; +dictionary JsonLdDictionary { +}; + +typedef (JsonLdDictionary or sequence or USVString) JsonLdInput; + +typedef (JsonLdDictionary or USVString or sequence<(JsonLdDictionary or USVString)>) JsonLdContext; dictionary JsonLdOptions { - DOMString base; - boolean compactArrays = true; - LoadDocumentCallback documentLoader = null; - (object? or DOMString) expandContext = null; - DOMString processingMode = "json-ld-1.0"; + USVString? base; + boolean compactArrays = true; + LoadDocumentCallback? documentLoader = null; + (JsonLdDictionary? or USVString) expandContext = null; + boolean produceGeneralizedRdf = true; + USVString? processingMode = null; + boolean compactToRelative = true; }; -callback LoadDocumentCallback = Promise (DOMString url); +callback LoadDocumentCallback = Promise (USVString url); dictionary RemoteDocument { - DOMString contextUrl = null; - DOMString documentUrl; + USVString contextUrl = null; + USVString documentUrl; any document; }; dictionary JsonLdError { JsonLdErrorCode code; - DOMString? message = null; + USVString? message = null; }; enum JsonLdErrorCode { - "loading document failed", - "list of lists", - "invalid @index value", + "colliding keywords", + "compaction to list of lists", "conflicting indexes", + "cyclic IRI mapping", "invalid @id value", - "invalid local context", - "multiple context link headers", - "loading remote context failed", - "invalid remote context", - "recursive context inclusion", + "invalid @index value", + "invalid @nest value", + "invalid @prefix value", + "invalid @reverse value", + "invalid @version value", "invalid base IRI", - "invalid vocab mapping", + "invalid container mapping", "invalid default language", - "keyword redefinition", - "invalid term definition", - "invalid reverse property", "invalid IRI mapping", - "cyclic IRI mapping", "invalid keyword alias", - "invalid type mapping", + "invalid language map value", "invalid language mapping", - "colliding keywords", - "invalid container mapping", - "invalid type value", - "invalid value object", - "invalid value object value", "invalid language-tagged string", "invalid language-tagged value", - "invalid typed value", - "invalid set or list object", - "invalid language map value", - "compaction to list of lists", + "invalid local context", + "invalid remote context", + "invalid reverse property", "invalid reverse property map", - "invalid @reverse value", - "invalid reverse property value" + "invalid reverse property value", + "invalid scoped context", + "invalid set or list object", + "invalid term definition", + "invalid type mapping", + "invalid type value", + "invalid typed value", + "invalid value object", + "invalid value object value", + "invalid vocab mapping", + "keyword redefinition", + "list of lists", + "loading document failed", + "loading remote context failed", + "multiple context link headers", + "processing mode conflict", + "recursive context inclusion" }; diff --git a/tests/webidl/WebIDLParser.js b/tests/webidl/WebIDLParser.js index bc5abaf7..1d6380c8 100644 --- a/tests/webidl/WebIDLParser.js +++ b/tests/webidl/WebIDLParser.js @@ -1,842 +1,1088 @@ +(function() { + var tokenise = function(str) { + var tokens = [], + re = { + "float": /^-?(([0-9]+\.[0-9]*|[0-9]*\.[0-9]+)([Ee][-+]?[0-9]+)?|[0-9]+[Ee][-+]?[0-9]+)/, + "integer": /^-?(0([Xx][0-9A-Fa-f]+|[0-7]*)|[1-9][0-9]*)/, + "identifier": /^[A-Z_a-z][0-9A-Z_a-z-]*/, + "string": /^"[^"]*"/, + "whitespace": /^(?:[\t\n\r ]+|[\t\n\r ]*((\/\/.*|\/\*(.|\n|\r)*?\*\/)[\t\n\r ]*))+/, + "other": /^[^\t\n\r 0-9A-Z_a-z]/ + }, + types = ["float", "integer", "identifier", "string", "whitespace", "other"]; + while (str.length > 0) { + var matched = false; + for (var i = 0, n = types.length; i < n; i++) { + var type = types[i]; + str = str.replace(re[type], function(tok) { + tokens.push({ type: type, value: tok }); + matched = true; + return ""; + }); + if (matched) break; + } + if (matched) continue; + throw new Error("Token stream not progressing"); + } + return tokens; + }; + function WebIDLParseError(str, line, input, tokens) { + this.message = str; + this.line = line; + this.input = input; + this.tokens = tokens; + }; -(function () { - var tokenise = function (str) { - var tokens = [] - , re = { - "float": /^-?(([0-9]+\.[0-9]*|[0-9]*\.[0-9]+)([Ee][-+]?[0-9]+)?|[0-9]+[Ee][-+]?[0-9]+)/ - , "integer": /^-?(0([Xx][0-9A-Fa-f]+|[0-7]*)|[1-9][0-9]*)/ - , "identifier": /^[A-Z_a-z][0-9A-Z_a-z]*/ - , "string": /^"[^"]*"/ - , "whitespace": /^(?:[\t\n\r ]+|[\t\n\r ]*((\/\/.*|\/\*(.|\n|\r)*?\*\/)[\t\n\r ]*))+/ - , "other": /^[^\t\n\r 0-9A-Z_a-z]/ - } - , types = [] - ; - for (var k in re) types.push(k); - while (str.length > 0) { + WebIDLParseError.prototype.toString = function() { + return this.message + ", line " + this.line + " (tokens: '" + this.input + "')\n" + + JSON.stringify(this.tokens, null, 4); + }; + + var parse = function(tokens, opt) { + var line = 1; + tokens = tokens.slice(); + + var FLOAT = "float", + INT = "integer", + ID = "identifier", + STR = "string", + OTHER = "other"; + + var error = function(str) { + var tok = ""; + var numTokens = 0; + var maxTokens = 5; + while (numTokens < maxTokens && tokens.length > numTokens) { + tok += tokens[numTokens].value; + numTokens++; + } + throw new WebIDLParseError(str, line, tok, tokens.slice(0, 5)); + }; + + var last_token = null; + + var consume = function(type, value) { + if (!tokens.length || tokens[0].type !== type) return; + if (typeof value === "undefined" || tokens[0].value === value) { + last_token = tokens.shift(); + if (type === ID) last_token.value = last_token.value.replace(/^_/, ""); + return last_token; + } + }; + + var ws = function() { + if (!tokens.length) return; + if (tokens[0].type === "whitespace") { + var t = tokens.shift(); + t.value.replace(/\n/g, function(m) { line++; + return m; }); + return t; + } + }; + + var all_ws = function(store, pea) { // pea == post extended attribute, tpea = same for types + var t = { type: "whitespace", value: "" }; + while (true) { + var w = ws(); + if (!w) break; + t.value += w.value; + } + if (t.value.length > 0) { + if (store) { + var w = t.value, + re = { + "ws": /^([\t\n\r ]+)/, + "line-comment": /^\/\/(.*)\n?/m, + "multiline-comment": /^\/\*((?:.|\n|\r)*?)\*\// + }, + wsTypes = []; + for (var k in re) wsTypes.push(k); + while (w.length) { var matched = false; - for (var i = 0, n = types.length; i < n; i++) { - var type = types[i]; - str = str.replace(re[type], function (tok) { - tokens.push({ type: type, value: tok }); - matched = true; - return ""; - }); - if (matched) break; + for (var i = 0, n = wsTypes.length; i < n; i++) { + var type = wsTypes[i]; + w = w.replace(re[type], function(tok, m1) { + store.push({ type: type + (pea ? ("-" + pea) : ""), value: m1 }); + matched = true; + return ""; + }); + if (matched) break; } if (matched) continue; - throw new Error("Token stream not progressing"); + throw new Error("Surprising white space construct."); // this shouldn't happen + } } - return tokens; + return t; + } + }; + + var integer_type = function() { + var ret = ""; + all_ws(); + if (consume(ID, "unsigned")) ret = "unsigned "; + all_ws(); + if (consume(ID, "short")) return ret + "short"; + if (consume(ID, "long")) { + ret += "long"; + all_ws(); + if (consume(ID, "long")) return ret + " long"; + return ret; + } + if (ret) error("Failed to parse integer type"); }; - var parse = function (tokens) { - var line = 1; - tokens = tokens.slice(); + var float_type = function() { + var ret = ""; + all_ws(); + if (consume(ID, "unrestricted")) ret = "unrestricted "; + all_ws(); + if (consume(ID, "float")) return ret + "float"; + if (consume(ID, "double")) return ret + "double"; + if (ret) error("Failed to parse float type"); + }; - var FLOAT = "float" - , INT = "integer" - , ID = "identifier" - , STR = "string" - , OTHER = "other" - ; + var primitive_type = function() { + var num_type = integer_type() || float_type(); + if (num_type) return num_type; + all_ws(); + if (consume(ID, "boolean")) return "boolean"; + if (consume(ID, "byte")) return "byte"; + if (consume(ID, "octet")) return "octet"; + }; - var WebIDLParseError = function (str, line, input, tokens) { - this.message = str; - this.line = line; - this.input = input; - this.tokens = tokens; - }; - WebIDLParseError.prototype.toString = function () { - return this.message + ", line " + this.line + " (tokens: '" + this.input + "')\n" + - JSON.stringify(this.tokens, null, 4); - }; + var const_value = function() { + if (consume(ID, "true")) return { type: "boolean", value: true }; + if (consume(ID, "false")) return { type: "boolean", value: false }; + if (consume(ID, "null")) return { type: "null" }; + if (consume(ID, "Infinity")) return { type: "Infinity", negative: false }; + if (consume(ID, "NaN")) return { type: "NaN" }; + var ret = consume(FLOAT) || consume(INT); + if (ret) return { type: "number", value: 1 * ret.value }; + var tok = consume(OTHER, "-"); + if (tok) { + if (consume(ID, "Infinity")) return { type: "Infinity", negative: true }; + else tokens.unshift(tok); + } + }; - var error = function (str) { - var tok = "", numTokens = 0, maxTokens = 5; - while (numTokens < maxTokens && tokens.length > numTokens) { - tok += tokens[numTokens].value; - numTokens++; + var type_suffix = function(obj) { + while (true) { + all_ws(); + if (consume(OTHER, "?")) { + if (obj.nullable) error("Can't nullable more than once"); + obj.nullable = true; + } else if (consume(OTHER, "[")) { + all_ws(); + consume(OTHER, "]") || error("Unterminated array type"); + if (!obj.array) { + obj.array = 1; + obj.nullableArray = [obj.nullable]; + } else { + obj.array++; + obj.nullableArray.push(obj.nullable); + } + obj.nullable = false; + } else return; + } + }; + + var single_type = function() { + var prim = primitive_type(), + ret = { sequence: false, generic: null, nullable: false, array: false, union: false }, + name, value; + if (prim) { + ret.idlType = prim; + } else if (name = consume(ID)) { + value = name.value; + all_ws(); + // Generic types + if (consume(OTHER, "<")) { + // backwards compat + if (value === "sequence") { + ret.sequence = true; + } + ret.generic = value; + var types = []; + do { + all_ws(); + types.push(type() || error("Error parsing generic type " + value)); + all_ws(); + } + while (consume(OTHER, ",")); + if (value === "sequence") { + if (types.length !== 1) error("A sequence must have exactly one subtype"); + } else if (value === "record") { + if (types.length !== 2) error("A record must have exactly two subtypes"); + if (!/^(DOMString|USVString|ByteString)$/.test(types[0].idlType)) { + error("Record key must be DOMString, USVString, or ByteString"); } - throw new WebIDLParseError(str, line, tok, tokens.slice(0, 5)); - }; + } + ret.idlType = types.length === 1 ? types[0] : types; + all_ws(); + if (!consume(OTHER, ">")) error("Unterminated generic type " + value); + type_suffix(ret); + return ret; + } else { + ret.idlType = value; + } + } else { + return; + } + type_suffix(ret); + if (ret.nullable && !ret.array && ret.idlType === "any") error("Type any cannot be made nullable"); + return ret; + }; - var last_token = null; + var union_type = function() { + all_ws(); + if (!consume(OTHER, "(")) return; + var ret = { sequence: false, generic: null, nullable: false, array: false, union: true, idlType: [] }; + var fst = type_with_extended_attributes() || error("Union type with no content"); + ret.idlType.push(fst); + while (true) { + all_ws(); + if (!consume(ID, "or")) break; + var typ = type_with_extended_attributes() || error("No type after 'or' in union type"); + ret.idlType.push(typ); + } + if (!consume(OTHER, ")")) error("Unterminated union type"); + type_suffix(ret); + return ret; + }; - var consume = function (type, value) { - if (!tokens.length || tokens[0].type !== type) return; - if (typeof value === "undefined" || tokens[0].value === value) { - last_token = tokens.shift(); - if (type === ID) last_token.value = last_token.value.replace(/^_/, ""); - return last_token; - } - }; + var type = function() { + return single_type() || union_type(); + }; - var ws = function () { - if (!tokens.length) return; - if (tokens[0].type === "whitespace") { - var t = tokens.shift(); - t.value.replace(/\n/g, function (m) { line++; return m; }); - return t; - } - }; + var type_with_extended_attributes = function() { + var extAttrs = extended_attrs(); + var ret = single_type() || union_type(); + if (extAttrs.length && ret) ret.extAttrs = extAttrs; + return ret; + }; - var all_ws = function () { - var t = { type: "whitespace", value: "" }; - while (true) { - var w = ws(); - if (!w) break; - t.value += w.value; - } - if (t.value.length > 0) return t; - }; + var argument = function(store) { + var ret = { optional: false, variadic: false }; + ret.extAttrs = extended_attrs(store); + all_ws(store, "pea"); + var opt_token = consume(ID, "optional"); + if (opt_token) { + ret.optional = true; + all_ws(); + } + ret.idlType = type_with_extended_attributes(); + if (!ret.idlType) { + if (opt_token) tokens.unshift(opt_token); + return; + } + var type_token = last_token; + if (!ret.optional) { + all_ws(); + if (tokens.length >= 3 && + tokens[0].type === "other" && tokens[0].value === "." && + tokens[1].type === "other" && tokens[1].value === "." && + tokens[2].type === "other" && tokens[2].value === "." + ) { + tokens.shift(); + tokens.shift(); + tokens.shift(); + ret.variadic = true; + } + } + all_ws(); + var name = consume(ID); + if (!name) { + if (opt_token) tokens.unshift(opt_token); + tokens.unshift(type_token); + return; + } + ret.name = name.value; + if (ret.optional) { + all_ws(); + var dflt = default_(); + if (typeof dflt !== "undefined") { + ret["default"] = dflt; + } + } + return ret; + }; - var integer_type = function () { - var ret = ""; - all_ws(); - if (consume(ID, "unsigned")) ret = "unsigned "; - all_ws(); - if (consume(ID, "short")) return ret + "short"; - if (consume(ID, "long")) { - ret += "long"; - all_ws(); - if (consume(ID, "long")) return ret + " long"; - return ret; - } - if (ret) error("Failed to parse integer type"); - }; + var argument_list = function(store) { + var ret = [], + arg = argument(store ? ret : null); + if (!arg) return; + ret.push(arg); + while (true) { + all_ws(store ? ret : null); + if (!consume(OTHER, ",")) return ret; + var nxt = argument(store ? ret : null) || error("Trailing comma in arguments list"); + ret.push(nxt); + } + }; - var float_type = function () { - var ret = ""; - all_ws(); - if (consume(ID, "unrestricted")) ret = "unrestricted "; - all_ws(); - if (consume(ID, "float")) return ret + "float"; - if (consume(ID, "double")) return ret + "double"; - if (ret) error("Failed to parse float type"); - }; + var simple_extended_attr = function(store) { + all_ws(); + var name = consume(ID); + if (!name) return; + var ret = { + name: name.value, + "arguments": null + }; + all_ws(); + var eq = consume(OTHER, "="); + if (eq) { + var rhs; + all_ws(); + if (rhs = consume(ID)) { + ret.rhs = rhs; + } else if (rhs = consume(FLOAT)) { + ret.rhs = rhs; + } else if (rhs = consume(INT)) { + ret.rhs = rhs; + } else if (rhs = consume(STR)) { + ret.rhs = rhs; + } else if (consume(OTHER, "(")) { + // [Exposed=(Window,Worker)] + rhs = []; + var id = consume(ID); + if (id) { + rhs = [id.value]; + } + identifiers(rhs); + consume(OTHER, ")") || error("Unexpected token in extended attribute argument list or type pair"); + ret.rhs = { + type: "identifier-list", + value: rhs + }; + } + if (!ret.rhs) return error("No right hand side to extended attribute assignment"); + } + all_ws(); + if (consume(OTHER, "(")) { + var args, pair; + // [Constructor(DOMString str)] + if (args = argument_list(store)) { + ret["arguments"] = args; + } + // [Constructor()] + else { + ret["arguments"] = []; + } + all_ws(); + consume(OTHER, ")") || error("Unexpected token in extended attribute argument list"); + } + return ret; + }; - var primitive_type = function () { - var num_type = integer_type() || float_type(); - if (num_type) return num_type; - all_ws(); - if (consume(ID, "boolean")) return "boolean"; - if (consume(ID, "byte")) return "byte"; - if (consume(ID, "octet")) return "octet"; - }; + // Note: we parse something simpler than the official syntax. It's all that ever + // seems to be used + var extended_attrs = function(store) { + var eas = []; + all_ws(store); + if (!consume(OTHER, "[")) return eas; + eas[0] = simple_extended_attr(store) || error("Extended attribute with not content"); + all_ws(); + while (consume(OTHER, ",")) { + if (eas.length) { + eas.push(simple_extended_attr(store)); + } else { + eas.push(simple_extended_attr(store) || error("Trailing comma in extended attribute")); + } + } + consume(OTHER, "]") || error("No end of extended attribute"); + return eas; + }; - var const_value = function () { - if (consume(ID, "true")) return { type: "boolean", value: true }; - if (consume(ID, "false")) return { type: "boolean", value: false }; - if (consume(ID, "null")) return { type: "null" }; - if (consume(ID, "Infinity")) return { type: "Infinity", negative: false }; - if (consume(ID, "NaN")) return { type: "NaN" }; - var ret = consume(FLOAT) || consume(INT); - if (ret) return { type: "number", value: 1 * ret.value }; - var tok = consume(OTHER, "-"); - if (tok) { - if (consume(ID, "Infinity")) return { type: "Infinity", negative: true }; - else tokens.unshift(tok); - } - }; + var default_ = function() { + all_ws(); + if (consume(OTHER, "=")) { + all_ws(); + var def = const_value(); + if (def) { + return def; + } else if (consume(OTHER, "[")) { + if (!consume(OTHER, "]")) error("Default sequence value must be empty"); + return { type: "sequence", value: [] }; + } else { + var str = consume(STR) || error("No value for default"); + str.value = str.value.replace(/^"/, "").replace(/"$/, ""); + return str; + } + } + }; - var type_suffix = function (obj) { - while (true) { - all_ws(); - if (consume(OTHER, "?")) { - if (obj.nullable) error("Can't nullable more than once"); - obj.nullable = true; - } - else if (consume(OTHER, "[")) { - all_ws(); - consume(OTHER, "]") || error("Unterminated array type"); - if (!obj.array) obj.array = 1; - else obj.array++; - } - else return; - } - }; + var const_ = function(store) { + all_ws(store, "pea"); + if (!consume(ID, "const")) return; + var ret = { type: "const", nullable: false }; + all_ws(); + var typ = primitive_type(); + if (!typ) { + typ = consume(ID) || error("No type for const"); + typ = typ.value; + } + ret.idlType = typ; + all_ws(); + if (consume(OTHER, "?")) { + ret.nullable = true; + all_ws(); + } + var name = consume(ID) || error("No name for const"); + ret.name = name.value; + all_ws(); + consume(OTHER, "=") || error("No value assignment for const"); + all_ws(); + var cnt = const_value(); + if (cnt) ret.value = cnt; + else error("No value for const"); + all_ws(); + consume(OTHER, ";") || error("Unterminated const"); + return ret; + }; - var single_type = function () { - var prim = primitive_type() - , ret = { sequence: false, nullable: false, array: false, union: false } - ; - if (prim) { - ret.idlType = prim; - } - else if (consume(ID, "sequence")) { - all_ws(); - if (!consume(OTHER, "<")) { - ret.idlType = "sequence"; - } - else { - ret.sequence = true; - ret.idlType = type() || error("Error parsing sequence type"); - all_ws(); - if (!consume(OTHER, ">")) error("Unterminated sequence"); - all_ws(); - if (consume(OTHER, "?")) ret.nullable = true; - return ret; - } - } - else { - var name = consume(ID); - if (!name) return; - ret.idlType = name.value; - } - type_suffix(ret); - if (ret.nullable && ret.idlType === "any") error("Type any cannot be made nullable"); - return ret; - }; + var inheritance = function() { + all_ws(); + if (consume(OTHER, ":")) { + all_ws(); + var inh = consume(ID) || error("No type in inheritance"); + return inh.value; + } + }; - var union_type = function () { - all_ws(); - if (!consume(OTHER, "(")) return; - var ret = { sequence: false, nullable: false, array: false, union: true, idlType: [] }; - var fst = type() || error("Union type with no content"); - ret.idlType.push(fst); - while (true) { - all_ws(); - if (!consume(ID, "or")) break; - var typ = type() || error("No type after 'or' in union type"); - ret.idlType.push(typ); - } - if (!consume(OTHER, ")")) error("Unterminated union type"); - type_suffix(ret); - return ret; - }; + var operation_rest = function(ret, store) { + all_ws(); + if (!ret) ret = {}; + var name = consume(ID); + ret.name = name ? name.value : null; + all_ws(); + consume(OTHER, "(") || error("Invalid operation"); + ret["arguments"] = argument_list(store) || []; + all_ws(); + consume(OTHER, ")") || error("Unterminated operation"); + all_ws(); + consume(OTHER, ";") || error("Unterminated operation"); + return ret; + }; - var type = function () { - return single_type() || union_type(); - }; + var callback = function(store) { + all_ws(store, "pea"); + var ret; + if (!consume(ID, "callback")) return; + all_ws(); + var tok = consume(ID, "interface"); + if (tok) { + tokens.unshift(tok); + ret = interface_(); + ret.type = "callback interface"; + return ret; + } + var name = consume(ID) || error("No name for callback"); + ret = { type: "callback", name: name.value }; + all_ws(); + consume(OTHER, "=") || error("No assignment in callback"); + all_ws(); + ret.idlType = return_type(); + all_ws(); + consume(OTHER, "(") || error("No arguments in callback"); + ret["arguments"] = argument_list(store) || []; + all_ws(); + consume(OTHER, ")") || error("Unterminated callback"); + all_ws(); + consume(OTHER, ";") || error("Unterminated callback"); + return ret; + }; - var argument = function () { - var ret = { optional: false, variadic: false }; - ret.extAttrs = extended_attrs(); - all_ws(); - if (consume(ID, "optional")) { - ret.optional = true; - all_ws(); - } - ret.idlType = type(); - if (!ret.idlType) return; - if (!ret.optional) { - all_ws(); - if (tokens.length >= 3 && - tokens[0].type === "other" && tokens[0].value === "." && - tokens[1].type === "other" && tokens[1].value === "." && - tokens[2].type === "other" && tokens[2].value === "." - ) { - tokens.shift(); - tokens.shift(); - tokens.shift(); - ret.variadic = true; - } - } - all_ws(); - var name = consume(ID) || error("No name in argument"); - ret.name = name.value; - if (ret.optional) { - all_ws(); - ret["default"] = default_(); - } - return ret; + var attribute = function(store) { + all_ws(store, "pea"); + var grabbed = [], + ret = { + type: "attribute", + "static": false, + stringifier: false, + inherit: false, + readonly: false }; + if (consume(ID, "static")) { + ret["static"] = true; + grabbed.push(last_token); + } else if (consume(ID, "stringifier")) { + ret.stringifier = true; + grabbed.push(last_token); + } + var w = all_ws(); + if (w) grabbed.push(w); + if (consume(ID, "inherit")) { + if (ret["static"] || ret.stringifier) error("Cannot have a static or stringifier inherit"); + ret.inherit = true; + grabbed.push(last_token); + var w = all_ws(); + if (w) grabbed.push(w); + } + if (consume(ID, "readonly")) { + ret.readonly = true; + grabbed.push(last_token); + var w = all_ws(); + if (w) grabbed.push(w); + } + var rest = attribute_rest(ret); + if (!rest) { + tokens = grabbed.concat(tokens); + } + return rest; + }; - var argument_list = function () { - var arg = argument(), ret = []; - if (!arg) return ret; - ret.push(arg); - while (true) { - all_ws(); - if (!consume(OTHER, ",")) return ret; - all_ws(); - var nxt = argument() || error("Trailing comma in arguments list"); - ret.push(nxt); - } - }; + var attribute_rest = function(ret) { + if (!consume(ID, "attribute")) { + return; + } + all_ws(); + ret.idlType = type_with_extended_attributes() || error("No type in attribute"); + if (ret.idlType.sequence) error("Attributes cannot accept sequence types"); + if (ret.idlType.generic === "record") error("Attributes cannot accept record types"); + all_ws(); + var name = consume(ID) || error("No name in attribute"); + ret.name = name.value; + all_ws(); + consume(OTHER, ";") || error("Unterminated attribute"); + return ret; + }; - var simple_extended_attr = function () { - all_ws(); - var name = consume(ID); - if (!name) return; - var ret = { - name: name.value - , "arguments": null - }; - all_ws(); - var eq = consume(OTHER, "="); - if (eq) { - all_ws(); - ret.rhs = consume(ID); - if (!ret.rhs) return error("No right hand side to extended attribute assignment"); - } - all_ws(); - if (consume(OTHER, "(")) { - ret["arguments"] = argument_list(); - all_ws(); - consume(OTHER, ")") || error("Unclosed argument in extended attribute"); - } - return ret; - }; + var return_type = function() { + var typ = type(); + if (!typ) { + if (consume(ID, "void")) { + return "void"; + } else error("No return type"); + } + return typ; + }; - // Note: we parse something simpler than the official syntax. It's all that ever - // seems to be used - var extended_attrs = function () { - var eas = []; - all_ws(); - if (!consume(OTHER, "[")) return eas; - eas[0] = simple_extended_attr() || error("Extended attribute with not content"); - all_ws(); - while (consume(OTHER, ",")) { - all_ws(); - eas.push(simple_extended_attr() || error("Trailing comma in extended attribute")); - all_ws(); - } - consume(OTHER, "]") || error("No end of extended attribute"); - return eas; - }; + var operation = function(store) { + all_ws(store, "pea"); + var ret = { + type: "operation", + getter: false, + setter: false, + creator: false, + deleter: false, + legacycaller: false, + "static": false, + stringifier: false + }; + while (true) { + all_ws(); + if (consume(ID, "getter")) ret.getter = true; + else if (consume(ID, "setter")) ret.setter = true; + else if (consume(ID, "creator")) ret.creator = true; + else if (consume(ID, "deleter")) ret.deleter = true; + else if (consume(ID, "legacycaller")) ret.legacycaller = true; + else break; + } + if (ret.getter || ret.setter || ret.creator || ret.deleter || ret.legacycaller) { + all_ws(); + ret.idlType = return_type(); + operation_rest(ret, store); + return ret; + } + if (consume(ID, "static")) { + ret["static"] = true; + ret.idlType = return_type(); + operation_rest(ret, store); + return ret; + } else if (consume(ID, "stringifier")) { + ret.stringifier = true; - + all_ws(); + if (consume(OTHER, ";")) return ret; + ret.idlType = return_type(); + operation_rest(ret, store); + return ret; + } + ret.idlType = return_type(); + all_ws(); + if (consume(ID, "iterator")) { + all_ws(); + ret.type = "iterator"; + if (consume(ID, "object")) { + ret.iteratorObject = "object"; + } else if (consume(OTHER, "=")) { + all_ws(); + var name = consume(ID) || error("No right hand side in iterator"); + ret.iteratorObject = name.value; + } + all_ws(); + consume(OTHER, ";") || error("Unterminated iterator"); + return ret; + } else { + operation_rest(ret, store); + return ret; + } + }; - var default_ = function () { - all_ws(); - if (consume(OTHER, "=")) { - all_ws(); - var def = const_value(); - if (def) { - return def; - } - else { - var str = consume(STR) || error("No value for default"); - str.value = str.value.replace(/^"/, "").replace(/"$/, ""); - return str; - } - } - }; + var identifiers = function(arr) { + while (true) { + all_ws(); + if (consume(OTHER, ",")) { + all_ws(); + var name = consume(ID) || error("Trailing comma in identifiers list"); + arr.push(name.value); + } else break; + } + }; - var const_ = function () { - all_ws(); - if (!consume(ID, "const")) return; - var ret = { type: "const", nullable: false }; - all_ws(); - var typ = primitive_type(); - if (!typ) { - typ = consume(ID) || error("No type for const"); - typ = typ.value; - } - ret.idlType = typ; - all_ws(); - if (consume(OTHER, "?")) { - ret.nullable = true; - all_ws(); - } - var name = consume(ID) || error("No name for const"); - ret.name = name.value; - all_ws(); - consume(OTHER, "=") || error("No value assignment for const"); - all_ws(); - var cnt = const_value(); - if (cnt) ret.value = cnt; - else error("No value for const"); - all_ws(); - consume(OTHER, ";") || error("Unterminated const"); - return ret; - }; + var serialiser = function(store) { + all_ws(store, "pea"); + if (!consume(ID, "serializer")) return; + var ret = { type: "serializer" }; + all_ws(); + if (consume(OTHER, "=")) { + all_ws(); + if (consume(OTHER, "{")) { + ret.patternMap = true; + all_ws(); + var id = consume(ID); + if (id && id.value === "getter") { + ret.names = ["getter"]; + } else if (id && id.value === "inherit") { + ret.names = ["inherit"]; + identifiers(ret.names); + } else if (id) { + ret.names = [id.value]; + identifiers(ret.names); + } else { + ret.names = []; + } + all_ws(); + consume(OTHER, "}") || error("Unterminated serializer pattern map"); + } else if (consume(OTHER, "[")) { + ret.patternList = true; + all_ws(); + var id = consume(ID); + if (id && id.value === "getter") { + ret.names = ["getter"]; + } else if (id) { + ret.names = [id.value]; + identifiers(ret.names); + } else { + ret.names = []; + } + all_ws(); + consume(OTHER, "]") || error("Unterminated serializer pattern list"); + } else { + var name = consume(ID) || error("Invalid serializer"); + ret.name = name.value; + } + all_ws(); + consume(OTHER, ";") || error("Unterminated serializer"); + return ret; + } else if (consume(OTHER, ";")) { + // noop, just parsing + } else { + ret.idlType = return_type(); + all_ws(); + ret.operation = operation_rest(null, store); + } + return ret; + }; - var inheritance = function () { - all_ws(); - if (consume(OTHER, ":")) { - all_ws(); - var inh = consume(ID) || error ("No type in inheritance"); - return inh.value; - } - }; + var iterable_type = function() { + if (consume(ID, "iterable")) return "iterable"; + else if (consume(ID, "legacyiterable")) return "legacyiterable"; + else if (consume(ID, "maplike")) return "maplike"; + else if (consume(ID, "setlike")) return "setlike"; + else return; + }; - var operation_rest = function (ret) { - all_ws(); - if (!ret) ret = {}; - var name = consume(ID); - ret.name = name ? name.value : null; - all_ws(); - consume(OTHER, "(") || error("Invalid operation"); - ret["arguments"] = argument_list(); - all_ws(); - consume(OTHER, ")") || error("Unterminated operation"); - all_ws(); - consume(OTHER, ";") || error("Unterminated operation"); - return ret; - }; + var readonly_iterable_type = function() { + if (consume(ID, "maplike")) return "maplike"; + else if (consume(ID, "setlike")) return "setlike"; + else return; + }; - var callback = function () { - all_ws(); - var ret; - if (!consume(ID, "callback")) return; - all_ws(); - var tok = consume(ID, "interface"); - if (tok) { - tokens.unshift(tok); - ret = interface_(); - ret.type = "callback interface"; - return ret; - } - var name = consume(ID) || error("No name for callback"); - ret = { type: "callback", name: name.value }; - all_ws(); - consume(OTHER, "=") || error("No assignment in callback"); - all_ws(); - ret.idlType = return_type(); - all_ws(); - consume(OTHER, "(") || error("No arguments in callback"); - ret["arguments"] = argument_list(); - all_ws(); - consume(OTHER, ")") || error("Unterminated callback"); - all_ws(); - consume(OTHER, ";") || error("Unterminated callback"); - return ret; - }; + var iterable = function(store) { + all_ws(store, "pea"); + var grabbed = [], + ret = { type: null, idlType: null, readonly: false }; + if (consume(ID, "readonly")) { + ret.readonly = true; + grabbed.push(last_token); + var w = all_ws(); + if (w) grabbed.push(w); + } + var consumeItType = ret.readonly ? readonly_iterable_type : iterable_type; - var attribute = function () { - all_ws(); - var grabbed = [] - , ret = { - type: "attribute" - , "static": false - , stringifier: false - , inherit: false - , readonly: false - }; - if (consume(ID, "static")) { - ret["static"] = true; - grabbed.push(last_token); - } - else if (consume(ID, "stringifier")) { - ret.stringifier = true; - grabbed.push(last_token); - } - var w = all_ws(); - if (w) grabbed.push(w); - if (consume(ID, "inherit")) { - if (ret["static"] || ret.stringifier) error("Cannot have a static or stringifier inherit"); - ret.inherit = true; - grabbed.push(last_token); - var w = all_ws(); - if (w) grabbed.push(w); - } - if (consume(ID, "readonly")) { - ret.readonly = true; - grabbed.push(last_token); - var w = all_ws(); - if (w) grabbed.push(w); - } - if (!consume(ID, "attribute")) { - tokens = grabbed.concat(tokens); - return; - } - all_ws(); - ret.idlType = type() || error("No type in attribute"); - if (ret.idlType.sequence) error("Attributes cannot accept sequence types"); + var ittype = consumeItType(); + if (!ittype) { + tokens = grabbed.concat(tokens); + return; + } + + var secondTypeRequired = ittype === "maplike"; + var secondTypeAllowed = secondTypeRequired || ittype === "iterable"; + ret.type = ittype; + if (ret.type !== 'maplike' && ret.type !== 'setlike') + delete ret.readonly; + all_ws(); + if (consume(OTHER, "<")) { + ret.idlType = type_with_extended_attributes() || error("Error parsing " + ittype + " declaration"); + all_ws(); + if (secondTypeAllowed) { + var type2 = null; + if (consume(OTHER, ",")) { all_ws(); - var name = consume(ID) || error("No name in attribute"); - ret.name = name.value; + type2 = type_with_extended_attributes(); all_ws(); - consume(OTHER, ";") || error("Unterminated attribute"); - return ret; - }; + } + if (type2) + ret.idlType = [ret.idlType, type2]; + else if (secondTypeRequired) + error("Missing second type argument in " + ittype + " declaration"); + } + if (!consume(OTHER, ">")) error("Unterminated " + ittype + " declaration"); + all_ws(); + if (!consume(OTHER, ";")) error("Missing semicolon after " + ittype + " declaration"); + } else + error("Error parsing " + ittype + " declaration"); - var return_type = function () { - var typ = type(); - if (!typ) { - if (consume(ID, "void")) { - return "void"; - } - else error("No return type"); - } - return typ; - }; + return ret; + }; - var operation = function () { - all_ws(); - var ret = { - type: "operation" - , getter: false - , setter: false - , creator: false - , deleter: false - , legacycaller: false - , "static": false - , stringifier: false - }; - while (true) { - all_ws(); - if (consume(ID, "getter")) ret.getter = true; - else if (consume(ID, "setter")) ret.setter = true; - else if (consume(ID, "creator")) ret.creator = true; - else if (consume(ID, "deleter")) ret.deleter = true; - else if (consume(ID, "legacycaller")) ret.legacycaller = true; - else break; - } - if (ret.getter || ret.setter || ret.creator || ret.deleter || ret.legacycaller) { - all_ws(); - ret.idlType = return_type(); - operation_rest(ret); - return ret; - } - if (consume(ID, "static")) { - ret["static"] = true; - ret.idlType = return_type(); - operation_rest(ret); - return ret; - } - else if (consume(ID, "stringifier")) { - ret.stringifier = true; - all_ws(); - if (consume(OTHER, ";")) return ret; - ret.idlType = return_type(); - operation_rest(ret); - return ret; - } - ret.idlType = return_type(); - all_ws(); - if (consume(ID, "iterator")) { - all_ws(); - ret.type = "iterator"; - if (consume(ID, "object")) { - ret.iteratorObject = "object"; - } - else if (consume(OTHER, "=")) { - all_ws(); - var name = consume(ID) || error("No right hand side in iterator"); - ret.iteratorObject = name.value; - } - all_ws(); - consume(OTHER, ";") || error("Unterminated iterator"); - return ret; - } - else { - operation_rest(ret); - return ret; - } + var interface_ = function(isPartial, store) { + all_ws(isPartial ? null : store, "pea"); + if (!consume(ID, "interface")) return; + all_ws(); + var name = consume(ID) || error("No name for interface"); + var mems = [], + ret = { + type: "interface", + name: name.value, + partial: false, + members: mems }; + if (!isPartial) ret.inheritance = inheritance() || null; + all_ws(); + consume(OTHER, "{") || error("Bodyless interface"); + while (true) { + all_ws(store ? mems : null); + if (consume(OTHER, "}")) { + all_ws(); + consume(OTHER, ";") || error("Missing semicolon after interface"); + return ret; + } + var ea = extended_attrs(store ? mems : null); + all_ws(); + var cnt = const_(store ? mems : null); + if (cnt) { + cnt.extAttrs = ea; + ret.members.push(cnt); + continue; + } + var mem = (opt.allowNestedTypedefs && typedef(store ? mems : null)) || + iterable(store ? mems : null) || + serialiser(store ? mems : null) || + attribute(store ? mems : null) || + operation(store ? mems : null) || + error("Unknown member"); + mem.extAttrs = ea; + ret.members.push(mem); + } + }; - var identifiers = function (arr) { - while (true) { - all_ws(); - if (consume(OTHER, ",")) { - all_ws(); - var name = consume(ID) || error("Trailing comma in identifiers list"); - arr.push(name.value); - } - else break; - } + var namespace = function(isPartial, store) { + all_ws(isPartial ? null : store, "pea"); + if (!consume(ID, "namespace")) return; + all_ws(); + var name = consume(ID) || error("No name for namespace"); + var mems = [], + ret = { + type: "namespace", + name: name.value, + partial: isPartial, + members: mems }; + all_ws(); + consume(OTHER, "{") || error("Bodyless namespace"); + while (true) { + all_ws(store ? mems : null); + if (consume(OTHER, "}")) { + all_ws(); + consume(OTHER, ";") || error("Missing semicolon after namespace"); + return ret; + } + var ea = extended_attrs(store ? mems : null); + all_ws(); + var mem = noninherited_attribute(store ? mems : null) || + nonspecial_operation(store ? mems : null) || + error("Unknown member"); + mem.extAttrs = ea; + ret.members.push(mem); + } + } - var serialiser = function () { - all_ws(); - if (!consume(ID, "serializer")) return; - var ret = { type: "serializer" }; - all_ws(); - if (consume(OTHER, "=")) { - all_ws(); - if (consume(OTHER, "{")) { - ret.patternMap = true; - all_ws(); - var id = consume(ID); - if (id && id.value === "getter") { - ret.names = ["getter"]; - } - else if (id && id.value === "inherit") { - ret.names = ["inherit"]; - identifiers(ret.names); - } - else if (id) { - ret.names = [id.value]; - identifiers(ret.names); - } - else { - ret.names = []; - } - all_ws(); - consume(OTHER, "}") || error("Unterminated serializer pattern map"); - } - else if (consume(OTHER, "[")) { - ret.patternList = true; - all_ws(); - var id = consume(ID); - if (id && id.value === "getter") { - ret.names = ["getter"]; - } - else if (id) { - ret.names = [id.value]; - identifiers(ret.names); - } - else { - ret.names = []; - } - all_ws(); - consume(OTHER, "]") || error("Unterminated serializer pattern list"); - } - else { - var name = consume(ID) || error("Invalid serializer"); - ret.name = name.value; - } - all_ws(); - consume(OTHER, ";") || error("Unterminated serializer"); - return ret; - } - else if (consume(OTHER, ";")) { - // noop, just parsing - } - else { - ret.idlType = return_type(); - all_ws(); - ret.operation = operation_rest(); - } - return ret; + var noninherited_attribute = function(store) { + var w = all_ws(store, "pea"), + grabbed = [], + ret = { + type: "attribute", + "static": false, + stringifier: false, + inherit: false, + readonly: false }; + if (w) grabbed.push(w); + if (consume(ID, "readonly")) { + ret.readonly = true; + grabbed.push(last_token); + var w = all_ws(); + if (w) grabbed.push(w); + } + var rest = attribute_rest(ret); + if (!rest) { + tokens = grabbed.concat(tokens); + } + return rest; + } - var interface_ = function (isPartial) { - all_ws(); - if (!consume(ID, "interface")) return; - all_ws(); - var name = consume(ID) || error("No name for interface"); - var ret = { - type: "interface" - , name: name.value - , partial: false - , members: [] - }; - if (!isPartial) ret.inheritance = inheritance() || null; - all_ws(); - consume(OTHER, "{") || error("Bodyless interface"); - while (true) { - all_ws(); - if (consume(OTHER, "}")) { - all_ws(); - consume(OTHER, ";") || error("Missing semicolon after interface"); - return ret; - } - var ea = extended_attrs(); - all_ws(); - var cnt = const_(); - if (cnt) { - cnt.extAttrs = ea; - ret.members.push(cnt); - continue; - } - var mem = serialiser() || attribute() || operation() || error("Unknown member"); - mem.extAttrs = ea; - ret.members.push(mem); - } - }; + var nonspecial_operation = function(store) { + all_ws(store, "pea"); + var ret = { + type: "operation", + getter: false, + setter: false, + creator: false, + deleter: false, + legacycaller: false, + "static": false, + stringifier: false + }; + ret.idlType = return_type(); + return operation_rest(ret, store); + } - var partial = function () { - all_ws(); - if (!consume(ID, "partial")) return; - var thing = dictionary(true) || interface_(true) || error("Partial doesn't apply to anything"); - thing.partial = true; - return thing; - }; + var partial = function(store) { + all_ws(store, "pea"); + if (!consume(ID, "partial")) return; + var thing = dictionary(true, store) || + interface_(true, store) || + namespace(true, store) || + error("Partial doesn't apply to anything"); + thing.partial = true; + return thing; + }; - var dictionary = function (isPartial) { - all_ws(); - if (!consume(ID, "dictionary")) return; - all_ws(); - var name = consume(ID) || error("No name for dictionary"); - var ret = { - type: "dictionary" - , name: name.value - , partial: false - , members: [] - }; - if (!isPartial) ret.inheritance = inheritance() || null; - all_ws(); - consume(OTHER, "{") || error("Bodyless dictionary"); - while (true) { - all_ws(); - if (consume(OTHER, "}")) { - all_ws(); - consume(OTHER, ";") || error("Missing semicolon after dictionary"); - return ret; - } - var ea = extended_attrs(); - all_ws(); - var typ = type() || error("No type for dictionary member"); - all_ws(); - var name = consume(ID) || error("No name for dictionary member"); - ret.members.push({ - type: "field" - , name: name.value - , idlType: typ - , extAttrs: ea - , "default": default_() - }); - all_ws(); - consume(OTHER, ";") || error("Unterminated dictionary member"); - } + var dictionary = function(isPartial, store) { + all_ws(isPartial ? null : store, "pea"); + if (!consume(ID, "dictionary")) return; + all_ws(); + var name = consume(ID) || error("No name for dictionary"); + var mems = [], + ret = { + type: "dictionary", + name: name.value, + partial: false, + members: mems }; - - var exception = function () { - all_ws(); - if (!consume(ID, "exception")) return; - all_ws(); - var name = consume(ID) || error("No name for exception"); - var ret = { - type: "exception" - , name: name.value - , members: [] - }; - ret.inheritance = inheritance() || null; - all_ws(); - consume(OTHER, "{") || error("Bodyless exception"); - while (true) { - all_ws(); - if (consume(OTHER, "}")) { - all_ws(); - consume(OTHER, ";") || error("Missing semicolon after exception"); - return ret; - } - var ea = extended_attrs(); - all_ws(); - var cnt = const_(); - if (cnt) { - cnt.extAttrs = ea; - ret.members.push(cnt); - } - else { - var typ = type(); - all_ws(); - var name = consume(ID); - all_ws(); - if (!typ || !name || !consume(OTHER, ";")) error("Unknown member in exception body"); - ret.members.push({ - type: "field" - , name: name.value - , idlType: typ - , extAttrs: ea - }); - } - } + if (!isPartial) ret.inheritance = inheritance() || null; + all_ws(); + consume(OTHER, "{") || error("Bodyless dictionary"); + while (true) { + all_ws(store ? mems : null); + if (consume(OTHER, "}")) { + all_ws(); + consume(OTHER, ";") || error("Missing semicolon after dictionary"); + return ret; + } + var ea = extended_attrs(store ? mems : null); + all_ws(store ? mems : null, "pea"); + var required = consume(ID, "required"); + var typ = type_with_extended_attributes() || error("No type for dictionary member"); + all_ws(); + var name = consume(ID) || error("No name for dictionary member"); + var dflt = default_(); + if (required && dflt) error("Required member must not have a default"); + var member = { + type: "field", + name: name.value, + required: !!required, + idlType: typ, + extAttrs: ea }; + if (typeof dflt !== "undefined") { + member["default"] = dflt; + } + ret.members.push(member); + all_ws(); + consume(OTHER, ";") || error("Unterminated dictionary member"); + } + }; - var enum_ = function () { - all_ws(); - if (!consume(ID, "enum")) return; - all_ws(); - var name = consume(ID) || error("No name for enum"); - var ret = { - type: "enum" - , name: name.value - , values: [] - }; - all_ws(); - consume(OTHER, "{") || error("No curly for enum"); - var saw_comma = false; - while (true) { - all_ws(); - if (consume(OTHER, "}")) { - all_ws(); - if (saw_comma) error("Trailing comma in enum"); - consume(OTHER, ";") || error("No semicolon after enum"); - return ret; - } - var val = consume(STR) || error("Unexpected value in enum"); - ret.values.push(val.value.replace(/"/g, "")); - all_ws(); - if (consume(OTHER, ",")) { - all_ws(); - saw_comma = true; - } - else { - saw_comma = false; - } - } + var exception = function(store) { + all_ws(store, "pea"); + if (!consume(ID, "exception")) return; + all_ws(); + var name = consume(ID) || error("No name for exception"); + var mems = [], + ret = { + type: "exception", + name: name.value, + members: mems }; + ret.inheritance = inheritance() || null; + all_ws(); + consume(OTHER, "{") || error("Bodyless exception"); + while (true) { + all_ws(store ? mems : null); + if (consume(OTHER, "}")) { + all_ws(); + consume(OTHER, ";") || error("Missing semicolon after exception"); + return ret; + } + var ea = extended_attrs(store ? mems : null); + all_ws(store ? mems : null, "pea"); + var cnt = const_(); + if (cnt) { + cnt.extAttrs = ea; + ret.members.push(cnt); + } else { + var typ = type(); + all_ws(); + var name = consume(ID); + all_ws(); + if (!typ || !name || !consume(OTHER, ";")) error("Unknown member in exception body"); + ret.members.push({ + type: "field", + name: name.value, + idlType: typ, + extAttrs: ea + }); + } + } + }; - var typedef = function () { - all_ws(); - if (!consume(ID, "typedef")) return; - var ret = { - type: "typedef" - }; - all_ws(); - ret.typeExtAttrs = extended_attrs(); - all_ws(); - ret.idlType = type() || error("No type in typedef"); - all_ws(); - var name = consume(ID) || error("No name in typedef"); - ret.name = name.value; - all_ws(); - consume(OTHER, ";") || error("Unterminated typedef"); - return ret; + var enum_ = function(store) { + all_ws(store, "pea"); + if (!consume(ID, "enum")) return; + all_ws(); + var name = consume(ID) || error("No name for enum"); + var vals = [], + ret = { + type: "enum", + name: name.value, + values: vals }; + all_ws(); + consume(OTHER, "{") || error("No curly for enum"); + var saw_comma = false; + while (true) { + all_ws(store ? vals : null); + if (consume(OTHER, "}")) { + all_ws(); + consume(OTHER, ";") || error("No semicolon after enum"); + return ret; + } + var val = consume(STR) || error("Unexpected value in enum"); + ret.values.push(val.value.replace(/"/g, "")); + all_ws(store ? vals : null); + if (consume(OTHER, ",")) { + if (store) vals.push({ type: "," }); + all_ws(store ? vals : null); + saw_comma = true; + } else { + saw_comma = false; + } + } + }; - var implements_ = function () { - all_ws(); - var target = consume(ID); - if (!target) return; - var w = all_ws(); - if (consume(ID, "implements")) { - var ret = { - type: "implements" - , target: target.value - }; - all_ws(); - var imp = consume(ID) || error("Incomplete implements statement"); - ret["implements"] = imp.value; - all_ws(); - consume(OTHER, ";") || error("No terminating ; for implements statement"); - return ret; - } - else { - // rollback - tokens.unshift(w); - tokens.unshift(target); - } - }; + var typedef = function(store) { + all_ws(store, "pea"); + if (!consume(ID, "typedef")) return; + var ret = { + type: "typedef" + }; + all_ws(); + ret.idlType = type_with_extended_attributes() || error("No type in typedef"); + all_ws(); + var name = consume(ID) || error("No name in typedef"); + ret.name = name.value; + all_ws(); + consume(OTHER, ";") || error("Unterminated typedef"); + return ret; + }; - var definition = function () { - return callback() || - interface_() || - partial() || - dictionary() || - exception() || - enum_() || - typedef() || - implements_() - ; + var implements_ = function(store) { + all_ws(store, "pea"); + var target = consume(ID); + if (!target) return; + var w = all_ws(); + if (consume(ID, "implements")) { + var ret = { + type: "implements", + target: target.value }; + all_ws(); + var imp = consume(ID) || error("Incomplete implements statement"); + ret["implements"] = imp.value; + all_ws(); + consume(OTHER, ";") || error("No terminating ; for implements statement"); + return ret; + } else { + // rollback + tokens.unshift(w); + tokens.unshift(target); + } + }; - var definitions = function () { - if (!tokens.length) return []; - var defs = []; - while (true) { - var ea = extended_attrs() - , def = definition(); - if (!def) { - if (ea.length) error("Stray extended attributes"); - break; - } - def.extAttrs = ea; - defs.push(def); - } - return defs; - }; - var res = definitions(); - if (tokens.length) error("Unrecognised tokens"); - return res; + var definition = function(store) { + return callback(store) || + interface_(false, store) || + partial(store) || + dictionary(false, store) || + exception(store) || + enum_(store) || + typedef(store) || + implements_(store) || + namespace(false, store); }; - var obj = { - parse: function (str) { - var tokens = tokenise(str); - return parse(tokens); + var definitions = function(store) { + if (!tokens.length) return []; + var defs = []; + while (true) { + var ea = extended_attrs(store ? defs : null), + def = definition(store ? defs : null); + if (!def) { + if (ea.length) error("Stray extended attributes"); + break; } + def.extAttrs = ea; + defs.push(def); + } + return defs; }; - if (typeof module !== "undefined" && module.exports) { - module.exports = obj; - } - // HACK: enable WebIDL2 in phantomJS - if(typeof window !== 'undefined') { - window.WebIDL2 = obj; + var res = definitions(opt.ws); + if (tokens.length) error("Unrecognised tokens"); + return res; + }; + + var obj = { + parse: function(str, opt) { + if (!opt) opt = {}; + var tokens = tokenise(str); + return parse(tokens, opt); } + }; + + if (typeof module !== 'undefined' && typeof module.exports !== 'undefined') { + module.exports = obj; + } else if (typeof define === 'function' && define.amd) { + define([], function() { + return obj; + }); + } else { + (self || window).WebIDL2 = obj; + } + // HACK: force WebIDL2 global + if(typeof window !== 'undefined') { + window.WebIDL2 = obj; + } }()); diff --git a/tests/webidl/fetch-latest b/tests/webidl/fetch-latest new file mode 100755 index 00000000..f4e82d9b --- /dev/null +++ b/tests/webidl/fetch-latest @@ -0,0 +1,9 @@ +#!/bin/sh + +# Fetch latest files from github. +# https://github.com/w3c/web-platform-tests/tree/master/resources +# https://github.com/w3c/web-platform-tests/tree/master/resources/webidl2/lib + +curl -o testharness.js-new https://raw.githubusercontent.com/w3c/web-platform-tests/master/resources/testharness.js +curl -o idlharness.js-new https://raw.githubusercontent.com/w3c/web-platform-tests/master/resources/idlharness.js +curl -o WebIDLParser.js-new https://raw.githubusercontent.com/w3c/web-platform-tests/master/resources/webidl2/lib/webidl2.js diff --git a/tests/webidl/idlharness.js b/tests/webidl/idlharness.js index cc20301a..a3bc373b 100644 --- a/tests/webidl/idlharness.js +++ b/tests/webidl/idlharness.js @@ -8,131 +8,7 @@ policies and contribution forms [3]. [3] http://www.w3.org/2004/10/27-testcases */ -/* - * This file automatically generates browser tests for WebIDL interfaces, using - * the testharness.js framework. To use, first include the following: - * - * - * - * - * - * - * Then you'll need some type of IDLs. Here's some script that can be run on a - * spec written in HTML, which will grab all the elements with class="idl", - * concatenate them, and replace the body so you can copy-paste: - * - var s = ""; - [].forEach.call(document.getElementsByClassName("idl"), function(idl) { - //https://www.w3.org/Bugs/Public/show_bug.cgi?id=14914 - if (!idl.classList.contains("extract")) - { - s += idl.textContent + "\n\n"; - } - }); - document.body.innerHTML = '
';
-     document.body.firstChild.textContent = s;
- *
- * (TODO: write this in Python or something so that it can be done from the
- * command line instead.)
- *
- * Once you have that, put it in your script somehow.  The easiest way is to
- * embed it literally in an HTML file with 
- * 
- *
- * Within each file one may define one or more tests. Each test is atomic
- * in the sense that a single test has a single result (pass/fail/timeout).
- * Within each test one may have a number of asserts. The test fails at the
- * first failing assert, and the remainder of the test is (typically) not run.
- *
- * If the file containing the tests is a HTML file with an element of id "log"
- * this will be populated with a table containing the test results after all
- * the tests have run.
- *
- * NOTE: By default tests must be created before the load event fires. For ways
- *       to create tests after the load event, see "Determining when all tests
- *       are complete", below
- *
- * == Synchronous Tests ==
- *
- * To create a synchronous test use the test() function:
- *
- * test(test_function, name, properties)
- *
- * test_function is a function that contains the code to test. For example a
- * trivial passing test would be:
- *
- * test(function() {assert_true(true)}, "assert_true with true")
- *
- * The function passed in is run in the test() call.
- *
- * properties is an object that overrides default test properties. The
- * recognised properties are:
- *    timeout - the test timeout in ms
- *
- * e.g.
- * test(test_function, "Sample test", {timeout:1000})
- *
- * would run test_function with a timeout of 1s.
- *
- * Additionally, test-specific metadata can be passed in the properties. These
- * are used when the individual test has different metadata from that stored
- * in the .
- * The recognized metadata properties are:
- *
- *    help - The url of the part of the specification being tested
- *
- *    assert - A human readable description of what the test is attempting
- *             to prove
- *
- *    author - Name and contact information for the author of the test in the
- *             format: "Name " or "Name http://contact/url"
- *
- * == Asynchronous Tests ==
- *
- * Testing asynchronous features is somewhat more complex since the result of
- * a test may depend on one or more events or other callbacks. The API provided
- * for testing these features is indended to be rather low-level but hopefully
- * applicable to many situations.
- *
- * To create a test, one starts by getting a Test object using async_test:
- *
- * async_test(name, properties)
- *
- * e.g.
- * var t = async_test("Simple async test")
- *
- * Assertions can be added to the test by calling the step method of the test
- * object with a function containing the test assertions:
- *
- * t.step(function() {assert_true(true)});
- *
- * When all the steps are complete, the done() method must be called:
- *
- * t.done();
- *
- * As a convenience, async_test can also takes a function as first argument.
- * This function is called with the test object as both its `this` object and
- * first argument. The above example can be rewritten as:
- *
- * async_test(function(t) {
- *     object.some_event = function() {
- *         t.step(function (){assert_true(true); t.done();});
- *     };
- * }, "Simple async test");
- *
- * which avoids cluttering the global scope with references to async
- * tests instances.
- *
- * The properties argument is identical to that for test().
- *
- * In many cases it is convenient to run a step in response to an event or a
- * callback. A convenient method of doing this is through the step_func method
- * which returns a function that, when called runs a test step. For example
- *
- * object.some_event = t.step_func(function(e) {assert_true(e.a)});
- *
- * == Making assertions ==
- *
- * Functions for making assertions start assert_
- * The best way to get a list is to look in this file for functions names
- * matching that pattern. The general signature is
- *
- * assert_something(actual, expected, description)
- *
- * although not all assertions precisely match this pattern e.g. assert_true
- * only takes actual and description as arguments.
- *
- * The description parameter is used to present more useful error messages when
- * a test fails
- *
- * NOTE: All asserts must be located in a test() or a step of an async_test().
- *       asserts outside these places won't be detected correctly by the harness
- *       and may cause a file to stop testing.
- *
- * == Setup ==
- *
- * Sometimes tests require non-trivial setup that may fail. For this purpose
- * there is a setup() function, that may be called with one or two arguments.
- * The two argument version is:
- *
- * setup(func, properties)
- *
- * The one argument versions may omit either argument.
- * func is a function to be run synchronously. setup() becomes a no-op once
- * any tests have returned results. Properties are global properties of the test
- * harness. Currently recognised properties are:
- *
- * timeout - The time in ms after which the harness should stop waiting for
- *           tests to complete (this is different to the per-test timeout
- *           because async tests do not start their timer until .step is called)
- *
- * explicit_done - Wait for an explicit call to done() before declaring all
- *                 tests complete (see below)
- *
- * output_document - The document to which results should be logged. By default
- *                   this is the current document but could be an ancestor
- *                   document in some cases e.g. a SVG test loaded in an HTML
- *                   wrapper
- *
- * explicit_timeout - disable file timeout; only stop waiting for results
- *                    when the timeout() function is called (typically for
- *                    use when integrating with some existing test framework
- *                    that has its own timeout mechanism).
- *
- * allow_uncaught_exception - don't treat an uncaught exception as an error;
- *                            needed when e.g. testing the window.onerror
- *                            handler.
- *
- * == Determining when all tests are complete ==
- *
- * By default the test harness will assume there are no more results to come
- * when:
- * 1) There are no Test objects that have been created but not completed
- * 2) The load event on the document has fired
- *
- * This behaviour can be overridden by setting the explicit_done property to
- * true in a call to setup(). If explicit_done is true, the test harness will
- * not assume it is done until the global done() function is called. Once done()
- * is called, the two conditions above apply like normal.
- *
- * == Generating tests ==
- *
- * NOTE: this functionality may be removed
- *
- * There are scenarios in which is is desirable to create a large number of
- * (synchronous) tests that are internally similar but vary in the parameters
- * used. To make this easier, the generate_tests function allows a single
- * function to be called with each set of parameters in a list:
- *
- * generate_tests(test_function, parameter_lists, properties)
- *
- * For example:
- *
- * generate_tests(assert_equals, [
- *     ["Sum one and one", 1+1, 2],
- *     ["Sum one and zero", 1+0, 1]
- *     ])
- *
- * Is equivalent to:
- *
- * test(function() {assert_equals(1+1, 2)}, "Sum one and one")
- * test(function() {assert_equals(1+0, 1)}, "Sum one and zero")
- *
- * Note that the first item in each parameter list corresponds to the name of
- * the test.
- *
- * The properties argument is identical to that for test(). This may be a
- * single object (used for all generated tests) or an array.
- *
- * == Callback API ==
- *
- * The framework provides callbacks corresponding to 3 events:
- *
- * start - happens when the first Test is created
- * result - happens when a test result is recieved
- * complete - happens when all results are recieved
- *
- * The page defining the tests may add callbacks for these events by calling
- * the following methods:
- *
- *   add_start_callback(callback) - callback called with no arguments
- *   add_result_callback(callback) - callback called with a test argument
- *   add_completion_callback(callback) - callback called with an array of tests
- *                                       and an status object
- *
- * tests have the following properties:
- *   status: A status code. This can be compared to the PASS, FAIL, TIMEOUT and
- *           NOTRUN properties on the test object
- *   message: A message indicating the reason for failure. In the future this
- *            will always be a string
- *
- *  The status object gives the overall status of the harness. It has the
- *  following properties:
- *    status: Can be compared to the OK, ERROR and TIMEOUT properties
- *    message: An error message set when the status is ERROR
- *
- * == External API ==
- *
- * In order to collect the results of multiple pages containing tests, the test
- * harness will, when loaded in a nested browsing context, attempt to call
- * certain functions in each ancestor and opener browsing context:
- *
- * start - start_callback
- * result - result_callback
- * complete - completion_callback
- *
- * These are given the same arguments as the corresponding internal callbacks
- * described above.
- *
- * == External API through cross-document messaging ==
- *
- * Where supported, the test harness will also send messages using
- * cross-document messaging to each ancestor and opener browsing context. Since
- * it uses the wildcard keyword (*), cross-origin communication is enabled and
- * script on different origins can collect the results.
- *
- * This API follows similar conventions as those described above only slightly
- * modified to accommodate message event API. Each message is sent by the harness
- * is passed a single vanilla object, available as the `data` property of the
- * event object. These objects are structures as follows:
- *
- * start - { type: "start" }
- * result - { type: "result", test: Test }
- * complete - { type: "complete", tests: [Test, ...], status: TestsStatus }
- *
- * == List of assertions ==
- *
- * assert_true(actual, description)
- *   asserts that /actual/ is strictly true
- *
- * assert_false(actual, description)
- *   asserts that /actual/ is strictly false
- *
- * assert_equals(actual, expected, description)
- *   asserts that /actual/ is the same value as /expected/
- *
- * assert_not_equals(actual, expected, description)
- *   asserts that /actual/ is a different value to /expected/. Yes, this means
- *   that "expected" is a misnomer
- *
- * assert_in_array(actual, expected, description)
- *   asserts that /expected/ is an Array, and /actual/ is equal to one of the
- *   members -- expected.indexOf(actual) != -1
- *
- * assert_array_equals(actual, expected, description)
- *   asserts that /actual/ and /expected/ have the same length and the value of
- *   each indexed property in /actual/ is the strictly equal to the corresponding
- *   property value in /expected/
- *
- * assert_approx_equals(actual, expected, epsilon, description)
- *   asserts that /actual/ is a number within +/- /epsilon/ of /expected/
- *
- * assert_less_than(actual, expected, description)
- *   asserts that /actual/ is a number less than /expected/
- *
- * assert_greater_than(actual, expected, description)
- *   asserts that /actual/ is a number greater than /expected/
- *
- * assert_less_than_equal(actual, expected, description)
- *   asserts that /actual/ is a number less than or equal to /expected/
- *
- * assert_greater_than_equal(actual, expected, description)
- *   asserts that /actual/ is a number greater than or equal to /expected/
- *
- * assert_regexp_match(actual, expected, description)
- *   asserts that /actual/ matches the regexp /expected/
- *
- * assert_class_string(object, class_name, description)
- *   asserts that the class string of /object/ as returned in
- *   Object.prototype.toString is equal to /class_name/.
- *
- * assert_own_property(object, property_name, description)
- *   assert that object has own property property_name
- *
- * assert_inherits(object, property_name, description)
- *   assert that object does not have an own property named property_name
- *   but that property_name is present in the prototype chain for object
- *
- * assert_idl_attribute(object, attribute_name, description)
- *   assert that an object that is an instance of some interface has the
- *   attribute attribute_name following the conditions specified by WebIDL
- *
- * assert_readonly(object, property_name, description)
- *   assert that property property_name on object is readonly
- *
- * assert_throws(code, func, description)
- *   code - the expected exception:
- *     o string: the thrown exception must be a DOMException with the given
- *               name, e.g., "TimeoutError" (for compatibility with existing
- *               tests, a constant is also supported, e.g., "TIMEOUT_ERR")
- *     o object: the thrown exception must have a property called "name" that
- *               matches code.name
- *     o null:   allow any exception (in general, one of the options above
- *               should be used)
- *   func - a function that should throw
- *
- * assert_unreached(description)
- *   asserts if called. Used to ensure that some codepath is *not* taken e.g.
- *   an event does not fire.
- *
- * assert_any(assert_func, actual, expected_array, extra_arg_1, ... extra_arg_N)
- *   asserts that one assert_func(actual, expected_array_N, extra_arg1, ..., extra_arg_N)
- *   is true for some expected_array_N in expected_array. This only works for assert_func
- *   with signature assert_func(actual, expected, args_1, ..., args_N). Note that tests
- *   with multiple allowed pass conditions are bad practice unless the spec specifically
- *   allows multiple behaviours. Test authors should not use this method simply to hide
- *   UA bugs.
- *
- * assert_exists(object, property_name, description)
- *   *** deprecated ***
- *   asserts that object has an own property property_name
- *
- * assert_not_exists(object, property_name, description)
- *   *** deprecated ***
- *   assert that object does not have own property property_name
- */
+/* Documentation: http://web-platform-tests.org/writing-tests/testharness-api.html
+ * (../docs/_writing-tests/testharness-api.md) */
 
 (function ()
 {
     var debug = false;
-    // default timeout is 5 seconds, test can override if needed
+    // default timeout is 10 seconds, test can override if needed
     var settings = {
-      output:true,
-      timeout:5000,
-      test_timeout:2000
+        output:false,
+        harness_timeout:{
+            "normal":10000,
+            "long":60000
+        },
+        test_timeout:null,
+        message_events: ["start", "test_state", "result", "completion"]
     };
 
     var xhtml_ns = "http://www.w3.org/1999/xhtml";
 
-    // script_prefix is used by Output.prototype.show_results() to figure out
-    // where to get testharness.css from.  It's enclosed in an extra closure to
-    // not pollute the library's namespace with variables like "src".
-    var script_prefix = null;
-    (function ()
-    {
-        var scripts = document.getElementsByTagName("script");
-        for (var i = 0; i < scripts.length; i++)
-        {
-            if (scripts[i].src)
-            {
-                var src = scripts[i].src;
+    /*
+     * TestEnvironment is an abstraction for the environment in which the test
+     * harness is used. Each implementation of a test environment has to provide
+     * the following interface:
+     *
+     * interface TestEnvironment {
+     *   // Invoked after the global 'tests' object has been created and it's
+     *   // safe to call add_*_callback() to register event handlers.
+     *   void on_tests_ready();
+     *
+     *   // Invoked after setup() has been called to notify the test environment
+     *   // of changes to the test harness properties.
+     *   void on_new_harness_properties(object properties);
+     *
+     *   // Should return a new unique default test name.
+     *   DOMString next_default_test_name();
+     *
+     *   // Should return the test harness timeout duration in milliseconds.
+     *   float test_timeout();
+     *
+     *   // Should return the global scope object.
+     *   object global_scope();
+     * };
+     */
+
+    /*
+     * A test environment with a DOM. The global object is 'window'. By default
+     * test results are displayed in a table. Any parent windows receive
+     * callbacks or messages via postMessage() when test events occur. See
+     * apisample11.html and apisample12.html.
+     */
+    function WindowTestEnvironment() {
+        this.name_counter = 0;
+        this.window_cache = null;
+        this.output_handler = null;
+        this.all_loaded = false;
+        var this_obj = this;
+        this.message_events = [];
+        this.dispatched_messages = [];
+
+        this.message_functions = {
+            start: [add_start_callback, remove_start_callback,
+                    function (properties) {
+                        this_obj._dispatch("start_callback", [properties],
+                                           {type: "start", properties: properties});
+                    }],
+
+            test_state: [add_test_state_callback, remove_test_state_callback,
+                         function(test) {
+                             this_obj._dispatch("test_state_callback", [test],
+                                                {type: "test_state",
+                                                 test: test.structured_clone()});
+                         }],
+            result: [add_result_callback, remove_result_callback,
+                     function (test) {
+                         this_obj.output_handler.show_status();
+                         this_obj._dispatch("result_callback", [test],
+                                            {type: "result",
+                                             test: test.structured_clone()});
+                     }],
+            completion: [add_completion_callback, remove_completion_callback,
+                         function (tests, harness_status) {
+                             var cloned_tests = map(tests, function(test) {
+                                 return test.structured_clone();
+                             });
+                             this_obj._dispatch("completion_callback", [tests, harness_status],
+                                                {type: "complete",
+                                                 tests: cloned_tests,
+                                                 status: harness_status.structured_clone()});
+                         }]
+        }
+
+        on_event(window, 'load', function() {
+            this_obj.all_loaded = true;
+        });
+
+        on_event(window, 'message', function(event) {
+            if (event.data && event.data.type === "getmessages" && event.source) {
+                // A window can post "getmessages" to receive a duplicate of every
+                // message posted by this environment so far. This allows subscribers
+                // from fetch_tests_from_window to 'catch up' to the current state of
+                // this environment.
+                for (var i = 0; i < this_obj.dispatched_messages.length; ++i)
+                {
+                    event.source.postMessage(this_obj.dispatched_messages[i], "*");
+                }
             }
-            else if (scripts[i].href)
-            {
-                //SVG case
-                var src = scripts[i].href.baseVal;
+        });
+    }
+
+    WindowTestEnvironment.prototype._dispatch = function(selector, callback_args, message_arg) {
+        this.dispatched_messages.push(message_arg);
+        this._forEach_windows(
+                function(w, same_origin) {
+                    if (same_origin) {
+                        try {
+                            var has_selector = selector in w;
+                        } catch(e) {
+                            // If document.domain was set at some point same_origin can be
+                            // wrong and the above will fail.
+                            has_selector = false;
+                        }
+                        if (has_selector) {
+                            try {
+                                w[selector].apply(undefined, callback_args);
+                            } catch (e) {
+                                if (debug) {
+                                    throw e;
+                                }
+                            }
+                        }
+                    }
+                    if (supports_post_message(w) && w !== self) {
+                        w.postMessage(message_arg, "*");
+                    }
+                });
+    };
+
+    WindowTestEnvironment.prototype._forEach_windows = function(callback) {
+        // Iterate of the the windows [self ... top, opener]. The callback is passed
+        // two objects, the first one is the windows object itself, the second one
+        // is a boolean indicating whether or not its on the same origin as the
+        // current window.
+        var cache = this.window_cache;
+        if (!cache) {
+            cache = [[self, true]];
+            var w = self;
+            var i = 0;
+            var so;
+            while (w != w.parent) {
+                w = w.parent;
+                so = is_same_origin(w);
+                cache.push([w, so]);
+                i++;
             }
-            if (src && src.slice(src.length - "testharness.js".length) === "testharness.js")
-            {
-                script_prefix = src.slice(0, src.length - "testharness.js".length);
-                break;
+            w = window.opener;
+            if (w) {
+                cache.push([w, is_same_origin(w)]);
             }
+            this.window_cache = cache;
         }
-    })();
 
-    /*
-     * API functions
-     */
+        forEach(cache,
+                function(a) {
+                    callback.apply(null, a);
+                });
+    };
 
-    var name_counter = 0;
-    function next_default_name()
-    {
+    WindowTestEnvironment.prototype.on_tests_ready = function() {
+        var output = new Output();
+        this.output_handler = output;
+
+        var this_obj = this;
+
+        add_start_callback(function (properties) {
+            this_obj.output_handler.init(properties);
+        });
+
+        add_test_state_callback(function(test) {
+            this_obj.output_handler.show_status();
+        });
+
+        add_result_callback(function (test) {
+            this_obj.output_handler.show_status();
+        });
+
+        add_completion_callback(function (tests, harness_status) {
+            this_obj.output_handler.show_results(tests, harness_status);
+        });
+        this.setup_messages(settings.message_events);
+    };
+
+    WindowTestEnvironment.prototype.setup_messages = function(new_events) {
+        var this_obj = this;
+        forEach(settings.message_events, function(x) {
+            var current_dispatch = this_obj.message_events.indexOf(x) !== -1;
+            var new_dispatch = new_events.indexOf(x) !== -1;
+            if (!current_dispatch && new_dispatch) {
+                this_obj.message_functions[x][0](this_obj.message_functions[x][2]);
+            } else if (current_dispatch && !new_dispatch) {
+                this_obj.message_functions[x][1](this_obj.message_functions[x][2]);
+            }
+        });
+        this.message_events = new_events;
+    }
+
+    WindowTestEnvironment.prototype.next_default_test_name = function() {
         //Don't use document.title to work around an Opera bug in XHTML documents
         var title = document.getElementsByTagName("title")[0];
         var prefix = (title && title.firstChild && title.firstChild.data) || "Untitled";
-        var suffix = name_counter > 0 ? " " + name_counter : "";
-        name_counter++;
+        var suffix = this.name_counter > 0 ? " " + this.name_counter : "";
+        this.name_counter++;
         return prefix + suffix;
+    };
+
+    WindowTestEnvironment.prototype.on_new_harness_properties = function(properties) {
+        this.output_handler.setup(properties);
+        if (properties.hasOwnProperty("message_events")) {
+            this.setup_messages(properties.message_events);
+        }
+    };
+
+    WindowTestEnvironment.prototype.add_on_loaded_callback = function(callback) {
+        on_event(window, 'load', callback);
+    };
+
+    WindowTestEnvironment.prototype.test_timeout = function() {
+        var metas = document.getElementsByTagName("meta");
+        for (var i = 0; i < metas.length; i++) {
+            if (metas[i].name == "timeout") {
+                if (metas[i].content == "long") {
+                    return settings.harness_timeout.long;
+                }
+                break;
+            }
+        }
+        return settings.harness_timeout.normal;
+    };
+
+    WindowTestEnvironment.prototype.global_scope = function() {
+        return window;
+    };
+
+    /*
+     * Base TestEnvironment implementation for a generic web worker.
+     *
+     * Workers accumulate test results. One or more clients can connect and
+     * retrieve results from a worker at any time.
+     *
+     * WorkerTestEnvironment supports communicating with a client via a
+     * MessagePort.  The mechanism for determining the appropriate MessagePort
+     * for communicating with a client depends on the type of worker and is
+     * implemented by the various specializations of WorkerTestEnvironment
+     * below.
+     *
+     * A client document using testharness can use fetch_tests_from_worker() to
+     * retrieve results from a worker. See apisample16.html.
+     */
+    function WorkerTestEnvironment() {
+        this.name_counter = 0;
+        this.all_loaded = true;
+        this.message_list = [];
+        this.message_ports = [];
+    }
+
+    WorkerTestEnvironment.prototype._dispatch = function(message) {
+        this.message_list.push(message);
+        for (var i = 0; i < this.message_ports.length; ++i)
+        {
+            this.message_ports[i].postMessage(message);
+        }
+    };
+
+    // The only requirement is that port has a postMessage() method. It doesn't
+    // have to be an instance of a MessagePort, and often isn't.
+    WorkerTestEnvironment.prototype._add_message_port = function(port) {
+        this.message_ports.push(port);
+        for (var i = 0; i < this.message_list.length; ++i)
+        {
+            port.postMessage(this.message_list[i]);
+        }
+    };
+
+    WorkerTestEnvironment.prototype.next_default_test_name = function() {
+        var suffix = this.name_counter > 0 ? " " + this.name_counter : "";
+        this.name_counter++;
+        return "Untitled" + suffix;
+    };
+
+    WorkerTestEnvironment.prototype.on_new_harness_properties = function() {};
+
+    WorkerTestEnvironment.prototype.on_tests_ready = function() {
+        var this_obj = this;
+        add_start_callback(
+                function(properties) {
+                    this_obj._dispatch({
+                        type: "start",
+                        properties: properties,
+                    });
+                });
+        add_test_state_callback(
+                function(test) {
+                    this_obj._dispatch({
+                        type: "test_state",
+                        test: test.structured_clone()
+                    });
+                });
+        add_result_callback(
+                function(test) {
+                    this_obj._dispatch({
+                        type: "result",
+                        test: test.structured_clone()
+                    });
+                });
+        add_completion_callback(
+                function(tests, harness_status) {
+                    this_obj._dispatch({
+                        type: "complete",
+                        tests: map(tests,
+                            function(test) {
+                                return test.structured_clone();
+                            }),
+                        status: harness_status.structured_clone()
+                    });
+                });
+    };
+
+    WorkerTestEnvironment.prototype.add_on_loaded_callback = function() {};
+
+    WorkerTestEnvironment.prototype.test_timeout = function() {
+        // Tests running in a worker don't have a default timeout. I.e. all
+        // worker tests behave as if settings.explicit_timeout is true.
+        return null;
+    };
+
+    WorkerTestEnvironment.prototype.global_scope = function() {
+        return self;
+    };
+
+    /*
+     * Dedicated web workers.
+     * https://html.spec.whatwg.org/multipage/workers.html#dedicatedworkerglobalscope
+     *
+     * This class is used as the test_environment when testharness is running
+     * inside a dedicated worker.
+     */
+    function DedicatedWorkerTestEnvironment() {
+        WorkerTestEnvironment.call(this);
+        // self is an instance of DedicatedWorkerGlobalScope which exposes
+        // a postMessage() method for communicating via the message channel
+        // established when the worker is created.
+        this._add_message_port(self);
+    }
+    DedicatedWorkerTestEnvironment.prototype = Object.create(WorkerTestEnvironment.prototype);
+
+    DedicatedWorkerTestEnvironment.prototype.on_tests_ready = function() {
+        WorkerTestEnvironment.prototype.on_tests_ready.call(this);
+        // In the absence of an onload notification, we a require dedicated
+        // workers to explicitly signal when the tests are done.
+        tests.wait_for_finish = true;
+    };
+
+    /*
+     * Shared web workers.
+     * https://html.spec.whatwg.org/multipage/workers.html#sharedworkerglobalscope
+     *
+     * This class is used as the test_environment when testharness is running
+     * inside a shared web worker.
+     */
+    function SharedWorkerTestEnvironment() {
+        WorkerTestEnvironment.call(this);
+        var this_obj = this;
+        // Shared workers receive message ports via the 'onconnect' event for
+        // each connection.
+        self.addEventListener("connect",
+                function(message_event) {
+                    this_obj._add_message_port(message_event.source);
+                }, false);
+    }
+    SharedWorkerTestEnvironment.prototype = Object.create(WorkerTestEnvironment.prototype);
+
+    SharedWorkerTestEnvironment.prototype.on_tests_ready = function() {
+        WorkerTestEnvironment.prototype.on_tests_ready.call(this);
+        // In the absence of an onload notification, we a require shared
+        // workers to explicitly signal when the tests are done.
+        tests.wait_for_finish = true;
+    };
+
+    /*
+     * Service workers.
+     * http://www.w3.org/TR/service-workers/
+     *
+     * This class is used as the test_environment when testharness is running
+     * inside a service worker.
+     */
+    function ServiceWorkerTestEnvironment() {
+        WorkerTestEnvironment.call(this);
+        this.all_loaded = false;
+        this.on_loaded_callback = null;
+        var this_obj = this;
+        self.addEventListener("message",
+                function(event) {
+                    if (event.data && event.data.type && event.data.type === "connect") {
+                        if (event.ports && event.ports[0]) {
+                            // If a MessageChannel was passed, then use it to
+                            // send results back to the main window.  This
+                            // allows the tests to work even if the browser
+                            // does not fully support MessageEvent.source in
+                            // ServiceWorkers yet.
+                            this_obj._add_message_port(event.ports[0]);
+                            event.ports[0].start();
+                        } else {
+                            // If there is no MessageChannel, then attempt to
+                            // use the MessageEvent.source to send results
+                            // back to the main window.
+                            this_obj._add_message_port(event.source);
+                        }
+                    }
+                }, false);
+
+        // The oninstall event is received after the service worker script and
+        // all imported scripts have been fetched and executed. It's the
+        // equivalent of an onload event for a document. All tests should have
+        // been added by the time this event is received, thus it's not
+        // necessary to wait until the onactivate event. However, tests for
+        // installed service workers need another event which is equivalent to
+        // the onload event because oninstall is fired only on installation. The
+        // onmessage event is used for that purpose since tests using
+        // testharness.js should ask the result to its service worker by
+        // PostMessage. If the onmessage event is triggered on the service
+        // worker's context, that means the worker's script has been evaluated.
+        on_event(self, "install", on_all_loaded);
+        on_event(self, "message", on_all_loaded);
+        function on_all_loaded() {
+            if (this_obj.all_loaded)
+                return;
+            this_obj.all_loaded = true;
+            if (this_obj.on_loaded_callback) {
+              this_obj.on_loaded_callback();
+            }
+        }
     }
 
+    ServiceWorkerTestEnvironment.prototype = Object.create(WorkerTestEnvironment.prototype);
+
+    ServiceWorkerTestEnvironment.prototype.add_on_loaded_callback = function(callback) {
+        if (this.all_loaded) {
+            callback();
+        } else {
+            this.on_loaded_callback = callback;
+        }
+    };
+
+    function create_test_environment() {
+        if ('document' in self) {
+            return new WindowTestEnvironment();
+        }
+        if ('DedicatedWorkerGlobalScope' in self &&
+            self instanceof DedicatedWorkerGlobalScope) {
+            return new DedicatedWorkerTestEnvironment();
+        }
+        if ('SharedWorkerGlobalScope' in self &&
+            self instanceof SharedWorkerGlobalScope) {
+            return new SharedWorkerTestEnvironment();
+        }
+        if ('ServiceWorkerGlobalScope' in self &&
+            self instanceof ServiceWorkerGlobalScope) {
+            return new ServiceWorkerTestEnvironment();
+        }
+        if ('WorkerGlobalScope' in self &&
+            self instanceof WorkerGlobalScope) {
+            return new DedicatedWorkerTestEnvironment();
+        }
+
+        throw new Error("Unsupported test environment");
+    }
+
+    var test_environment = create_test_environment();
+
+    function is_shared_worker(worker) {
+        return 'SharedWorker' in self && worker instanceof SharedWorker;
+    }
+
+    function is_service_worker(worker) {
+        // The worker object may be from another execution context,
+        // so do not use instanceof here.
+        return 'ServiceWorker' in self &&
+            Object.prototype.toString.call(worker) == '[object ServiceWorker]';
+    }
+
+    /*
+     * API functions
+     */
+
     function test(func, name, properties)
     {
-        var test_name = name ? name : next_default_name();
+        var test_name = name ? name : test_environment.next_default_test_name();
         properties = properties ? properties : {};
         var test_obj = new Test(test_name, properties);
-        test_obj.step(func);
-        if (test_obj.status === test_obj.NOTRUN) {
+        test_obj.step(func, test_obj, test_obj);
+        if (test_obj.phase === test_obj.phases.STARTED) {
             test_obj.done();
         }
     }
@@ -427,7 +521,7 @@ policies and contribution forms [3].
             name = func;
             func = null;
         }
-        var test_name = name ? name : next_default_name();
+        var test_name = name ? name : test_environment.next_default_test_name();
         properties = properties ? properties : {};
         var test_obj = new Test(test_name, properties);
         if (func) {
@@ -436,6 +530,144 @@ policies and contribution forms [3].
         return test_obj;
     }
 
+    function promise_test(func, name, properties) {
+        var test = async_test(name, properties);
+        // If there is no promise tests queue make one.
+        if (!tests.promise_tests) {
+            tests.promise_tests = Promise.resolve();
+        }
+        tests.promise_tests = tests.promise_tests.then(function() {
+            var donePromise = new Promise(function(resolve) {
+                test._add_cleanup(resolve);
+            });
+            var promise = test.step(func, test, test);
+            test.step(function() {
+                assert_not_equals(promise, undefined);
+            });
+            Promise.resolve(promise).then(
+                    function() {
+                        test.done();
+                    })
+                .catch(test.step_func(
+                    function(value) {
+                        if (value instanceof AssertionError) {
+                            throw value;
+                        }
+                        assert(false, "promise_test", null,
+                               "Unhandled rejection with value: ${value}", {value:value});
+                    }));
+            return donePromise;
+        });
+    }
+
+    function promise_rejects(test, expected, promise, description) {
+        return promise.then(test.unreached_func("Should have rejected: " + description)).catch(function(e) {
+            assert_throws(expected, function() { throw e }, description);
+        });
+    }
+
+    /**
+     * This constructor helper allows DOM events to be handled using Promises,
+     * which can make it a lot easier to test a very specific series of events,
+     * including ensuring that unexpected events are not fired at any point.
+     */
+    function EventWatcher(test, watchedNode, eventTypes)
+    {
+        if (typeof eventTypes == 'string') {
+            eventTypes = [eventTypes];
+        }
+
+        var waitingFor = null;
+
+        // This is null unless we are recording all events, in which case it
+        // will be an Array object.
+        var recordedEvents = null;
+
+        var eventHandler = test.step_func(function(evt) {
+            assert_true(!!waitingFor,
+                        'Not expecting event, but got ' + evt.type + ' event');
+            assert_equals(evt.type, waitingFor.types[0],
+                          'Expected ' + waitingFor.types[0] + ' event, but got ' +
+                          evt.type + ' event instead');
+
+            if (Array.isArray(recordedEvents)) {
+                recordedEvents.push(evt);
+            }
+
+            if (waitingFor.types.length > 1) {
+                // Pop first event from array
+                waitingFor.types.shift();
+                return;
+            }
+            // We need to null out waitingFor before calling the resolve function
+            // since the Promise's resolve handlers may call wait_for() which will
+            // need to set waitingFor.
+            var resolveFunc = waitingFor.resolve;
+            waitingFor = null;
+            // Likewise, we should reset the state of recordedEvents.
+            var result = recordedEvents || evt;
+            recordedEvents = null;
+            resolveFunc(result);
+        });
+
+        for (var i = 0; i < eventTypes.length; i++) {
+            watchedNode.addEventListener(eventTypes[i], eventHandler, false);
+        }
+
+        /**
+         * Returns a Promise that will resolve after the specified event or
+         * series of events has occured.
+         *
+         * @param options An optional options object. If the 'record' property
+         *                on this object has the value 'all', when the Promise
+         *                returned by this function is resolved,  *all* Event
+         *                objects that were waited for will be returned as an
+         *                array.
+         *
+         * For example,
+         *
+         * ```js
+         * const watcher = new EventWatcher(t, div, [ 'animationstart',
+         *                                            'animationiteration',
+         *                                            'animationend' ]);
+         * return watcher.wait_for([ 'animationstart', 'animationend' ],
+         *                         { record: 'all' }).then(evts => {
+         *   assert_equals(evts[0].elapsedTime, 0.0);
+         *   assert_equals(evts[1].elapsedTime, 2.0);
+         * });
+         * ```
+         */
+        this.wait_for = function(types, options) {
+            if (waitingFor) {
+                return Promise.reject('Already waiting for an event or events');
+            }
+            if (typeof types == 'string') {
+                types = [types];
+            }
+            if (options && options.record && options.record === 'all') {
+                recordedEvents = [];
+            }
+            return new Promise(function(resolve, reject) {
+                waitingFor = {
+                    types: types,
+                    resolve: resolve,
+                    reject: reject
+                };
+            });
+        };
+
+        function stop_watching() {
+            for (var i = 0; i < eventTypes.length; i++) {
+                watchedNode.removeEventListener(eventTypes[i], eventHandler, false);
+            }
+        };
+
+        test._add_cleanup(stop_watching);
+
+        return this;
+    }
+    expose(EventWatcher, 'EventWatcher');
+
     function setup(func_or_properties, maybe_properties)
     {
         var func = null;
@@ -443,16 +675,22 @@ policies and contribution forms [3].
         if (arguments.length === 2) {
             func = func_or_properties;
             properties = maybe_properties;
-        } else if (func_or_properties instanceof Function){
+        } else if (func_or_properties instanceof Function) {
             func = func_or_properties;
         } else {
             properties = func_or_properties;
         }
         tests.setup(func, properties);
-        output.setup(properties);
+        test_environment.on_new_harness_properties(properties);
     }
 
     function done() {
+        if (tests.tests.length === 0) {
+            tests.set_file_is_test();
+        }
+        if (tests.file_is_test) {
+            tests.tests[0].done();
+        }
         tests.end_wait();
     }
 
@@ -471,15 +709,26 @@ policies and contribution forms [3].
 
     function on_event(object, event, callback)
     {
-      object.addEventListener(event, callback, false);
+        object.addEventListener(event, callback, false);
+    }
+
+    function step_timeout(f, t) {
+        var outer_this = this;
+        var args = Array.prototype.slice.call(arguments, 2);
+        return setTimeout(function() {
+            f.apply(outer_this, args);
+        }, t * tests.timeout_multiplier);
     }
 
     expose(test, 'test');
     expose(async_test, 'async_test');
+    expose(promise_test, 'promise_test');
+    expose(promise_rejects, 'promise_rejects');
     expose(generate_tests, 'generate_tests');
     expose(setup, 'setup');
     expose(done, 'done');
     expose(on_event, 'on_event');
+    expose(step_timeout, 'step_timeout');
 
     /*
      * Return a string truncated to the given length, with ... added at the end
@@ -493,69 +742,100 @@ policies and contribution forms [3].
         return s;
     }
 
+    /*
+     * Return true if object is probably a Node object.
+     */
+    function is_node(object)
+    {
+        // I use duck-typing instead of instanceof, because
+        // instanceof doesn't work if the node is from another window (like an
+        // iframe's contentWindow):
+        // http://www.w3.org/Bugs/Public/show_bug.cgi?id=12295
+        try {
+            var has_node_properties = ("nodeType" in object &&
+                                       "nodeName" in object &&
+                                       "nodeValue" in object &&
+                                       "childNodes" in object);
+        } catch (e) {
+            // We're probably cross-origin, which means we aren't a node
+            return false;
+        }
+
+        if (has_node_properties) {
+            try {
+                object.nodeType;
+            } catch (e) {
+                // The object is probably Node.prototype or another prototype
+                // object that inherits from it, and not a Node instance.
+                return false;
+            }
+            return true;
+        }
+        return false;
+    }
+
+    var replacements = {
+        "0": "0",
+        "1": "x01",
+        "2": "x02",
+        "3": "x03",
+        "4": "x04",
+        "5": "x05",
+        "6": "x06",
+        "7": "x07",
+        "8": "b",
+        "9": "t",
+        "10": "n",
+        "11": "v",
+        "12": "f",
+        "13": "r",
+        "14": "x0e",
+        "15": "x0f",
+        "16": "x10",
+        "17": "x11",
+        "18": "x12",
+        "19": "x13",
+        "20": "x14",
+        "21": "x15",
+        "22": "x16",
+        "23": "x17",
+        "24": "x18",
+        "25": "x19",
+        "26": "x1a",
+        "27": "x1b",
+        "28": "x1c",
+        "29": "x1d",
+        "30": "x1e",
+        "31": "x1f",
+        "0xfffd": "ufffd",
+        "0xfffe": "ufffe",
+        "0xffff": "uffff",
+    };
+
     /*
      * Convert a value to a nice, human-readable string
      */
     function format_value(val, seen)
     {
-	if (!seen) {
-	    seen = [];
+        if (!seen) {
+            seen = [];
         }
-        if (typeof val === "object" && val !== null)
-        {
-            if (seen.indexOf(val) >= 0)
-            {
+        if (typeof val === "object" && val !== null) {
+            if (seen.indexOf(val) >= 0) {
                 return "[...]";
             }
-	    seen.push(val);
+            seen.push(val);
         }
-        if (Array.isArray(val))
-        {
-            return "[" + val.map(function(x) {return format_value(x, seen)}).join(", ") + "]";
+        if (Array.isArray(val)) {
+            return "[" + val.map(function(x) {return format_value(x, seen);}).join(", ") + "]";
         }
 
-        switch (typeof val)
-        {
+        switch (typeof val) {
         case "string":
             val = val.replace("\\", "\\\\");
-            for (var i = 0; i < 32; i++)
-            {
-                var replace = "\\";
-                switch (i) {
-                case 0: replace += "0"; break;
-                case 1: replace += "x01"; break;
-                case 2: replace += "x02"; break;
-                case 3: replace += "x03"; break;
-                case 4: replace += "x04"; break;
-                case 5: replace += "x05"; break;
-                case 6: replace += "x06"; break;
-                case 7: replace += "x07"; break;
-                case 8: replace += "b"; break;
-                case 9: replace += "t"; break;
-                case 10: replace += "n"; break;
-                case 11: replace += "v"; break;
-                case 12: replace += "f"; break;
-                case 13: replace += "r"; break;
-                case 14: replace += "x0e"; break;
-                case 15: replace += "x0f"; break;
-                case 16: replace += "x10"; break;
-                case 17: replace += "x11"; break;
-                case 18: replace += "x12"; break;
-                case 19: replace += "x13"; break;
-                case 20: replace += "x14"; break;
-                case 21: replace += "x15"; break;
-                case 22: replace += "x16"; break;
-                case 23: replace += "x17"; break;
-                case 24: replace += "x18"; break;
-                case 25: replace += "x19"; break;
-                case 26: replace += "x1a"; break;
-                case 27: replace += "x1b"; break;
-                case 28: replace += "x1c"; break;
-                case 29: replace += "x1d"; break;
-                case 30: replace += "x1e"; break;
-                case 31: replace += "x1f"; break;
-                }
-                val = val.replace(RegExp(String.fromCharCode(i), "g"), replace);
+            for (var p in replacements) {
+                var replace = "\\" + replacements[p];
+                val = val.replace(RegExp(String.fromCharCode(p), "g"), replace);
             }
             return '"' + val.replace(/"/g, '\\"') + '"';
         case "boolean":
@@ -564,36 +844,25 @@ policies and contribution forms [3].
         case "number":
             // In JavaScript, -0 === 0 and String(-0) == "0", so we have to
             // special-case.
-            if (val === -0 && 1/val === -Infinity)
-            {
+            if (val === -0 && 1/val === -Infinity) {
                 return "-0";
             }
             return String(val);
         case "object":
-            if (val === null)
-            {
+            if (val === null) {
                 return "null";
             }
 
             // Special-case Node objects, since those come up a lot in my tests.  I
-            // ignore namespaces.  I use duck-typing instead of instanceof, because
-            // instanceof doesn't work if the node is from another window (like an
-            // iframe's contentWindow):
-            // http://www.w3.org/Bugs/Public/show_bug.cgi?id=12295
-            if ("nodeType" in val
-            && "nodeName" in val
-            && "nodeValue" in val
-            && "childNodes" in val)
-            {
-                switch (val.nodeType)
-                {
+            // ignore namespaces.
+            if (is_node(val)) {
+                switch (val.nodeType) {
                 case Node.ELEMENT_NODE:
-                    var ret = "<" + val.tagName.toLowerCase();
-                    for (var i = 0; i < val.attributes.length; i++)
-                    {
+                    var ret = "<" + val.localName;
+                    for (var i = 0; i < val.attributes.length; i++) {
                         ret += " " + val.attributes[i].name + '="' + val.attributes[i].value + '"';
                     }
-                    ret += ">" + val.innerHTML + "";
+                    ret += ">" + val.innerHTML + "";
                     return "Element node " + truncate(ret, 60);
                 case Node.TEXT_NODE:
                     return 'Text node "' + truncate(val.data, 60) + '"';
@@ -612,9 +881,14 @@ policies and contribution forms [3].
                 }
             }
 
-            // Fall through to default
+        /* falls through */
         default:
-            return typeof val + ' "' + truncate(String(val), 60) + '"';
+            try {
+                return typeof val + ' "' + truncate(String(val), 1000) + '"';
+            } catch(e) {
+                return ("[stringifying object threw " + String(e) +
+                        " with type " + String(typeof e) + "]");
+            }
         }
     }
     expose(format_value, "format_value");
@@ -627,31 +901,26 @@ policies and contribution forms [3].
     {
         assert(actual === true, "assert_true", description,
                                 "expected true got ${actual}", {actual:actual});
-    };
+    }
     expose(assert_true, "assert_true");
 
     function assert_false(actual, description)
     {
         assert(actual === false, "assert_false", description,
                                  "expected false got ${actual}", {actual:actual});
-    };
+    }
     expose(assert_false, "assert_false");
 
     function same_value(x, y) {
-        if (y !== y)
-        {
+        if (y !== y) {
             //NaN case
             return x !== x;
         }
-        else if (x === 0 && y === 0) {
+        if (x === 0 && y === 0) {
             //Distinguish +0 and -0
             return 1/x === 1/y;
         }
-        else
-        {
-            //typical case
-            return x === y;
-        }
+        return x === y;
     }
 
     function assert_equals(actual, expected, description)
@@ -660,8 +929,7 @@ policies and contribution forms [3].
           * Test if two primitives are equal or two objects
           * are the same object
           */
-        if (typeof actual != typeof expected)
-        {
+        if (typeof actual != typeof expected) {
             assert(false, "assert_equals", description,
                           "expected (" + typeof expected + ") ${expected} but got (" + typeof actual + ") ${actual}",
                           {expected:expected, actual:actual});
@@ -670,7 +938,7 @@ policies and contribution forms [3].
         assert(same_value(actual, expected), "assert_equals", description,
                                              "expected ${expected} but got ${actual}",
                                              {expected:expected, actual:actual});
-    };
+    }
     expose(assert_equals, "assert_equals");
 
     function assert_not_equals(actual, expected, description)
@@ -682,7 +950,7 @@ policies and contribution forms [3].
         assert(!same_value(actual, expected), "assert_not_equals", description,
                                               "got disallowed value ${actual}",
                                               {actual:actual});
-    };
+    }
     expose(assert_not_equals, "assert_not_equals");
 
     function assert_in_array(actual, expected, description)
@@ -701,27 +969,21 @@ policies and contribution forms [3].
              stack.push(actual);
 
              var p;
-             for (p in actual)
-             {
+             for (p in actual) {
                  assert(expected.hasOwnProperty(p), "assert_object_equals", description,
                                                     "unexpected property ${p}", {p:p});
 
-                 if (typeof actual[p] === "object" && actual[p] !== null)
-                 {
-                     if (stack.indexOf(actual[p]) === -1)
-                     {
+                 if (typeof actual[p] === "object" && actual[p] !== null) {
+                     if (stack.indexOf(actual[p]) === -1) {
                          check_equal(actual[p], expected[p], stack);
                      }
-                 }
-                 else
-                 {
+                 } else {
                      assert(same_value(actual[p], expected[p]), "assert_object_equals", description,
                                                        "property ${p} expected ${expected} got ${actual}",
                                                        {p:p, expected:expected, actual:actual});
                  }
              }
-             for (p in expected)
-             {
+             for (p in expected) {
                  assert(actual.hasOwnProperty(p),
                         "assert_object_equals", description,
                         "expected property ${p} missing", {p:p});
@@ -729,7 +991,7 @@ policies and contribution forms [3].
              stack.pop();
          }
          check_equal(actual, expected, []);
-    };
+    }
     expose(assert_object_equals, "assert_object_equals");
 
     function assert_array_equals(actual, expected, description)
@@ -739,11 +1001,10 @@ policies and contribution forms [3].
                "lengths differ, expected ${expected} got ${actual}",
                {expected:expected.length, actual:actual.length});
 
-        for (var i=0; i < actual.length; i++)
-        {
+        for (var i = 0; i < actual.length; i++) {
             assert(actual.hasOwnProperty(i) === expected.hasOwnProperty(i),
                    "assert_array_equals", description,
-                   "property ${i}, property expected to be $expected but was $actual",
+                   "property ${i}, property expected to be ${expected} but was ${actual}",
                    {i:i, expected:expected.hasOwnProperty(i) ? "present" : "missing",
                    actual:actual.hasOwnProperty(i) ? "present" : "missing"});
             assert(same_value(expected[i], actual[i]),
@@ -754,6 +1015,34 @@ policies and contribution forms [3].
     }
     expose(assert_array_equals, "assert_array_equals");
 
+    function assert_array_approx_equals(actual, expected, epsilon, description)
+    {
+        /*
+         * Test if two primitive arrays are equal withing +/- epsilon
+         */
+        assert(actual.length === expected.length,
+               "assert_array_approx_equals", description,
+               "lengths differ, expected ${expected} got ${actual}",
+               {expected:expected.length, actual:actual.length});
+
+        for (var i = 0; i < actual.length; i++) {
+            assert(actual.hasOwnProperty(i) === expected.hasOwnProperty(i),
+                   "assert_array_approx_equals", description,
+                   "property ${i}, property expected to be ${expected} but was ${actual}",
+                   {i:i, expected:expected.hasOwnProperty(i) ? "present" : "missing",
+                   actual:actual.hasOwnProperty(i) ? "present" : "missing"});
+            assert(typeof actual[i] === "number",
+                   "assert_array_approx_equals", description,
+                   "property ${i}, expected a number but got a ${type_actual}",
+                   {i:i, type_actual:typeof actual[i]});
+            assert(Math.abs(actual[i] - expected[i]) <= epsilon,
+                   "assert_array_approx_equals", description,
+                   "property ${i}, expected ${expected} +/- ${epsilon}, expected ${expected} but got ${actual}",
+                   {i:i, expected:expected[i], actual:actual[i]});
+        }
+    }
+    expose(assert_array_approx_equals, "assert_array_approx_equals");
+
     function assert_approx_equals(actual, expected, epsilon, description)
     {
         /*
@@ -768,7 +1057,7 @@ policies and contribution forms [3].
                "assert_approx_equals", description,
                "expected ${expected} +/- ${epsilon} but got ${actual}",
                {expected:expected, actual:actual, epsilon:epsilon});
-    };
+    }
     expose(assert_approx_equals, "assert_approx_equals");
 
     function assert_less_than(actual, expected, description)
@@ -785,7 +1074,7 @@ policies and contribution forms [3].
                "assert_less_than", description,
                "expected a number less than ${expected} but got ${actual}",
                {expected:expected, actual:actual});
-    };
+    }
     expose(assert_less_than, "assert_less_than");
 
     function assert_greater_than(actual, expected, description)
@@ -802,9 +1091,27 @@ policies and contribution forms [3].
                "assert_greater_than", description,
                "expected a number greater than ${expected} but got ${actual}",
                {expected:expected, actual:actual});
-    };
+    }
     expose(assert_greater_than, "assert_greater_than");
 
+    function assert_between_exclusive(actual, lower, upper, description)
+    {
+        /*
+         * Test if a primitive number is between two others
+         */
+        assert(typeof actual === "number",
+               "assert_between_exclusive", description,
+               "expected a number but got a ${type_actual}",
+               {type_actual:typeof actual});
+
+        assert(actual > lower && actual < upper,
+               "assert_between_exclusive", description,
+               "expected a number greater than ${lower} " +
+               "and less than ${upper} but got ${actual}",
+               {lower:lower, upper:upper, actual:actual});
+    }
+    expose(assert_between_exclusive, "assert_between_exclusive");
+
     function assert_less_than_equal(actual, expected, description)
     {
         /*
@@ -816,10 +1123,10 @@ policies and contribution forms [3].
                {type_actual:typeof actual});
 
         assert(actual <= expected,
-               "assert_less_than", description,
+               "assert_less_than_equal", description,
                "expected a number less than or equal to ${expected} but got ${actual}",
                {expected:expected, actual:actual});
-    };
+    }
     expose(assert_less_than_equal, "assert_less_than_equal");
 
     function assert_greater_than_equal(actual, expected, description)
@@ -836,9 +1143,27 @@ policies and contribution forms [3].
                "assert_greater_than_equal", description,
                "expected a number greater than or equal to ${expected} but got ${actual}",
                {expected:expected, actual:actual});
-    };
+    }
     expose(assert_greater_than_equal, "assert_greater_than_equal");
 
+    function assert_between_inclusive(actual, lower, upper, description)
+    {
+        /*
+         * Test if a primitive number is between to two others or equal to either of them
+         */
+        assert(typeof actual === "number",
+               "assert_between_inclusive", description,
+               "expected a number but got a ${type_actual}",
+               {type_actual:typeof actual});
+
+        assert(actual >= lower && actual <= upper,
+               "assert_between_inclusive", description,
+               "expected a number greater than or equal to ${lower} " +
+               "and less than or equal to ${upper} but got ${actual}",
+               {lower:lower, upper:upper, actual:actual});
+    }
+    expose(assert_between_inclusive, "assert_between_inclusive");
+
     function assert_regexp_match(actual, expected, description) {
         /*
          * Test if a string (actual) matches a regexp (expected)
@@ -873,13 +1198,13 @@ policies and contribution forms [3].
         assert(!object.hasOwnProperty(property_name),
                "assert_not_exists", description,
                "unexpected property ${p} found", {p:property_name});
-    };
+    }
     expose(assert_not_exists, "assert_not_exists");
 
     function _assert_inherits(name) {
         return function (object, property_name, description)
         {
-            assert(typeof object === "object",
+            assert(typeof object === "object" || typeof object === "function",
                    name, description,
                    "provided value is not an object");
 
@@ -912,33 +1237,26 @@ policies and contribution forms [3].
                     "assert_readonly", description,
                     "changing property ${p} succeeded",
                     {p:property_name});
-         }
-         finally
-         {
+         } finally {
              object[property_name] = initial_value;
          }
-    };
+    }
     expose(assert_readonly, "assert_readonly");
 
     function assert_throws(code, func, description)
     {
-        try
-        {
+        try {
             func.call(this);
             assert(false, "assert_throws", description,
                    "${func} did not throw", {func:func});
-        }
-        catch(e)
-        {
+        } catch (e) {
             if (e instanceof AssertionError) {
-                throw(e);
+                throw e;
             }
-            if (code === null)
-            {
-                return;
+            if (code === null) {
+                throw new AssertionError('Test bug: need to pass exception to assert_throws()');
             }
-            if (typeof code === "object")
-            {
+            if (typeof code === "object") {
                 assert(typeof e == "object" && "name" in e && e.name == code.name,
                        "assert_throws", description,
                        "${func} threw ${actual} (${actual_name}) expected ${expected} (${expected_name})",
@@ -956,6 +1274,7 @@ policies and contribution forms [3].
                 NO_MODIFICATION_ALLOWED_ERR: 'NoModificationAllowedError',
                 NOT_FOUND_ERR: 'NotFoundError',
                 NOT_SUPPORTED_ERR: 'NotSupportedError',
+                INUSE_ATTRIBUTE_ERR: 'InUseAttributeError',
                 INVALID_STATE_ERR: 'InvalidStateError',
                 SYNTAX_ERR: 'SyntaxError',
                 INVALID_MODIFICATION_ERR: 'InvalidModificationError',
@@ -982,6 +1301,7 @@ policies and contribution forms [3].
                 NoModificationAllowedError: 7,
                 NotFoundError: 8,
                 NotSupportedError: 9,
+                InUseAttributeError: 10,
                 InvalidStateError: 11,
                 SyntaxError: 12,
                 InvalidModificationError: 13,
@@ -997,24 +1317,29 @@ policies and contribution forms [3].
                 InvalidNodeTypeError: 24,
                 DataCloneError: 25,
 
+                EncodingError: 0,
+                NotReadableError: 0,
                 UnknownError: 0,
                 ConstraintError: 0,
                 DataError: 0,
                 TransactionInactiveError: 0,
                 ReadOnlyError: 0,
-                VersionError: 0
+                VersionError: 0,
+                OperationError: 0,
+                NotAllowedError: 0
             };
 
-            if (!(name in name_code_map))
-            {
+            if (!(name in name_code_map)) {
                 throw new AssertionError('Test bug: unrecognized DOMException code "' + code + '" passed to assert_throws()');
             }
 
             var required_props = { code: name_code_map[name] };
 
-            if (required_props.code === 0
-            || ("name" in e && e.name !== e.name.toUpperCase() && e.name !== "DOMException"))
-            {
+            if (required_props.code === 0 ||
+               (typeof e == "object" &&
+                "name" in e &&
+                e.name !== e.name.toUpperCase() &&
+                e.name !== "DOMException")) {
                 // New style exception: also test the name property.
                 required_props.name = name;
             }
@@ -1029,8 +1354,7 @@ policies and contribution forms [3].
                    "${func} threw ${e} with type ${type}, not an object",
                    {func:func, e:e, type:typeof e});
 
-            for (var prop in required_props)
-            {
+            for (var prop in required_props) {
                 assert(typeof e == "object" && prop in e && e[prop] == required_props[prop],
                        "assert_throws", description,
                        "${func} threw ${e} that is not a DOMException " + code + ": property ${prop} is equal to ${actual}, expected ${expected}",
@@ -1048,16 +1372,16 @@ policies and contribution forms [3].
 
     function assert_any(assert_func, actual, expected_array)
     {
-        var args = [].slice.call(arguments, 3)
-        var errors = []
+        var args = [].slice.call(arguments, 3);
+        var errors = [];
         var passed = false;
         forEach(expected_array,
                 function(expected)
                 {
                     try {
-                        assert_func.apply(this, [actual, expected].concat(args))
+                        assert_func.apply(this, [actual, expected].concat(args));
                         passed = true;
-                    } catch(e) {
+                    } catch (e) {
                         errors.push(e.message);
                     }
                 });
@@ -1069,19 +1393,34 @@ policies and contribution forms [3].
 
     function Test(name, properties)
     {
+        if (tests.file_is_test && tests.tests.length) {
+            throw new Error("Tried to create a test with file_is_test");
+        }
         this.name = name;
+
+        this.phase = tests.phase === tests.phases.ABORTED ?
+            this.phases.COMPLETE : this.phases.INITIAL;
+
         this.status = this.NOTRUN;
         this.timeout_id = null;
-        this.is_done = false;
+        this.index = null;
 
         this.properties = properties;
-        this.timeout_length = properties.timeout ? properties.timeout : settings.test_timeout;
+        var timeout = properties.timeout ? properties.timeout : settings.test_timeout;
+        if (timeout !== null) {
+            this.timeout_length = timeout * tests.timeout_multiplier;
+        } else {
+            this.timeout_length = null;
+        }
 
         this.message = null;
+        this.stack = null;
 
-        var this_obj = this;
         this.steps = [];
 
+        this.cleanup_callbacks = [];
+        this._user_defined_cleanup_count = 0;
+
         tests.push(this);
     }
 
@@ -1094,30 +1433,43 @@ policies and contribution forms [3].
 
     Test.prototype = merge({}, Test.statuses);
 
+    Test.prototype.phases = {
+        INITIAL:0,
+        STARTED:1,
+        HAS_RESULT:2,
+        COMPLETE:3
+    };
+
     Test.prototype.structured_clone = function()
     {
-        if(!this._structured_clone)
-        {
+        if (!this._structured_clone) {
             var msg = this.message;
             msg = msg ? String(msg) : msg;
             this._structured_clone = merge({
                 name:String(this.name),
-                status:this.status,
-                message:msg
+                properties:merge({}, this.properties),
+                phases:merge({}, this.phases)
             }, Test.statuses);
         }
+        this._structured_clone.status = this.status;
+        this._structured_clone.message = this.message;
+        this._structured_clone.stack = this.stack;
+        this._structured_clone.index = this.index;
+        this._structured_clone.phase = this.phase;
         return this._structured_clone;
     };
 
     Test.prototype.step = function(func, this_obj)
     {
-        //In case the test has already failed
-        if (this.status !== this.NOTRUN)
-        {
-          return;
+        if (this.phase > this.phases.STARTED) {
+            return;
         }
+        this.phase = this.phases.STARTED;
+        //If we don't get a result before the harness times out that will be a test timout
+        this.set_status(this.TIMEOUT, "Test timed out");
 
         tests.started = true;
+        tests.notify_test_state(this);
 
         if (this.timeout_id === null) {
             this.set_timeout();
@@ -1125,36 +1477,22 @@ policies and contribution forms [3].
 
         this.steps.push(func);
 
-        if (arguments.length === 1)
-        {
+        if (arguments.length === 1) {
             this_obj = this;
         }
 
-        try
-        {
+        try {
             return func.apply(this_obj, Array.prototype.slice.call(arguments, 2));
-        }
-        catch(e)
-        {
-            //This can happen if something called synchronously invoked another
-            //step
-            if (this.status !== this.NOTRUN)
-            {
+        } catch (e) {
+            if (this.phase >= this.phases.HAS_RESULT) {
                 return;
             }
-            this.status = this.FAIL;
-            this.message = (typeof e === "object" && e !== null) ? e.message : e;
-            if (typeof e.stack != "undefined" && typeof e.message == "string") {
-                //Try to make it more informative for some exceptions, at least
-                //in Gecko and WebKit.  This results in a stack dump instead of
-                //just errors like "Cannot read property 'parentNode' of null"
-                //or "root is null".  Makes it a lot longer, of course.
-                this.message += "(stack: " + e.stack + ")";
-            }
+            var message = String((typeof e === "object" && e !== null) ? e.message : e);
+            var stack = e.stack ? e.stack : null;
+
+            this.set_status(this.FAIL, message, stack);
+            this.phase = this.phases.HAS_RESULT;
             this.done();
-            if (debug && e.constructor !== AssertionError) {
-                throw e;
-            }
         }
     };
 
@@ -1162,14 +1500,13 @@ policies and contribution forms [3].
     {
         var test_this = this;
 
-        if (arguments.length === 1)
-        {
+        if (arguments.length === 1) {
             this_obj = test_this;
         }
 
         return function()
         {
-            test_this.step.apply(test_this, [func, this_obj].concat(
+            return test_this.step.apply(test_this, [func, this_obj].concat(
                 Array.prototype.slice.call(arguments)));
         };
     };
@@ -1178,50 +1515,272 @@ policies and contribution forms [3].
     {
         var test_this = this;
 
-        if (arguments.length === 1)
-        {
+        if (arguments.length === 1) {
             this_obj = test_this;
         }
 
         return function()
         {
-            test_this.step.apply(test_this, [func, this_obj].concat(
-                Array.prototype.slice.call(arguments)));
+            if (func) {
+                test_this.step.apply(test_this, [func, this_obj].concat(
+                    Array.prototype.slice.call(arguments)));
+            }
             test_this.done();
         };
     };
 
+    Test.prototype.unreached_func = function(description)
+    {
+        return this.step_func(function() {
+            assert_unreached(description);
+        });
+    };
+
+    Test.prototype.step_timeout = function(f, timeout) {
+        var test_this = this;
+        var args = Array.prototype.slice.call(arguments, 2);
+        return setTimeout(this.step_func(function() {
+            return f.apply(test_this, args);
+        }), timeout * tests.timeout_multiplier);
+    }
+
+    /*
+     * Private method for registering cleanup functions. `testharness.js`
+     * internals should use this method instead of the public `add_cleanup`
+     * method in order to hide implementation details from the harness status
+     * message in the case errors.
+     */
+    Test.prototype._add_cleanup = function(callback) {
+        this.cleanup_callbacks.push(callback);
+    };
+
+    /*
+     * Schedule a function to be run after the test result is known, regardless
+     * of passing or failing state. The behavior of this function will not
+     * influence the result of the test, but if an exception is thrown, the
+     * test harness will report an error.
+     */
+    Test.prototype.add_cleanup = function(callback) {
+        this._user_defined_cleanup_count += 1;
+        this._add_cleanup(callback);
+    };
+
+    Test.prototype.force_timeout = function() {
+        this.set_status(this.TIMEOUT);
+        this.phase = this.phases.HAS_RESULT;
+    };
+
     Test.prototype.set_timeout = function()
     {
-        var this_obj = this;
-        this.timeout_id = setTimeout(function()
-                                     {
-                                         this_obj.timeout();
-                                     }, this.timeout_length);
+        if (this.timeout_length !== null) {
+            var this_obj = this;
+            this.timeout_id = setTimeout(function()
+                                         {
+                                             this_obj.timeout();
+                                         }, this.timeout_length);
+        }
+    };
+
+    Test.prototype.set_status = function(status, message, stack)
+    {
+        this.status = status;
+        this.message = message;
+        this.stack = stack ? stack : null;
     };
 
     Test.prototype.timeout = function()
     {
-        this.status = this.TIMEOUT;
         this.timeout_id = null;
-        this.message = "Test timed out";
+        this.set_status(this.TIMEOUT, "Test timed out");
+        this.phase = this.phases.HAS_RESULT;
         this.done();
     };
 
     Test.prototype.done = function()
     {
-        if (this.is_done) {
+        if (this.phase == this.phases.COMPLETE) {
             return;
         }
-        clearTimeout(this.timeout_id);
-        if (this.status === this.NOTRUN)
-        {
-            this.status = this.PASS;
+
+        if (this.phase <= this.phases.STARTED) {
+            this.set_status(this.PASS, null);
         }
-        this.is_done = true;
+
+        this.phase = this.phases.COMPLETE;
+
+        clearTimeout(this.timeout_id);
         tests.result(this);
+        this.cleanup();
+    };
+
+    /*
+     * Invoke all specified cleanup functions. If one or more produce an error,
+     * the context is in an unpredictable state, so all further testing should
+     * be cancelled.
+     */
+    Test.prototype.cleanup = function() {
+        var error_count = 0;
+        var total;
+
+        forEach(this.cleanup_callbacks,
+                function(cleanup_callback) {
+                    try {
+                        cleanup_callback();
+                    } catch (e) {
+                        // Set test phase immediately so that tests declared
+                        // within subsequent cleanup functions are not run.
+                        tests.phase = tests.phases.ABORTED;
+                        error_count += 1;
+                    }
+                });
+
+        if (error_count > 0) {
+            total = this._user_defined_cleanup_count;
+            tests.status.status = tests.status.ERROR;
+            tests.status.message = "Test named '" + this.name +
+                "' specified " + total + " 'cleanup' function" +
+                (total > 1 ? "s" : "") + ", and " + error_count + " failed.";
+            tests.status.stack = null;
+        }
     };
 
+    /*
+     * A RemoteTest object mirrors a Test object on a remote worker. The
+     * associated RemoteWorker updates the RemoteTest object in response to
+     * received events. In turn, the RemoteTest object replicates these events
+     * on the local document. This allows listeners (test result reporting
+     * etc..) to transparently handle local and remote events.
+     */
+    function RemoteTest(clone) {
+        var this_obj = this;
+        Object.keys(clone).forEach(
+                function(key) {
+                    this_obj[key] = clone[key];
+                });
+        this.index = null;
+        this.phase = this.phases.INITIAL;
+        this.update_state_from(clone);
+        tests.push(this);
+    }
+
+    RemoteTest.prototype.structured_clone = function() {
+        var clone = {};
+        Object.keys(this).forEach(
+                (function(key) {
+                    var value = this[key];
+
+                    if (typeof value === "object" && value !== null) {
+                        clone[key] = merge({}, value);
+                    } else {
+                        clone[key] = value;
+                    }
+                }).bind(this));
+        clone.phases = merge({}, this.phases);
+        return clone;
+    };
+
+    RemoteTest.prototype.cleanup = function() {};
+    RemoteTest.prototype.phases = Test.prototype.phases;
+    RemoteTest.prototype.update_state_from = function(clone) {
+        this.status = clone.status;
+        this.message = clone.message;
+        this.stack = clone.stack;
+        if (this.phase === this.phases.INITIAL) {
+            this.phase = this.phases.STARTED;
+        }
+    };
+    RemoteTest.prototype.done = function() {
+        this.phase = this.phases.COMPLETE;
+    }
+
+    /*
+     * A RemoteContext listens for test events from a remote test context, such
+     * as another window or a worker. These events are then used to construct
+     * and maintain RemoteTest objects that mirror the tests running in the
+     * remote context.
+     *
+     * An optional third parameter can be used as a predicate to filter incoming
+     * MessageEvents.
+     */
+    function RemoteContext(remote, message_target, message_filter) {
+        this.running = true;
+        this.tests = new Array();
+
+        var this_obj = this;
+        remote.onerror = function(error) { this_obj.remote_error(error); };
+
+        // Keeping a reference to the remote object and the message handler until
+        // remote_done() is seen prevents the remote object and its message channel
+        // from going away before all the messages are dispatched.
+        this.remote = remote;
+        this.message_target = message_target;
+        this.message_handler = function(message) {
+            var passesFilter = !message_filter || message_filter(message);
+            if (this_obj.running && message.data && passesFilter &&
+                (message.data.type in this_obj.message_handlers)) {
+                this_obj.message_handlers[message.data.type].call(this_obj, message.data);
+            }
+        };
+
+        this.message_target.addEventListener("message", this.message_handler);
+    }
+
+    RemoteContext.prototype.remote_error = function(error) {
+        var message = error.message || String(error);
+        var filename = (error.filename ? " " + error.filename: "");
+        // FIXME: Display remote error states separately from main document
+        // error state.
+        this.remote_done({
+            status: {
+                status: tests.status.ERROR,
+                message: "Error in remote" + filename + ": " + message,
+                stack: error.stack
+            }
+        });
+
+        if (error.preventDefault) {
+            error.preventDefault();
+        }
+    };
+
+    RemoteContext.prototype.test_state = function(data) {
+        var remote_test = this.tests[data.test.index];
+        if (!remote_test) {
+            remote_test = new RemoteTest(data.test);
+            this.tests[data.test.index] = remote_test;
+        }
+        remote_test.update_state_from(data.test);
+        tests.notify_test_state(remote_test);
+    };
+
+    RemoteContext.prototype.test_done = function(data) {
+        var remote_test = this.tests[data.test.index];
+        remote_test.update_state_from(data.test);
+        remote_test.done();
+        tests.result(remote_test);
+    };
+
+    RemoteContext.prototype.remote_done = function(data) {
+        if (tests.status.status === null &&
+            data.status.status !== data.status.OK) {
+            tests.status.status = data.status.status;
+            tests.status.message = data.status.message;
+            tests.status.stack = data.status.stack;
+        }
+        this.message_target.removeEventListener("message", this.message_handler);
+        this.running = false;
+        this.remote = null;
+        this.message_target = null;
+        if (tests.all_done()) {
+            tests.complete();
+        }
+    };
+
+    RemoteContext.prototype.message_handlers = {
+        test_state: RemoteContext.prototype.test_state,
+        result: RemoteContext.prototype.test_done,
+        complete: RemoteContext.prototype.remote_done
+    };
 
     /*
      * Harness
@@ -1231,6 +1790,7 @@ policies and contribution forms [3].
     {
         this.status = null;
         this.message = null;
+        this.stack = null;
     }
 
     TestsStatus.statuses = {
@@ -1243,13 +1803,13 @@ policies and contribution forms [3].
 
     TestsStatus.prototype.structured_clone = function()
     {
-        if(!this._structured_clone)
-        {
+        if (!this._structured_clone) {
             var msg = this.message;
             msg = msg ? String(msg) : msg;
             this._structured_clone = merge({
                 status:this.status,
-                message:msg
+                message:msg,
+                stack:this.stack
             }, TestsStatus.statuses);
         }
         return this._structured_clone;
@@ -1265,98 +1825,101 @@ policies and contribution forms [3].
             SETUP:1,
             HAVE_TESTS:2,
             HAVE_RESULTS:3,
-            COMPLETE:4
+            COMPLETE:4,
+            ABORTED:5
         };
         this.phase = this.phases.INITIAL;
 
         this.properties = {};
 
-        //All tests can't be done until the load event fires
-        this.all_loaded = false;
         this.wait_for_finish = false;
         this.processing_callbacks = false;
 
         this.allow_uncaught_exception = false;
 
-        this.timeout_length = settings.timeout;
+        this.file_is_test = false;
+
+        this.timeout_multiplier = 1;
+        this.timeout_length = test_environment.test_timeout();
         this.timeout_id = null;
 
         this.start_callbacks = [];
+        this.test_state_callbacks = [];
         this.test_done_callbacks = [];
         this.all_done_callbacks = [];
 
+        this.pending_remotes = [];
+
         this.status = new TestsStatus();
 
         var this_obj = this;
 
-        on_event(window, "load",
-                 function()
-                 {
-                     this_obj.all_loaded = true;
-                     if (this_obj.all_done())
-                     {
-                         this_obj.complete();
-                     }
-                 });
+        test_environment.add_on_loaded_callback(function() {
+            if (this_obj.all_done()) {
+                this_obj.complete();
+            }
+        });
 
         this.set_timeout();
     }
 
     Tests.prototype.setup = function(func, properties)
     {
-        if (this.phase >= this.phases.HAVE_RESULTS)
-        {
+        if (this.phase >= this.phases.HAVE_RESULTS) {
             return;
         }
-        if (this.phase < this.phases.SETUP)
-        {
+
+        if (this.phase < this.phases.SETUP) {
             this.phase = this.phases.SETUP;
         }
 
         this.properties = properties;
 
-        for (var p in properties)
-        {
-            if (properties.hasOwnProperty(p))
-            {
-                var value = properties[p]
-                if (p == "timeout")
-                {
-                    this.timeout_length = value;
-                }
-                else if (p == "allow_uncaught_exception") {
+        for (var p in properties) {
+            if (properties.hasOwnProperty(p)) {
+                var value = properties[p];
+                if (p == "allow_uncaught_exception") {
                     this.allow_uncaught_exception = value;
-                }
-                else if (p == "explicit_done" && value)
-                {
+                } else if (p == "explicit_done" && value) {
                     this.wait_for_finish = true;
-                }
-                else if (p == "explicit_timeout" && value) {
+                } else if (p == "explicit_timeout" && value) {
                     this.timeout_length = null;
+                    if (this.timeout_id)
+                    {
+                        clearTimeout(this.timeout_id);
+                    }
+                } else if (p == "timeout_multiplier") {
+                    this.timeout_multiplier = value;
                 }
             }
         }
 
-        if (func)
-        {
-            try
-            {
+        if (func) {
+            try {
                 func();
-            } catch(e)
-            {
+            } catch (e) {
                 this.status.status = this.status.ERROR;
-                this.status.message = e;
-            };
+                this.status.message = String(e);
+                this.status.stack = e.stack ? e.stack : null;
+            }
         }
         this.set_timeout();
     };
 
-    Tests.prototype.set_timeout = function()
-    {
+    Tests.prototype.set_file_is_test = function() {
+        if (this.tests.length > 0) {
+            throw new Error("Tried to set file as test after creating a test");
+        }
+        this.wait_for_finish = true;
+        this.file_is_test = true;
+        // Create the test, which will add it to the list of tests
+        async_test();
+    };
+
+    Tests.prototype.set_timeout = function() {
         var this_obj = this;
         clearTimeout(this.timeout_id);
-        if (this.timeout_length !== null)
-        {
+        if (this.timeout_length !== null) {
             this.timeout_id = setTimeout(function() {
                                              this_obj.timeout();
                                          }, this.timeout_length);
@@ -1364,7 +1927,9 @@ policies and contribution forms [3].
     };
 
     Tests.prototype.timeout = function() {
-        this.status.status = this.status.TIMEOUT;
+        if (this.status.status === null) {
+            this.status.status = this.status.TIMEOUT;
+        }
         this.complete();
     };
 
@@ -1382,12 +1947,24 @@ policies and contribution forms [3].
             this.start();
         }
         this.num_pending++;
-        this.tests.push(test);
+        test.index = this.tests.push(test);
+        this.notify_test_state(test);
+    };
+
+    Tests.prototype.notify_test_state = function(test) {
+        var this_obj = this;
+        forEach(this.test_state_callbacks,
+                function(callback) {
+                    callback(test, this_obj);
+                });
     };
 
     Tests.prototype.all_done = function() {
-        return (this.all_loaded && this.num_pending === 0 &&
-                !this.wait_for_finish && !this.processing_callbacks);
+        return this.phase === this.phases.ABORTED ||
+            (this.tests.length > 0 && test_environment.all_loaded &&
+                this.num_pending === 0 && !this.wait_for_finish &&
+                !this.processing_callbacks &&
+                !this.pending_remotes.some(function(w) { return w.running; }));
     };
 
     Tests.prototype.start = function() {
@@ -1402,37 +1979,11 @@ policies and contribution forms [3].
                  {
                      callback(this_obj.properties);
                  });
-        forEach_windows(
-                function(w, is_same_origin)
-                {
-                    if(is_same_origin && w.start_callback)
-                    {
-                        try
-                        {
-                            w.start_callback(this_obj.properties);
-                        }
-                        catch(e)
-                        {
-                            if (debug)
-                            {
-                                throw(e);
-                            }
-                        }
-                    }
-                    if (supports_post_message(w) && w !== self)
-                    {
-                        w.postMessage({
-                            type: "start",
-                            properties: this_obj.properties
-                        }, "*");
-                    }
-                });
     };
 
     Tests.prototype.result = function(test)
     {
-        if (this.phase > this.phases.HAVE_RESULTS)
-        {
+        if (this.phase > this.phases.HAVE_RESULTS) {
             return;
         }
         this.phase = this.phases.HAVE_RESULTS;
@@ -1448,34 +1999,8 @@ policies and contribution forms [3].
                 {
                     callback(test, this_obj);
                 });
-
-        forEach_windows(
-                function(w, is_same_origin)
-                {
-                    if(is_same_origin && w.result_callback)
-                    {
-                        try
-                        {
-                            w.result_callback(test);
-                        }
-                        catch(e)
-                        {
-                            if(debug) {
-                                throw e;
-                            }
-                        }
-                    }
-                    if (supports_post_message(w) && w !== self)
-                    {
-                        w.postMessage({
-                            type: "result",
-                            test: test.structured_clone()
-                        }, "*");
-                    }
-                });
         this.processing_callbacks = false;
-        if (this_obj.all_done())
-        {
+        if (this_obj.all_done()) {
             this_obj.complete();
         }
     };
@@ -1489,27 +2014,56 @@ policies and contribution forms [3].
         this.tests.forEach(
             function(x)
             {
-                if(x.status === x.NOTRUN)
-                {
+                if (x.phase < x.phases.COMPLETE) {
                     this_obj.notify_result(x);
+                    x.cleanup();
+                    x.phase = x.phases.COMPLETE;
                 }
             }
         );
         this.notify_complete();
     };
 
-    Tests.prototype.notify_complete = function()
-    {
-        clearTimeout(this.timeout_id);
+    /*
+     * Determine if any tests share the same `name` property. Return an array
+     * containing the names of any such duplicates.
+     */
+    Tests.prototype.find_duplicates = function() {
+        var names = Object.create(null);
+        var duplicates = [];
+
+        forEach (this.tests,
+                 function(test)
+                 {
+                     if (test.name in names && duplicates.indexOf(test.name) === -1) {
+                        duplicates.push(test.name);
+                     }
+                     names[test.name] = true;
+                 });
+
+        return duplicates;
+    };
+
+    Tests.prototype.notify_complete = function() {
         var this_obj = this;
-        var tests = map(this_obj.tests,
-                        function(test)
-                        {
-                            return test.structured_clone();
-                        });
-        if (this.status.status === null)
-        {
-            this.status.status = this.status.OK;
+        var duplicates;
+
+        if (this.status.status === null) {
+            duplicates = this.find_duplicates();
+
+            // Test names are presumed to be unique within test files--this
+            // allows consumers to use them for identification purposes.
+            // Duplicated names violate this expectation and should therefore
+            // be reported as an error.
+            if (duplicates.length) {
+                this.status.status = this.status.ERROR;
+                this.status.message =
+                   duplicates.length + ' duplicate test name' +
+                   (duplicates.length > 1 ? 's' : '') + ': "' +
+                   duplicates.join('", "') + '"';
+            } else {
+                this.status.status = this.status.OK;
+            }
         }
 
         forEach (this.all_done_callbacks,
@@ -1517,49 +2071,89 @@ policies and contribution forms [3].
                  {
                      callback(this_obj.tests, this_obj.status);
                  });
+    };
 
-        forEach_windows(
-                function(w, is_same_origin)
-                {
-                    if(is_same_origin && w.completion_callback)
-                    {
-                        try
-                        {
-                            w.completion_callback(this_obj.tests, this_obj.status);
-                        }
-                        catch(e)
-                        {
-                            if (debug)
-                            {
-                                throw e;
-                            }
-                        }
-                    }
-                    if (supports_post_message(w) && w !== self)
-                    {
-                        w.postMessage({
-                            type: "complete",
-                            tests: tests,
-                            status: this_obj.status.structured_clone()
-                        }, "*");
-                    }
-                });
+    /*
+     * Constructs a RemoteContext that tracks tests from a specific worker.
+     */
+    Tests.prototype.create_remote_worker = function(worker) {
+        var message_port;
+
+        if (is_service_worker(worker)) {
+            // Microsoft Edge's implementation of ServiceWorker doesn't support MessagePort yet.
+            // Feature detection isn't a straightforward option here; it's only possible in the
+            // worker's script context.
+            var isMicrosoftEdgeBrowser = navigator.userAgent.includes("Edge");
+            if (window.MessageChannel && !isMicrosoftEdgeBrowser) {
+                // The ServiceWorker's implicit MessagePort is currently not
+                // reliably accessible from the ServiceWorkerGlobalScope due to
+                // Blink setting MessageEvent.source to null for messages sent
+                // via ServiceWorker.postMessage(). Until that's resolved,
+                // create an explicit MessageChannel and pass one end to the
+                // worker.
+                var message_channel = new MessageChannel();
+                message_port = message_channel.port1;
+                message_port.start();
+                worker.postMessage({type: "connect"}, [message_channel.port2]);
+            } else {
+                // If MessageChannel is not available, then try the
+                // ServiceWorker.postMessage() approach using MessageEvent.source
+                // on the other end.
+                message_port = navigator.serviceWorker;
+                worker.postMessage({type: "connect"});
+            }
+        } else if (is_shared_worker(worker)) {
+            message_port = worker.port;
+            message_port.start();
+        } else {
+            message_port = worker;
+        }
+
+        return new RemoteContext(worker, message_port);
     };
 
-    var tests = new Tests();
+    /*
+     * Constructs a RemoteContext that tracks tests from a specific window.
+     */
+    Tests.prototype.create_remote_window = function(remote) {
+        remote.postMessage({type: "getmessages"}, "*");
+        return new RemoteContext(
+            remote,
+            window,
+            function(msg) {
+                return msg.source === remote;
+            }
+        );
+    };
 
-    window.onerror = function(msg) {
-        if (!tests.allow_uncaught_exception)
-        {
-            tests.status.status = tests.status.ERROR;
-            tests.status.message = msg;
-            tests.complete();
+    Tests.prototype.fetch_tests_from_worker = function(worker) {
+        if (this.phase >= this.phases.COMPLETE) {
+            return;
+        }
+
+        this.pending_remotes.push(this.create_remote_worker(worker));
+    };
+
+    function fetch_tests_from_worker(port) {
+        tests.fetch_tests_from_worker(port);
+    }
+    expose(fetch_tests_from_worker, 'fetch_tests_from_worker');
+
+    Tests.prototype.fetch_tests_from_window = function(remote) {
+        if (this.phase >= this.phases.COMPLETE) {
+            return;
         }
+
+        this.pending_remotes.push(this.create_remote_window(remote));
+    };
+
+    function fetch_tests_from_window(window) {
+        tests.fetch_tests_from_window(window);
     }
+    expose(fetch_tests_from_window, 'fetch_tests_from_window');
 
     function timeout() {
-        if (tests.timeout_length === null)
-        {
+        if (tests.timeout_length === null) {
             tests.timeout();
         }
     }
@@ -1569,30 +2163,55 @@ policies and contribution forms [3].
         tests.start_callbacks.push(callback);
     }
 
-    function add_result_callback(callback)
-    {
+    function add_test_state_callback(callback) {
+        tests.test_state_callbacks.push(callback);
+    }
+
+    function add_result_callback(callback) {
         tests.test_done_callbacks.push(callback);
     }
 
-    function add_completion_callback(callback)
-    {
-       tests.all_done_callbacks.push(callback);
+    function add_completion_callback(callback) {
+        tests.all_done_callbacks.push(callback);
     }
 
     expose(add_start_callback, 'add_start_callback');
+    expose(add_test_state_callback, 'add_test_state_callback');
     expose(add_result_callback, 'add_result_callback');
     expose(add_completion_callback, 'add_completion_callback');
 
+    function remove(array, item) {
+        var index = array.indexOf(item);
+        if (index > -1) {
+            array.splice(index, 1);
+        }
+    }
+
+    function remove_start_callback(callback) {
+        remove(tests.start_callbacks, callback);
+    }
+
+    function remove_test_state_callback(callback) {
+        remove(tests.test_state_callbacks, callback);
+    }
+
+    function remove_result_callback(callback) {
+        remove(tests.test_done_callbacks, callback);
+    }
+
+    function remove_completion_callback(callback) {
+       remove(tests.all_done_callbacks, callback);
+    }
+
     /*
      * Output listener
     */
 
     function Output() {
-      this.output_document = document;
-      this.output_node = null;
-      this.done_count = 0;
-      this.enabled = settings.output;
-      this.phase = this.INITIAL;
+        this.output_document = document;
+        this.output_node = null;
+        this.enabled = settings.output;
+        this.phase = this.INITIAL;
     }
 
     Output.prototype.INITIAL = 0;
@@ -1611,8 +2230,7 @@ policies and contribution forms [3].
                                         properties.output : settings.output);
     };
 
-    Output.prototype.init = function(properties)
-    {
+    Output.prototype.init = function(properties) {
         if (this.phase >= this.STARTED) {
             return;
         }
@@ -1624,63 +2242,57 @@ policies and contribution forms [3].
         this.phase = this.STARTED;
     };
 
-    Output.prototype.resolve_log = function()
-    {
+    Output.prototype.resolve_log = function() {
         var output_document;
-        if (typeof this.output_document === "function")
-        {
+        if (typeof this.output_document === "function") {
             output_document = this.output_document.apply(undefined);
-        } else
-        {
+        } else {
             output_document = this.output_document;
         }
-        if (!output_document)
-        {
+        if (!output_document) {
             return;
         }
         var node = output_document.getElementById("log");
-        if (node)
-        {
-            this.output_document = output_document;
-            this.output_node = node;
+        if (!node) {
+            if (!document.body || document.readyState == "loading") {
+                return;
+            }
+            node = output_document.createElement("div");
+            node.id = "log";
+            output_document.body.appendChild(node);
         }
+        this.output_document = output_document;
+        this.output_node = node;
     };
 
-    Output.prototype.show_status = function(test)
-    {
-        if (this.phase < this.STARTED)
-        {
+    Output.prototype.show_status = function() {
+        if (this.phase < this.STARTED) {
             this.init();
         }
-        if (!this.enabled)
-        {
+        if (!this.enabled) {
             return;
         }
-        if (this.phase < this.HAVE_RESULTS)
-        {
+        if (this.phase < this.HAVE_RESULTS) {
             this.resolve_log();
             this.phase = this.HAVE_RESULTS;
         }
-        this.done_count++;
-        if (this.output_node)
-        {
-            if (this.done_count < 100
-            || (this.done_count < 1000 && this.done_count % 100 == 0)
-            || this.done_count % 1000 == 0) {
-                this.output_node.textContent = "Running, "
-                    + this.done_count + " complete, "
-                    + tests.num_pending + " remain";
+        var done_count = tests.tests.length - tests.num_pending;
+        if (this.output_node) {
+            if (done_count < 100 ||
+                (done_count < 1000 && done_count % 100 === 0) ||
+                done_count % 1000 === 0) {
+                this.output_node.textContent = "Running, " +
+                    done_count + " complete, " +
+                    tests.num_pending + " remain";
             }
         }
     };
 
-    Output.prototype.show_results = function (tests, harness_status)
-    {
+    Output.prototype.show_results = function (tests, harness_status) {
         if (this.phase >= this.COMPLETE) {
             return;
         }
-        if (!this.enabled)
-        {
+        if (!this.enabled) {
             return;
         }
         if (!this.output_node) {
@@ -1689,21 +2301,20 @@ policies and contribution forms [3].
         this.phase = this.COMPLETE;
 
         var log = this.output_node;
-        if (!log)
-        {
+        if (!log) {
             return;
         }
         var output_document = this.output_document;
 
-        while (log.lastChild)
-        {
+        while (log.lastChild) {
             log.removeChild(log.lastChild);
         }
 
-        if (script_prefix != null) {
+        var harness_url = get_harness_url();
+        if (harness_url !== undefined) {
             var stylesheet = output_document.createElementNS(xhtml_ns, "link");
             stylesheet.setAttribute("rel", "stylesheet");
-            stylesheet.setAttribute("href", script_prefix + "testharness.css");
+            stylesheet.setAttribute("href", harness_url + "testharness.css");
             var heads = output_document.getElementsByTagName("head");
             if (heads.length) {
                 heads[0].appendChild(stylesheet);
@@ -1722,10 +2333,10 @@ policies and contribution forms [3].
         status_text[Test.prototype.NOTRUN] = "Not Run";
 
         var status_number = {};
-        forEach(tests, function(test) {
+        forEach(tests,
+                function(test) {
                     var status = status_text[test.status];
-                    if (status_number.hasOwnProperty(status))
-                    {
+                    if (status_number.hasOwnProperty(status)) {
                         status_number[status] += 1;
                     } else {
                         status_number[status] = 1;
@@ -1739,38 +2350,31 @@ policies and contribution forms [3].
 
         var summary_template = ["section", {"id":"summary"},
                                 ["h2", {}, "Summary"],
-                                function(vars)
+                                function()
                                 {
-                                    if (harness_status.status === harness_status.OK)
-                                    {
-                                        return null;
-                                    }
-                                    else
-                                    {
-                                        var status = status_text_harness[harness_status.status];
-                                        var rv = [["p", {"class":status_class(status)}]];
-
-                                        if (harness_status.status === harness_status.ERROR)
-                                        {
-                                            rv[0].push("Harness encountered an error:");
-                                            rv.push(["pre", {}, harness_status.message]);
-                                        }
-                                        else if (harness_status.status === harness_status.TIMEOUT)
-                                        {
-                                            rv[0].push("Harness timed out.");
-                                        }
-                                        else
-                                        {
-                                            rv[0].push("Harness got an unexpected status.");
-                                        }
 
-                                        return rv;
+                                    var status = status_text_harness[harness_status.status];
+                                    var rv = [["section", {},
+                                               ["p", {},
+                                                "Harness status: ",
+                                                ["span", {"class":status_class(status)},
+                                                 status
+                                                ],
+                                               ]
+                                              ]];
+
+                                    if (harness_status.status === harness_status.ERROR) {
+                                        rv[0].push(["pre", {}, harness_status.message]);
+                                        if (harness_status.stack) {
+                                            rv[0].push(["pre", {}, harness_status.stack]);
+                                        }
                                     }
+                                    return rv;
                                 },
                                 ["p", {}, "Found ${num_tests} tests"],
-                                function(vars) {
+                                function() {
                                     var rv = [["div", {}]];
-                                    var i=0;
+                                    var i = 0;
                                     while (status_text.hasOwnProperty(i)) {
                                         if (status_number.hasOwnProperty(status_text[i])) {
                                             var status = status_text[i];
@@ -1782,7 +2386,8 @@ policies and contribution forms [3].
                                         i++;
                                     }
                                     return rv;
-                                }];
+                                },
+                               ];
 
         log.appendChild(render(summary_template, {num_tests:tests.length}, output_document));
 
@@ -1792,8 +2397,7 @@ policies and contribution forms [3].
                     on_event(element, "click",
                              function(e)
                              {
-                                 if (output_document.getElementById("results") === null)
-                                 {
+                                 if (output_document.getElementById("results") === null) {
                                      e.preventDefault();
                                      return;
                                  }
@@ -1846,22 +2450,25 @@ policies and contribution forms [3].
 
         log.appendChild(document.createElementNS(xhtml_ns, "section"));
         var assertions = has_assertions();
-        var html = "

Details

" - + "" - + (assertions ? "" : "") - + "" - + ""; + var html = "

Details

ResultTest NameAssertionMessage
" + + "" + + (assertions ? "" : "") + + "" + + ""; for (var i = 0; i < tests.length; i++) { - html += '"; + html += '"; } html += "
ResultTest NameAssertionMessage
' - + escape_html(status_text[tests[i].status]) - + "" - + escape_html(tests[i].name) - + "" - + (assertions ? escape_html(get_assertion(tests[i])) + "" : "") - + escape_html(tests[i].message ? tests[i].message : " ") - + "
' + + escape_html(status_text[tests[i].status]) + + "" + + escape_html(tests[i].name) + + "" + + (assertions ? escape_html(get_assertion(tests[i])) + "" : "") + + escape_html(tests[i].message ? tests[i].message : " ") + + (tests[i].stack ? "
" +
+                 escape_html(tests[i].stack) +
+                 "
": "") + + "
"; try { @@ -1874,11 +2481,6 @@ policies and contribution forms [3]. } }; - var output = new Output(); - add_start_callback(function (properties) {output.init(properties);}); - add_result_callback(function (test) {output.show_status(tests);}); - add_completion_callback(function (tests, harness_status) {output.show_results(tests, harness_status);}); - /* * Template code * @@ -1921,154 +2523,124 @@ policies and contribution forms [3]. { if (typeof template === "function") { var replacement = template(substitutions); - if (replacement) - { - var rv = substitute(replacement, substitutions); - return rv; - } - else - { + if (!replacement) { return null; } + + return substitute(replacement, substitutions); } - else if (is_single_node(template)) - { + + if (is_single_node(template)) { return substitute_single(template, substitutions); } - else - { - return filter(map(template, function(x) { - return substitute(x, substitutions); - }), function(x) {return x !== null;}); - } + + return filter(map(template, function(x) { + return substitute(x, substitutions); + }), function(x) {return x !== null;}); } function substitute_single(template, substitutions) { - var substitution_re = /\${([^ }]*)}/g; + var substitution_re = /\$\{([^ }]*)\}/g; function do_substitution(input) { var components = input.split(substitution_re); var rv = []; - for (var i=0; i= lines.length) { + return stack; + } + + return lines.slice(i).join("\n"); } function make_message(function_name, description, error, substitutions) @@ -2106,10 +2731,8 @@ policies and contribution forms [3]. function filter(array, callable, thisObj) { var rv = []; - for (var i=0; i tag in the page to include the file 'testharness.js'. */ + function get_script_url() + { + if (!('document' in self)) { + return undefined; + } + + var scripts = document.getElementsByTagName("script"); + for (var i = 0; i < scripts.length; i++) { + var src; + if (scripts[i].src) { + src = scripts[i].src; + } else if (scripts[i].href) { + //SVG case + src = scripts[i].href.baseVal; + } + + var matches = src && src.match(/^(.*\/|)testharness\.js$/); + if (matches) { + return src; + } + } + return undefined; + } + + /** Returns the URL path at which the files for testharness.js are assumed to reside (e.g., '/resources/'). + The path is derived from inspecting the 'src' of the