Initial commit
This commit is contained in:
83
node_modules/parse-srcset/.jscs.json
generated
vendored
Normal file
83
node_modules/parse-srcset/.jscs.json
generated
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
{
|
||||
"requireCurlyBraces": [
|
||||
"if",
|
||||
"else",
|
||||
"for",
|
||||
"while",
|
||||
"do",
|
||||
"try",
|
||||
"catch"
|
||||
],
|
||||
"requireOperatorBeforeLineBreak": true,
|
||||
"requireParenthesesAroundIIFE": true,
|
||||
"requireMultipleVarDecl": "onevar",
|
||||
"requireCommaBeforeLineBreak": true,
|
||||
"requireCamelCaseOrUpperCaseIdentifiers": true,
|
||||
"requireDotNotation": true,
|
||||
"requireSpacesInForStatement": true,
|
||||
"maximumLineLength": {
|
||||
"value": 100,
|
||||
"tabSize": 4,
|
||||
"allowUrlComments": true,
|
||||
"allowRegex": true
|
||||
},
|
||||
"validateQuoteMarks": { "mark": "\"", "escape": true },
|
||||
|
||||
"disallowMixedSpacesAndTabs": "smart",
|
||||
"disallowTrailingWhitespace": true,
|
||||
"disallowMultipleLineStrings": true,
|
||||
"disallowTrailingComma": true,
|
||||
|
||||
"requireSpaceBeforeBlockStatements": true,
|
||||
"requireSpacesInFunctionExpression": {
|
||||
"beforeOpeningCurlyBrace": true
|
||||
},
|
||||
"requireSpaceAfterKeywords": [
|
||||
"if",
|
||||
"else",
|
||||
"for",
|
||||
"while",
|
||||
"do",
|
||||
"switch",
|
||||
"return",
|
||||
"try",
|
||||
"catch"
|
||||
],
|
||||
"requireSpacesInsideObjectBrackets": "all",
|
||||
"requireSpacesInsideArrayBrackets": "all",
|
||||
"requireSpacesInConditionalExpression": true,
|
||||
"requireSpaceAfterBinaryOperators": true,
|
||||
"requireLineFeedAtFileEnd": true,
|
||||
"requireSpaceBeforeBinaryOperators": [
|
||||
"=", "+=", "-=", "*=", "/=", "%=", "<<=", ">>=", ">>>=",
|
||||
"&=", "|=", "^=", "+=",
|
||||
|
||||
"+", "-", "*", "/", "%", "<<", ">>", ">>>", "&",
|
||||
"|", "^", "&&", "||", "===", "==", ">=",
|
||||
"<=", "<", ">", "!=", "!=="
|
||||
],
|
||||
"requireSpacesInAnonymousFunctionExpression": {
|
||||
"beforeOpeningCurlyBrace": true
|
||||
},
|
||||
"requireSpacesInNamedFunctionExpression": {
|
||||
"beforeOpeningCurlyBrace": true
|
||||
},
|
||||
"validateLineBreaks": "LF",
|
||||
|
||||
"disallowKeywords": [ "with" ],
|
||||
"disallowKeywordsOnNewLine": [ "else" ],
|
||||
"disallowSpacesInFunctionExpression": {
|
||||
"beforeOpeningRoundBrace": true
|
||||
},
|
||||
"disallowSpacesInNamedFunctionExpression": {
|
||||
"beforeOpeningRoundBrace": true
|
||||
},
|
||||
"disallowSpacesInAnonymousFunctionExpression": {
|
||||
"beforeOpeningRoundBrace": true
|
||||
},
|
||||
"disallowSpaceAfterObjectKeys": true,
|
||||
"disallowSpaceAfterPrefixUnaryOperators": true,
|
||||
"disallowSpaceBeforePostfixUnaryOperators": true,
|
||||
"disallowSpaceBeforeBinaryOperators": [ ",", ":" ],
|
||||
"disallowMultipleLineBreaks": true
|
||||
}
|
15
node_modules/parse-srcset/.jshintrc
generated
vendored
Normal file
15
node_modules/parse-srcset/.jshintrc
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"boss": true,
|
||||
"curly": true,
|
||||
"eqeqeq": true,
|
||||
"eqnull": true,
|
||||
"expr": true,
|
||||
"immed": true,
|
||||
"noarg": true,
|
||||
"onevar": true,
|
||||
"quotmark": "double",
|
||||
"smarttabs": true,
|
||||
"trailing": true,
|
||||
"undef": true,
|
||||
"unused": true
|
||||
}
|
8
node_modules/parse-srcset/.npmignore
generated
vendored
Normal file
8
node_modules/parse-srcset/.npmignore
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
*/.DS_Store
|
||||
.DS_Store
|
||||
.DS_Store?
|
||||
._*
|
||||
.Spotlight-V100
|
||||
.Trashes
|
||||
|
||||
node_modules
|
22
node_modules/parse-srcset/LICENSE
generated
vendored
Normal file
22
node_modules/parse-srcset/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Alex Bell
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
19
node_modules/parse-srcset/README.md
generated
vendored
Normal file
19
node_modules/parse-srcset/README.md
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
# parse-srcset
|
||||
|
||||
A javascript parser for the [HTML5 srcset](http://www.w3.org/TR/html-srcset/) attribute, based on the [WHATWG reference algorithm](https://html.spec.whatwg.org/multipage/embedded-content.html#parse-a-srcset-attribute). It has an extensive test suite based on the [W3C srcset conformance checker](http://w3c-test.org/html/semantics/embedded-content/the-img-element/srcset/parse-a-srcset-attribute.html). It conforms to the jQuery JSCS style rules.
|
||||
|
||||
Tests are written using Intern-geezer for compatibility.
|
||||
|
||||
To run the tests in console:
|
||||
|
||||
```
|
||||
$ npm test
|
||||
```
|
||||
|
||||
Or in a browser, just open the html file at:
|
||||
|
||||
```
|
||||
node_modules/intern-geezer/client.html?config=tests/intern
|
||||
```
|
||||
|
||||
I’m on twitter [@tweetywheaty](https://twitter.com/tweetywheaty).
|
25
node_modules/parse-srcset/package.json
generated
vendored
Normal file
25
node_modules/parse-srcset/package.json
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"name": "parse-srcset",
|
||||
"version": "1.0.2",
|
||||
"description": "A spec-conformant JavaScript parser for the HTML5 srcset attribute",
|
||||
"main": "src/parse-srcset.js",
|
||||
"directories": {
|
||||
"test": "tests"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "intern-client config=tests/intern"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/albell/parse-srcset.git"
|
||||
},
|
||||
"author": "Alex Bell <alex@bellandwhistle.net>",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/albell/parse-srcset/issues"
|
||||
},
|
||||
"homepage": "https://github.com/albell/parse-srcset#readme",
|
||||
"devDependencies": {
|
||||
"intern-geezer": "^2.2.3"
|
||||
}
|
||||
}
|
330
node_modules/parse-srcset/src/parse-srcset.js
generated
vendored
Normal file
330
node_modules/parse-srcset/src/parse-srcset.js
generated
vendored
Normal file
@@ -0,0 +1,330 @@
|
||||
/**
|
||||
* Srcset Parser
|
||||
*
|
||||
* By Alex Bell | MIT License
|
||||
*
|
||||
* JS Parser for the string value that appears in markup <img srcset="here">
|
||||
*
|
||||
* @returns Array [{url: _, d: _, w: _, h:_}, ...]
|
||||
*
|
||||
* Based super duper closely on the reference algorithm at:
|
||||
* https://html.spec.whatwg.org/multipage/embedded-content.html#parse-a-srcset-attribute
|
||||
*
|
||||
* Most comments are copied in directly from the spec
|
||||
* (except for comments in parens).
|
||||
*/
|
||||
|
||||
(function (root, factory) {
|
||||
if (typeof define === 'function' && define.amd) {
|
||||
// AMD. Register as an anonymous module.
|
||||
define([], factory);
|
||||
} else if (typeof module === 'object' && module.exports) {
|
||||
// Node. Does not work with strict CommonJS, but
|
||||
// only CommonJS-like environments that support module.exports,
|
||||
// like Node.
|
||||
module.exports = factory();
|
||||
} else {
|
||||
// Browser globals (root is window)
|
||||
root.parseSrcset = factory();
|
||||
}
|
||||
}(this, function () {
|
||||
|
||||
// 1. Let input be the value passed to this algorithm.
|
||||
return function (input) {
|
||||
|
||||
// UTILITY FUNCTIONS
|
||||
|
||||
// Manual is faster than RegEx
|
||||
// http://bjorn.tipling.com/state-and-regular-expressions-in-javascript
|
||||
// http://jsperf.com/whitespace-character/5
|
||||
function isSpace(c) {
|
||||
return (c === "\u0020" || // space
|
||||
c === "\u0009" || // horizontal tab
|
||||
c === "\u000A" || // new line
|
||||
c === "\u000C" || // form feed
|
||||
c === "\u000D"); // carriage return
|
||||
}
|
||||
|
||||
function collectCharacters(regEx) {
|
||||
var chars,
|
||||
match = regEx.exec(input.substring(pos));
|
||||
if (match) {
|
||||
chars = match[ 0 ];
|
||||
pos += chars.length;
|
||||
return chars;
|
||||
}
|
||||
}
|
||||
|
||||
var inputLength = input.length,
|
||||
|
||||
// (Don't use \s, to avoid matching non-breaking space)
|
||||
regexLeadingSpaces = /^[ \t\n\r\u000c]+/,
|
||||
regexLeadingCommasOrSpaces = /^[, \t\n\r\u000c]+/,
|
||||
regexLeadingNotSpaces = /^[^ \t\n\r\u000c]+/,
|
||||
regexTrailingCommas = /[,]+$/,
|
||||
regexNonNegativeInteger = /^\d+$/,
|
||||
|
||||
// ( Positive or negative or unsigned integers or decimals, without or without exponents.
|
||||
// Must include at least one digit.
|
||||
// According to spec tests any decimal point must be followed by a digit.
|
||||
// No leading plus sign is allowed.)
|
||||
// https://html.spec.whatwg.org/multipage/infrastructure.html#valid-floating-point-number
|
||||
regexFloatingPoint = /^-?(?:[0-9]+|[0-9]*\.[0-9]+)(?:[eE][+-]?[0-9]+)?$/,
|
||||
|
||||
url,
|
||||
descriptors,
|
||||
currentDescriptor,
|
||||
state,
|
||||
c,
|
||||
|
||||
// 2. Let position be a pointer into input, initially pointing at the start
|
||||
// of the string.
|
||||
pos = 0,
|
||||
|
||||
// 3. Let candidates be an initially empty source set.
|
||||
candidates = [];
|
||||
|
||||
// 4. Splitting loop: Collect a sequence of characters that are space
|
||||
// characters or U+002C COMMA characters. If any U+002C COMMA characters
|
||||
// were collected, that is a parse error.
|
||||
while (true) {
|
||||
collectCharacters(regexLeadingCommasOrSpaces);
|
||||
|
||||
// 5. If position is past the end of input, return candidates and abort these steps.
|
||||
if (pos >= inputLength) {
|
||||
return candidates; // (we're done, this is the sole return path)
|
||||
}
|
||||
|
||||
// 6. Collect a sequence of characters that are not space characters,
|
||||
// and let that be url.
|
||||
url = collectCharacters(regexLeadingNotSpaces);
|
||||
|
||||
// 7. Let descriptors be a new empty list.
|
||||
descriptors = [];
|
||||
|
||||
// 8. If url ends with a U+002C COMMA character (,), follow these substeps:
|
||||
// (1). Remove all trailing U+002C COMMA characters from url. If this removed
|
||||
// more than one character, that is a parse error.
|
||||
if (url.slice(-1) === ",") {
|
||||
url = url.replace(regexTrailingCommas, "");
|
||||
// (Jump ahead to step 9 to skip tokenization and just push the candidate).
|
||||
parseDescriptors();
|
||||
|
||||
// Otherwise, follow these substeps:
|
||||
} else {
|
||||
tokenize();
|
||||
} // (close else of step 8)
|
||||
|
||||
// 16. Return to the step labeled splitting loop.
|
||||
} // (Close of big while loop.)
|
||||
|
||||
/**
|
||||
* Tokenizes descriptor properties prior to parsing
|
||||
* Returns undefined.
|
||||
*/
|
||||
function tokenize() {
|
||||
|
||||
// 8.1. Descriptor tokeniser: Skip whitespace
|
||||
collectCharacters(regexLeadingSpaces);
|
||||
|
||||
// 8.2. Let current descriptor be the empty string.
|
||||
currentDescriptor = "";
|
||||
|
||||
// 8.3. Let state be in descriptor.
|
||||
state = "in descriptor";
|
||||
|
||||
while (true) {
|
||||
|
||||
// 8.4. Let c be the character at position.
|
||||
c = input.charAt(pos);
|
||||
|
||||
// Do the following depending on the value of state.
|
||||
// For the purpose of this step, "EOF" is a special character representing
|
||||
// that position is past the end of input.
|
||||
|
||||
// In descriptor
|
||||
if (state === "in descriptor") {
|
||||
// Do the following, depending on the value of c:
|
||||
|
||||
// Space character
|
||||
// If current descriptor is not empty, append current descriptor to
|
||||
// descriptors and let current descriptor be the empty string.
|
||||
// Set state to after descriptor.
|
||||
if (isSpace(c)) {
|
||||
if (currentDescriptor) {
|
||||
descriptors.push(currentDescriptor);
|
||||
currentDescriptor = "";
|
||||
state = "after descriptor";
|
||||
}
|
||||
|
||||
// U+002C COMMA (,)
|
||||
// Advance position to the next character in input. If current descriptor
|
||||
// is not empty, append current descriptor to descriptors. Jump to the step
|
||||
// labeled descriptor parser.
|
||||
} else if (c === ",") {
|
||||
pos += 1;
|
||||
if (currentDescriptor) {
|
||||
descriptors.push(currentDescriptor);
|
||||
}
|
||||
parseDescriptors();
|
||||
return;
|
||||
|
||||
// U+0028 LEFT PARENTHESIS (()
|
||||
// Append c to current descriptor. Set state to in parens.
|
||||
} else if (c === "\u0028") {
|
||||
currentDescriptor = currentDescriptor + c;
|
||||
state = "in parens";
|
||||
|
||||
// EOF
|
||||
// If current descriptor is not empty, append current descriptor to
|
||||
// descriptors. Jump to the step labeled descriptor parser.
|
||||
} else if (c === "") {
|
||||
if (currentDescriptor) {
|
||||
descriptors.push(currentDescriptor);
|
||||
}
|
||||
parseDescriptors();
|
||||
return;
|
||||
|
||||
// Anything else
|
||||
// Append c to current descriptor.
|
||||
} else {
|
||||
currentDescriptor = currentDescriptor + c;
|
||||
}
|
||||
// (end "in descriptor"
|
||||
|
||||
// In parens
|
||||
} else if (state === "in parens") {
|
||||
|
||||
// U+0029 RIGHT PARENTHESIS ())
|
||||
// Append c to current descriptor. Set state to in descriptor.
|
||||
if (c === ")") {
|
||||
currentDescriptor = currentDescriptor + c;
|
||||
state = "in descriptor";
|
||||
|
||||
// EOF
|
||||
// Append current descriptor to descriptors. Jump to the step labeled
|
||||
// descriptor parser.
|
||||
} else if (c === "") {
|
||||
descriptors.push(currentDescriptor);
|
||||
parseDescriptors();
|
||||
return;
|
||||
|
||||
// Anything else
|
||||
// Append c to current descriptor.
|
||||
} else {
|
||||
currentDescriptor = currentDescriptor + c;
|
||||
}
|
||||
|
||||
// After descriptor
|
||||
} else if (state === "after descriptor") {
|
||||
|
||||
// Do the following, depending on the value of c:
|
||||
// Space character: Stay in this state.
|
||||
if (isSpace(c)) {
|
||||
|
||||
// EOF: Jump to the step labeled descriptor parser.
|
||||
} else if (c === "") {
|
||||
parseDescriptors();
|
||||
return;
|
||||
|
||||
// Anything else
|
||||
// Set state to in descriptor. Set position to the previous character in input.
|
||||
} else {
|
||||
state = "in descriptor";
|
||||
pos -= 1;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
// Advance position to the next character in input.
|
||||
pos += 1;
|
||||
|
||||
// Repeat this step.
|
||||
} // (close while true loop)
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds descriptor properties to a candidate, pushes to the candidates array
|
||||
* @return undefined
|
||||
*/
|
||||
// Declared outside of the while loop so that it's only created once.
|
||||
function parseDescriptors() {
|
||||
|
||||
// 9. Descriptor parser: Let error be no.
|
||||
var pError = false,
|
||||
|
||||
// 10. Let width be absent.
|
||||
// 11. Let density be absent.
|
||||
// 12. Let future-compat-h be absent. (We're implementing it now as h)
|
||||
w, d, h, i,
|
||||
candidate = {},
|
||||
desc, lastChar, value, intVal, floatVal;
|
||||
|
||||
// 13. For each descriptor in descriptors, run the appropriate set of steps
|
||||
// from the following list:
|
||||
for (i = 0 ; i < descriptors.length; i++) {
|
||||
desc = descriptors[ i ];
|
||||
|
||||
lastChar = desc[ desc.length - 1 ];
|
||||
value = desc.substring(0, desc.length - 1);
|
||||
intVal = parseInt(value, 10);
|
||||
floatVal = parseFloat(value);
|
||||
|
||||
// If the descriptor consists of a valid non-negative integer followed by
|
||||
// a U+0077 LATIN SMALL LETTER W character
|
||||
if (regexNonNegativeInteger.test(value) && (lastChar === "w")) {
|
||||
|
||||
// If width and density are not both absent, then let error be yes.
|
||||
if (w || d) {pError = true;}
|
||||
|
||||
// Apply the rules for parsing non-negative integers to the descriptor.
|
||||
// If the result is zero, let error be yes.
|
||||
// Otherwise, let width be the result.
|
||||
if (intVal === 0) {pError = true;} else {w = intVal;}
|
||||
|
||||
// If the descriptor consists of a valid floating-point number followed by
|
||||
// a U+0078 LATIN SMALL LETTER X character
|
||||
} else if (regexFloatingPoint.test(value) && (lastChar === "x")) {
|
||||
|
||||
// If width, density and future-compat-h are not all absent, then let error
|
||||
// be yes.
|
||||
if (w || d || h) {pError = true;}
|
||||
|
||||
// Apply the rules for parsing floating-point number values to the descriptor.
|
||||
// If the result is less than zero, let error be yes. Otherwise, let density
|
||||
// be the result.
|
||||
if (floatVal < 0) {pError = true;} else {d = floatVal;}
|
||||
|
||||
// If the descriptor consists of a valid non-negative integer followed by
|
||||
// a U+0068 LATIN SMALL LETTER H character
|
||||
} else if (regexNonNegativeInteger.test(value) && (lastChar === "h")) {
|
||||
|
||||
// If height and density are not both absent, then let error be yes.
|
||||
if (h || d) {pError = true;}
|
||||
|
||||
// Apply the rules for parsing non-negative integers to the descriptor.
|
||||
// If the result is zero, let error be yes. Otherwise, let future-compat-h
|
||||
// be the result.
|
||||
if (intVal === 0) {pError = true;} else {h = intVal;}
|
||||
|
||||
// Anything else, Let error be yes.
|
||||
} else {pError = true;}
|
||||
} // (close step 13 for loop)
|
||||
|
||||
// 15. If error is still no, then append a new image source to candidates whose
|
||||
// URL is url, associated with a width width if not absent and a pixel
|
||||
// density density if not absent. Otherwise, there is a parse error.
|
||||
if (!pError) {
|
||||
candidate.url = url;
|
||||
if (w) { candidate.w = w;}
|
||||
if (d) { candidate.d = d;}
|
||||
if (h) { candidate.h = h;}
|
||||
candidates.push(candidate);
|
||||
} else if (console && console.log) {
|
||||
console.log("Invalid srcset descriptor found in '" +
|
||||
input + "' at '" + desc + "'.");
|
||||
}
|
||||
} // (close parseDescriptors fn)
|
||||
|
||||
}
|
||||
}));
|
329
node_modules/parse-srcset/tests/he.js
generated
vendored
Executable file
329
node_modules/parse-srcset/tests/he.js
generated
vendored
Executable file
File diff suppressed because one or more lines are too long
74
node_modules/parse-srcset/tests/intern.js
generated
vendored
Normal file
74
node_modules/parse-srcset/tests/intern.js
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
// Learn more about configuring this file at
|
||||
// <https://github.com/theintern/intern/wiki/Configuring-Intern>.
|
||||
// These default settings work OK for most people. The options that *must* be
|
||||
// changed below are the packages, suites, excludeInstrumentation, and (if you
|
||||
// want functional tests) functionalSuites.
|
||||
|
||||
define({
|
||||
// The port on which the instrumenting proxy will listen
|
||||
proxyPort: 9000,
|
||||
|
||||
// A fully qualified URL to the Intern proxy
|
||||
proxyUrl: 'http://localhost:9000/',
|
||||
|
||||
// Default desired capabilities for all environments. Individual capabilities
|
||||
// can be overridden by any of the specified browser environments in the
|
||||
// `environments` array below as well. See
|
||||
// https://code.google.com/p/selenium/wiki/DesiredCapabilities
|
||||
// for standard Selenium capabilities and
|
||||
// https://saucelabs.com/docs/additional-config#desired-capabilities
|
||||
// for Sauce Labs capabilities.
|
||||
// Note that the `build` capability will be filled in with the current commit
|
||||
// ID from the Travis CI environment
|
||||
// automatically
|
||||
capabilities: {
|
||||
'selenium-version': '2.41.0'
|
||||
},
|
||||
|
||||
// Browsers to run integration testing against. Note that version numbers must
|
||||
// be strings if used with Sauce OnDemand. Options that will be permutated are
|
||||
// browserName, version, platform, and platformVersion; any other capabilities
|
||||
// options specified for an environment will be copied as-is.
|
||||
environments: [
|
||||
{ browserName: 'internet explorer', version: '11', platform: 'Windows 8.1' },
|
||||
{ browserName: 'internet explorer', version: '10', platform: 'Windows 8' },
|
||||
{ browserName: 'internet explorer', version: '9', platform: 'Windows 7' },
|
||||
{ browserName: 'firefox', version: '28', platform: [ 'OS X 10.9', 'Windows 7', 'Linux' ] },
|
||||
{ browserName: 'chrome', version: '34', platform: [ 'OS X 10.9', 'Windows 7', 'Linux' ] },
|
||||
{ browserName: 'safari', version: '6', platform: 'OS X 10.8' },
|
||||
{ browserName: 'safari', version: '7', platform: 'OS X 10.9' }
|
||||
],
|
||||
|
||||
// Maximum number of simultaneous integration tests that should be executed on
|
||||
// the remote WebDriver service.
|
||||
maxConcurrency: 3,
|
||||
|
||||
// Name of the tunnel class to use for WebDriver tests
|
||||
tunnel: 'SauceLabsTunnel',
|
||||
|
||||
// The desired AMD loader to use when running unit tests (client.html/client.js).
|
||||
// Omit to use the default Dojo loader.
|
||||
useLoader: {
|
||||
'host-node': 'dojo/dojo',
|
||||
'host-browser': 'node_modules/dojo/dojo.js'
|
||||
},
|
||||
|
||||
// Configuration options for the module loader; any AMD configuration options
|
||||
// supported by the specified AMD loader can be used here.
|
||||
loader: {
|
||||
// Packages that should be registered with the loader in each testing environment
|
||||
packages: [
|
||||
{ name: 'app', location: 'src/' },
|
||||
{ name: 'tests', location: 'tests/' }
|
||||
]
|
||||
},
|
||||
|
||||
// Non-functional test suite(s) to run in each browser
|
||||
suites: [ 'tests/unit/ps' ],
|
||||
|
||||
// Functional test suite(s) to run in each browser once non-functional tests are completed
|
||||
functionalSuites: [ /* 'myPackage/tests/functional' */ ],
|
||||
|
||||
// A regular expression matching URLs to files that should not be included in code coverage analysis
|
||||
excludeInstrumentation: /^(?:tests|node_modules)\//
|
||||
});
|
345
node_modules/parse-srcset/tests/unit/ps.js
generated
vendored
Normal file
345
node_modules/parse-srcset/tests/unit/ps.js
generated
vendored
Normal file
@@ -0,0 +1,345 @@
|
||||
define(function (require) {
|
||||
var tdd = require('intern!tdd');
|
||||
|
||||
// Not really Chai but a Chai-compatible "assert" library for old IE.
|
||||
var assert = require('intern/chai!assert');
|
||||
|
||||
// parse-srcset is an AMD module.
|
||||
var parseSrcset = require('../../src/parse-srcset');
|
||||
|
||||
var he = require('tests/he');
|
||||
|
||||
tdd.suite('Parse Srcset', function() {
|
||||
|
||||
// Adapted from the W3C srcset conformance checker at:
|
||||
// http://w3c-test.org/html/semantics/embedded-content/the-img-element/srcset/parse-a-srcset-attribute.html
|
||||
var w3Ctests = [
|
||||
{
|
||||
groupName: "Splitting Loop",
|
||||
testArray: [
|
||||
{srcset: '', expect: '', desc: 'empty string'},
|
||||
{srcset: ',' , expect: '', desc: 'single comma'},
|
||||
{srcset: ',,,', expect: '', desc: 'three commas'},
|
||||
{srcset: '		data:,a		1x		', expect: 'data:,a', desc: 'tabs'},
|
||||
{srcset: '

data:,a

1x

', expect: 'data:,a', desc: 'line feeds'},
|
||||
{srcset: 'data:,a1x', expect: 'data:,a1x', desc: 'line tab'},
|
||||
{srcset: 'data:,a1x', expect: 'data:,a', desc: 'form feed U+000C'},
|
||||
{srcset: '

data:,a

1x

', expect: 'data:,a', desc: 'carriage return U+000D'},
|
||||
{srcset: 'data:,a1x', expect: 'data:,a1x', desc: 'shift out U+000E'},
|
||||
{srcset: 'data:,a1x', expect: 'data:,a1x', desc: 'shift in U+000F' },
|
||||
{srcset: 'data:,a1x', expect: 'data:,a1x', desc: 'data link escape U+0010' },
|
||||
{srcset: 'data:,a', expect: 'data:,a', desc:'plain url'},
|
||||
{srcset: 'data:,a ', expect: 'data:,a', desc:'trailing space'},
|
||||
{srcset: 'data:,a ,', expect: 'data:,a', desc:'trailing space and comma'},
|
||||
{srcset: 'data:,a,', expect: 'data:,a', desc:'trailing comma'},
|
||||
{srcset: 'data:,a, ', expect: 'data:,a', desc:'trailing comma and space'},
|
||||
{srcset: 'data:,a,,,', expect: 'data:,a', desc:'trailing three commas'},
|
||||
{srcset: 'data:,a,, , ', expect: 'data:,a', desc:'trailing two commas space comma space'},
|
||||
{srcset: ' data:,a', expect: 'data:,a', desc:'leading space'},
|
||||
{srcset: ',,,data:,a', expect: 'data:,a', desc:'leading three commas'},
|
||||
{srcset: ' , ,,data:,a', expect: 'data:,a', desc:'leading space comma space comma comma'},
|
||||
{srcset: ' data:,a', expect: ' data:,a', desc:'leading non-breaking space'},
|
||||
{srcset: 'data:,a ', expect: 'data:,a ', desc:'trailing non-breaking space'}
|
||||
]
|
||||
},
|
||||
|
||||
{
|
||||
groupName: "Descriptor Tokenizer",
|
||||
testArray: [
|
||||
{srcset: 'data:,a 1x', expect: 'data:,a', desc: 'plain url with descriptor'},
|
||||
{srcset: 'data:,a 1x ', expect: 'data:,a', desc: 'trailing space'},
|
||||
{srcset: 'data:,a 1x,', expect: 'data:,a', desc: 'trailing comma'},
|
||||
{srcset: 'data:,a ( , data:,b 1x, ), data:,c', expect: 'data:,c', desc: 'irregular parens 1'},
|
||||
{srcset: 'data:,a ((( , data:,b 1x, ), data:,c', expect: 'data:,c', desc: 'irregular parens 2'},
|
||||
{srcset: 'data:,a [ , data:,b 1x, ], data:,c', expect: 'data:,b', desc: 'brackets'},
|
||||
{srcset: 'data:,a { , data:,b 1x, }, data:,c', expect: 'data:,b', desc: 'braces'},
|
||||
{srcset: 'data:,a " , data:,b 1x, ", data:,c', expect: 'data:,b', desc: 'double quotes'},
|
||||
{srcset: 'data:,a \\,data:;\\,b, data:,c', expect: 'data:;\\,b', desc: 'backslashes'},
|
||||
{srcset: 'data:,a, data:,b (', expect: 'data:,a', desc: 'trailing unclosed paren'},
|
||||
{srcset: 'data:,a, data:,b ( ', expect: 'data:,a', desc: 'unclosed paren space'},
|
||||
{srcset: 'data:,a, data:,b (,', expect: 'data:,a', desc: 'unclosed paren comma'},
|
||||
{srcset: 'data:,a, data:,b (x', expect: 'data:,a', desc: 'unclosed paren x'},
|
||||
{srcset: 'data:,a, data:,b ()', expect: 'data:,a', desc: 'parens, no descriptor'},
|
||||
{srcset: 'data:,a (, data:,b', expect: '', desc: 'unclosed paren'},
|
||||
{srcset: 'data:,a /*, data:,b, data:,c */', expect: 'data:,b', desc: 'block comments'},
|
||||
{srcset: 'data:,a //, data:,b', expect: 'data:,b', desc: 'double slash like a comment'}
|
||||
]
|
||||
},
|
||||
|
||||
{ groupName: "Descriptor Parser",
|
||||
testArray : [
|
||||
{srcset: 'data:,a foo', expect: '', desc: 'trailing foo'},
|
||||
{srcset: 'data:,a foo foo', expect: '', desc: 'trailing foo foo'},
|
||||
{srcset: 'data:,a foo 1x', expect: '', desc: 'trailing foo 1x'},
|
||||
{srcset: 'data:,a foo 1x foo', expect: '', desc: 'trailing 1x foo'},
|
||||
{srcset: 'data:,a foo 1w', expect: '', desc: 'trailing foo 1w'},
|
||||
{srcset: 'data:,a foo 1w foo', expect: '', desc: 'trailing foo 1w foo'},
|
||||
{srcset: 'data:,a 1x 1x', expect: '', desc: 'two density descriptors'},
|
||||
{srcset: 'data:,a 1w 1w', expect: '', desc: 'two width descriptors'},
|
||||
{srcset: 'data:,a 1h 1h', expect: '', desc: 'two height descriptors'},
|
||||
{srcset: 'data:,a 1w 1x', expect: '', desc: 'width then density'},
|
||||
{srcset: 'data:,a 1x 1w', expect: '', desc: 'density then width'},
|
||||
{srcset: 'data:,a 1w 1h', expect: 'data:,a', desc: 'width then height'},
|
||||
{srcset: 'data:,a 1h 1w', expect: 'data:,a', desc: 'height then width'},
|
||||
{srcset: 'data:,a 1h 1x', expect: '', desc: 'height then density'},
|
||||
{srcset: 'data:,a 1h 1w 1x', expect: '', desc: 'height then width then density'},
|
||||
{srcset: 'data:,a 1x 1w 1h', expect: '', desc: 'density then width then height'},
|
||||
{srcset: 'data:,a 1h foo', expect: '', desc: 'trailing 1h foo'},
|
||||
{srcset: 'data:,a foo 1h', expect: '', desc: 'trailing foo 1h'},
|
||||
{srcset: 'data:,a 0w', expect: '', desc: 'zero width'},
|
||||
{srcset: 'data:,a -1w', expect: '', desc: 'negative width'},
|
||||
{srcset: 'data:,a 1w -1w', expect: '', desc: 'positive width, negative width'},
|
||||
{srcset: 'data:,a 1.0w', expect: '', desc: 'floating point width'},
|
||||
{srcset: 'data:,a 1w 1.0w', expect: '', desc: 'integer width, floating point width'},
|
||||
{srcset: 'data:,a 1e0w', expect: '', desc: 'exponent width'},
|
||||
{srcset: 'data:,a 1w 1e0w', expect: '', desc: 'integer width, exponent width'},
|
||||
{srcset: 'data:,a 1www', expect: '', desc: '1www'},
|
||||
{srcset: 'data:,a 1w 1www', expect: '', desc: '1w 1www'},
|
||||
{srcset: 'data:,a 1w +1w', expect: '', desc: '1w +1w'},
|
||||
{srcset: 'data:,a 1W', expect: '', desc: 'capital W descriptor'},
|
||||
{srcset: 'data:,a 1w 1W', expect: '', desc: 'lowercase w, capital W descriptors'},
|
||||
{srcset: 'data:,a Infinityw', expect: '', desc: 'Infinityw'},
|
||||
{srcset: 'data:,a 1w Infinityw', expect: '', desc: '1w Infinityw'},
|
||||
{srcset: 'data:,a NaNw', expect: '', desc: 'Nanw'},
|
||||
{srcset: 'data:,a 1w NaNw', expect: '', desc: '1w Nanw'},
|
||||
{srcset: 'data:,a 0x1w', expect: '', desc: 'ox1w'},
|
||||
{srcset: 'data:,a 1w', expect: '', desc: 'trailing U+0001'},
|
||||
{srcset: 'data:,a 1 w', expect: '', desc: 'trailing U+00A0'},
|
||||
{srcset: 'data:,a 1 w', expect: '', desc: 'trailing U+1680'},
|
||||
{srcset: 'data:,a 1 w', expect: '', desc: 'trailing U+2000'},
|
||||
{srcset: 'data:,a 1 w', expect: '', desc: 'trailing U+2001'},
|
||||
{srcset: 'data:,a 1 w', expect: '', desc: 'trailing U+2002'},
|
||||
{srcset: 'data:,a 1 w', expect: '', desc: 'trailing U+2003'},
|
||||
{srcset: 'data:,a 1 w', expect: '', desc: 'trailing U+2004'},
|
||||
{srcset: 'data:,a 1 w', expect: '', desc: 'trailing U+2005'},
|
||||
{srcset: 'data:,a 1 w', expect: '', desc: 'trailing U+2006'},
|
||||
{srcset: 'data:,a 1 w', expect: '', desc: 'trailing U+2007'},
|
||||
{srcset: 'data:,a 1 w', expect: '', desc: 'trailing U+2008'},
|
||||
{srcset: 'data:,a 1 w', expect: '', desc: 'trailing U+2009'},
|
||||
{srcset: 'data:,a 1 w', expect: '', desc: 'trailing U+200A'},
|
||||
{srcset: 'data:,a 1‌w', expect: '', desc: 'trailing U+200C'},
|
||||
{srcset: 'data:,a 1‍w', expect: '', desc: 'trailing U+200D'},
|
||||
{srcset: 'data:,a 1 w', expect: '', desc: 'trailing U+202F'},
|
||||
{srcset: 'data:,a 1 w', expect: '', desc: 'trailing U+205F'},
|
||||
{srcset: 'data:,a 1 w', expect: '', desc: 'trailing U+3000'},
|
||||
{srcset: 'data:,a 1w', expect: '', desc: 'trailing U+FEFF'},
|
||||
{srcset: 'data:,a 1w' , expect: '', desc: 'leading U+0001'},
|
||||
// {srcset: 'data:,a 1w' , expect: '', desc: 'leading U+00A0 width'},
|
||||
{srcset: 'data:,a  1w', expect: '', desc: 'leading U+1680'},
|
||||
{srcset: 'data:,a  1w', expect: '', desc: 'leading U+2000'},
|
||||
{srcset: 'data:,a  1w', expect: '', desc: 'leading U+2001'},
|
||||
{srcset: 'data:,a  1w', expect: '', desc: 'leading U+2002'},
|
||||
{srcset: 'data:,a  1w', expect: '', desc: 'leading U+2003'},
|
||||
{srcset: 'data:,a  1w', expect: '', desc: 'leading U+2004'},
|
||||
{srcset: 'data:,a  1w', expect: '', desc: 'leading U+2005'},
|
||||
{srcset: 'data:,a  1w', expect: '', desc: 'leading U+2006'},
|
||||
{srcset: 'data:,a  1w', expect: '', desc: 'leading U+2007'},
|
||||
{srcset: 'data:,a  1w', expect: '', desc: 'leading U+2008'},
|
||||
{srcset: 'data:,a  1w', expect: '', desc: 'leading U+2009'},
|
||||
{srcset: 'data:,a  1w', expect: '', desc: 'leading U+200A'},
|
||||
{srcset: 'data:,a ‌1w', expect: '', desc: 'leading U+200C'},
|
||||
{srcset: 'data:,a ‍1w', expect: '', desc: 'leading U+200D'},
|
||||
{srcset: 'data:,a  1w', expect: '', desc: 'leading U+202F'},
|
||||
{srcset: 'data:,a  1w', expect: '', desc: 'leading U+205F'},
|
||||
{srcset: 'data:,a  1w', expect: '', desc: 'leading U+3000'},
|
||||
{srcset: 'data:,a 1w', expect: '', desc: 'leading U+FEFF'},
|
||||
{srcset: 'data:,a 0x', expect: 'data:,a', desc: 'zero density'},
|
||||
{srcset: 'data:,a -0x' , expect: 'data:,a', desc: 'negative zero density'},
|
||||
{srcset: 'data:,a 1x -0x', expect: '', desc: '1x -0x'},
|
||||
{srcset: 'data:,a -1x', expect: '', desc: '-1x'},
|
||||
{srcset: 'data:,a 1x -1x', expect: '', desc: '1x -1x'},
|
||||
{srcset: 'data:,a 1e0x', expect: 'data:,a', desc: '1e0x'},
|
||||
{srcset: 'data:,a 1E0x', expect: 'data:,a', desc: '1E0x'},
|
||||
{srcset: 'data:,a 1e-1x', expect: 'data:,a', desc: '1e-1x'},
|
||||
{srcset: 'data:,a 1.5e1x', expect: 'data:,a', desc: '1.5e1x'},
|
||||
{srcset: 'data:,a -x', expect: '', desc: 'negative density with no digits'},
|
||||
{srcset: 'data:,a .x', expect: '', desc: 'decimal density with no digits'},
|
||||
{srcset: 'data:,a -.x', expect: '', desc: '-.x'},
|
||||
{srcset: 'data:,a 1.x', expect: '', desc: '1.x'},
|
||||
{srcset: 'data:,a .5x', expect: 'data:,a', desc: 'floating point density descriptor'},
|
||||
{srcset: 'data:,a .5e1x', expect: 'data:,a', desc: '.5e1x'},
|
||||
{srcset: 'data:,a 1x 1.5e1x', expect: '', desc: '1x 1.5e1x'},
|
||||
{srcset: 'data:,a 1x 1e1.5x', expect: '', desc: '1x 1e1.5x'},
|
||||
{srcset: 'data:,a 1.0x', expect: 'data:,a', desc: '1.0x'},
|
||||
{srcset: 'data:,a 1x 1.0x', expect: '', desc: '1x 1.0x'},
|
||||
{srcset: 'data:,a +1x', expect: '', desc: 'no plus sign allowed on floating point number'},
|
||||
{srcset: 'data:,a 1X', expect: '', desc: 'Capital X descriptor'},
|
||||
{srcset: 'data:,a Infinityx', expect: '', desc: 'Infinityx'},
|
||||
{srcset: 'data:,a NaNx', expect: '', desc: 'NaNx'},
|
||||
{srcset: 'data:,a 0x1x', expect: '', desc: '0X1x'},
|
||||
{srcset: 'data:,a 0X1x', expect: '', desc: '1x'},
|
||||
{srcset: 'data:,a 1x', expect: '', desc: 'trailing U+0001'},
|
||||
{srcset: 'data:,a 1 x' , expect: '', desc: 'trailing U+00A0 density'},
|
||||
{srcset: 'data:,a 1 x', expect: '', desc: 'trailing U+1680'},
|
||||
{srcset: 'data:,a 1 x', expect: '', desc: 'trailing U+2000'},
|
||||
{srcset: 'data:,a 1 x', expect: '', desc: 'trailing U+2001'},
|
||||
{srcset: 'data:,a 1 x', expect: '', desc: 'trailing U+2002'},
|
||||
{srcset: 'data:,a 1 x', expect: '', desc: 'trailing U+2003'},
|
||||
{srcset: 'data:,a 1 x', expect: '', desc: 'trailing U+2004'},
|
||||
{srcset: 'data:,a 1 x', expect: '', desc: 'trailing U+2005'},
|
||||
{srcset: 'data:,a 1 x', expect: '', desc: 'trailing U+2006'},
|
||||
{srcset: 'data:,a 1 x', expect: '', desc: 'trailing U+2007'},
|
||||
{srcset: 'data:,a 1 x', expect: '', desc: 'trailing U+2008'},
|
||||
{srcset: 'data:,a 1 x', expect: '', desc: 'trailing U+2009'},
|
||||
{srcset: 'data:,a 1 x', expect: '', desc: 'trailing U+200A'},
|
||||
{srcset: 'data:,a 1‌x', expect: '', desc: 'trailing U+200C'},
|
||||
{srcset: 'data:,a 1‍x', expect: '', desc: 'trailing U+200D'},
|
||||
{srcset: 'data:,a 1 x', expect: '', desc: 'trailing U+202F'},
|
||||
{srcset: 'data:,a 1 x', expect: '', desc: 'trailing U+205F'},
|
||||
{srcset: 'data:,a 1 x', expect: '', desc: 'trailing U+3000'},
|
||||
{srcset: 'data:,a 1x', expect: '', desc: 'trailing U+FEFF'},
|
||||
{srcset: 'data:,a 1x' , expect: '', desc: 'leading U+0001'},
|
||||
{srcset: 'data:,a 1x' , expect: '', desc: 'leading U+00A0 density'},
|
||||
{srcset: 'data:,a  1x', expect: '', desc: 'leading U+1680'},
|
||||
{srcset: 'data:,a  1x', expect: '', desc: 'leading U+2000'},
|
||||
{srcset: 'data:,a  1x', expect: '', desc: 'leading U+2001'},
|
||||
{srcset: 'data:,a  1x', expect: '', desc: 'leading U+2002'},
|
||||
{srcset: 'data:,a  1x', expect: '', desc: 'leading U+2003'},
|
||||
{srcset: 'data:,a  1x', expect: '', desc: 'leading U+2004'},
|
||||
{srcset: 'data:,a  1x', expect: '', desc: 'leading U+2005'},
|
||||
{srcset: 'data:,a  1x', expect: '', desc: 'leading U+2006'},
|
||||
{srcset: 'data:,a  1x', expect: '', desc: 'leading U+2007'},
|
||||
{srcset: 'data:,a  1x', expect: '', desc: 'leading U+2008'},
|
||||
{srcset: 'data:,a  1x', expect: '', desc: 'leading U+2009'},
|
||||
{srcset: 'data:,a  1x', expect: '', desc: 'leading U+200A'},
|
||||
{srcset: 'data:,a ‌1x', expect: '', desc: 'leading U+200C'},
|
||||
{srcset: 'data:,a ‍1x', expect: '', desc: 'leading U+200D'},
|
||||
{srcset: 'data:,a  1x', expect: '', desc: 'leading U+202F'},
|
||||
{srcset: 'data:,a  1x', expect: '', desc: 'leading U+205F'},
|
||||
{srcset: 'data:,a  1x', expect: '', desc: 'leading U+3000'},
|
||||
{srcset: 'data:,a 1x', expect: '', desc: 'leading U+FEFF'},
|
||||
{srcset: 'data:,a 1w 0h', expect: '', desc: '1w 0h'},
|
||||
{srcset: 'data:,a 1w -1h', expect: '', desc: '1w -1h'},
|
||||
{srcset: 'data:,a 1w 1.0h', expect: '', desc: '1w 1.0h'},
|
||||
{srcset: 'data:,a 1w 1e0h', expect: '', desc: '1w 1e0h'},
|
||||
{srcset: 'data:,a 1w 1hhh', expect: '', desc: '1w 1hhh'},
|
||||
{srcset: 'data:,a 1w 1H', expect: '', desc: '1w 1H'},
|
||||
{srcset: 'data:,a 1w Infinityh', expect: '', desc: '1w Infinityh'},
|
||||
{srcset: 'data:,a 1w NaNh', expect: '', desc: '1w NaNh'},
|
||||
{srcset: 'data:,a 0x1h', expect: '', desc: '0x1h'},
|
||||
{srcset: 'data:,a 0X1h', expect: '', desc: '0X1h'},
|
||||
{srcset: 'data:,a 1w 1h', expect: '', desc: 'trailing U+0001'},
|
||||
{srcset: 'data:,a 1w 1 h', expect: '', desc: 'trailing U+00A0'},
|
||||
{srcset: 'data:,a 1w 1 h', expect: '', desc: 'trailing U+1680'},
|
||||
{srcset: 'data:,a 1w 1 h', expect: '', desc: 'trailing U+2000'},
|
||||
{srcset: 'data:,a 1w 1 h', expect: '', desc: 'trailing U+2001'},
|
||||
{srcset: 'data:,a 1w 1 h', expect: '', desc: 'trailing U+2002'},
|
||||
{srcset: 'data:,a 1w 1 h', expect: '', desc: 'trailing U+2003'},
|
||||
{srcset: 'data:,a 1w 1 h', expect: '', desc: 'trailing U+2004'},
|
||||
{srcset: 'data:,a 1w 1 h', expect: '', desc: 'trailing U+2005'},
|
||||
{srcset: 'data:,a 1w 1 h', expect: '', desc: 'trailing U+2006'},
|
||||
{srcset: 'data:,a 1w 1 h', expect: '', desc: 'trailing U+2007'},
|
||||
{srcset: 'data:,a 1w 1 h', expect: '', desc: 'trailing U+2008'},
|
||||
{srcset: 'data:,a 1w 1 h', expect: '', desc: 'trailing U+2009'},
|
||||
{srcset: 'data:,a 1w 1 h', expect: '', desc: 'trailing U+200A'},
|
||||
{srcset: 'data:,a 1w 1‌h', expect: '', desc: 'trailing U+200C'},
|
||||
{srcset: 'data:,a 1w 1‍h', expect: '', desc: 'trailing U+200D'},
|
||||
{srcset: 'data:,a 1w 1 h', expect: '', desc: 'trailing U+202F'},
|
||||
{srcset: 'data:,a 1w 1 h', expect: '', desc: 'trailing U+205F'},
|
||||
{srcset: 'data:,a 1w 1 h', expect: '', desc: 'trailing U+3000'},
|
||||
{srcset: 'data:,a 1w 1h', expect: '', desc: 'trailing U+FEFF'},
|
||||
{srcset: 'data:,a 1w 1h', expect: '', desc: 'leading U+0001'},
|
||||
{srcset: 'data:,a 1w 1h', expect: '', desc: 'leading U+00A0'},
|
||||
{srcset: 'data:,a 1w  1h', expect: '', desc: 'leading U+1680'},
|
||||
{srcset: 'data:,a 1w  1h', expect: '', desc: 'leading U+2000'},
|
||||
{srcset: 'data:,a 1w  1h', expect: '', desc: 'leading U+2001'},
|
||||
{srcset: 'data:,a 1w  1h', expect: '', desc: 'leading U+2002'},
|
||||
{srcset: 'data:,a 1w  1h', expect: '', desc: 'leading U+2003'},
|
||||
{srcset: 'data:,a 1w  1h', expect: '', desc: 'leading U+2004'},
|
||||
{srcset: 'data:,a 1w  1h', expect: '', desc: 'leading U+2005'},
|
||||
{srcset: 'data:,a 1w  1h', expect: '', desc: 'leading U+2006'},
|
||||
{srcset: 'data:,a 1w  1h', expect: '', desc: 'leading U+2007'},
|
||||
{srcset: 'data:,a 1w  1h', expect: '', desc: 'leading U+2008'},
|
||||
{srcset: 'data:,a 1w  1h', expect: '', desc: 'leading U+2009'},
|
||||
{srcset: 'data:,a 1w  1h', expect: '', desc: 'leading U+200A'},
|
||||
{srcset: 'data:,a 1w ‌1h', expect: '', desc: 'leading U+200C'},
|
||||
{srcset: 'data:,a 1w ‍1h', expect: '', desc: 'leading U+200D'},
|
||||
{srcset: 'data:,a 1w  1h', expect: '', desc: 'leading U+202F'},
|
||||
{srcset: 'data:,a 1w  1h', expect: '', desc: 'leading U+205F'},
|
||||
{srcset: 'data:,a 1w  1h', expect: '', desc: 'leading U+3000'},
|
||||
{srcset: 'data:,a 1w 1h', expect: '', desc: 'leading U+FEFF'}
|
||||
]
|
||||
}
|
||||
];
|
||||
|
||||
// HTML Entities are much easier to troubleshoot in console.
|
||||
he.encode.options.useNamedReferences = true;
|
||||
|
||||
function runTest(test) {
|
||||
var origAttr = test.srcset;
|
||||
var attrDecoded = he.decode(origAttr);
|
||||
var parsed = parseSrcset(attrDecoded);
|
||||
|
||||
var firstCandidate = parsed[0];
|
||||
|
||||
var url = "";
|
||||
var encodedUrl = "";
|
||||
|
||||
if (firstCandidate) {
|
||||
url = firstCandidate.url;
|
||||
}
|
||||
|
||||
// Must re-encode url prior to comparison with expected string.
|
||||
if (url) {
|
||||
encodedUrl = he.encode(url);
|
||||
}
|
||||
|
||||
console.log("");
|
||||
console.log(test.desc);
|
||||
console.log("origAttr: '" + origAttr + "'");
|
||||
console.log("attrDecoded: '" + attrDecoded + "'");
|
||||
console.log("parsed: ", parsed);
|
||||
console.log("url: '" + url + "'");
|
||||
console.log("encodedUrl: '" + encodedUrl + "'");
|
||||
|
||||
|
||||
tdd.test( test.desc , function() {
|
||||
assert.strictEqual(encodedUrl, test.expect, "passed" );
|
||||
});
|
||||
}
|
||||
|
||||
function runTestGroup(testGroup) {
|
||||
var j;
|
||||
var testArray = testGroup.testArray;
|
||||
|
||||
// Group Tests
|
||||
tdd.suite(testGroup.groupName, function() {
|
||||
|
||||
for (j = 0; j < testArray.length; j++) {
|
||||
runTest(testArray[j]);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
var i;
|
||||
var w3CtestsLength = w3Ctests.length;
|
||||
|
||||
for (i = 0; i < w3CtestsLength; i++) {
|
||||
runTestGroup(w3Ctests[i]);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
// tdd.test('First Test', function () {
|
||||
// var parsed = parseSrcset('data:,a 1x');
|
||||
// var url = parsed[0].url;
|
||||
//
|
||||
// console.log("parsed: ", parsed);
|
||||
// console.log("url: ", url);
|
||||
//
|
||||
// assert.strictEqual(parsed, parsed, 'should be');
|
||||
//
|
||||
// // assert.strictEqual(url, 'data:,a', 'should be');
|
||||
// });
|
||||
|
||||
// assert.strictEqual(parseSrcset('data:,a 1x')[0], 'data:,a', 'plain url with descriptor');
|
||||
|
||||
// tdd.test('Second Test', function () {
|
||||
// assert.strictEqual(5, 5, '5 is itself, right?');
|
||||
// });
|
||||
|
||||
});
|
||||
});
|
||||
|
Reference in New Issue
Block a user