Initial commit

This commit is contained in:
Patrick Marsceill
2017-03-09 13:16:08 -05:00
commit b7b0d0d7bf
4147 changed files with 401224 additions and 0 deletions

28
node_modules/css-tokenize/.npmignore generated vendored Normal file
View File

@@ -0,0 +1,28 @@
# Logs
logs
*.log
# Runtime data
pids
*.pid
*.seed
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Compiled binary addons (http://nodejs.org/api/addons.html)
build/Release
# Dependency directory
# Commenting this out is preferred by some people, see
# https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git-
node_modules
# Users Environment Variables
.lock-wscript

4
node_modules/css-tokenize/.travis.yml generated vendored Normal file
View File

@@ -0,0 +1,4 @@
language: node_js
node_js:
- "0.11"
- "0.10"

22
node_modules/css-tokenize/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,22 @@
The MIT License (MIT)
Copyright (c) 2014 Anand Thakker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

100
node_modules/css-tokenize/README.md generated vendored Normal file
View File

@@ -0,0 +1,100 @@
css-tokenize
============
Coarsely tokenize a stream of CSS, largely modeled after
[substack/html-tokenize](/substack/html-tokenize).
```javascript
var tokenize = require('css-tokenize'),
through = require('through2');
process.stdin
.pipe(tokenize())
.pipe(through.obj(function(token, enc, next) {
token[1] = token[1].toString(); // it's a buffer slice
console.log('TOKEN', token);
next();
}))
```
Input:
```css
div {
background: red;
}
.cls {
color: green;
}
#id {
font-size: 10px;
}
/* comment */
@media screen and (min-width: 1000px) {
a {
text-decoration: underline;
}
}
a:hover {
font-weight: bold;
}
section
{
margin: 0;
/* comment wthin a rule */
padding: 5px;
}
body > * {
}
```
Output:
```
TOKEN [ 'root', '\n' ]
TOKEN [ 'rule_start', 'div {' ]
TOKEN [ 'rule', '\n background: red;\n' ]
TOKEN [ 'rule_end', '}' ]
TOKEN [ 'root', '\n\n' ]
TOKEN [ 'rule_start', '.cls {' ]
TOKEN [ 'rule', '\n color: green;\n' ]
TOKEN [ 'rule_end', '}' ]
TOKEN [ 'root', '\n\n' ]
TOKEN [ 'rule_start', '#id {' ]
TOKEN [ 'rule', '\n font-size: 10px;\n' ]
TOKEN [ 'rule_end', '}' ]
TOKEN [ 'comment', '\n\n/* comment */' ]
TOKEN [ 'space', '\n\n' ]
TOKEN [ 'atrule_start', '@media screen and (min-width: 1000px) {' ]
TOKEN [ 'atrule', '\n ' ]
TOKEN [ 'rule_start', 'a {' ]
TOKEN [ 'rule', '\n text-decoration: underline;\n ' ]
TOKEN [ 'rule_end', '}' ]
TOKEN [ 'atrule', '\n' ]
TOKEN [ 'atrule_end', '}' ]
TOKEN [ 'root', '\n\n' ]
TOKEN [ 'rule_start', 'a:hover {' ]
TOKEN [ 'rule', '\n font-weight: bold; \n' ]
TOKEN [ 'rule_end', '}' ]
TOKEN [ 'root', '\n\n' ]
TOKEN [ 'rule_start', 'section \n\n\n{' ]
TOKEN [ 'rule', '\n margin: 0;\n ' ]
TOKEN [ 'comment', '/* comment wthin a rule */' ]
TOKEN [ 'rule', '\n padding: 5px;\n' ]
TOKEN [ 'rule_end', '}' ]
TOKEN [ 'root', '\n\n\n' ]
TOKEN [ 'rule_start', 'body > * {' ]
TOKEN [ 'rule', '\n \n' ]
TOKEN [ 'rule_end', '}' ]
TOKEN [ 'root', '\n' ]
```

13
node_modules/css-tokenize/example.js generated vendored Normal file
View File

@@ -0,0 +1,13 @@
var tokenize = require('./'),
through = require('through2'),
fs = require('fs');
var input = process.argv[2] ? fs.createReadStream(process.argv[2]) : process.stdin;
input.pipe(tokenize())
.pipe(through.obj(function(token, enc, next) {
token[1] = token[1].toString(); // it's a buffer slice
console.log('TOKEN', token);
next();
}))

172
node_modules/css-tokenize/index.js generated vendored Normal file
View File

@@ -0,0 +1,172 @@
/**
* The approach here owes a LOT to https://github.com/substack/html-tokenize.
*/
var Transform = require('readable-stream').Transform;
var inherits = require('inherits');
inherits(Tokenize, Transform);
module.exports = Tokenize;
var codes = {
fslash: '/'.charCodeAt(0),
bslash: '\\'.charCodeAt(0),
lparen: '('.charCodeAt(0),
rparen: ')'.charCodeAt(0),
lbrace: '{'.charCodeAt(0),
rbrace: '}'.charCodeAt(0),
lbrack: '['.charCodeAt(0),
rbrack: ']'.charCodeAt(0),
squote: '\''.charCodeAt(0),
dquote: '"'.charCodeAt(0),
at: '@'.charCodeAt(0),
semi: ';'.charCodeAt(0),
asterisk: '*'.charCodeAt(0)
}
var sequences = {
comment_start: '/*',
comment_end: '*/'
}
function charCodes(s) { return s.split('').map(function(c){return c.charCodeAt(0)}) }
var sets = {
space: charCodes(' \t\n\r\f'),
open_brackets: [codes.lparen, codes.lbrace, codes.lbrack],
newline: charCodes('\n\r\f')
}
function Tokenize() {
if (!(this instanceof Tokenize)) return new Tokenize;
Transform.call(this);
this._readableState.objectMode = true;
this.state = [['root']];
this._input = null; // buffer the input for read-ahead
this._position = 0; // current position in this._input
this.buffers = []; // collect data for current token
}
Tokenize.prototype._transform = function(buf, enc, next) {
var input = this._input = this._input ? Buffer.concat([ this._input, buf ]) : buf;
for(var i = this._position; i < input.length; i++) {
var c = input[i];
var state = this.state[this.state.length - 1][0],
stateData = this.state[this.state.length - 1][1],
end = null;
// console.log(i, c, this.state);
/* comments */
if(i === input.length - 1
&& (('comment' === state && c === codes.asterisk)
|| c === codes.fslash)) {
// need more data: save unprocessed input and bail out.
this._input = this._input.slice(this._position);
break;
}
else if('comment' !== state
&& c === codes.fslash && input[i+1] === codes.asterisk) {
if('root' !== state) end = [].concat(state);
i--; //backup to save the '/*' for the comment token.
this.state.push(['comment'])
}
else if('comment' === state
&& c === codes.asterisk && input[i+1] === codes.fslash) {
i++;
end = this.state.pop();
}
/* strings */
else if('string' === state
&& c === stateData) {
this.state.pop();
}
else if('string' !== state
&& codes.squote === c || codes.dquote === c) {
this.state.push(['string', c]);
}
/* brackets */
// else if(codes.lparen === c) {
// this.state.push(['brackets', codes.rparen]);
// }
// else if(codes.lbrack === c) {
// this.state.push(['brackets', codes.rbrack]);
// }
// else if('brackets' === state
// && c === stateData) {
// this.state.pop();
// }
/* rules */
else if('rule_start' === state
&& c === codes.lbrace) {
end = this.state.pop();
this.state.push(['rule']);
}
else if('atrule_start' === state
&& c === codes.lbrace) {
end = this.state.pop();
this.state.push(['atrule']);
}
else if(('rule' === state || 'atrule' === state)
&& c === codes.rbrace) {
end = this.state.pop();
i--; // backup to save the ending curly brace for the rule_end token.
this.state.push([ state + '_end' ]);
}
else if(('rule_end' === state || 'atrule_end' === state)
&& c === codes.rbrace) {
end = this.state.pop();
}
else if('root' === state
&& c === codes.at) {
end = ['space'];
i--;
this.state.push(['atrule_start'])
}
// rules can start from the root or nested within at-rules.
else if(sets.space.indexOf(c) < 0)
{
if('root' === state) {
end = ['root'];
i--;
this.state.push(['rule_start'])
}
else if('atrule' === state) {
end = ['atrule'];
i--;
this.state.push(['rule_start']);
}
}
if(end && i >= this._position) {
var out;
this.push(out = [end[0], input.slice(this._position, i+1)]);
this._position = i+1;
end = null;
}
}
if(this._position < this._input.length) {
this._input = this._input.slice(this._position);
this._position = 0;
}
else {
this._input = null;
this._position = 0;
}
next();
}
Tokenize.prototype._flush = function (next) {
if(this._input)
this.push([this.state.pop()[0], this._input.slice(this._position)]);
if(this.state.length !== 0) {
console.warn("[css-tokenize] unfinished business", this.state);
}
this.push(null);
next();
};

86
node_modules/css-tokenize/package.json generated vendored Normal file
View File

@@ -0,0 +1,86 @@
{
"_args": [
[
"css-tokenize@^1.0.1",
"/Users/pmarsceill/_projects/just-the-docs/node_modules/css-rule-stream"
]
],
"_from": "css-tokenize@>=1.0.1 <2.0.0",
"_id": "css-tokenize@1.0.1",
"_inCache": true,
"_installable": true,
"_location": "/css-tokenize",
"_nodeVersion": "0.11.14",
"_npmUser": {
"email": "vestibule@anandthakker.net",
"name": "anandthakker"
},
"_npmVersion": "2.1.8",
"_phantomChildren": {},
"_requested": {
"name": "css-tokenize",
"raw": "css-tokenize@^1.0.1",
"rawSpec": "^1.0.1",
"scope": null,
"spec": ">=1.0.1 <2.0.0",
"type": "range"
},
"_requiredBy": [
"/css-rule-stream"
],
"_resolved": "https://registry.npmjs.org/css-tokenize/-/css-tokenize-1.0.1.tgz",
"_shasum": "4625cb1eda21c143858b7f81d6803c1d26fc14be",
"_shrinkwrap": null,
"_spec": "css-tokenize@^1.0.1",
"_where": "/Users/pmarsceill/_projects/just-the-docs/node_modules/css-rule-stream",
"author": {
"email": "vestibule@anandthakker.net",
"name": "Anand Thakker",
"url": "http://anandthakker.net"
},
"bugs": {
"url": "https://github.com/anandthakker/css-tokenize/issues"
},
"dependencies": {
"inherits": "^2.0.1",
"readable-stream": "^1.0.33"
},
"description": "Transform stream that tokenizes CSS",
"devDependencies": {
"tape": "^3.0.3",
"through2": "^0.6.3"
},
"directories": {},
"dist": {
"shasum": "4625cb1eda21c143858b7f81d6803c1d26fc14be",
"tarball": "https://registry.npmjs.org/css-tokenize/-/css-tokenize-1.0.1.tgz"
},
"gitHead": "d4acf6d2010cbbb61f69817f53b249471468d7a9",
"homepage": "https://github.com/anandthakker/css-tokenize",
"keywords": [
"css",
"parse",
"stream",
"streaming",
"tokenize"
],
"license": "MIT",
"main": "index.js",
"maintainers": [
{
"name": "anandthakker",
"email": "vestibule@anandthakker.net"
}
],
"name": "css-tokenize",
"optionalDependencies": {},
"readme": "ERROR: No README data found!",
"repository": {
"type": "git",
"url": "git+https://github.com/anandthakker/css-tokenize.git"
},
"scripts": {
"test": "tape test/*.js"
},
"version": "1.0.1"
}

100
node_modules/css-tokenize/test/base.js generated vendored Normal file
View File

@@ -0,0 +1,100 @@
var fs = require('fs'),
test = require('tape'),
through = require('through2'),
tokenize = require('../')
test('basic', function(t) {
var tok = tokenize();
var input = "html { font-size: 10px; }"
var expected = [
['rule_start', 'html {'],
['rule', ' font-size: 10px; '],
['rule_end', '}']
]
t.plan(expected.length);
tok.pipe(through.obj(function(token, enc, next) {
token[1] = token[1].toString();
if (expected.length > 0) t.same(token, expected.shift())
next();
}))
tok.end(input);
})
test('at-rule nesting', function(t) {
var tok = tokenize();
var expected = [
[ 'atrule_start', '@media screen and (min-width: 1000px) {' ],
[ 'atrule', '\n ' ],
[ 'rule_start', 'a {' ],
[ 'rule', '\n text-decoration: underline;\n ' ],
[ 'rule_end', '}' ],
[ 'atrule', '\n' ],
[ 'atrule_end', '}' ],
[ 'root', '\n' ]
]
t.plan(expected.length);
tok.pipe(through.obj(function(token, enc, next) {
token[1] = token[1].toString();
if (expected.length > 0) t.same(token, expected.shift())
next();
}))
fs.createReadStream(__dirname + '/nested.css').pipe(tok);
})
test('gauntlet', function(t) {
var tok = tokenize();
var expected = [
['root', '\n'],
['rule_start', 'div {'],
['rule', '\n background: red;\n'],
['rule_end', '}'],
['root', '\n\n'],
['rule_start', '.cls {'],
['rule', '\n color: green;\n'],
['rule_end', '}'],
['root', '\n\n'],
['rule_start', '#id {'],
['rule', '\n font-size: 10px;\n'],
['rule_end', '}'],
['comment', '\n\n/* comment */'],
['space', '\n\n'],
['atrule_start', '@media screen and (min-width: 1000px) {'],
['atrule', '\n '],
['rule_start', 'a {'],
['rule', '\n text-decoration: underline;\n '],
['rule_end', '}'],
['atrule', '\n'],
['atrule_end', '}'],
['root', '\n\n'],
['rule_start', 'a:hover {'],
['rule', '\n font-weight: bold; \n'],
['rule_end', '}'],
['root', '\n\n'],
['rule_start', 'section \n\n\n{'],
['rule', '\n margin: 0;\n '],
['comment', '/* comment wthin a rule */'],
['rule', '\n padding: 5px;\n'],
['rule_end', '}'],
['root', '\n\n\n'],
['rule_start', 'body > * {'],
['rule', '\n \n'],
['rule_end', '}'],
['root', '\n']
]
t.plan(expected.length);
tok.pipe(through.obj(function(token, enc, next) {
token[1] = token[1].toString();
if (expected.length > 0) t.same(token, expected.shift())
next();
}));
fs.createReadStream(__dirname + '/gauntlet.css').pipe(tok);
})

7
node_modules/css-tokenize/test/brackets.css generated vendored Normal file
View File

@@ -0,0 +1,7 @@
.hello {
background: url(http://blahlbahlbah.com);
}
.explore-mail-tease{padding-top:20px;overflow:hidden;background:#202021 url(/assets/modules/home/octicons-bg-f51579234572c7286b1ead7a6408f2d43c7a0520dbb09f9d2b8b6b59024a01a0.png) center repeat;border-bottom:1px solid #ddd}.explore-mail-tease h3{color:#fff;text-align:center}.explore-mail-tease img{margin-bottom:-5px}

38
node_modules/css-tokenize/test/gauntlet.css generated vendored Normal file
View File

@@ -0,0 +1,38 @@
div {
background: red;
}
.cls {
color: green;
}
#id {
font-size: 10px;
}
/* comment */
@media screen and (min-width: 1000px) {
a {
text-decoration: underline;
}
}
a:hover {
font-weight: bold;
}
section
{
margin: 0;
/* comment wthin a rule */
padding: 5px;
}
body > * {
}

5
node_modules/css-tokenize/test/nested.css generated vendored Normal file
View File

@@ -0,0 +1,5 @@
@media screen and (min-width: 1000px) {
a {
text-decoration: underline;
}
}