Initial commit.
This commit is contained in:
79
app.js
Normal file
79
app.js
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
var express = require('express');
|
||||||
|
var path = require('path');
|
||||||
|
var favicon = require('serve-favicon');
|
||||||
|
var logger = require('morgan');
|
||||||
|
var cookieParser = require('cookie-parser');
|
||||||
|
var bodyParser = require('body-parser');
|
||||||
|
var SSE = require('express-sse');
|
||||||
|
var request = require('request');
|
||||||
|
|
||||||
|
var fs = require('fs');
|
||||||
|
var nconf = require('nconf');
|
||||||
|
|
||||||
|
var index = require('./routes/index');
|
||||||
|
var index2 = require('./routes/index2');
|
||||||
|
var users = require('./routes/users');
|
||||||
|
var api = require('./routes/api');
|
||||||
|
|
||||||
|
var app = express();
|
||||||
|
|
||||||
|
var sse = new SSE();
|
||||||
|
|
||||||
|
// view engine setup
|
||||||
|
app.set('views', path.join(__dirname, 'views'));
|
||||||
|
app.set('view engine', 'ejs');
|
||||||
|
|
||||||
|
// uncomment after placing your favicon in /public
|
||||||
|
//app.use(favicon(path.join(__dirname, 'public', 'favicon.ico')));
|
||||||
|
app.use(logger('dev'));
|
||||||
|
app.use(bodyParser.json());
|
||||||
|
app.use(bodyParser.urlencoded({ extended: false }));
|
||||||
|
app.use(cookieParser());
|
||||||
|
app.use(express.static(path.join(__dirname, 'public')));
|
||||||
|
|
||||||
|
app.get('/stream', sse.init);
|
||||||
|
|
||||||
|
app.use('/', index);
|
||||||
|
app.use('/index2', index2);
|
||||||
|
app.use('/users', users);
|
||||||
|
app.use('/api', api);
|
||||||
|
|
||||||
|
var i = 0
|
||||||
|
|
||||||
|
setInterval(function() {
|
||||||
|
nconf.file({ file:'config.json' });
|
||||||
|
numberOfServers = nconf.get('configuration').servers.length;
|
||||||
|
message = {};
|
||||||
|
message.server = i;
|
||||||
|
ip = nconf.get('configuration').servers[i].ip;
|
||||||
|
port = nconf.get('configuration').servers[i].port;
|
||||||
|
request('http://' + ip + ':' + port + '/api/stats', function(err, response, body) {
|
||||||
|
message.body = body;
|
||||||
|
sse.send(message);
|
||||||
|
console.log(message);
|
||||||
|
});
|
||||||
|
i++;
|
||||||
|
if(i >= numberOfServers) {
|
||||||
|
i = 0;
|
||||||
|
}
|
||||||
|
}, 5000);
|
||||||
|
|
||||||
|
// catch 404 and forward to error handler
|
||||||
|
app.use(function(req, res, next) {
|
||||||
|
var err = new Error('Not Found');
|
||||||
|
err.status = 404;
|
||||||
|
next(err);
|
||||||
|
});
|
||||||
|
|
||||||
|
// error handler
|
||||||
|
app.use(function(err, req, res, next) {
|
||||||
|
// set locals, only providing error in development
|
||||||
|
res.locals.message = err.message;
|
||||||
|
res.locals.error = req.app.get('env') === 'development' ? err : {};
|
||||||
|
|
||||||
|
// render the error page
|
||||||
|
res.status(err.status || 500);
|
||||||
|
res.render('error');
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = app;
|
90
bin/www
Normal file
90
bin/www
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Module dependencies.
|
||||||
|
*/
|
||||||
|
|
||||||
|
var app = require('../app');
|
||||||
|
var debug = require('debug')('zoneadmin-master:server');
|
||||||
|
var http = require('http');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get port from environment and store in Express.
|
||||||
|
*/
|
||||||
|
|
||||||
|
var port = normalizePort(process.env.PORT || '3001');
|
||||||
|
app.set('port', port);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create HTTP server.
|
||||||
|
*/
|
||||||
|
|
||||||
|
var server = http.createServer(app);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Listen on provided port, on all network interfaces.
|
||||||
|
*/
|
||||||
|
|
||||||
|
server.listen(port);
|
||||||
|
server.on('error', onError);
|
||||||
|
server.on('listening', onListening);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalize a port into a number, string, or false.
|
||||||
|
*/
|
||||||
|
|
||||||
|
function normalizePort(val) {
|
||||||
|
var port = parseInt(val, 10);
|
||||||
|
|
||||||
|
if (isNaN(port)) {
|
||||||
|
// named pipe
|
||||||
|
return val;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (port >= 0) {
|
||||||
|
// port number
|
||||||
|
return port;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Event listener for HTTP server "error" event.
|
||||||
|
*/
|
||||||
|
|
||||||
|
function onError(error) {
|
||||||
|
if (error.syscall !== 'listen') {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
var bind = typeof port === 'string'
|
||||||
|
? 'Pipe ' + port
|
||||||
|
: 'Port ' + port;
|
||||||
|
|
||||||
|
// handle specific listen errors with friendly messages
|
||||||
|
switch (error.code) {
|
||||||
|
case 'EACCES':
|
||||||
|
console.error(bind + ' requires elevated privileges');
|
||||||
|
process.exit(1);
|
||||||
|
break;
|
||||||
|
case 'EADDRINUSE':
|
||||||
|
console.error(bind + ' is already in use');
|
||||||
|
process.exit(1);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Event listener for HTTP server "listening" event.
|
||||||
|
*/
|
||||||
|
|
||||||
|
function onListening() {
|
||||||
|
var addr = server.address();
|
||||||
|
var bind = typeof addr === 'string'
|
||||||
|
? 'pipe ' + addr
|
||||||
|
: 'port ' + addr.port;
|
||||||
|
debug('Listening on ' + bind);
|
||||||
|
}
|
14
config.json
Normal file
14
config.json
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"configuration": {
|
||||||
|
"servers": [
|
||||||
|
{ "ip": "192.168.5.105", "port":"3300", "name": "HP Proliant Micro", "description": "Main Server" },
|
||||||
|
{ "ip": "192.168.5.115", "port":"3300", "name":"VirtualBox1", "description":"Virtual Box Server na LenovoX220"},
|
||||||
|
{ "ip": "192.168.5.144", "port":"3300", "name":"VirtualBox2", "description":"Virtual Box Server na MacMini"},
|
||||||
|
{ "ip": "192.168.5.107", "port":"3300", "name": "MacBook Pro", "description": "Dev Maschine" },
|
||||||
|
{ "ip": "192.168.5.107", "port":"3300", "name": "MacBook Pro", "description": "Dev Maschine" },
|
||||||
|
{ "ip": "192.168.5.107", "port":"3300", "name": "MacBook Pro", "description": "Dev Maschine" },
|
||||||
|
{ "ip": "192.168.5.107", "port":"3300", "name": "MacBook Pro", "description": "Dev Maschine" },
|
||||||
|
{ "ip": "127.0.0.1", "port":"3300", "name": "Localhost", "description": "This Laptop" }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
45
docs/install.md
Normal file
45
docs/install.md
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
How to setup Zoneadmin
|
||||||
|
======================
|
||||||
|
|
||||||
|
Create ZFS
|
||||||
|
----------
|
||||||
|
|
||||||
|
zfs create rpool/zones
|
||||||
|
zfs set mountpoint=/zones rpool/zones
|
||||||
|
zfs set compression=on rpool/zones
|
||||||
|
|
||||||
|
(http://docs.oracle.com/cd/E19253-01/819-5461/6n7ht6qu6/index.html)
|
||||||
|
|
||||||
|
Create user
|
||||||
|
-----------
|
||||||
|
|
||||||
|
useradd -m -d /export/home/zoneadmin -s /bin/bash zoneadmin
|
||||||
|
|
||||||
|
Add privileges to the user
|
||||||
|
--------------------------
|
||||||
|
|
||||||
|
usermod -P'Zone Security' zoneadmin
|
||||||
|
usermod -P'Zone Management' zoneadmin
|
||||||
|
usermod -P'Network Management' zoneadmin
|
||||||
|
usermod -P'Network Link Security' zoneadmin
|
||||||
|
|
||||||
|
|
||||||
|
Give user permissions
|
||||||
|
---------------------
|
||||||
|
to boot, stop ... any zone you want to manage through Zoneadmin
|
||||||
|
|
||||||
|
zonecfg -z lxzone "add admin;set user=zoneadmin;set auths=login,manage,config;end"
|
||||||
|
|
||||||
|
Test it
|
||||||
|
-------
|
||||||
|
|
||||||
|
pfexec zoneadm -z lxzone boot
|
||||||
|
pfexec zoneadm -z lxzone shutdown
|
||||||
|
|
||||||
|
More Info
|
||||||
|
---------
|
||||||
|
http://www.solaris-training.com/sol11tips/Solaris_11_Zones_P2_properties.pdf
|
||||||
|
|
||||||
|
Additional Reading
|
||||||
|
|
||||||
|
- http://www.clausconrad.com/blog/run-a-command-in-all-non-global-solaris-zones
|
7
docs/linki.md
Normal file
7
docs/linki.md
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
Povezave na različno dokumentacijo
|
||||||
|
==================================
|
||||||
|
|
||||||
|
Zna priti prav ...
|
||||||
|
- http://software.fujitsu.com/jp/manual/manualfiles/m120005/j2s20340/01enz2c/j0340-00-03-02-04.html
|
||||||
|
- http://solaris.reys.net/shared-ip-and-exclusive-ip-in-solaris-zones/
|
||||||
|
|
1
node_modules/.bin/har-validator
generated
vendored
Symbolic link
1
node_modules/.bin/har-validator
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../har-validator/bin/har-validator
|
1
node_modules/.bin/mime
generated
vendored
Symbolic link
1
node_modules/.bin/mime
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../mime/cli.js
|
1
node_modules/.bin/sshpk-conv
generated
vendored
Symbolic link
1
node_modules/.bin/sshpk-conv
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../sshpk/bin/sshpk-conv
|
1
node_modules/.bin/sshpk-sign
generated
vendored
Symbolic link
1
node_modules/.bin/sshpk-sign
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../sshpk/bin/sshpk-sign
|
1
node_modules/.bin/sshpk-verify
generated
vendored
Symbolic link
1
node_modules/.bin/sshpk-verify
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../sshpk/bin/sshpk-verify
|
1
node_modules/.bin/uuid
generated
vendored
Symbolic link
1
node_modules/.bin/uuid
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../uuid/bin/uuid
|
1
node_modules/.bin/window-size
generated
vendored
Symbolic link
1
node_modules/.bin/window-size
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../window-size/cli.js
|
212
node_modules/accepts/HISTORY.md
generated
vendored
Normal file
212
node_modules/accepts/HISTORY.md
generated
vendored
Normal file
@ -0,0 +1,212 @@
|
|||||||
|
1.3.3 / 2016-05-02
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.11
|
||||||
|
- deps: mime-db@~1.23.0
|
||||||
|
* deps: negotiator@0.6.1
|
||||||
|
- perf: improve `Accept` parsing speed
|
||||||
|
- perf: improve `Accept-Charset` parsing speed
|
||||||
|
- perf: improve `Accept-Encoding` parsing speed
|
||||||
|
- perf: improve `Accept-Language` parsing speed
|
||||||
|
|
||||||
|
1.3.2 / 2016-03-08
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.10
|
||||||
|
- Fix extension of `application/dash+xml`
|
||||||
|
- Update primary extension for `audio/mp4`
|
||||||
|
- deps: mime-db@~1.22.0
|
||||||
|
|
||||||
|
1.3.1 / 2016-01-19
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.9
|
||||||
|
- deps: mime-db@~1.21.0
|
||||||
|
|
||||||
|
1.3.0 / 2015-09-29
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.7
|
||||||
|
- deps: mime-db@~1.19.0
|
||||||
|
* deps: negotiator@0.6.0
|
||||||
|
- Fix including type extensions in parameters in `Accept` parsing
|
||||||
|
- Fix parsing `Accept` parameters with quoted equals
|
||||||
|
- Fix parsing `Accept` parameters with quoted semicolons
|
||||||
|
- Lazy-load modules from main entry point
|
||||||
|
- perf: delay type concatenation until needed
|
||||||
|
- perf: enable strict mode
|
||||||
|
- perf: hoist regular expressions
|
||||||
|
- perf: remove closures getting spec properties
|
||||||
|
- perf: remove a closure from media type parsing
|
||||||
|
- perf: remove property delete from media type parsing
|
||||||
|
|
||||||
|
1.2.13 / 2015-09-06
|
||||||
|
===================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.6
|
||||||
|
- deps: mime-db@~1.18.0
|
||||||
|
|
||||||
|
1.2.12 / 2015-07-30
|
||||||
|
===================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.4
|
||||||
|
- deps: mime-db@~1.16.0
|
||||||
|
|
||||||
|
1.2.11 / 2015-07-16
|
||||||
|
===================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.3
|
||||||
|
- deps: mime-db@~1.15.0
|
||||||
|
|
||||||
|
1.2.10 / 2015-07-01
|
||||||
|
===================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.2
|
||||||
|
- deps: mime-db@~1.14.0
|
||||||
|
|
||||||
|
1.2.9 / 2015-06-08
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.1
|
||||||
|
- perf: fix deopt during mapping
|
||||||
|
|
||||||
|
1.2.8 / 2015-06-07
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.1.0
|
||||||
|
- deps: mime-db@~1.13.0
|
||||||
|
* perf: avoid argument reassignment & argument slice
|
||||||
|
* perf: avoid negotiator recursive construction
|
||||||
|
* perf: enable strict mode
|
||||||
|
* perf: remove unnecessary bitwise operator
|
||||||
|
|
||||||
|
1.2.7 / 2015-05-10
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: negotiator@0.5.3
|
||||||
|
- Fix media type parameter matching to be case-insensitive
|
||||||
|
|
||||||
|
1.2.6 / 2015-05-07
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.0.11
|
||||||
|
- deps: mime-db@~1.9.1
|
||||||
|
* deps: negotiator@0.5.2
|
||||||
|
- Fix comparing media types with quoted values
|
||||||
|
- Fix splitting media types with quoted commas
|
||||||
|
|
||||||
|
1.2.5 / 2015-03-13
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.0.10
|
||||||
|
- deps: mime-db@~1.8.0
|
||||||
|
|
||||||
|
1.2.4 / 2015-02-14
|
||||||
|
==================
|
||||||
|
|
||||||
|
* Support Node.js 0.6
|
||||||
|
* deps: mime-types@~2.0.9
|
||||||
|
- deps: mime-db@~1.7.0
|
||||||
|
* deps: negotiator@0.5.1
|
||||||
|
- Fix preference sorting to be stable for long acceptable lists
|
||||||
|
|
||||||
|
1.2.3 / 2015-01-31
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.0.8
|
||||||
|
- deps: mime-db@~1.6.0
|
||||||
|
|
||||||
|
1.2.2 / 2014-12-30
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.0.7
|
||||||
|
- deps: mime-db@~1.5.0
|
||||||
|
|
||||||
|
1.2.1 / 2014-12-30
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.0.5
|
||||||
|
- deps: mime-db@~1.3.1
|
||||||
|
|
||||||
|
1.2.0 / 2014-12-19
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: negotiator@0.5.0
|
||||||
|
- Fix list return order when large accepted list
|
||||||
|
- Fix missing identity encoding when q=0 exists
|
||||||
|
- Remove dynamic building of Negotiator class
|
||||||
|
|
||||||
|
1.1.4 / 2014-12-10
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.0.4
|
||||||
|
- deps: mime-db@~1.3.0
|
||||||
|
|
||||||
|
1.1.3 / 2014-11-09
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.0.3
|
||||||
|
- deps: mime-db@~1.2.0
|
||||||
|
|
||||||
|
1.1.2 / 2014-10-14
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: negotiator@0.4.9
|
||||||
|
- Fix error when media type has invalid parameter
|
||||||
|
|
||||||
|
1.1.1 / 2014-09-28
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: mime-types@~2.0.2
|
||||||
|
- deps: mime-db@~1.1.0
|
||||||
|
* deps: negotiator@0.4.8
|
||||||
|
- Fix all negotiations to be case-insensitive
|
||||||
|
- Stable sort preferences of same quality according to client order
|
||||||
|
|
||||||
|
1.1.0 / 2014-09-02
|
||||||
|
==================
|
||||||
|
|
||||||
|
* update `mime-types`
|
||||||
|
|
||||||
|
1.0.7 / 2014-07-04
|
||||||
|
==================
|
||||||
|
|
||||||
|
* Fix wrong type returned from `type` when match after unknown extension
|
||||||
|
|
||||||
|
1.0.6 / 2014-06-24
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: negotiator@0.4.7
|
||||||
|
|
||||||
|
1.0.5 / 2014-06-20
|
||||||
|
==================
|
||||||
|
|
||||||
|
* fix crash when unknown extension given
|
||||||
|
|
||||||
|
1.0.4 / 2014-06-19
|
||||||
|
==================
|
||||||
|
|
||||||
|
* use `mime-types`
|
||||||
|
|
||||||
|
1.0.3 / 2014-06-11
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: negotiator@0.4.6
|
||||||
|
- Order by specificity when quality is the same
|
||||||
|
|
||||||
|
1.0.2 / 2014-05-29
|
||||||
|
==================
|
||||||
|
|
||||||
|
* Fix interpretation when header not in request
|
||||||
|
* deps: pin negotiator@0.4.5
|
||||||
|
|
||||||
|
1.0.1 / 2014-01-18
|
||||||
|
==================
|
||||||
|
|
||||||
|
* Identity encoding isn't always acceptable
|
||||||
|
* deps: negotiator@~0.4.0
|
||||||
|
|
||||||
|
1.0.0 / 2013-12-27
|
||||||
|
==================
|
||||||
|
|
||||||
|
* Genesis
|
23
node_modules/accepts/LICENSE
generated
vendored
Normal file
23
node_modules/accepts/LICENSE
generated
vendored
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
(The MIT License)
|
||||||
|
|
||||||
|
Copyright (c) 2014 Jonathan Ong <me@jongleberry.com>
|
||||||
|
Copyright (c) 2015 Douglas Christopher Wilson <doug@somethingdoug.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining
|
||||||
|
a copy of this software and associated documentation files (the
|
||||||
|
'Software'), to deal in the Software without restriction, including
|
||||||
|
without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
distribute, sublicense, and/or sell copies of the Software, and to
|
||||||
|
permit persons to whom the Software is furnished to do so, subject to
|
||||||
|
the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be
|
||||||
|
included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||||
|
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||||
|
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||||
|
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||||
|
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
135
node_modules/accepts/README.md
generated
vendored
Normal file
135
node_modules/accepts/README.md
generated
vendored
Normal file
@ -0,0 +1,135 @@
|
|||||||
|
# accepts
|
||||||
|
|
||||||
|
[![NPM Version][npm-image]][npm-url]
|
||||||
|
[![NPM Downloads][downloads-image]][downloads-url]
|
||||||
|
[![Node.js Version][node-version-image]][node-version-url]
|
||||||
|
[![Build Status][travis-image]][travis-url]
|
||||||
|
[![Test Coverage][coveralls-image]][coveralls-url]
|
||||||
|
|
||||||
|
Higher level content negotiation based on [negotiator](https://www.npmjs.com/package/negotiator). Extracted from [koa](https://www.npmjs.com/package/koa) for general use.
|
||||||
|
|
||||||
|
In addition to negotiator, it allows:
|
||||||
|
|
||||||
|
- Allows types as an array or arguments list, ie `(['text/html', 'application/json'])` as well as `('text/html', 'application/json')`.
|
||||||
|
- Allows type shorthands such as `json`.
|
||||||
|
- Returns `false` when no types match
|
||||||
|
- Treats non-existent headers as `*`
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install accepts
|
||||||
|
```
|
||||||
|
|
||||||
|
## API
|
||||||
|
|
||||||
|
```js
|
||||||
|
var accepts = require('accepts')
|
||||||
|
```
|
||||||
|
|
||||||
|
### accepts(req)
|
||||||
|
|
||||||
|
Create a new `Accepts` object for the given `req`.
|
||||||
|
|
||||||
|
#### .charset(charsets)
|
||||||
|
|
||||||
|
Return the first accepted charset. If nothing in `charsets` is accepted,
|
||||||
|
then `false` is returned.
|
||||||
|
|
||||||
|
#### .charsets()
|
||||||
|
|
||||||
|
Return the charsets that the request accepts, in the order of the client's
|
||||||
|
preference (most preferred first).
|
||||||
|
|
||||||
|
#### .encoding(encodings)
|
||||||
|
|
||||||
|
Return the first accepted encoding. If nothing in `encodings` is accepted,
|
||||||
|
then `false` is returned.
|
||||||
|
|
||||||
|
#### .encodings()
|
||||||
|
|
||||||
|
Return the encodings that the request accepts, in the order of the client's
|
||||||
|
preference (most preferred first).
|
||||||
|
|
||||||
|
#### .language(languages)
|
||||||
|
|
||||||
|
Return the first accepted language. If nothing in `languages` is accepted,
|
||||||
|
then `false` is returned.
|
||||||
|
|
||||||
|
#### .languages()
|
||||||
|
|
||||||
|
Return the languages that the request accepts, in the order of the client's
|
||||||
|
preference (most preferred first).
|
||||||
|
|
||||||
|
#### .type(types)
|
||||||
|
|
||||||
|
Return the first accepted type (and it is returned as the same text as what
|
||||||
|
appears in the `types` array). If nothing in `types` is accepted, then `false`
|
||||||
|
is returned.
|
||||||
|
|
||||||
|
The `types` array can contain full MIME types or file extensions. Any value
|
||||||
|
that is not a full MIME types is passed to `require('mime-types').lookup`.
|
||||||
|
|
||||||
|
#### .types()
|
||||||
|
|
||||||
|
Return the types that the request accepts, in the order of the client's
|
||||||
|
preference (most preferred first).
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
### Simple type negotiation
|
||||||
|
|
||||||
|
This simple example shows how to use `accepts` to return a different typed
|
||||||
|
respond body based on what the client wants to accept. The server lists it's
|
||||||
|
preferences in order and will get back the best match between the client and
|
||||||
|
server.
|
||||||
|
|
||||||
|
```js
|
||||||
|
var accepts = require('accepts')
|
||||||
|
var http = require('http')
|
||||||
|
|
||||||
|
function app(req, res) {
|
||||||
|
var accept = accepts(req)
|
||||||
|
|
||||||
|
// the order of this list is significant; should be server preferred order
|
||||||
|
switch(accept.type(['json', 'html'])) {
|
||||||
|
case 'json':
|
||||||
|
res.setHeader('Content-Type', 'application/json')
|
||||||
|
res.write('{"hello":"world!"}')
|
||||||
|
break
|
||||||
|
case 'html':
|
||||||
|
res.setHeader('Content-Type', 'text/html')
|
||||||
|
res.write('<b>hello, world!</b>')
|
||||||
|
break
|
||||||
|
default:
|
||||||
|
// the fallback is text/plain, so no need to specify it above
|
||||||
|
res.setHeader('Content-Type', 'text/plain')
|
||||||
|
res.write('hello, world!')
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
res.end()
|
||||||
|
}
|
||||||
|
|
||||||
|
http.createServer(app).listen(3000)
|
||||||
|
```
|
||||||
|
|
||||||
|
You can test this out with the cURL program:
|
||||||
|
```sh
|
||||||
|
curl -I -H'Accept: text/html' http://localhost:3000/
|
||||||
|
```
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
[MIT](LICENSE)
|
||||||
|
|
||||||
|
[npm-image]: https://img.shields.io/npm/v/accepts.svg
|
||||||
|
[npm-url]: https://npmjs.org/package/accepts
|
||||||
|
[node-version-image]: https://img.shields.io/node/v/accepts.svg
|
||||||
|
[node-version-url]: http://nodejs.org/download/
|
||||||
|
[travis-image]: https://img.shields.io/travis/jshttp/accepts/master.svg
|
||||||
|
[travis-url]: https://travis-ci.org/jshttp/accepts
|
||||||
|
[coveralls-image]: https://img.shields.io/coveralls/jshttp/accepts/master.svg
|
||||||
|
[coveralls-url]: https://coveralls.io/r/jshttp/accepts
|
||||||
|
[downloads-image]: https://img.shields.io/npm/dm/accepts.svg
|
||||||
|
[downloads-url]: https://npmjs.org/package/accepts
|
231
node_modules/accepts/index.js
generated
vendored
Normal file
231
node_modules/accepts/index.js
generated
vendored
Normal file
@ -0,0 +1,231 @@
|
|||||||
|
/*!
|
||||||
|
* accepts
|
||||||
|
* Copyright(c) 2014 Jonathan Ong
|
||||||
|
* Copyright(c) 2015 Douglas Christopher Wilson
|
||||||
|
* MIT Licensed
|
||||||
|
*/
|
||||||
|
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Module dependencies.
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
|
||||||
|
var Negotiator = require('negotiator')
|
||||||
|
var mime = require('mime-types')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Module exports.
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
|
||||||
|
module.exports = Accepts
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new Accepts object for the given req.
|
||||||
|
*
|
||||||
|
* @param {object} req
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
|
||||||
|
function Accepts(req) {
|
||||||
|
if (!(this instanceof Accepts))
|
||||||
|
return new Accepts(req)
|
||||||
|
|
||||||
|
this.headers = req.headers
|
||||||
|
this.negotiator = new Negotiator(req)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if the given `type(s)` is acceptable, returning
|
||||||
|
* the best match when true, otherwise `undefined`, in which
|
||||||
|
* case you should respond with 406 "Not Acceptable".
|
||||||
|
*
|
||||||
|
* The `type` value may be a single mime type string
|
||||||
|
* such as "application/json", the extension name
|
||||||
|
* such as "json" or an array `["json", "html", "text/plain"]`. When a list
|
||||||
|
* or array is given the _best_ match, if any is returned.
|
||||||
|
*
|
||||||
|
* Examples:
|
||||||
|
*
|
||||||
|
* // Accept: text/html
|
||||||
|
* this.types('html');
|
||||||
|
* // => "html"
|
||||||
|
*
|
||||||
|
* // Accept: text/*, application/json
|
||||||
|
* this.types('html');
|
||||||
|
* // => "html"
|
||||||
|
* this.types('text/html');
|
||||||
|
* // => "text/html"
|
||||||
|
* this.types('json', 'text');
|
||||||
|
* // => "json"
|
||||||
|
* this.types('application/json');
|
||||||
|
* // => "application/json"
|
||||||
|
*
|
||||||
|
* // Accept: text/*, application/json
|
||||||
|
* this.types('image/png');
|
||||||
|
* this.types('png');
|
||||||
|
* // => undefined
|
||||||
|
*
|
||||||
|
* // Accept: text/*;q=.5, application/json
|
||||||
|
* this.types(['html', 'json']);
|
||||||
|
* this.types('html', 'json');
|
||||||
|
* // => "json"
|
||||||
|
*
|
||||||
|
* @param {String|Array} types...
|
||||||
|
* @return {String|Array|Boolean}
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
|
||||||
|
Accepts.prototype.type =
|
||||||
|
Accepts.prototype.types = function (types_) {
|
||||||
|
var types = types_
|
||||||
|
|
||||||
|
// support flattened arguments
|
||||||
|
if (types && !Array.isArray(types)) {
|
||||||
|
types = new Array(arguments.length)
|
||||||
|
for (var i = 0; i < types.length; i++) {
|
||||||
|
types[i] = arguments[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// no types, return all requested types
|
||||||
|
if (!types || types.length === 0) {
|
||||||
|
return this.negotiator.mediaTypes()
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.headers.accept) return types[0];
|
||||||
|
var mimes = types.map(extToMime);
|
||||||
|
var accepts = this.negotiator.mediaTypes(mimes.filter(validMime));
|
||||||
|
var first = accepts[0];
|
||||||
|
if (!first) return false;
|
||||||
|
return types[mimes.indexOf(first)];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return accepted encodings or best fit based on `encodings`.
|
||||||
|
*
|
||||||
|
* Given `Accept-Encoding: gzip, deflate`
|
||||||
|
* an array sorted by quality is returned:
|
||||||
|
*
|
||||||
|
* ['gzip', 'deflate']
|
||||||
|
*
|
||||||
|
* @param {String|Array} encodings...
|
||||||
|
* @return {String|Array}
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
|
||||||
|
Accepts.prototype.encoding =
|
||||||
|
Accepts.prototype.encodings = function (encodings_) {
|
||||||
|
var encodings = encodings_
|
||||||
|
|
||||||
|
// support flattened arguments
|
||||||
|
if (encodings && !Array.isArray(encodings)) {
|
||||||
|
encodings = new Array(arguments.length)
|
||||||
|
for (var i = 0; i < encodings.length; i++) {
|
||||||
|
encodings[i] = arguments[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// no encodings, return all requested encodings
|
||||||
|
if (!encodings || encodings.length === 0) {
|
||||||
|
return this.negotiator.encodings()
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.negotiator.encodings(encodings)[0] || false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return accepted charsets or best fit based on `charsets`.
|
||||||
|
*
|
||||||
|
* Given `Accept-Charset: utf-8, iso-8859-1;q=0.2, utf-7;q=0.5`
|
||||||
|
* an array sorted by quality is returned:
|
||||||
|
*
|
||||||
|
* ['utf-8', 'utf-7', 'iso-8859-1']
|
||||||
|
*
|
||||||
|
* @param {String|Array} charsets...
|
||||||
|
* @return {String|Array}
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
|
||||||
|
Accepts.prototype.charset =
|
||||||
|
Accepts.prototype.charsets = function (charsets_) {
|
||||||
|
var charsets = charsets_
|
||||||
|
|
||||||
|
// support flattened arguments
|
||||||
|
if (charsets && !Array.isArray(charsets)) {
|
||||||
|
charsets = new Array(arguments.length)
|
||||||
|
for (var i = 0; i < charsets.length; i++) {
|
||||||
|
charsets[i] = arguments[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// no charsets, return all requested charsets
|
||||||
|
if (!charsets || charsets.length === 0) {
|
||||||
|
return this.negotiator.charsets()
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.negotiator.charsets(charsets)[0] || false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return accepted languages or best fit based on `langs`.
|
||||||
|
*
|
||||||
|
* Given `Accept-Language: en;q=0.8, es, pt`
|
||||||
|
* an array sorted by quality is returned:
|
||||||
|
*
|
||||||
|
* ['es', 'pt', 'en']
|
||||||
|
*
|
||||||
|
* @param {String|Array} langs...
|
||||||
|
* @return {Array|String}
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
|
||||||
|
Accepts.prototype.lang =
|
||||||
|
Accepts.prototype.langs =
|
||||||
|
Accepts.prototype.language =
|
||||||
|
Accepts.prototype.languages = function (languages_) {
|
||||||
|
var languages = languages_
|
||||||
|
|
||||||
|
// support flattened arguments
|
||||||
|
if (languages && !Array.isArray(languages)) {
|
||||||
|
languages = new Array(arguments.length)
|
||||||
|
for (var i = 0; i < languages.length; i++) {
|
||||||
|
languages[i] = arguments[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// no languages, return all requested languages
|
||||||
|
if (!languages || languages.length === 0) {
|
||||||
|
return this.negotiator.languages()
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.negotiator.languages(languages)[0] || false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert extnames to mime.
|
||||||
|
*
|
||||||
|
* @param {String} type
|
||||||
|
* @return {String}
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
|
||||||
|
function extToMime(type) {
|
||||||
|
return type.indexOf('/') === -1
|
||||||
|
? mime.lookup(type)
|
||||||
|
: type
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if mime is valid.
|
||||||
|
*
|
||||||
|
* @param {String} type
|
||||||
|
* @return {String}
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
|
||||||
|
function validMime(type) {
|
||||||
|
return typeof type === 'string';
|
||||||
|
}
|
112
node_modules/accepts/package.json
generated
vendored
Normal file
112
node_modules/accepts/package.json
generated
vendored
Normal file
@ -0,0 +1,112 @@
|
|||||||
|
{
|
||||||
|
"_args": [
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"raw": "accepts@~1.3.3",
|
||||||
|
"scope": null,
|
||||||
|
"escapedName": "accepts",
|
||||||
|
"name": "accepts",
|
||||||
|
"rawSpec": "~1.3.3",
|
||||||
|
"spec": ">=1.3.3 <1.4.0",
|
||||||
|
"type": "range"
|
||||||
|
},
|
||||||
|
"/tank/data/SERVER/zoneadm-master/node_modules/express"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"_from": "accepts@>=1.3.3 <1.4.0",
|
||||||
|
"_id": "accepts@1.3.3",
|
||||||
|
"_inCache": true,
|
||||||
|
"_location": "/accepts",
|
||||||
|
"_nodeVersion": "4.4.3",
|
||||||
|
"_npmOperationalInternal": {
|
||||||
|
"host": "packages-16-east.internal.npmjs.com",
|
||||||
|
"tmp": "tmp/accepts-1.3.3.tgz_1462251932032_0.7092335098423064"
|
||||||
|
},
|
||||||
|
"_npmUser": {
|
||||||
|
"name": "dougwilson",
|
||||||
|
"email": "doug@somethingdoug.com"
|
||||||
|
},
|
||||||
|
"_npmVersion": "2.15.1",
|
||||||
|
"_phantomChildren": {},
|
||||||
|
"_requested": {
|
||||||
|
"raw": "accepts@~1.3.3",
|
||||||
|
"scope": null,
|
||||||
|
"escapedName": "accepts",
|
||||||
|
"name": "accepts",
|
||||||
|
"rawSpec": "~1.3.3",
|
||||||
|
"spec": ">=1.3.3 <1.4.0",
|
||||||
|
"type": "range"
|
||||||
|
},
|
||||||
|
"_requiredBy": [
|
||||||
|
"/express"
|
||||||
|
],
|
||||||
|
"_resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.3.tgz",
|
||||||
|
"_shasum": "c3ca7434938648c3e0d9c1e328dd68b622c284ca",
|
||||||
|
"_shrinkwrap": null,
|
||||||
|
"_spec": "accepts@~1.3.3",
|
||||||
|
"_where": "/tank/data/SERVER/zoneadm-master/node_modules/express",
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/jshttp/accepts/issues"
|
||||||
|
},
|
||||||
|
"contributors": [
|
||||||
|
{
|
||||||
|
"name": "Douglas Christopher Wilson",
|
||||||
|
"email": "doug@somethingdoug.com"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Jonathan Ong",
|
||||||
|
"email": "me@jongleberry.com",
|
||||||
|
"url": "http://jongleberry.com"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dependencies": {
|
||||||
|
"mime-types": "~2.1.11",
|
||||||
|
"negotiator": "0.6.1"
|
||||||
|
},
|
||||||
|
"description": "Higher-level content negotiation",
|
||||||
|
"devDependencies": {
|
||||||
|
"istanbul": "0.4.3",
|
||||||
|
"mocha": "~1.21.5"
|
||||||
|
},
|
||||||
|
"directories": {},
|
||||||
|
"dist": {
|
||||||
|
"shasum": "c3ca7434938648c3e0d9c1e328dd68b622c284ca",
|
||||||
|
"tarball": "https://registry.npmjs.org/accepts/-/accepts-1.3.3.tgz"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"LICENSE",
|
||||||
|
"HISTORY.md",
|
||||||
|
"index.js"
|
||||||
|
],
|
||||||
|
"gitHead": "3e925b1e65ed7da2798849683d49814680dfa426",
|
||||||
|
"homepage": "https://github.com/jshttp/accepts#readme",
|
||||||
|
"keywords": [
|
||||||
|
"content",
|
||||||
|
"negotiation",
|
||||||
|
"accept",
|
||||||
|
"accepts"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"maintainers": [
|
||||||
|
{
|
||||||
|
"name": "dougwilson",
|
||||||
|
"email": "doug@somethingdoug.com"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "accepts",
|
||||||
|
"optionalDependencies": {},
|
||||||
|
"readme": "ERROR: No README data found!",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/jshttp/accepts.git"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "mocha --reporter spec --check-leaks --bail test/",
|
||||||
|
"test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/",
|
||||||
|
"test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/"
|
||||||
|
},
|
||||||
|
"version": "1.3.3"
|
||||||
|
}
|
4
node_modules/ansi-regex/index.js
generated
vendored
Normal file
4
node_modules/ansi-regex/index.js
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
'use strict';
|
||||||
|
module.exports = function () {
|
||||||
|
return /[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-PRZcf-nqry=><]/g;
|
||||||
|
};
|
21
node_modules/ansi-regex/license
generated
vendored
Normal file
21
node_modules/ansi-regex/license
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
131
node_modules/ansi-regex/package.json
generated
vendored
Normal file
131
node_modules/ansi-regex/package.json
generated
vendored
Normal file
@ -0,0 +1,131 @@
|
|||||||
|
{
|
||||||
|
"_args": [
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"raw": "ansi-regex@^2.0.0",
|
||||||
|
"scope": null,
|
||||||
|
"escapedName": "ansi-regex",
|
||||||
|
"name": "ansi-regex",
|
||||||
|
"rawSpec": "^2.0.0",
|
||||||
|
"spec": ">=2.0.0 <3.0.0",
|
||||||
|
"type": "range"
|
||||||
|
},
|
||||||
|
"/tank/data/SERVER/zoneadm-master/node_modules/strip-ansi"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"_from": "ansi-regex@>=2.0.0 <3.0.0",
|
||||||
|
"_id": "ansi-regex@2.1.1",
|
||||||
|
"_inCache": true,
|
||||||
|
"_location": "/ansi-regex",
|
||||||
|
"_nodeVersion": "0.10.32",
|
||||||
|
"_npmOperationalInternal": {
|
||||||
|
"host": "packages-18-east.internal.npmjs.com",
|
||||||
|
"tmp": "tmp/ansi-regex-2.1.1.tgz_1484363378013_0.4482989883981645"
|
||||||
|
},
|
||||||
|
"_npmUser": {
|
||||||
|
"name": "qix",
|
||||||
|
"email": "i.am.qix@gmail.com"
|
||||||
|
},
|
||||||
|
"_npmVersion": "2.14.2",
|
||||||
|
"_phantomChildren": {},
|
||||||
|
"_requested": {
|
||||||
|
"raw": "ansi-regex@^2.0.0",
|
||||||
|
"scope": null,
|
||||||
|
"escapedName": "ansi-regex",
|
||||||
|
"name": "ansi-regex",
|
||||||
|
"rawSpec": "^2.0.0",
|
||||||
|
"spec": ">=2.0.0 <3.0.0",
|
||||||
|
"type": "range"
|
||||||
|
},
|
||||||
|
"_requiredBy": [
|
||||||
|
"/strip-ansi"
|
||||||
|
],
|
||||||
|
"_resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz",
|
||||||
|
"_shasum": "c3b33ab5ee360d86e0e628f0468ae7ef27d654df",
|
||||||
|
"_shrinkwrap": null,
|
||||||
|
"_spec": "ansi-regex@^2.0.0",
|
||||||
|
"_where": "/tank/data/SERVER/zoneadm-master/node_modules/strip-ansi",
|
||||||
|
"author": {
|
||||||
|
"name": "Sindre Sorhus",
|
||||||
|
"email": "sindresorhus@gmail.com",
|
||||||
|
"url": "sindresorhus.com"
|
||||||
|
},
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/chalk/ansi-regex/issues"
|
||||||
|
},
|
||||||
|
"dependencies": {},
|
||||||
|
"description": "Regular expression for matching ANSI escape codes",
|
||||||
|
"devDependencies": {
|
||||||
|
"ava": "0.17.0",
|
||||||
|
"xo": "0.16.0"
|
||||||
|
},
|
||||||
|
"directories": {},
|
||||||
|
"dist": {
|
||||||
|
"shasum": "c3b33ab5ee360d86e0e628f0468ae7ef27d654df",
|
||||||
|
"tarball": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10.0"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"index.js"
|
||||||
|
],
|
||||||
|
"gitHead": "7c908e7b4eb6cd82bfe1295e33fdf6d166c7ed85",
|
||||||
|
"homepage": "https://github.com/chalk/ansi-regex#readme",
|
||||||
|
"keywords": [
|
||||||
|
"ansi",
|
||||||
|
"styles",
|
||||||
|
"color",
|
||||||
|
"colour",
|
||||||
|
"colors",
|
||||||
|
"terminal",
|
||||||
|
"console",
|
||||||
|
"cli",
|
||||||
|
"string",
|
||||||
|
"tty",
|
||||||
|
"escape",
|
||||||
|
"formatting",
|
||||||
|
"rgb",
|
||||||
|
"256",
|
||||||
|
"shell",
|
||||||
|
"xterm",
|
||||||
|
"command-line",
|
||||||
|
"text",
|
||||||
|
"regex",
|
||||||
|
"regexp",
|
||||||
|
"re",
|
||||||
|
"match",
|
||||||
|
"test",
|
||||||
|
"find",
|
||||||
|
"pattern"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"maintainers": [
|
||||||
|
{
|
||||||
|
"name": "qix",
|
||||||
|
"email": "i.am.qix@gmail.com"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "sindresorhus",
|
||||||
|
"email": "sindresorhus@gmail.com"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ansi-regex",
|
||||||
|
"optionalDependencies": {},
|
||||||
|
"readme": "ERROR: No README data found!",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/chalk/ansi-regex.git"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "xo && ava --verbose",
|
||||||
|
"view-supported": "node fixtures/view-codes.js"
|
||||||
|
},
|
||||||
|
"version": "2.1.1",
|
||||||
|
"xo": {
|
||||||
|
"rules": {
|
||||||
|
"guard-for-in": 0,
|
||||||
|
"no-loop-func": 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
39
node_modules/ansi-regex/readme.md
generated
vendored
Normal file
39
node_modules/ansi-regex/readme.md
generated
vendored
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
# ansi-regex [](https://travis-ci.org/chalk/ansi-regex)
|
||||||
|
|
||||||
|
> Regular expression for matching [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code)
|
||||||
|
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
```
|
||||||
|
$ npm install --save ansi-regex
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```js
|
||||||
|
const ansiRegex = require('ansi-regex');
|
||||||
|
|
||||||
|
ansiRegex().test('\u001b[4mcake\u001b[0m');
|
||||||
|
//=> true
|
||||||
|
|
||||||
|
ansiRegex().test('cake');
|
||||||
|
//=> false
|
||||||
|
|
||||||
|
'\u001b[4mcake\u001b[0m'.match(ansiRegex());
|
||||||
|
//=> ['\u001b[4m', '\u001b[0m']
|
||||||
|
```
|
||||||
|
|
||||||
|
## FAQ
|
||||||
|
|
||||||
|
### Why do you test for codes not in the ECMA 48 standard?
|
||||||
|
|
||||||
|
Some of the codes we run as a test are codes that we acquired finding various lists of non-standard or manufacturer specific codes. If I recall correctly, we test for both standard and non-standard codes, as most of them follow the same or similar format and can be safely matched in strings without the risk of removing actual string content. There are a few non-standard control codes that do not follow the traditional format (i.e. they end in numbers) thus forcing us to exclude them from the test because we cannot reliably match them.
|
||||||
|
|
||||||
|
On the historical side, those ECMA standards were established in the early 90's whereas the VT100, for example, was designed in the mid/late 70's. At that point in time, control codes were still pretty ungoverned and engineers used them for a multitude of things, namely to activate hardware ports that may have been proprietary. Somewhere else you see a similar 'anarchy' of codes is in the x86 architecture for processors; there are a ton of "interrupts" that can mean different things on certain brands of processors, most of which have been phased out.
|
||||||
|
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT © [Sindre Sorhus](http://sindresorhus.com)
|
65
node_modules/ansi-styles/index.js
generated
vendored
Normal file
65
node_modules/ansi-styles/index.js
generated
vendored
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
function assembleStyles () {
|
||||||
|
var styles = {
|
||||||
|
modifiers: {
|
||||||
|
reset: [0, 0],
|
||||||
|
bold: [1, 22], // 21 isn't widely supported and 22 does the same thing
|
||||||
|
dim: [2, 22],
|
||||||
|
italic: [3, 23],
|
||||||
|
underline: [4, 24],
|
||||||
|
inverse: [7, 27],
|
||||||
|
hidden: [8, 28],
|
||||||
|
strikethrough: [9, 29]
|
||||||
|
},
|
||||||
|
colors: {
|
||||||
|
black: [30, 39],
|
||||||
|
red: [31, 39],
|
||||||
|
green: [32, 39],
|
||||||
|
yellow: [33, 39],
|
||||||
|
blue: [34, 39],
|
||||||
|
magenta: [35, 39],
|
||||||
|
cyan: [36, 39],
|
||||||
|
white: [37, 39],
|
||||||
|
gray: [90, 39]
|
||||||
|
},
|
||||||
|
bgColors: {
|
||||||
|
bgBlack: [40, 49],
|
||||||
|
bgRed: [41, 49],
|
||||||
|
bgGreen: [42, 49],
|
||||||
|
bgYellow: [43, 49],
|
||||||
|
bgBlue: [44, 49],
|
||||||
|
bgMagenta: [45, 49],
|
||||||
|
bgCyan: [46, 49],
|
||||||
|
bgWhite: [47, 49]
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// fix humans
|
||||||
|
styles.colors.grey = styles.colors.gray;
|
||||||
|
|
||||||
|
Object.keys(styles).forEach(function (groupName) {
|
||||||
|
var group = styles[groupName];
|
||||||
|
|
||||||
|
Object.keys(group).forEach(function (styleName) {
|
||||||
|
var style = group[styleName];
|
||||||
|
|
||||||
|
styles[styleName] = group[styleName] = {
|
||||||
|
open: '\u001b[' + style[0] + 'm',
|
||||||
|
close: '\u001b[' + style[1] + 'm'
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
Object.defineProperty(styles, groupName, {
|
||||||
|
value: group,
|
||||||
|
enumerable: false
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
return styles;
|
||||||
|
}
|
||||||
|
|
||||||
|
Object.defineProperty(module, 'exports', {
|
||||||
|
enumerable: true,
|
||||||
|
get: assembleStyles
|
||||||
|
});
|
21
node_modules/ansi-styles/license
generated
vendored
Normal file
21
node_modules/ansi-styles/license
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
114
node_modules/ansi-styles/package.json
generated
vendored
Normal file
114
node_modules/ansi-styles/package.json
generated
vendored
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
{
|
||||||
|
"_args": [
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"raw": "ansi-styles@^2.2.1",
|
||||||
|
"scope": null,
|
||||||
|
"escapedName": "ansi-styles",
|
||||||
|
"name": "ansi-styles",
|
||||||
|
"rawSpec": "^2.2.1",
|
||||||
|
"spec": ">=2.2.1 <3.0.0",
|
||||||
|
"type": "range"
|
||||||
|
},
|
||||||
|
"/tank/data/SERVER/zoneadm-master/node_modules/chalk"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"_from": "ansi-styles@>=2.2.1 <3.0.0",
|
||||||
|
"_id": "ansi-styles@2.2.1",
|
||||||
|
"_inCache": true,
|
||||||
|
"_location": "/ansi-styles",
|
||||||
|
"_nodeVersion": "4.3.0",
|
||||||
|
"_npmOperationalInternal": {
|
||||||
|
"host": "packages-12-west.internal.npmjs.com",
|
||||||
|
"tmp": "tmp/ansi-styles-2.2.1.tgz_1459197317833_0.9694824463222176"
|
||||||
|
},
|
||||||
|
"_npmUser": {
|
||||||
|
"name": "sindresorhus",
|
||||||
|
"email": "sindresorhus@gmail.com"
|
||||||
|
},
|
||||||
|
"_npmVersion": "3.8.3",
|
||||||
|
"_phantomChildren": {},
|
||||||
|
"_requested": {
|
||||||
|
"raw": "ansi-styles@^2.2.1",
|
||||||
|
"scope": null,
|
||||||
|
"escapedName": "ansi-styles",
|
||||||
|
"name": "ansi-styles",
|
||||||
|
"rawSpec": "^2.2.1",
|
||||||
|
"spec": ">=2.2.1 <3.0.0",
|
||||||
|
"type": "range"
|
||||||
|
},
|
||||||
|
"_requiredBy": [
|
||||||
|
"/chalk"
|
||||||
|
],
|
||||||
|
"_resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz",
|
||||||
|
"_shasum": "b432dd3358b634cf75e1e4664368240533c1ddbe",
|
||||||
|
"_shrinkwrap": null,
|
||||||
|
"_spec": "ansi-styles@^2.2.1",
|
||||||
|
"_where": "/tank/data/SERVER/zoneadm-master/node_modules/chalk",
|
||||||
|
"author": {
|
||||||
|
"name": "Sindre Sorhus",
|
||||||
|
"email": "sindresorhus@gmail.com",
|
||||||
|
"url": "sindresorhus.com"
|
||||||
|
},
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/chalk/ansi-styles/issues"
|
||||||
|
},
|
||||||
|
"dependencies": {},
|
||||||
|
"description": "ANSI escape codes for styling strings in the terminal",
|
||||||
|
"devDependencies": {
|
||||||
|
"mocha": "*"
|
||||||
|
},
|
||||||
|
"directories": {},
|
||||||
|
"dist": {
|
||||||
|
"shasum": "b432dd3358b634cf75e1e4664368240533c1ddbe",
|
||||||
|
"tarball": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.10.0"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"index.js"
|
||||||
|
],
|
||||||
|
"gitHead": "95c59b23be760108b6530ca1c89477c21b258032",
|
||||||
|
"homepage": "https://github.com/chalk/ansi-styles#readme",
|
||||||
|
"keywords": [
|
||||||
|
"ansi",
|
||||||
|
"styles",
|
||||||
|
"color",
|
||||||
|
"colour",
|
||||||
|
"colors",
|
||||||
|
"terminal",
|
||||||
|
"console",
|
||||||
|
"cli",
|
||||||
|
"string",
|
||||||
|
"tty",
|
||||||
|
"escape",
|
||||||
|
"formatting",
|
||||||
|
"rgb",
|
||||||
|
"256",
|
||||||
|
"shell",
|
||||||
|
"xterm",
|
||||||
|
"log",
|
||||||
|
"logging",
|
||||||
|
"command-line",
|
||||||
|
"text"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"maintainers": [
|
||||||
|
{
|
||||||
|
"name": "sindresorhus",
|
||||||
|
"email": "sindresorhus@gmail.com"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "ansi-styles",
|
||||||
|
"optionalDependencies": {},
|
||||||
|
"readme": "ERROR: No README data found!",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/chalk/ansi-styles.git"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "mocha"
|
||||||
|
},
|
||||||
|
"version": "2.2.1"
|
||||||
|
}
|
86
node_modules/ansi-styles/readme.md
generated
vendored
Normal file
86
node_modules/ansi-styles/readme.md
generated
vendored
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
# ansi-styles [](https://travis-ci.org/chalk/ansi-styles)
|
||||||
|
|
||||||
|
> [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code#Colors_and_Styles) for styling strings in the terminal
|
||||||
|
|
||||||
|
You probably want the higher-level [chalk](https://github.com/chalk/chalk) module for styling your strings.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
```
|
||||||
|
$ npm install --save ansi-styles
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```js
|
||||||
|
var ansi = require('ansi-styles');
|
||||||
|
|
||||||
|
console.log(ansi.green.open + 'Hello world!' + ansi.green.close);
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## API
|
||||||
|
|
||||||
|
Each style has an `open` and `close` property.
|
||||||
|
|
||||||
|
|
||||||
|
## Styles
|
||||||
|
|
||||||
|
### Modifiers
|
||||||
|
|
||||||
|
- `reset`
|
||||||
|
- `bold`
|
||||||
|
- `dim`
|
||||||
|
- `italic` *(not widely supported)*
|
||||||
|
- `underline`
|
||||||
|
- `inverse`
|
||||||
|
- `hidden`
|
||||||
|
- `strikethrough` *(not widely supported)*
|
||||||
|
|
||||||
|
### Colors
|
||||||
|
|
||||||
|
- `black`
|
||||||
|
- `red`
|
||||||
|
- `green`
|
||||||
|
- `yellow`
|
||||||
|
- `blue`
|
||||||
|
- `magenta`
|
||||||
|
- `cyan`
|
||||||
|
- `white`
|
||||||
|
- `gray`
|
||||||
|
|
||||||
|
### Background colors
|
||||||
|
|
||||||
|
- `bgBlack`
|
||||||
|
- `bgRed`
|
||||||
|
- `bgGreen`
|
||||||
|
- `bgYellow`
|
||||||
|
- `bgBlue`
|
||||||
|
- `bgMagenta`
|
||||||
|
- `bgCyan`
|
||||||
|
- `bgWhite`
|
||||||
|
|
||||||
|
|
||||||
|
## Advanced usage
|
||||||
|
|
||||||
|
By default you get a map of styles, but the styles are also available as groups. They are non-enumerable so they don't show up unless you access them explicitly. This makes it easier to expose only a subset in a higher-level module.
|
||||||
|
|
||||||
|
- `ansi.modifiers`
|
||||||
|
- `ansi.colors`
|
||||||
|
- `ansi.bgColors`
|
||||||
|
|
||||||
|
|
||||||
|
###### Example
|
||||||
|
|
||||||
|
```js
|
||||||
|
console.log(ansi.colors.green.open);
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT © [Sindre Sorhus](http://sindresorhus.com)
|
21
node_modules/array-flatten/LICENSE
generated
vendored
Normal file
21
node_modules/array-flatten/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2014 Blake Embrey (hello@blakeembrey.com)
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
43
node_modules/array-flatten/README.md
generated
vendored
Normal file
43
node_modules/array-flatten/README.md
generated
vendored
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
# Array Flatten
|
||||||
|
|
||||||
|
[![NPM version][npm-image]][npm-url]
|
||||||
|
[![NPM downloads][downloads-image]][downloads-url]
|
||||||
|
[![Build status][travis-image]][travis-url]
|
||||||
|
[![Test coverage][coveralls-image]][coveralls-url]
|
||||||
|
|
||||||
|
> Flatten an array of nested arrays into a single flat array. Accepts an optional depth.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```
|
||||||
|
npm install array-flatten --save
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var flatten = require('array-flatten')
|
||||||
|
|
||||||
|
flatten([1, [2, [3, [4, [5], 6], 7], 8], 9])
|
||||||
|
//=> [1, 2, 3, 4, 5, 6, 7, 8, 9]
|
||||||
|
|
||||||
|
flatten([1, [2, [3, [4, [5], 6], 7], 8], 9], 2)
|
||||||
|
//=> [1, 2, 3, [4, [5], 6], 7, 8, 9]
|
||||||
|
|
||||||
|
(function () {
|
||||||
|
flatten(arguments) //=> [1, 2, 3]
|
||||||
|
})(1, [2, 3])
|
||||||
|
```
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT
|
||||||
|
|
||||||
|
[npm-image]: https://img.shields.io/npm/v/array-flatten.svg?style=flat
|
||||||
|
[npm-url]: https://npmjs.org/package/array-flatten
|
||||||
|
[downloads-image]: https://img.shields.io/npm/dm/array-flatten.svg?style=flat
|
||||||
|
[downloads-url]: https://npmjs.org/package/array-flatten
|
||||||
|
[travis-image]: https://img.shields.io/travis/blakeembrey/array-flatten.svg?style=flat
|
||||||
|
[travis-url]: https://travis-ci.org/blakeembrey/array-flatten
|
||||||
|
[coveralls-image]: https://img.shields.io/coveralls/blakeembrey/array-flatten.svg?style=flat
|
||||||
|
[coveralls-url]: https://coveralls.io/r/blakeembrey/array-flatten?branch=master
|
64
node_modules/array-flatten/array-flatten.js
generated
vendored
Normal file
64
node_modules/array-flatten/array-flatten.js
generated
vendored
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
'use strict'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Expose `arrayFlatten`.
|
||||||
|
*/
|
||||||
|
module.exports = arrayFlatten
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Recursive flatten function with depth.
|
||||||
|
*
|
||||||
|
* @param {Array} array
|
||||||
|
* @param {Array} result
|
||||||
|
* @param {Number} depth
|
||||||
|
* @return {Array}
|
||||||
|
*/
|
||||||
|
function flattenWithDepth (array, result, depth) {
|
||||||
|
for (var i = 0; i < array.length; i++) {
|
||||||
|
var value = array[i]
|
||||||
|
|
||||||
|
if (depth > 0 && Array.isArray(value)) {
|
||||||
|
flattenWithDepth(value, result, depth - 1)
|
||||||
|
} else {
|
||||||
|
result.push(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Recursive flatten function. Omitting depth is slightly faster.
|
||||||
|
*
|
||||||
|
* @param {Array} array
|
||||||
|
* @param {Array} result
|
||||||
|
* @return {Array}
|
||||||
|
*/
|
||||||
|
function flattenForever (array, result) {
|
||||||
|
for (var i = 0; i < array.length; i++) {
|
||||||
|
var value = array[i]
|
||||||
|
|
||||||
|
if (Array.isArray(value)) {
|
||||||
|
flattenForever(value, result)
|
||||||
|
} else {
|
||||||
|
result.push(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Flatten an array, with the ability to define a depth.
|
||||||
|
*
|
||||||
|
* @param {Array} array
|
||||||
|
* @param {Number} depth
|
||||||
|
* @return {Array}
|
||||||
|
*/
|
||||||
|
function arrayFlatten (array, depth) {
|
||||||
|
if (depth == null) {
|
||||||
|
return flattenForever(array, [])
|
||||||
|
}
|
||||||
|
|
||||||
|
return flattenWithDepth(array, [], depth)
|
||||||
|
}
|
96
node_modules/array-flatten/package.json
generated
vendored
Normal file
96
node_modules/array-flatten/package.json
generated
vendored
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
{
|
||||||
|
"_args": [
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"raw": "array-flatten@1.1.1",
|
||||||
|
"scope": null,
|
||||||
|
"escapedName": "array-flatten",
|
||||||
|
"name": "array-flatten",
|
||||||
|
"rawSpec": "1.1.1",
|
||||||
|
"spec": "1.1.1",
|
||||||
|
"type": "version"
|
||||||
|
},
|
||||||
|
"/tank/data/SERVER/zoneadm-master/node_modules/express"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"_from": "array-flatten@1.1.1",
|
||||||
|
"_id": "array-flatten@1.1.1",
|
||||||
|
"_inCache": true,
|
||||||
|
"_location": "/array-flatten",
|
||||||
|
"_nodeVersion": "2.3.3",
|
||||||
|
"_npmUser": {
|
||||||
|
"name": "blakeembrey",
|
||||||
|
"email": "hello@blakeembrey.com"
|
||||||
|
},
|
||||||
|
"_npmVersion": "2.11.3",
|
||||||
|
"_phantomChildren": {},
|
||||||
|
"_requested": {
|
||||||
|
"raw": "array-flatten@1.1.1",
|
||||||
|
"scope": null,
|
||||||
|
"escapedName": "array-flatten",
|
||||||
|
"name": "array-flatten",
|
||||||
|
"rawSpec": "1.1.1",
|
||||||
|
"spec": "1.1.1",
|
||||||
|
"type": "version"
|
||||||
|
},
|
||||||
|
"_requiredBy": [
|
||||||
|
"/express"
|
||||||
|
],
|
||||||
|
"_resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
|
||||||
|
"_shasum": "9a5f699051b1e7073328f2a008968b64ea2955d2",
|
||||||
|
"_shrinkwrap": null,
|
||||||
|
"_spec": "array-flatten@1.1.1",
|
||||||
|
"_where": "/tank/data/SERVER/zoneadm-master/node_modules/express",
|
||||||
|
"author": {
|
||||||
|
"name": "Blake Embrey",
|
||||||
|
"email": "hello@blakeembrey.com",
|
||||||
|
"url": "http://blakeembrey.me"
|
||||||
|
},
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/blakeembrey/array-flatten/issues"
|
||||||
|
},
|
||||||
|
"dependencies": {},
|
||||||
|
"description": "Flatten an array of nested arrays into a single flat array",
|
||||||
|
"devDependencies": {
|
||||||
|
"istanbul": "^0.3.13",
|
||||||
|
"mocha": "^2.2.4",
|
||||||
|
"pre-commit": "^1.0.7",
|
||||||
|
"standard": "^3.7.3"
|
||||||
|
},
|
||||||
|
"directories": {},
|
||||||
|
"dist": {
|
||||||
|
"shasum": "9a5f699051b1e7073328f2a008968b64ea2955d2",
|
||||||
|
"tarball": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"array-flatten.js",
|
||||||
|
"LICENSE"
|
||||||
|
],
|
||||||
|
"gitHead": "1963a9189229d408e1e8f585a00c8be9edbd1803",
|
||||||
|
"homepage": "https://github.com/blakeembrey/array-flatten",
|
||||||
|
"keywords": [
|
||||||
|
"array",
|
||||||
|
"flatten",
|
||||||
|
"arguments",
|
||||||
|
"depth"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"main": "array-flatten.js",
|
||||||
|
"maintainers": [
|
||||||
|
{
|
||||||
|
"name": "blakeembrey",
|
||||||
|
"email": "hello@blakeembrey.com"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "array-flatten",
|
||||||
|
"optionalDependencies": {},
|
||||||
|
"readme": "ERROR: No README data found!",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git://github.com/blakeembrey/array-flatten.git"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "istanbul cover _mocha -- -R spec"
|
||||||
|
},
|
||||||
|
"version": "1.1.1"
|
||||||
|
}
|
2
node_modules/asn1/.npmignore
generated
vendored
Normal file
2
node_modules/asn1/.npmignore
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
node_modules
|
||||||
|
*.log
|
4
node_modules/asn1/.travis.yml
generated
vendored
Normal file
4
node_modules/asn1/.travis.yml
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
language: node_js
|
||||||
|
node_js:
|
||||||
|
- 0.8
|
||||||
|
- 0.10
|
19
node_modules/asn1/LICENSE
generated
vendored
Normal file
19
node_modules/asn1/LICENSE
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
Copyright (c) 2011 Mark Cavage, All rights reserved.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE
|
50
node_modules/asn1/README.md
generated
vendored
Normal file
50
node_modules/asn1/README.md
generated
vendored
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
node-asn1 is a library for encoding and decoding ASN.1 datatypes in pure JS.
|
||||||
|
Currently BER encoding is supported; at some point I'll likely have to do DER.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
Mostly, if you're *actually* needing to read and write ASN.1, you probably don't
|
||||||
|
need this readme to explain what and why. If you have no idea what ASN.1 is,
|
||||||
|
see this: ftp://ftp.rsa.com/pub/pkcs/ascii/layman.asc
|
||||||
|
|
||||||
|
The source is pretty much self-explanatory, and has read/write methods for the
|
||||||
|
common types out there.
|
||||||
|
|
||||||
|
### Decoding
|
||||||
|
|
||||||
|
The following reads an ASN.1 sequence with a boolean.
|
||||||
|
|
||||||
|
var Ber = require('asn1').Ber;
|
||||||
|
|
||||||
|
var reader = new Ber.Reader(new Buffer([0x30, 0x03, 0x01, 0x01, 0xff]));
|
||||||
|
|
||||||
|
reader.readSequence();
|
||||||
|
console.log('Sequence len: ' + reader.length);
|
||||||
|
if (reader.peek() === Ber.Boolean)
|
||||||
|
console.log(reader.readBoolean());
|
||||||
|
|
||||||
|
### Encoding
|
||||||
|
|
||||||
|
The following generates the same payload as above.
|
||||||
|
|
||||||
|
var Ber = require('asn1').Ber;
|
||||||
|
|
||||||
|
var writer = new Ber.Writer();
|
||||||
|
|
||||||
|
writer.startSequence();
|
||||||
|
writer.writeBoolean(true);
|
||||||
|
writer.endSequence();
|
||||||
|
|
||||||
|
console.log(writer.buffer);
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
npm install asn1
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT.
|
||||||
|
|
||||||
|
## Bugs
|
||||||
|
|
||||||
|
See <https://github.com/mcavage/node-asn1/issues>.
|
13
node_modules/asn1/lib/ber/errors.js
generated
vendored
Normal file
13
node_modules/asn1/lib/ber/errors.js
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
// Copyright 2011 Mark Cavage <mcavage@gmail.com> All rights reserved.
|
||||||
|
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
|
||||||
|
newInvalidAsn1Error: function(msg) {
|
||||||
|
var e = new Error();
|
||||||
|
e.name = 'InvalidAsn1Error';
|
||||||
|
e.message = msg || '';
|
||||||
|
return e;
|
||||||
|
}
|
||||||
|
|
||||||
|
};
|
27
node_modules/asn1/lib/ber/index.js
generated
vendored
Normal file
27
node_modules/asn1/lib/ber/index.js
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
// Copyright 2011 Mark Cavage <mcavage@gmail.com> All rights reserved.
|
||||||
|
|
||||||
|
var errors = require('./errors');
|
||||||
|
var types = require('./types');
|
||||||
|
|
||||||
|
var Reader = require('./reader');
|
||||||
|
var Writer = require('./writer');
|
||||||
|
|
||||||
|
|
||||||
|
///--- Exports
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
|
||||||
|
Reader: Reader,
|
||||||
|
|
||||||
|
Writer: Writer
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
for (var t in types) {
|
||||||
|
if (types.hasOwnProperty(t))
|
||||||
|
module.exports[t] = types[t];
|
||||||
|
}
|
||||||
|
for (var e in errors) {
|
||||||
|
if (errors.hasOwnProperty(e))
|
||||||
|
module.exports[e] = errors[e];
|
||||||
|
}
|
261
node_modules/asn1/lib/ber/reader.js
generated
vendored
Normal file
261
node_modules/asn1/lib/ber/reader.js
generated
vendored
Normal file
@ -0,0 +1,261 @@
|
|||||||
|
// Copyright 2011 Mark Cavage <mcavage@gmail.com> All rights reserved.
|
||||||
|
|
||||||
|
var assert = require('assert');
|
||||||
|
|
||||||
|
var ASN1 = require('./types');
|
||||||
|
var errors = require('./errors');
|
||||||
|
|
||||||
|
|
||||||
|
///--- Globals
|
||||||
|
|
||||||
|
var newInvalidAsn1Error = errors.newInvalidAsn1Error;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
///--- API
|
||||||
|
|
||||||
|
function Reader(data) {
|
||||||
|
if (!data || !Buffer.isBuffer(data))
|
||||||
|
throw new TypeError('data must be a node Buffer');
|
||||||
|
|
||||||
|
this._buf = data;
|
||||||
|
this._size = data.length;
|
||||||
|
|
||||||
|
// These hold the "current" state
|
||||||
|
this._len = 0;
|
||||||
|
this._offset = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
Object.defineProperty(Reader.prototype, 'length', {
|
||||||
|
enumerable: true,
|
||||||
|
get: function () { return (this._len); }
|
||||||
|
});
|
||||||
|
|
||||||
|
Object.defineProperty(Reader.prototype, 'offset', {
|
||||||
|
enumerable: true,
|
||||||
|
get: function () { return (this._offset); }
|
||||||
|
});
|
||||||
|
|
||||||
|
Object.defineProperty(Reader.prototype, 'remain', {
|
||||||
|
get: function () { return (this._size - this._offset); }
|
||||||
|
});
|
||||||
|
|
||||||
|
Object.defineProperty(Reader.prototype, 'buffer', {
|
||||||
|
get: function () { return (this._buf.slice(this._offset)); }
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads a single byte and advances offset; you can pass in `true` to make this
|
||||||
|
* a "peek" operation (i.e., get the byte, but don't advance the offset).
|
||||||
|
*
|
||||||
|
* @param {Boolean} peek true means don't move offset.
|
||||||
|
* @return {Number} the next byte, null if not enough data.
|
||||||
|
*/
|
||||||
|
Reader.prototype.readByte = function(peek) {
|
||||||
|
if (this._size - this._offset < 1)
|
||||||
|
return null;
|
||||||
|
|
||||||
|
var b = this._buf[this._offset] & 0xff;
|
||||||
|
|
||||||
|
if (!peek)
|
||||||
|
this._offset += 1;
|
||||||
|
|
||||||
|
return b;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
Reader.prototype.peek = function() {
|
||||||
|
return this.readByte(true);
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads a (potentially) variable length off the BER buffer. This call is
|
||||||
|
* not really meant to be called directly, as callers have to manipulate
|
||||||
|
* the internal buffer afterwards.
|
||||||
|
*
|
||||||
|
* As a result of this call, you can call `Reader.length`, until the
|
||||||
|
* next thing called that does a readLength.
|
||||||
|
*
|
||||||
|
* @return {Number} the amount of offset to advance the buffer.
|
||||||
|
* @throws {InvalidAsn1Error} on bad ASN.1
|
||||||
|
*/
|
||||||
|
Reader.prototype.readLength = function(offset) {
|
||||||
|
if (offset === undefined)
|
||||||
|
offset = this._offset;
|
||||||
|
|
||||||
|
if (offset >= this._size)
|
||||||
|
return null;
|
||||||
|
|
||||||
|
var lenB = this._buf[offset++] & 0xff;
|
||||||
|
if (lenB === null)
|
||||||
|
return null;
|
||||||
|
|
||||||
|
if ((lenB & 0x80) == 0x80) {
|
||||||
|
lenB &= 0x7f;
|
||||||
|
|
||||||
|
if (lenB == 0)
|
||||||
|
throw newInvalidAsn1Error('Indefinite length not supported');
|
||||||
|
|
||||||
|
if (lenB > 4)
|
||||||
|
throw newInvalidAsn1Error('encoding too long');
|
||||||
|
|
||||||
|
if (this._size - offset < lenB)
|
||||||
|
return null;
|
||||||
|
|
||||||
|
this._len = 0;
|
||||||
|
for (var i = 0; i < lenB; i++)
|
||||||
|
this._len = (this._len << 8) + (this._buf[offset++] & 0xff);
|
||||||
|
|
||||||
|
} else {
|
||||||
|
// Wasn't a variable length
|
||||||
|
this._len = lenB;
|
||||||
|
}
|
||||||
|
|
||||||
|
return offset;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses the next sequence in this BER buffer.
|
||||||
|
*
|
||||||
|
* To get the length of the sequence, call `Reader.length`.
|
||||||
|
*
|
||||||
|
* @return {Number} the sequence's tag.
|
||||||
|
*/
|
||||||
|
Reader.prototype.readSequence = function(tag) {
|
||||||
|
var seq = this.peek();
|
||||||
|
if (seq === null)
|
||||||
|
return null;
|
||||||
|
if (tag !== undefined && tag !== seq)
|
||||||
|
throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) +
|
||||||
|
': got 0x' + seq.toString(16));
|
||||||
|
|
||||||
|
var o = this.readLength(this._offset + 1); // stored in `length`
|
||||||
|
if (o === null)
|
||||||
|
return null;
|
||||||
|
|
||||||
|
this._offset = o;
|
||||||
|
return seq;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
Reader.prototype.readInt = function() {
|
||||||
|
return this._readTag(ASN1.Integer);
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
Reader.prototype.readBoolean = function() {
|
||||||
|
return (this._readTag(ASN1.Boolean) === 0 ? false : true);
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
Reader.prototype.readEnumeration = function() {
|
||||||
|
return this._readTag(ASN1.Enumeration);
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
Reader.prototype.readString = function(tag, retbuf) {
|
||||||
|
if (!tag)
|
||||||
|
tag = ASN1.OctetString;
|
||||||
|
|
||||||
|
var b = this.peek();
|
||||||
|
if (b === null)
|
||||||
|
return null;
|
||||||
|
|
||||||
|
if (b !== tag)
|
||||||
|
throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) +
|
||||||
|
': got 0x' + b.toString(16));
|
||||||
|
|
||||||
|
var o = this.readLength(this._offset + 1); // stored in `length`
|
||||||
|
|
||||||
|
if (o === null)
|
||||||
|
return null;
|
||||||
|
|
||||||
|
if (this.length > this._size - o)
|
||||||
|
return null;
|
||||||
|
|
||||||
|
this._offset = o;
|
||||||
|
|
||||||
|
if (this.length === 0)
|
||||||
|
return retbuf ? new Buffer(0) : '';
|
||||||
|
|
||||||
|
var str = this._buf.slice(this._offset, this._offset + this.length);
|
||||||
|
this._offset += this.length;
|
||||||
|
|
||||||
|
return retbuf ? str : str.toString('utf8');
|
||||||
|
};
|
||||||
|
|
||||||
|
Reader.prototype.readOID = function(tag) {
|
||||||
|
if (!tag)
|
||||||
|
tag = ASN1.OID;
|
||||||
|
|
||||||
|
var b = this.readString(tag, true);
|
||||||
|
if (b === null)
|
||||||
|
return null;
|
||||||
|
|
||||||
|
var values = [];
|
||||||
|
var value = 0;
|
||||||
|
|
||||||
|
for (var i = 0; i < b.length; i++) {
|
||||||
|
var byte = b[i] & 0xff;
|
||||||
|
|
||||||
|
value <<= 7;
|
||||||
|
value += byte & 0x7f;
|
||||||
|
if ((byte & 0x80) == 0) {
|
||||||
|
values.push(value);
|
||||||
|
value = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
value = values.shift();
|
||||||
|
values.unshift(value % 40);
|
||||||
|
values.unshift((value / 40) >> 0);
|
||||||
|
|
||||||
|
return values.join('.');
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
Reader.prototype._readTag = function(tag) {
|
||||||
|
assert.ok(tag !== undefined);
|
||||||
|
|
||||||
|
var b = this.peek();
|
||||||
|
|
||||||
|
if (b === null)
|
||||||
|
return null;
|
||||||
|
|
||||||
|
if (b !== tag)
|
||||||
|
throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) +
|
||||||
|
': got 0x' + b.toString(16));
|
||||||
|
|
||||||
|
var o = this.readLength(this._offset + 1); // stored in `length`
|
||||||
|
if (o === null)
|
||||||
|
return null;
|
||||||
|
|
||||||
|
if (this.length > 4)
|
||||||
|
throw newInvalidAsn1Error('Integer too long: ' + this.length);
|
||||||
|
|
||||||
|
if (this.length > this._size - o)
|
||||||
|
return null;
|
||||||
|
this._offset = o;
|
||||||
|
|
||||||
|
var fb = this._buf[this._offset];
|
||||||
|
var value = 0;
|
||||||
|
|
||||||
|
for (var i = 0; i < this.length; i++) {
|
||||||
|
value <<= 8;
|
||||||
|
value |= (this._buf[this._offset++] & 0xff);
|
||||||
|
}
|
||||||
|
|
||||||
|
if ((fb & 0x80) == 0x80 && i !== 4)
|
||||||
|
value -= (1 << (i * 8));
|
||||||
|
|
||||||
|
return value >> 0;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
///--- Exported API
|
||||||
|
|
||||||
|
module.exports = Reader;
|
36
node_modules/asn1/lib/ber/types.js
generated
vendored
Normal file
36
node_modules/asn1/lib/ber/types.js
generated
vendored
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
// Copyright 2011 Mark Cavage <mcavage@gmail.com> All rights reserved.
|
||||||
|
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
EOC: 0,
|
||||||
|
Boolean: 1,
|
||||||
|
Integer: 2,
|
||||||
|
BitString: 3,
|
||||||
|
OctetString: 4,
|
||||||
|
Null: 5,
|
||||||
|
OID: 6,
|
||||||
|
ObjectDescriptor: 7,
|
||||||
|
External: 8,
|
||||||
|
Real: 9, // float
|
||||||
|
Enumeration: 10,
|
||||||
|
PDV: 11,
|
||||||
|
Utf8String: 12,
|
||||||
|
RelativeOID: 13,
|
||||||
|
Sequence: 16,
|
||||||
|
Set: 17,
|
||||||
|
NumericString: 18,
|
||||||
|
PrintableString: 19,
|
||||||
|
T61String: 20,
|
||||||
|
VideotexString: 21,
|
||||||
|
IA5String: 22,
|
||||||
|
UTCTime: 23,
|
||||||
|
GeneralizedTime: 24,
|
||||||
|
GraphicString: 25,
|
||||||
|
VisibleString: 26,
|
||||||
|
GeneralString: 28,
|
||||||
|
UniversalString: 29,
|
||||||
|
CharacterString: 30,
|
||||||
|
BMPString: 31,
|
||||||
|
Constructor: 32,
|
||||||
|
Context: 128
|
||||||
|
};
|
316
node_modules/asn1/lib/ber/writer.js
generated
vendored
Normal file
316
node_modules/asn1/lib/ber/writer.js
generated
vendored
Normal file
@ -0,0 +1,316 @@
|
|||||||
|
// Copyright 2011 Mark Cavage <mcavage@gmail.com> All rights reserved.
|
||||||
|
|
||||||
|
var assert = require('assert');
|
||||||
|
var ASN1 = require('./types');
|
||||||
|
var errors = require('./errors');
|
||||||
|
|
||||||
|
|
||||||
|
///--- Globals
|
||||||
|
|
||||||
|
var newInvalidAsn1Error = errors.newInvalidAsn1Error;
|
||||||
|
|
||||||
|
var DEFAULT_OPTS = {
|
||||||
|
size: 1024,
|
||||||
|
growthFactor: 8
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
///--- Helpers
|
||||||
|
|
||||||
|
function merge(from, to) {
|
||||||
|
assert.ok(from);
|
||||||
|
assert.equal(typeof(from), 'object');
|
||||||
|
assert.ok(to);
|
||||||
|
assert.equal(typeof(to), 'object');
|
||||||
|
|
||||||
|
var keys = Object.getOwnPropertyNames(from);
|
||||||
|
keys.forEach(function(key) {
|
||||||
|
if (to[key])
|
||||||
|
return;
|
||||||
|
|
||||||
|
var value = Object.getOwnPropertyDescriptor(from, key);
|
||||||
|
Object.defineProperty(to, key, value);
|
||||||
|
});
|
||||||
|
|
||||||
|
return to;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
///--- API
|
||||||
|
|
||||||
|
function Writer(options) {
|
||||||
|
options = merge(DEFAULT_OPTS, options || {});
|
||||||
|
|
||||||
|
this._buf = new Buffer(options.size || 1024);
|
||||||
|
this._size = this._buf.length;
|
||||||
|
this._offset = 0;
|
||||||
|
this._options = options;
|
||||||
|
|
||||||
|
// A list of offsets in the buffer where we need to insert
|
||||||
|
// sequence tag/len pairs.
|
||||||
|
this._seq = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
Object.defineProperty(Writer.prototype, 'buffer', {
|
||||||
|
get: function () {
|
||||||
|
if (this._seq.length)
|
||||||
|
throw new InvalidAsn1Error(this._seq.length + ' unended sequence(s)');
|
||||||
|
|
||||||
|
return (this._buf.slice(0, this._offset));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
Writer.prototype.writeByte = function(b) {
|
||||||
|
if (typeof(b) !== 'number')
|
||||||
|
throw new TypeError('argument must be a Number');
|
||||||
|
|
||||||
|
this._ensure(1);
|
||||||
|
this._buf[this._offset++] = b;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
Writer.prototype.writeInt = function(i, tag) {
|
||||||
|
if (typeof(i) !== 'number')
|
||||||
|
throw new TypeError('argument must be a Number');
|
||||||
|
if (typeof(tag) !== 'number')
|
||||||
|
tag = ASN1.Integer;
|
||||||
|
|
||||||
|
var sz = 4;
|
||||||
|
|
||||||
|
while ((((i & 0xff800000) === 0) || ((i & 0xff800000) === 0xff800000 >> 0)) &&
|
||||||
|
(sz > 1)) {
|
||||||
|
sz--;
|
||||||
|
i <<= 8;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (sz > 4)
|
||||||
|
throw new InvalidAsn1Error('BER ints cannot be > 0xffffffff');
|
||||||
|
|
||||||
|
this._ensure(2 + sz);
|
||||||
|
this._buf[this._offset++] = tag;
|
||||||
|
this._buf[this._offset++] = sz;
|
||||||
|
|
||||||
|
while (sz-- > 0) {
|
||||||
|
this._buf[this._offset++] = ((i & 0xff000000) >>> 24);
|
||||||
|
i <<= 8;
|
||||||
|
}
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
Writer.prototype.writeNull = function() {
|
||||||
|
this.writeByte(ASN1.Null);
|
||||||
|
this.writeByte(0x00);
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
Writer.prototype.writeEnumeration = function(i, tag) {
|
||||||
|
if (typeof(i) !== 'number')
|
||||||
|
throw new TypeError('argument must be a Number');
|
||||||
|
if (typeof(tag) !== 'number')
|
||||||
|
tag = ASN1.Enumeration;
|
||||||
|
|
||||||
|
return this.writeInt(i, tag);
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
Writer.prototype.writeBoolean = function(b, tag) {
|
||||||
|
if (typeof(b) !== 'boolean')
|
||||||
|
throw new TypeError('argument must be a Boolean');
|
||||||
|
if (typeof(tag) !== 'number')
|
||||||
|
tag = ASN1.Boolean;
|
||||||
|
|
||||||
|
this._ensure(3);
|
||||||
|
this._buf[this._offset++] = tag;
|
||||||
|
this._buf[this._offset++] = 0x01;
|
||||||
|
this._buf[this._offset++] = b ? 0xff : 0x00;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
Writer.prototype.writeString = function(s, tag) {
|
||||||
|
if (typeof(s) !== 'string')
|
||||||
|
throw new TypeError('argument must be a string (was: ' + typeof(s) + ')');
|
||||||
|
if (typeof(tag) !== 'number')
|
||||||
|
tag = ASN1.OctetString;
|
||||||
|
|
||||||
|
var len = Buffer.byteLength(s);
|
||||||
|
this.writeByte(tag);
|
||||||
|
this.writeLength(len);
|
||||||
|
if (len) {
|
||||||
|
this._ensure(len);
|
||||||
|
this._buf.write(s, this._offset);
|
||||||
|
this._offset += len;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
Writer.prototype.writeBuffer = function(buf, tag) {
|
||||||
|
if (typeof(tag) !== 'number')
|
||||||
|
throw new TypeError('tag must be a number');
|
||||||
|
if (!Buffer.isBuffer(buf))
|
||||||
|
throw new TypeError('argument must be a buffer');
|
||||||
|
|
||||||
|
this.writeByte(tag);
|
||||||
|
this.writeLength(buf.length);
|
||||||
|
this._ensure(buf.length);
|
||||||
|
buf.copy(this._buf, this._offset, 0, buf.length);
|
||||||
|
this._offset += buf.length;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
Writer.prototype.writeStringArray = function(strings) {
|
||||||
|
if ((!strings instanceof Array))
|
||||||
|
throw new TypeError('argument must be an Array[String]');
|
||||||
|
|
||||||
|
var self = this;
|
||||||
|
strings.forEach(function(s) {
|
||||||
|
self.writeString(s);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
// This is really to solve DER cases, but whatever for now
|
||||||
|
Writer.prototype.writeOID = function(s, tag) {
|
||||||
|
if (typeof(s) !== 'string')
|
||||||
|
throw new TypeError('argument must be a string');
|
||||||
|
if (typeof(tag) !== 'number')
|
||||||
|
tag = ASN1.OID;
|
||||||
|
|
||||||
|
if (!/^([0-9]+\.){3,}[0-9]+$/.test(s))
|
||||||
|
throw new Error('argument is not a valid OID string');
|
||||||
|
|
||||||
|
function encodeOctet(bytes, octet) {
|
||||||
|
if (octet < 128) {
|
||||||
|
bytes.push(octet);
|
||||||
|
} else if (octet < 16384) {
|
||||||
|
bytes.push((octet >>> 7) | 0x80);
|
||||||
|
bytes.push(octet & 0x7F);
|
||||||
|
} else if (octet < 2097152) {
|
||||||
|
bytes.push((octet >>> 14) | 0x80);
|
||||||
|
bytes.push(((octet >>> 7) | 0x80) & 0xFF);
|
||||||
|
bytes.push(octet & 0x7F);
|
||||||
|
} else if (octet < 268435456) {
|
||||||
|
bytes.push((octet >>> 21) | 0x80);
|
||||||
|
bytes.push(((octet >>> 14) | 0x80) & 0xFF);
|
||||||
|
bytes.push(((octet >>> 7) | 0x80) & 0xFF);
|
||||||
|
bytes.push(octet & 0x7F);
|
||||||
|
} else {
|
||||||
|
bytes.push(((octet >>> 28) | 0x80) & 0xFF);
|
||||||
|
bytes.push(((octet >>> 21) | 0x80) & 0xFF);
|
||||||
|
bytes.push(((octet >>> 14) | 0x80) & 0xFF);
|
||||||
|
bytes.push(((octet >>> 7) | 0x80) & 0xFF);
|
||||||
|
bytes.push(octet & 0x7F);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var tmp = s.split('.');
|
||||||
|
var bytes = [];
|
||||||
|
bytes.push(parseInt(tmp[0], 10) * 40 + parseInt(tmp[1], 10));
|
||||||
|
tmp.slice(2).forEach(function(b) {
|
||||||
|
encodeOctet(bytes, parseInt(b, 10));
|
||||||
|
});
|
||||||
|
|
||||||
|
var self = this;
|
||||||
|
this._ensure(2 + bytes.length);
|
||||||
|
this.writeByte(tag);
|
||||||
|
this.writeLength(bytes.length);
|
||||||
|
bytes.forEach(function(b) {
|
||||||
|
self.writeByte(b);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
Writer.prototype.writeLength = function(len) {
|
||||||
|
if (typeof(len) !== 'number')
|
||||||
|
throw new TypeError('argument must be a Number');
|
||||||
|
|
||||||
|
this._ensure(4);
|
||||||
|
|
||||||
|
if (len <= 0x7f) {
|
||||||
|
this._buf[this._offset++] = len;
|
||||||
|
} else if (len <= 0xff) {
|
||||||
|
this._buf[this._offset++] = 0x81;
|
||||||
|
this._buf[this._offset++] = len;
|
||||||
|
} else if (len <= 0xffff) {
|
||||||
|
this._buf[this._offset++] = 0x82;
|
||||||
|
this._buf[this._offset++] = len >> 8;
|
||||||
|
this._buf[this._offset++] = len;
|
||||||
|
} else if (len <= 0xffffff) {
|
||||||
|
this._buf[this._offset++] = 0x83;
|
||||||
|
this._buf[this._offset++] = len >> 16;
|
||||||
|
this._buf[this._offset++] = len >> 8;
|
||||||
|
this._buf[this._offset++] = len;
|
||||||
|
} else {
|
||||||
|
throw new InvalidAsn1ERror('Length too long (> 4 bytes)');
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Writer.prototype.startSequence = function(tag) {
|
||||||
|
if (typeof(tag) !== 'number')
|
||||||
|
tag = ASN1.Sequence | ASN1.Constructor;
|
||||||
|
|
||||||
|
this.writeByte(tag);
|
||||||
|
this._seq.push(this._offset);
|
||||||
|
this._ensure(3);
|
||||||
|
this._offset += 3;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
Writer.prototype.endSequence = function() {
|
||||||
|
var seq = this._seq.pop();
|
||||||
|
var start = seq + 3;
|
||||||
|
var len = this._offset - start;
|
||||||
|
|
||||||
|
if (len <= 0x7f) {
|
||||||
|
this._shift(start, len, -2);
|
||||||
|
this._buf[seq] = len;
|
||||||
|
} else if (len <= 0xff) {
|
||||||
|
this._shift(start, len, -1);
|
||||||
|
this._buf[seq] = 0x81;
|
||||||
|
this._buf[seq + 1] = len;
|
||||||
|
} else if (len <= 0xffff) {
|
||||||
|
this._buf[seq] = 0x82;
|
||||||
|
this._buf[seq + 1] = len >> 8;
|
||||||
|
this._buf[seq + 2] = len;
|
||||||
|
} else if (len <= 0xffffff) {
|
||||||
|
this._shift(start, len, 1);
|
||||||
|
this._buf[seq] = 0x83;
|
||||||
|
this._buf[seq + 1] = len >> 16;
|
||||||
|
this._buf[seq + 2] = len >> 8;
|
||||||
|
this._buf[seq + 3] = len;
|
||||||
|
} else {
|
||||||
|
throw new InvalidAsn1Error('Sequence too long');
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
Writer.prototype._shift = function(start, len, shift) {
|
||||||
|
assert.ok(start !== undefined);
|
||||||
|
assert.ok(len !== undefined);
|
||||||
|
assert.ok(shift);
|
||||||
|
|
||||||
|
this._buf.copy(this._buf, start + shift, start, start + len);
|
||||||
|
this._offset += shift;
|
||||||
|
};
|
||||||
|
|
||||||
|
Writer.prototype._ensure = function(len) {
|
||||||
|
assert.ok(len);
|
||||||
|
|
||||||
|
if (this._size - this._offset < len) {
|
||||||
|
var sz = this._size * this._options.growthFactor;
|
||||||
|
if (sz - this._offset < len)
|
||||||
|
sz += len;
|
||||||
|
|
||||||
|
var buf = new Buffer(sz);
|
||||||
|
|
||||||
|
this._buf.copy(buf, 0, 0, this._offset);
|
||||||
|
this._buf = buf;
|
||||||
|
this._size = sz;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
///--- Exported API
|
||||||
|
|
||||||
|
module.exports = Writer;
|
20
node_modules/asn1/lib/index.js
generated
vendored
Normal file
20
node_modules/asn1/lib/index.js
generated
vendored
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
// Copyright 2011 Mark Cavage <mcavage@gmail.com> All rights reserved.
|
||||||
|
|
||||||
|
// If you have no idea what ASN.1 or BER is, see this:
|
||||||
|
// ftp://ftp.rsa.com/pub/pkcs/ascii/layman.asc
|
||||||
|
|
||||||
|
var Ber = require('./ber/index');
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
///--- Exported API
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
|
||||||
|
Ber: Ber,
|
||||||
|
|
||||||
|
BerReader: Ber.Reader,
|
||||||
|
|
||||||
|
BerWriter: Ber.Writer
|
||||||
|
|
||||||
|
};
|
98
node_modules/asn1/package.json
generated
vendored
Normal file
98
node_modules/asn1/package.json
generated
vendored
Normal file
@ -0,0 +1,98 @@
|
|||||||
|
{
|
||||||
|
"_args": [
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"raw": "asn1@~0.2.3",
|
||||||
|
"scope": null,
|
||||||
|
"escapedName": "asn1",
|
||||||
|
"name": "asn1",
|
||||||
|
"rawSpec": "~0.2.3",
|
||||||
|
"spec": ">=0.2.3 <0.3.0",
|
||||||
|
"type": "range"
|
||||||
|
},
|
||||||
|
"/tank/data/SERVER/zoneadm-master/node_modules/sshpk"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"_from": "asn1@>=0.2.3 <0.3.0",
|
||||||
|
"_id": "asn1@0.2.3",
|
||||||
|
"_inCache": true,
|
||||||
|
"_location": "/asn1",
|
||||||
|
"_npmUser": {
|
||||||
|
"name": "pfmooney",
|
||||||
|
"email": "patrick.f.mooney@gmail.com"
|
||||||
|
},
|
||||||
|
"_npmVersion": "1.4.28",
|
||||||
|
"_phantomChildren": {},
|
||||||
|
"_requested": {
|
||||||
|
"raw": "asn1@~0.2.3",
|
||||||
|
"scope": null,
|
||||||
|
"escapedName": "asn1",
|
||||||
|
"name": "asn1",
|
||||||
|
"rawSpec": "~0.2.3",
|
||||||
|
"spec": ">=0.2.3 <0.3.0",
|
||||||
|
"type": "range"
|
||||||
|
},
|
||||||
|
"_requiredBy": [
|
||||||
|
"/sshpk"
|
||||||
|
],
|
||||||
|
"_resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz",
|
||||||
|
"_shasum": "dac8787713c9966849fc8180777ebe9c1ddf3b86",
|
||||||
|
"_shrinkwrap": null,
|
||||||
|
"_spec": "asn1@~0.2.3",
|
||||||
|
"_where": "/tank/data/SERVER/zoneadm-master/node_modules/sshpk",
|
||||||
|
"author": {
|
||||||
|
"name": "Mark Cavage",
|
||||||
|
"email": "mcavage@gmail.com"
|
||||||
|
},
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/mcavage/node-asn1/issues"
|
||||||
|
},
|
||||||
|
"contributors": [
|
||||||
|
{
|
||||||
|
"name": "David Gwynne",
|
||||||
|
"email": "loki@animata.net"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Yunong Xiao",
|
||||||
|
"email": "yunong@joyent.com"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Alex Wilson",
|
||||||
|
"email": "alex.wilson@joyent.com"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dependencies": {},
|
||||||
|
"description": "Contains parsers and serializers for ASN.1 (currently BER only)",
|
||||||
|
"devDependencies": {
|
||||||
|
"tap": "0.4.8"
|
||||||
|
},
|
||||||
|
"directories": {},
|
||||||
|
"dist": {
|
||||||
|
"shasum": "dac8787713c9966849fc8180777ebe9c1ddf3b86",
|
||||||
|
"tarball": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/mcavage/node-asn1",
|
||||||
|
"license": "MIT",
|
||||||
|
"main": "lib/index.js",
|
||||||
|
"maintainers": [
|
||||||
|
{
|
||||||
|
"name": "mcavage",
|
||||||
|
"email": "mcavage@gmail.com"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pfmooney",
|
||||||
|
"email": "patrick.f.mooney@gmail.com"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "asn1",
|
||||||
|
"optionalDependencies": {},
|
||||||
|
"readme": "ERROR: No README data found!",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git://github.com/mcavage/node-asn1.git"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "tap ./tst"
|
||||||
|
},
|
||||||
|
"version": "0.2.3"
|
||||||
|
}
|
208
node_modules/asn1/tst/ber/reader.test.js
generated
vendored
Normal file
208
node_modules/asn1/tst/ber/reader.test.js
generated
vendored
Normal file
@ -0,0 +1,208 @@
|
|||||||
|
// Copyright 2011 Mark Cavage <mcavage@gmail.com> All rights reserved.
|
||||||
|
|
||||||
|
var test = require('tap').test;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
///--- Globals
|
||||||
|
|
||||||
|
var BerReader;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
///--- Tests
|
||||||
|
|
||||||
|
test('load library', function(t) {
|
||||||
|
BerReader = require('../../lib/index').BerReader;
|
||||||
|
t.ok(BerReader);
|
||||||
|
try {
|
||||||
|
new BerReader();
|
||||||
|
t.fail('Should have thrown');
|
||||||
|
} catch (e) {
|
||||||
|
t.ok(e instanceof TypeError, 'Should have been a type error');
|
||||||
|
}
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('read byte', function(t) {
|
||||||
|
var reader = new BerReader(new Buffer([0xde]));
|
||||||
|
t.ok(reader);
|
||||||
|
t.equal(reader.readByte(), 0xde, 'wrong value');
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('read 1 byte int', function(t) {
|
||||||
|
var reader = new BerReader(new Buffer([0x02, 0x01, 0x03]));
|
||||||
|
t.ok(reader);
|
||||||
|
t.equal(reader.readInt(), 0x03, 'wrong value');
|
||||||
|
t.equal(reader.length, 0x01, 'wrong length');
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('read 2 byte int', function(t) {
|
||||||
|
var reader = new BerReader(new Buffer([0x02, 0x02, 0x7e, 0xde]));
|
||||||
|
t.ok(reader);
|
||||||
|
t.equal(reader.readInt(), 0x7ede, 'wrong value');
|
||||||
|
t.equal(reader.length, 0x02, 'wrong length');
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('read 3 byte int', function(t) {
|
||||||
|
var reader = new BerReader(new Buffer([0x02, 0x03, 0x7e, 0xde, 0x03]));
|
||||||
|
t.ok(reader);
|
||||||
|
t.equal(reader.readInt(), 0x7ede03, 'wrong value');
|
||||||
|
t.equal(reader.length, 0x03, 'wrong length');
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('read 4 byte int', function(t) {
|
||||||
|
var reader = new BerReader(new Buffer([0x02, 0x04, 0x7e, 0xde, 0x03, 0x01]));
|
||||||
|
t.ok(reader);
|
||||||
|
t.equal(reader.readInt(), 0x7ede0301, 'wrong value');
|
||||||
|
t.equal(reader.length, 0x04, 'wrong length');
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('read 1 byte negative int', function(t) {
|
||||||
|
var reader = new BerReader(new Buffer([0x02, 0x01, 0xdc]));
|
||||||
|
t.ok(reader);
|
||||||
|
t.equal(reader.readInt(), -36, 'wrong value');
|
||||||
|
t.equal(reader.length, 0x01, 'wrong length');
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('read 2 byte negative int', function(t) {
|
||||||
|
var reader = new BerReader(new Buffer([0x02, 0x02, 0xc0, 0x4e]));
|
||||||
|
t.ok(reader);
|
||||||
|
t.equal(reader.readInt(), -16306, 'wrong value');
|
||||||
|
t.equal(reader.length, 0x02, 'wrong length');
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('read 3 byte negative int', function(t) {
|
||||||
|
var reader = new BerReader(new Buffer([0x02, 0x03, 0xff, 0x00, 0x19]));
|
||||||
|
t.ok(reader);
|
||||||
|
t.equal(reader.readInt(), -65511, 'wrong value');
|
||||||
|
t.equal(reader.length, 0x03, 'wrong length');
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('read 4 byte negative int', function(t) {
|
||||||
|
var reader = new BerReader(new Buffer([0x02, 0x04, 0x91, 0x7c, 0x22, 0x1f]));
|
||||||
|
t.ok(reader);
|
||||||
|
t.equal(reader.readInt(), -1854135777, 'wrong value');
|
||||||
|
t.equal(reader.length, 0x04, 'wrong length');
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('read boolean true', function(t) {
|
||||||
|
var reader = new BerReader(new Buffer([0x01, 0x01, 0xff]));
|
||||||
|
t.ok(reader);
|
||||||
|
t.equal(reader.readBoolean(), true, 'wrong value');
|
||||||
|
t.equal(reader.length, 0x01, 'wrong length');
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('read boolean false', function(t) {
|
||||||
|
var reader = new BerReader(new Buffer([0x01, 0x01, 0x00]));
|
||||||
|
t.ok(reader);
|
||||||
|
t.equal(reader.readBoolean(), false, 'wrong value');
|
||||||
|
t.equal(reader.length, 0x01, 'wrong length');
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('read enumeration', function(t) {
|
||||||
|
var reader = new BerReader(new Buffer([0x0a, 0x01, 0x20]));
|
||||||
|
t.ok(reader);
|
||||||
|
t.equal(reader.readEnumeration(), 0x20, 'wrong value');
|
||||||
|
t.equal(reader.length, 0x01, 'wrong length');
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('read string', function(t) {
|
||||||
|
var dn = 'cn=foo,ou=unit,o=test';
|
||||||
|
var buf = new Buffer(dn.length + 2);
|
||||||
|
buf[0] = 0x04;
|
||||||
|
buf[1] = Buffer.byteLength(dn);
|
||||||
|
buf.write(dn, 2);
|
||||||
|
var reader = new BerReader(buf);
|
||||||
|
t.ok(reader);
|
||||||
|
t.equal(reader.readString(), dn, 'wrong value');
|
||||||
|
t.equal(reader.length, dn.length, 'wrong length');
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('read sequence', function(t) {
|
||||||
|
var reader = new BerReader(new Buffer([0x30, 0x03, 0x01, 0x01, 0xff]));
|
||||||
|
t.ok(reader);
|
||||||
|
t.equal(reader.readSequence(), 0x30, 'wrong value');
|
||||||
|
t.equal(reader.length, 0x03, 'wrong length');
|
||||||
|
t.equal(reader.readBoolean(), true, 'wrong value');
|
||||||
|
t.equal(reader.length, 0x01, 'wrong length');
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('anonymous LDAPv3 bind', function(t) {
|
||||||
|
var BIND = new Buffer(14);
|
||||||
|
BIND[0] = 0x30; // Sequence
|
||||||
|
BIND[1] = 12; // len
|
||||||
|
BIND[2] = 0x02; // ASN.1 Integer
|
||||||
|
BIND[3] = 1; // len
|
||||||
|
BIND[4] = 0x04; // msgid (make up 4)
|
||||||
|
BIND[5] = 0x60; // Bind Request
|
||||||
|
BIND[6] = 7; // len
|
||||||
|
BIND[7] = 0x02; // ASN.1 Integer
|
||||||
|
BIND[8] = 1; // len
|
||||||
|
BIND[9] = 0x03; // v3
|
||||||
|
BIND[10] = 0x04; // String (bind dn)
|
||||||
|
BIND[11] = 0; // len
|
||||||
|
BIND[12] = 0x80; // ContextSpecific (choice)
|
||||||
|
BIND[13] = 0; // simple bind
|
||||||
|
|
||||||
|
// Start testing ^^
|
||||||
|
var ber = new BerReader(BIND);
|
||||||
|
t.equal(ber.readSequence(), 48, 'Not an ASN.1 Sequence');
|
||||||
|
t.equal(ber.length, 12, 'Message length should be 12');
|
||||||
|
t.equal(ber.readInt(), 4, 'Message id should have been 4');
|
||||||
|
t.equal(ber.readSequence(), 96, 'Bind Request should have been 96');
|
||||||
|
t.equal(ber.length, 7, 'Bind length should have been 7');
|
||||||
|
t.equal(ber.readInt(), 3, 'LDAP version should have been 3');
|
||||||
|
t.equal(ber.readString(), '', 'Bind DN should have been empty');
|
||||||
|
t.equal(ber.length, 0, 'string length should have been 0');
|
||||||
|
t.equal(ber.readByte(), 0x80, 'Should have been ContextSpecific (choice)');
|
||||||
|
t.equal(ber.readByte(), 0, 'Should have been simple bind');
|
||||||
|
t.equal(null, ber.readByte(), 'Should be out of data');
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('long string', function(t) {
|
||||||
|
var buf = new Buffer(256);
|
||||||
|
var o;
|
||||||
|
var s =
|
||||||
|
'2;649;CN=Red Hat CS 71GA Demo,O=Red Hat CS 71GA Demo,C=US;' +
|
||||||
|
'CN=RHCS Agent - admin01,UID=admin01,O=redhat,C=US [1] This is ' +
|
||||||
|
'Teena Vradmin\'s description.';
|
||||||
|
buf[0] = 0x04;
|
||||||
|
buf[1] = 0x81;
|
||||||
|
buf[2] = 0x94;
|
||||||
|
buf.write(s, 3);
|
||||||
|
var ber = new BerReader(buf.slice(0, 3 + s.length));
|
||||||
|
t.equal(ber.readString(), s);
|
||||||
|
t.end();
|
||||||
|
});
|
370
node_modules/asn1/tst/ber/writer.test.js
generated
vendored
Normal file
370
node_modules/asn1/tst/ber/writer.test.js
generated
vendored
Normal file
@ -0,0 +1,370 @@
|
|||||||
|
// Copyright 2011 Mark Cavage <mcavage@gmail.com> All rights reserved.
|
||||||
|
|
||||||
|
var test = require('tap').test;
|
||||||
|
var sys = require('sys');
|
||||||
|
|
||||||
|
///--- Globals
|
||||||
|
|
||||||
|
var BerWriter;
|
||||||
|
|
||||||
|
var BerReader;
|
||||||
|
|
||||||
|
|
||||||
|
///--- Tests
|
||||||
|
|
||||||
|
test('load library', function(t) {
|
||||||
|
BerWriter = require('../../lib/index').BerWriter;
|
||||||
|
t.ok(BerWriter);
|
||||||
|
t.ok(new BerWriter());
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('write byte', function(t) {
|
||||||
|
var writer = new BerWriter();
|
||||||
|
|
||||||
|
writer.writeByte(0xC2);
|
||||||
|
var ber = writer.buffer;
|
||||||
|
|
||||||
|
t.ok(ber);
|
||||||
|
t.equal(ber.length, 1, 'Wrong length');
|
||||||
|
t.equal(ber[0], 0xC2, 'value wrong');
|
||||||
|
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('write 1 byte int', function(t) {
|
||||||
|
var writer = new BerWriter();
|
||||||
|
|
||||||
|
writer.writeInt(0x7f);
|
||||||
|
var ber = writer.buffer;
|
||||||
|
|
||||||
|
t.ok(ber);
|
||||||
|
t.equal(ber.length, 3, 'Wrong length for an int: ' + ber.length);
|
||||||
|
t.equal(ber[0], 0x02, 'ASN.1 tag wrong (2) -> ' + ber[0]);
|
||||||
|
t.equal(ber[1], 0x01, 'length wrong(1) -> ' + ber[1]);
|
||||||
|
t.equal(ber[2], 0x7f, 'value wrong(3) -> ' + ber[2]);
|
||||||
|
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('write 2 byte int', function(t) {
|
||||||
|
var writer = new BerWriter();
|
||||||
|
|
||||||
|
writer.writeInt(0x7ffe);
|
||||||
|
var ber = writer.buffer;
|
||||||
|
|
||||||
|
t.ok(ber);
|
||||||
|
t.equal(ber.length, 4, 'Wrong length for an int');
|
||||||
|
t.equal(ber[0], 0x02, 'ASN.1 tag wrong');
|
||||||
|
t.equal(ber[1], 0x02, 'length wrong');
|
||||||
|
t.equal(ber[2], 0x7f, 'value wrong (byte 1)');
|
||||||
|
t.equal(ber[3], 0xfe, 'value wrong (byte 2)');
|
||||||
|
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('write 3 byte int', function(t) {
|
||||||
|
var writer = new BerWriter();
|
||||||
|
|
||||||
|
writer.writeInt(0x7ffffe);
|
||||||
|
var ber = writer.buffer;
|
||||||
|
|
||||||
|
t.ok(ber);
|
||||||
|
t.equal(ber.length, 5, 'Wrong length for an int');
|
||||||
|
t.equal(ber[0], 0x02, 'ASN.1 tag wrong');
|
||||||
|
t.equal(ber[1], 0x03, 'length wrong');
|
||||||
|
t.equal(ber[2], 0x7f, 'value wrong (byte 1)');
|
||||||
|
t.equal(ber[3], 0xff, 'value wrong (byte 2)');
|
||||||
|
t.equal(ber[4], 0xfe, 'value wrong (byte 3)');
|
||||||
|
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('write 4 byte int', function(t) {
|
||||||
|
var writer = new BerWriter();
|
||||||
|
|
||||||
|
writer.writeInt(0x7ffffffe);
|
||||||
|
var ber = writer.buffer;
|
||||||
|
|
||||||
|
t.ok(ber);
|
||||||
|
|
||||||
|
t.equal(ber.length, 6, 'Wrong length for an int');
|
||||||
|
t.equal(ber[0], 0x02, 'ASN.1 tag wrong');
|
||||||
|
t.equal(ber[1], 0x04, 'length wrong');
|
||||||
|
t.equal(ber[2], 0x7f, 'value wrong (byte 1)');
|
||||||
|
t.equal(ber[3], 0xff, 'value wrong (byte 2)');
|
||||||
|
t.equal(ber[4], 0xff, 'value wrong (byte 3)');
|
||||||
|
t.equal(ber[5], 0xfe, 'value wrong (byte 4)');
|
||||||
|
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('write 1 byte negative int', function(t) {
|
||||||
|
var writer = new BerWriter();
|
||||||
|
|
||||||
|
writer.writeInt(-128);
|
||||||
|
var ber = writer.buffer;
|
||||||
|
|
||||||
|
t.ok(ber);
|
||||||
|
|
||||||
|
t.equal(ber.length, 3, 'Wrong length for an int');
|
||||||
|
t.equal(ber[0], 0x02, 'ASN.1 tag wrong');
|
||||||
|
t.equal(ber[1], 0x01, 'length wrong');
|
||||||
|
t.equal(ber[2], 0x80, 'value wrong (byte 1)');
|
||||||
|
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('write 2 byte negative int', function(t) {
|
||||||
|
var writer = new BerWriter();
|
||||||
|
|
||||||
|
writer.writeInt(-22400);
|
||||||
|
var ber = writer.buffer;
|
||||||
|
|
||||||
|
t.ok(ber);
|
||||||
|
|
||||||
|
t.equal(ber.length, 4, 'Wrong length for an int');
|
||||||
|
t.equal(ber[0], 0x02, 'ASN.1 tag wrong');
|
||||||
|
t.equal(ber[1], 0x02, 'length wrong');
|
||||||
|
t.equal(ber[2], 0xa8, 'value wrong (byte 1)');
|
||||||
|
t.equal(ber[3], 0x80, 'value wrong (byte 2)');
|
||||||
|
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('write 3 byte negative int', function(t) {
|
||||||
|
var writer = new BerWriter();
|
||||||
|
|
||||||
|
writer.writeInt(-481653);
|
||||||
|
var ber = writer.buffer;
|
||||||
|
|
||||||
|
t.ok(ber);
|
||||||
|
|
||||||
|
t.equal(ber.length, 5, 'Wrong length for an int');
|
||||||
|
t.equal(ber[0], 0x02, 'ASN.1 tag wrong');
|
||||||
|
t.equal(ber[1], 0x03, 'length wrong');
|
||||||
|
t.equal(ber[2], 0xf8, 'value wrong (byte 1)');
|
||||||
|
t.equal(ber[3], 0xa6, 'value wrong (byte 2)');
|
||||||
|
t.equal(ber[4], 0x8b, 'value wrong (byte 3)');
|
||||||
|
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('write 4 byte negative int', function(t) {
|
||||||
|
var writer = new BerWriter();
|
||||||
|
|
||||||
|
writer.writeInt(-1522904131);
|
||||||
|
var ber = writer.buffer;
|
||||||
|
|
||||||
|
t.ok(ber);
|
||||||
|
|
||||||
|
t.equal(ber.length, 6, 'Wrong length for an int');
|
||||||
|
t.equal(ber[0], 0x02, 'ASN.1 tag wrong');
|
||||||
|
t.equal(ber[1], 0x04, 'length wrong');
|
||||||
|
t.equal(ber[2], 0xa5, 'value wrong (byte 1)');
|
||||||
|
t.equal(ber[3], 0x3a, 'value wrong (byte 2)');
|
||||||
|
t.equal(ber[4], 0x53, 'value wrong (byte 3)');
|
||||||
|
t.equal(ber[5], 0xbd, 'value wrong (byte 4)');
|
||||||
|
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('write boolean', function(t) {
|
||||||
|
var writer = new BerWriter();
|
||||||
|
|
||||||
|
writer.writeBoolean(true);
|
||||||
|
writer.writeBoolean(false);
|
||||||
|
var ber = writer.buffer;
|
||||||
|
|
||||||
|
t.ok(ber);
|
||||||
|
t.equal(ber.length, 6, 'Wrong length');
|
||||||
|
t.equal(ber[0], 0x01, 'tag wrong');
|
||||||
|
t.equal(ber[1], 0x01, 'length wrong');
|
||||||
|
t.equal(ber[2], 0xff, 'value wrong');
|
||||||
|
t.equal(ber[3], 0x01, 'tag wrong');
|
||||||
|
t.equal(ber[4], 0x01, 'length wrong');
|
||||||
|
t.equal(ber[5], 0x00, 'value wrong');
|
||||||
|
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('write string', function(t) {
|
||||||
|
var writer = new BerWriter();
|
||||||
|
writer.writeString('hello world');
|
||||||
|
var ber = writer.buffer;
|
||||||
|
|
||||||
|
t.ok(ber);
|
||||||
|
t.equal(ber.length, 13, 'wrong length');
|
||||||
|
t.equal(ber[0], 0x04, 'wrong tag');
|
||||||
|
t.equal(ber[1], 11, 'wrong length');
|
||||||
|
t.equal(ber.slice(2).toString('utf8'), 'hello world', 'wrong value');
|
||||||
|
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('write buffer', function(t) {
|
||||||
|
var writer = new BerWriter();
|
||||||
|
// write some stuff to start with
|
||||||
|
writer.writeString('hello world');
|
||||||
|
var ber = writer.buffer;
|
||||||
|
var buf = new Buffer([0x04, 0x0b, 0x30, 0x09, 0x02, 0x01, 0x0f, 0x01, 0x01,
|
||||||
|
0xff, 0x01, 0x01, 0xff]);
|
||||||
|
writer.writeBuffer(buf.slice(2, buf.length), 0x04);
|
||||||
|
ber = writer.buffer;
|
||||||
|
|
||||||
|
t.ok(ber);
|
||||||
|
t.equal(ber.length, 26, 'wrong length');
|
||||||
|
t.equal(ber[0], 0x04, 'wrong tag');
|
||||||
|
t.equal(ber[1], 11, 'wrong length');
|
||||||
|
t.equal(ber.slice(2, 13).toString('utf8'), 'hello world', 'wrong value');
|
||||||
|
t.equal(ber[13], buf[0], 'wrong tag');
|
||||||
|
t.equal(ber[14], buf[1], 'wrong length');
|
||||||
|
for (var i = 13, j = 0; i < ber.length && j < buf.length; i++, j++) {
|
||||||
|
t.equal(ber[i], buf[j], 'buffer contents not identical');
|
||||||
|
}
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('write string array', function(t) {
|
||||||
|
var writer = new BerWriter();
|
||||||
|
writer.writeStringArray(['hello world', 'fubar!']);
|
||||||
|
var ber = writer.buffer;
|
||||||
|
|
||||||
|
t.ok(ber);
|
||||||
|
|
||||||
|
t.equal(ber.length, 21, 'wrong length');
|
||||||
|
t.equal(ber[0], 0x04, 'wrong tag');
|
||||||
|
t.equal(ber[1], 11, 'wrong length');
|
||||||
|
t.equal(ber.slice(2, 13).toString('utf8'), 'hello world', 'wrong value');
|
||||||
|
|
||||||
|
t.equal(ber[13], 0x04, 'wrong tag');
|
||||||
|
t.equal(ber[14], 6, 'wrong length');
|
||||||
|
t.equal(ber.slice(15).toString('utf8'), 'fubar!', 'wrong value');
|
||||||
|
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('resize internal buffer', function(t) {
|
||||||
|
var writer = new BerWriter({size: 2});
|
||||||
|
writer.writeString('hello world');
|
||||||
|
var ber = writer.buffer;
|
||||||
|
|
||||||
|
t.ok(ber);
|
||||||
|
t.equal(ber.length, 13, 'wrong length');
|
||||||
|
t.equal(ber[0], 0x04, 'wrong tag');
|
||||||
|
t.equal(ber[1], 11, 'wrong length');
|
||||||
|
t.equal(ber.slice(2).toString('utf8'), 'hello world', 'wrong value');
|
||||||
|
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('sequence', function(t) {
|
||||||
|
var writer = new BerWriter({size: 25});
|
||||||
|
writer.startSequence();
|
||||||
|
writer.writeString('hello world');
|
||||||
|
writer.endSequence();
|
||||||
|
var ber = writer.buffer;
|
||||||
|
|
||||||
|
t.ok(ber);
|
||||||
|
console.log(ber);
|
||||||
|
t.equal(ber.length, 15, 'wrong length');
|
||||||
|
t.equal(ber[0], 0x30, 'wrong tag');
|
||||||
|
t.equal(ber[1], 13, 'wrong length');
|
||||||
|
t.equal(ber[2], 0x04, 'wrong tag');
|
||||||
|
t.equal(ber[3], 11, 'wrong length');
|
||||||
|
t.equal(ber.slice(4).toString('utf8'), 'hello world', 'wrong value');
|
||||||
|
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('nested sequence', function(t) {
|
||||||
|
var writer = new BerWriter({size: 25});
|
||||||
|
writer.startSequence();
|
||||||
|
writer.writeString('hello world');
|
||||||
|
writer.startSequence();
|
||||||
|
writer.writeString('hello world');
|
||||||
|
writer.endSequence();
|
||||||
|
writer.endSequence();
|
||||||
|
var ber = writer.buffer;
|
||||||
|
|
||||||
|
t.ok(ber);
|
||||||
|
t.equal(ber.length, 30, 'wrong length');
|
||||||
|
t.equal(ber[0], 0x30, 'wrong tag');
|
||||||
|
t.equal(ber[1], 28, 'wrong length');
|
||||||
|
t.equal(ber[2], 0x04, 'wrong tag');
|
||||||
|
t.equal(ber[3], 11, 'wrong length');
|
||||||
|
t.equal(ber.slice(4, 15).toString('utf8'), 'hello world', 'wrong value');
|
||||||
|
t.equal(ber[15], 0x30, 'wrong tag');
|
||||||
|
t.equal(ber[16], 13, 'wrong length');
|
||||||
|
t.equal(ber[17], 0x04, 'wrong tag');
|
||||||
|
t.equal(ber[18], 11, 'wrong length');
|
||||||
|
t.equal(ber.slice(19, 30).toString('utf8'), 'hello world', 'wrong value');
|
||||||
|
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('LDAP bind message', function(t) {
|
||||||
|
var dn = 'cn=foo,ou=unit,o=test';
|
||||||
|
var writer = new BerWriter();
|
||||||
|
writer.startSequence();
|
||||||
|
writer.writeInt(3); // msgid = 3
|
||||||
|
writer.startSequence(0x60); // ldap bind
|
||||||
|
writer.writeInt(3); // ldap v3
|
||||||
|
writer.writeString(dn);
|
||||||
|
writer.writeByte(0x80);
|
||||||
|
writer.writeByte(0x00);
|
||||||
|
writer.endSequence();
|
||||||
|
writer.endSequence();
|
||||||
|
var ber = writer.buffer;
|
||||||
|
|
||||||
|
t.ok(ber);
|
||||||
|
t.equal(ber.length, 35, 'wrong length (buffer)');
|
||||||
|
t.equal(ber[0], 0x30, 'wrong tag');
|
||||||
|
t.equal(ber[1], 33, 'wrong length');
|
||||||
|
t.equal(ber[2], 0x02, 'wrong tag');
|
||||||
|
t.equal(ber[3], 1, 'wrong length');
|
||||||
|
t.equal(ber[4], 0x03, 'wrong value');
|
||||||
|
t.equal(ber[5], 0x60, 'wrong tag');
|
||||||
|
t.equal(ber[6], 28, 'wrong length');
|
||||||
|
t.equal(ber[7], 0x02, 'wrong tag');
|
||||||
|
t.equal(ber[8], 1, 'wrong length');
|
||||||
|
t.equal(ber[9], 0x03, 'wrong value');
|
||||||
|
t.equal(ber[10], 0x04, 'wrong tag');
|
||||||
|
t.equal(ber[11], dn.length, 'wrong length');
|
||||||
|
t.equal(ber.slice(12, 33).toString('utf8'), dn, 'wrong value');
|
||||||
|
t.equal(ber[33], 0x80, 'wrong tag');
|
||||||
|
t.equal(ber[34], 0x00, 'wrong len');
|
||||||
|
|
||||||
|
t.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
test('Write OID', function(t) {
|
||||||
|
var oid = '1.2.840.113549.1.1.1';
|
||||||
|
var writer = new BerWriter();
|
||||||
|
writer.writeOID(oid);
|
||||||
|
|
||||||
|
var ber = writer.buffer;
|
||||||
|
t.ok(ber);
|
||||||
|
console.log(require('util').inspect(ber));
|
||||||
|
console.log(require('util').inspect(new Buffer([0x06, 0x09, 0x2a, 0x86,
|
||||||
|
0x48, 0x86, 0xf7, 0x0d,
|
||||||
|
0x01, 0x01, 0x01])));
|
||||||
|
|
||||||
|
t.end();
|
||||||
|
});
|
6
node_modules/assert-plus/AUTHORS
generated
vendored
Normal file
6
node_modules/assert-plus/AUTHORS
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
Dave Eddy <dave@daveeddy.com>
|
||||||
|
Fred Kuo <fred.kuo@joyent.com>
|
||||||
|
Lars-Magnus Skog <ralphtheninja@riseup.net>
|
||||||
|
Mark Cavage <mcavage@gmail.com>
|
||||||
|
Patrick Mooney <pmooney@pfmooney.com>
|
||||||
|
Rob Gulewich <robert.gulewich@joyent.com>
|
8
node_modules/assert-plus/CHANGES.md
generated
vendored
Normal file
8
node_modules/assert-plus/CHANGES.md
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
# assert-plus Changelog
|
||||||
|
|
||||||
|
## 0.2.0
|
||||||
|
|
||||||
|
- Fix `assert.object(null)` so it throws
|
||||||
|
- Fix optional/arrayOf exports for non-type-of asserts
|
||||||
|
- Add optiona/arrayOf exports for Stream/Date/Regex/uuid
|
||||||
|
- Add basic unit test coverage
|
155
node_modules/assert-plus/README.md
generated
vendored
Normal file
155
node_modules/assert-plus/README.md
generated
vendored
Normal file
@ -0,0 +1,155 @@
|
|||||||
|
# assert-plus
|
||||||
|
|
||||||
|
This library is a super small wrapper over node's assert module that has two
|
||||||
|
things: (1) the ability to disable assertions with the environment variable
|
||||||
|
NODE\_NDEBUG, and (2) some API wrappers for argument testing. Like
|
||||||
|
`assert.string(myArg, 'myArg')`. As a simple example, most of my code looks
|
||||||
|
like this:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var assert = require('assert-plus');
|
||||||
|
|
||||||
|
function fooAccount(options, callback) {
|
||||||
|
assert.object(options, 'options');
|
||||||
|
assert.number(options.id, 'options.id');
|
||||||
|
assert.bool(options.isManager, 'options.isManager');
|
||||||
|
assert.string(options.name, 'options.name');
|
||||||
|
assert.arrayOfString(options.email, 'options.email');
|
||||||
|
assert.func(callback, 'callback');
|
||||||
|
|
||||||
|
// Do stuff
|
||||||
|
callback(null, {});
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
# API
|
||||||
|
|
||||||
|
All methods that *aren't* part of node's core assert API are simply assumed to
|
||||||
|
take an argument, and then a string 'name' that's not a message; `AssertionError`
|
||||||
|
will be thrown if the assertion fails with a message like:
|
||||||
|
|
||||||
|
AssertionError: foo (string) is required
|
||||||
|
at test (/home/mark/work/foo/foo.js:3:9)
|
||||||
|
at Object.<anonymous> (/home/mark/work/foo/foo.js:15:1)
|
||||||
|
at Module._compile (module.js:446:26)
|
||||||
|
at Object..js (module.js:464:10)
|
||||||
|
at Module.load (module.js:353:31)
|
||||||
|
at Function._load (module.js:311:12)
|
||||||
|
at Array.0 (module.js:484:10)
|
||||||
|
at EventEmitter._tickCallback (node.js:190:38)
|
||||||
|
|
||||||
|
from:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
function test(foo) {
|
||||||
|
assert.string(foo, 'foo');
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
There you go. You can check that arrays are of a homogeneous type with `Arrayof$Type`:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
function test(foo) {
|
||||||
|
assert.arrayOfString(foo, 'foo');
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
You can assert IFF an argument is not `undefined` (i.e., an optional arg):
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
assert.optionalString(foo, 'foo');
|
||||||
|
```
|
||||||
|
|
||||||
|
Lastly, you can opt-out of assertion checking altogether by setting the
|
||||||
|
environment variable `NODE_NDEBUG=1`. This is pseudo-useful if you have
|
||||||
|
lots of assertions, and don't want to pay `typeof ()` taxes to v8 in
|
||||||
|
production. Be advised: The standard functions re-exported from `assert` are
|
||||||
|
also disabled in assert-plus if NDEBUG is specified. Using them directly from
|
||||||
|
the `assert` module avoids this behavior.
|
||||||
|
|
||||||
|
The complete list of APIs is:
|
||||||
|
|
||||||
|
* assert.array
|
||||||
|
* assert.bool
|
||||||
|
* assert.buffer
|
||||||
|
* assert.func
|
||||||
|
* assert.number
|
||||||
|
* assert.object
|
||||||
|
* assert.string
|
||||||
|
* assert.stream
|
||||||
|
* assert.date
|
||||||
|
* assert.regex
|
||||||
|
* assert.uuid
|
||||||
|
* assert.arrayOfArray
|
||||||
|
* assert.arrayOfBool
|
||||||
|
* assert.arrayOfBuffer
|
||||||
|
* assert.arrayOfFunc
|
||||||
|
* assert.arrayOfNumber
|
||||||
|
* assert.arrayOfObject
|
||||||
|
* assert.arrayOfString
|
||||||
|
* assert.arrayOfStream
|
||||||
|
* assert.arrayOfDate
|
||||||
|
* assert.arrayOfUuid
|
||||||
|
* assert.optionalArray
|
||||||
|
* assert.optionalBool
|
||||||
|
* assert.optionalBuffer
|
||||||
|
* assert.optionalFunc
|
||||||
|
* assert.optionalNumber
|
||||||
|
* assert.optionalObject
|
||||||
|
* assert.optionalString
|
||||||
|
* assert.optionalStream
|
||||||
|
* assert.optionalDate
|
||||||
|
* assert.optionalUuid
|
||||||
|
* assert.optionalArrayOfArray
|
||||||
|
* assert.optionalArrayOfBool
|
||||||
|
* assert.optionalArrayOfBuffer
|
||||||
|
* assert.optionalArrayOfFunc
|
||||||
|
* assert.optionalArrayOfNumber
|
||||||
|
* assert.optionalArrayOfObject
|
||||||
|
* assert.optionalArrayOfString
|
||||||
|
* assert.optionalArrayOfStream
|
||||||
|
* assert.optionalArrayOfDate
|
||||||
|
* assert.optionalArrayOfUuid
|
||||||
|
* assert.AssertionError
|
||||||
|
* assert.fail
|
||||||
|
* assert.ok
|
||||||
|
* assert.equal
|
||||||
|
* assert.notEqual
|
||||||
|
* assert.deepEqual
|
||||||
|
* assert.notDeepEqual
|
||||||
|
* assert.strictEqual
|
||||||
|
* assert.notStrictEqual
|
||||||
|
* assert.throws
|
||||||
|
* assert.doesNotThrow
|
||||||
|
* assert.ifError
|
||||||
|
|
||||||
|
# Installation
|
||||||
|
|
||||||
|
npm install assert-plus
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
The MIT License (MIT)
|
||||||
|
Copyright (c) 2012 Mark Cavage
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
|
this software and associated documentation files (the "Software"), to deal in
|
||||||
|
the Software without restriction, including without limitation the rights to
|
||||||
|
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||||
|
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||||
|
subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
|
||||||
|
## Bugs
|
||||||
|
|
||||||
|
See <https://github.com/mcavage/node-assert-plus/issues>.
|
206
node_modules/assert-plus/assert.js
generated
vendored
Normal file
206
node_modules/assert-plus/assert.js
generated
vendored
Normal file
@ -0,0 +1,206 @@
|
|||||||
|
// Copyright (c) 2012, Mark Cavage. All rights reserved.
|
||||||
|
// Copyright 2015 Joyent, Inc.
|
||||||
|
|
||||||
|
var assert = require('assert');
|
||||||
|
var Stream = require('stream').Stream;
|
||||||
|
var util = require('util');
|
||||||
|
|
||||||
|
|
||||||
|
///--- Globals
|
||||||
|
|
||||||
|
/* JSSTYLED */
|
||||||
|
var UUID_REGEXP = /^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$/;
|
||||||
|
|
||||||
|
|
||||||
|
///--- Internal
|
||||||
|
|
||||||
|
function _capitalize(str) {
|
||||||
|
return (str.charAt(0).toUpperCase() + str.slice(1));
|
||||||
|
}
|
||||||
|
|
||||||
|
function _toss(name, expected, oper, arg, actual) {
|
||||||
|
throw new assert.AssertionError({
|
||||||
|
message: util.format('%s (%s) is required', name, expected),
|
||||||
|
actual: (actual === undefined) ? typeof (arg) : actual(arg),
|
||||||
|
expected: expected,
|
||||||
|
operator: oper || '===',
|
||||||
|
stackStartFunction: _toss.caller
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function _getClass(arg) {
|
||||||
|
return (Object.prototype.toString.call(arg).slice(8, -1));
|
||||||
|
}
|
||||||
|
|
||||||
|
function noop() {
|
||||||
|
// Why even bother with asserts?
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
///--- Exports
|
||||||
|
|
||||||
|
var types = {
|
||||||
|
bool: {
|
||||||
|
check: function (arg) { return typeof (arg) === 'boolean'; }
|
||||||
|
},
|
||||||
|
func: {
|
||||||
|
check: function (arg) { return typeof (arg) === 'function'; }
|
||||||
|
},
|
||||||
|
string: {
|
||||||
|
check: function (arg) { return typeof (arg) === 'string'; }
|
||||||
|
},
|
||||||
|
object: {
|
||||||
|
check: function (arg) {
|
||||||
|
return typeof (arg) === 'object' && arg !== null;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
number: {
|
||||||
|
check: function (arg) {
|
||||||
|
return typeof (arg) === 'number' && !isNaN(arg) && isFinite(arg);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
buffer: {
|
||||||
|
check: function (arg) { return Buffer.isBuffer(arg); },
|
||||||
|
operator: 'Buffer.isBuffer'
|
||||||
|
},
|
||||||
|
array: {
|
||||||
|
check: function (arg) { return Array.isArray(arg); },
|
||||||
|
operator: 'Array.isArray'
|
||||||
|
},
|
||||||
|
stream: {
|
||||||
|
check: function (arg) { return arg instanceof Stream; },
|
||||||
|
operator: 'instanceof',
|
||||||
|
actual: _getClass
|
||||||
|
},
|
||||||
|
date: {
|
||||||
|
check: function (arg) { return arg instanceof Date; },
|
||||||
|
operator: 'instanceof',
|
||||||
|
actual: _getClass
|
||||||
|
},
|
||||||
|
regexp: {
|
||||||
|
check: function (arg) { return arg instanceof RegExp; },
|
||||||
|
operator: 'instanceof',
|
||||||
|
actual: _getClass
|
||||||
|
},
|
||||||
|
uuid: {
|
||||||
|
check: function (arg) {
|
||||||
|
return typeof (arg) === 'string' && UUID_REGEXP.test(arg);
|
||||||
|
},
|
||||||
|
operator: 'isUUID'
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
function _setExports(ndebug) {
|
||||||
|
var keys = Object.keys(types);
|
||||||
|
var out;
|
||||||
|
|
||||||
|
/* re-export standard assert */
|
||||||
|
if (process.env.NODE_NDEBUG) {
|
||||||
|
out = noop;
|
||||||
|
} else {
|
||||||
|
out = function (arg, msg) {
|
||||||
|
if (!arg) {
|
||||||
|
_toss(msg, 'true', arg);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/* standard checks */
|
||||||
|
keys.forEach(function (k) {
|
||||||
|
if (ndebug) {
|
||||||
|
out[k] = noop;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
var type = types[k];
|
||||||
|
out[k] = function (arg, msg) {
|
||||||
|
if (!type.check(arg)) {
|
||||||
|
_toss(msg, k, type.operator, arg, type.actual);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
/* optional checks */
|
||||||
|
keys.forEach(function (k) {
|
||||||
|
var name = 'optional' + _capitalize(k);
|
||||||
|
if (ndebug) {
|
||||||
|
out[name] = noop;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
var type = types[k];
|
||||||
|
out[name] = function (arg, msg) {
|
||||||
|
if (arg === undefined || arg === null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (!type.check(arg)) {
|
||||||
|
_toss(msg, k, type.operator, arg, type.actual);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
/* arrayOf checks */
|
||||||
|
keys.forEach(function (k) {
|
||||||
|
var name = 'arrayOf' + _capitalize(k);
|
||||||
|
if (ndebug) {
|
||||||
|
out[name] = noop;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
var type = types[k];
|
||||||
|
var expected = '[' + k + ']';
|
||||||
|
out[name] = function (arg, msg) {
|
||||||
|
if (!Array.isArray(arg)) {
|
||||||
|
_toss(msg, expected, type.operator, arg, type.actual);
|
||||||
|
}
|
||||||
|
var i;
|
||||||
|
for (i = 0; i < arg.length; i++) {
|
||||||
|
if (!type.check(arg[i])) {
|
||||||
|
_toss(msg, expected, type.operator, arg, type.actual);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
/* optionalArrayOf checks */
|
||||||
|
keys.forEach(function (k) {
|
||||||
|
var name = 'optionalArrayOf' + _capitalize(k);
|
||||||
|
if (ndebug) {
|
||||||
|
out[name] = noop;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
var type = types[k];
|
||||||
|
var expected = '[' + k + ']';
|
||||||
|
out[name] = function (arg, msg) {
|
||||||
|
if (arg === undefined || arg === null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (!Array.isArray(arg)) {
|
||||||
|
_toss(msg, expected, type.operator, arg, type.actual);
|
||||||
|
}
|
||||||
|
var i;
|
||||||
|
for (i = 0; i < arg.length; i++) {
|
||||||
|
if (!type.check(arg[i])) {
|
||||||
|
_toss(msg, expected, type.operator, arg, type.actual);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
/* re-export built-in assertions */
|
||||||
|
Object.keys(assert).forEach(function (k) {
|
||||||
|
if (k === 'AssertionError') {
|
||||||
|
out[k] = assert[k];
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (ndebug) {
|
||||||
|
out[k] = noop;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
out[k] = assert[k];
|
||||||
|
});
|
||||||
|
|
||||||
|
/* export ourselves (for unit tests _only_) */
|
||||||
|
out._setExports = _setExports;
|
||||||
|
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = _setExports(process.env.NODE_NDEBUG);
|
115
node_modules/assert-plus/package.json
generated
vendored
Normal file
115
node_modules/assert-plus/package.json
generated
vendored
Normal file
@ -0,0 +1,115 @@
|
|||||||
|
{
|
||||||
|
"_args": [
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"raw": "assert-plus@^0.2.0",
|
||||||
|
"scope": null,
|
||||||
|
"escapedName": "assert-plus",
|
||||||
|
"name": "assert-plus",
|
||||||
|
"rawSpec": "^0.2.0",
|
||||||
|
"spec": ">=0.2.0 <0.3.0",
|
||||||
|
"type": "range"
|
||||||
|
},
|
||||||
|
"/tank/data/SERVER/zoneadm-master/node_modules/http-signature"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"_from": "assert-plus@>=0.2.0 <0.3.0",
|
||||||
|
"_id": "assert-plus@0.2.0",
|
||||||
|
"_inCache": true,
|
||||||
|
"_location": "/assert-plus",
|
||||||
|
"_nodeVersion": "0.10.36",
|
||||||
|
"_npmUser": {
|
||||||
|
"name": "pfmooney",
|
||||||
|
"email": "patrick.f.mooney@gmail.com"
|
||||||
|
},
|
||||||
|
"_npmVersion": "3.3.8",
|
||||||
|
"_phantomChildren": {},
|
||||||
|
"_requested": {
|
||||||
|
"raw": "assert-plus@^0.2.0",
|
||||||
|
"scope": null,
|
||||||
|
"escapedName": "assert-plus",
|
||||||
|
"name": "assert-plus",
|
||||||
|
"rawSpec": "^0.2.0",
|
||||||
|
"spec": ">=0.2.0 <0.3.0",
|
||||||
|
"type": "range"
|
||||||
|
},
|
||||||
|
"_requiredBy": [
|
||||||
|
"/http-signature"
|
||||||
|
],
|
||||||
|
"_resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.2.0.tgz",
|
||||||
|
"_shasum": "d74e1b87e7affc0db8aadb7021f3fe48101ab234",
|
||||||
|
"_shrinkwrap": null,
|
||||||
|
"_spec": "assert-plus@^0.2.0",
|
||||||
|
"_where": "/tank/data/SERVER/zoneadm-master/node_modules/http-signature",
|
||||||
|
"author": {
|
||||||
|
"name": "Mark Cavage",
|
||||||
|
"email": "mcavage@gmail.com"
|
||||||
|
},
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/mcavage/node-assert-plus/issues"
|
||||||
|
},
|
||||||
|
"contributors": [
|
||||||
|
{
|
||||||
|
"name": "Dave Eddy",
|
||||||
|
"email": "dave@daveeddy.com"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Fred Kuo",
|
||||||
|
"email": "fred.kuo@joyent.com"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Lars-Magnus Skog",
|
||||||
|
"email": "ralphtheninja@riseup.net"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Mark Cavage",
|
||||||
|
"email": "mcavage@gmail.com"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Patrick Mooney",
|
||||||
|
"email": "pmooney@pfmooney.com"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Rob Gulewich",
|
||||||
|
"email": "robert.gulewich@joyent.com"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dependencies": {},
|
||||||
|
"description": "Extra assertions on top of node's assert module",
|
||||||
|
"devDependencies": {
|
||||||
|
"faucet": "0.0.1",
|
||||||
|
"tape": "4.2.2"
|
||||||
|
},
|
||||||
|
"directories": {},
|
||||||
|
"dist": {
|
||||||
|
"shasum": "d74e1b87e7affc0db8aadb7021f3fe48101ab234",
|
||||||
|
"tarball": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.2.0.tgz"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=0.8"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/mcavage/node-assert-plus#readme",
|
||||||
|
"license": "MIT",
|
||||||
|
"main": "./assert.js",
|
||||||
|
"maintainers": [
|
||||||
|
{
|
||||||
|
"name": "mcavage",
|
||||||
|
"email": "mcavage@gmail.com"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "pfmooney",
|
||||||
|
"email": "patrick.f.mooney@gmail.com"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "assert-plus",
|
||||||
|
"optionalDependencies": {},
|
||||||
|
"readme": "ERROR: No README data found!",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/mcavage/node-assert-plus.git"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "tape tests/*.js | ./node_modules/.bin/faucet"
|
||||||
|
},
|
||||||
|
"version": "0.2.0"
|
||||||
|
}
|
125
node_modules/async/CHANGELOG.md
generated
vendored
Normal file
125
node_modules/async/CHANGELOG.md
generated
vendored
Normal file
@ -0,0 +1,125 @@
|
|||||||
|
# v1.5.2
|
||||||
|
- Allow using `"consructor"` as an argument in `memoize` (#998)
|
||||||
|
- Give a better error messsage when `auto` dependency checking fails (#994)
|
||||||
|
- Various doc updates (#936, #956, #979, #1002)
|
||||||
|
|
||||||
|
# v1.5.1
|
||||||
|
- Fix issue with `pause` in `queue` with concurrency enabled (#946)
|
||||||
|
- `while` and `until` now pass the final result to callback (#963)
|
||||||
|
- `auto` will properly handle concurrency when there is no callback (#966)
|
||||||
|
- `auto` will now properly stop execution when an error occurs (#988, #993)
|
||||||
|
- Various doc fixes (#971, #980)
|
||||||
|
|
||||||
|
# v1.5.0
|
||||||
|
|
||||||
|
- Added `transform`, analogous to [`_.transform`](http://lodash.com/docs#transform) (#892)
|
||||||
|
- `map` now returns an object when an object is passed in, rather than array with non-numeric keys. `map` will begin always returning an array with numeric indexes in the next major release. (#873)
|
||||||
|
- `auto` now accepts an optional `concurrency` argument to limit the number of running tasks (#637)
|
||||||
|
- Added `queue#workersList()`, to retrieve the list of currently running tasks. (#891)
|
||||||
|
- Various code simplifications (#896, #904)
|
||||||
|
- Various doc fixes :scroll: (#890, #894, #903, #905, #912)
|
||||||
|
|
||||||
|
# v1.4.2
|
||||||
|
|
||||||
|
- Ensure coverage files don't get published on npm (#879)
|
||||||
|
|
||||||
|
# v1.4.1
|
||||||
|
|
||||||
|
- Add in overlooked `detectLimit` method (#866)
|
||||||
|
- Removed unnecessary files from npm releases (#861)
|
||||||
|
- Removed usage of a reserved word to prevent :boom: in older environments (#870)
|
||||||
|
|
||||||
|
# v1.4.0
|
||||||
|
|
||||||
|
- `asyncify` now supports promises (#840)
|
||||||
|
- Added `Limit` versions of `filter` and `reject` (#836)
|
||||||
|
- Add `Limit` versions of `detect`, `some` and `every` (#828, #829)
|
||||||
|
- `some`, `every` and `detect` now short circuit early (#828, #829)
|
||||||
|
- Improve detection of the global object (#804), enabling use in WebWorkers
|
||||||
|
- `whilst` now called with arguments from iterator (#823)
|
||||||
|
- `during` now gets called with arguments from iterator (#824)
|
||||||
|
- Code simplifications and optimizations aplenty ([diff](https://github.com/caolan/async/compare/v1.3.0...v1.4.0))
|
||||||
|
|
||||||
|
|
||||||
|
# v1.3.0
|
||||||
|
|
||||||
|
New Features:
|
||||||
|
- Added `constant`
|
||||||
|
- Added `asyncify`/`wrapSync` for making sync functions work with callbacks. (#671, #806)
|
||||||
|
- Added `during` and `doDuring`, which are like `whilst` with an async truth test. (#800)
|
||||||
|
- `retry` now accepts an `interval` parameter to specify a delay between retries. (#793)
|
||||||
|
- `async` should work better in Web Workers due to better `root` detection (#804)
|
||||||
|
- Callbacks are now optional in `whilst`, `doWhilst`, `until`, and `doUntil` (#642)
|
||||||
|
- Various internal updates (#786, #801, #802, #803)
|
||||||
|
- Various doc fixes (#790, #794)
|
||||||
|
|
||||||
|
Bug Fixes:
|
||||||
|
- `cargo` now exposes the `payload` size, and `cargo.payload` can be changed on the fly after the `cargo` is created. (#740, #744, #783)
|
||||||
|
|
||||||
|
|
||||||
|
# v1.2.1
|
||||||
|
|
||||||
|
Bug Fix:
|
||||||
|
|
||||||
|
- Small regression with synchronous iterator behavior in `eachSeries` with a 1-element array. Before 1.1.0, `eachSeries`'s callback was called on the same tick, which this patch restores. In 2.0.0, it will be called on the next tick. (#782)
|
||||||
|
|
||||||
|
|
||||||
|
# v1.2.0
|
||||||
|
|
||||||
|
New Features:
|
||||||
|
|
||||||
|
- Added `timesLimit` (#743)
|
||||||
|
- `concurrency` can be changed after initialization in `queue` by setting `q.concurrency`. The new concurrency will be reflected the next time a task is processed. (#747, #772)
|
||||||
|
|
||||||
|
Bug Fixes:
|
||||||
|
|
||||||
|
- Fixed a regression in `each` and family with empty arrays that have additional properties. (#775, #777)
|
||||||
|
|
||||||
|
|
||||||
|
# v1.1.1
|
||||||
|
|
||||||
|
Bug Fix:
|
||||||
|
|
||||||
|
- Small regression with synchronous iterator behavior in `eachSeries` with a 1-element array. Before 1.1.0, `eachSeries`'s callback was called on the same tick, which this patch restores. In 2.0.0, it will be called on the next tick. (#782)
|
||||||
|
|
||||||
|
|
||||||
|
# v1.1.0
|
||||||
|
|
||||||
|
New Features:
|
||||||
|
|
||||||
|
- `cargo` now supports all of the same methods and event callbacks as `queue`.
|
||||||
|
- Added `ensureAsync` - A wrapper that ensures an async function calls its callback on a later tick. (#769)
|
||||||
|
- Optimized `map`, `eachOf`, and `waterfall` families of functions
|
||||||
|
- Passing a `null` or `undefined` array to `map`, `each`, `parallel` and families will be treated as an empty array (#667).
|
||||||
|
- The callback is now optional for the composed results of `compose` and `seq`. (#618)
|
||||||
|
- Reduced file size by 4kb, (minified version by 1kb)
|
||||||
|
- Added code coverage through `nyc` and `coveralls` (#768)
|
||||||
|
|
||||||
|
Bug Fixes:
|
||||||
|
|
||||||
|
- `forever` will no longer stack overflow with a synchronous iterator (#622)
|
||||||
|
- `eachLimit` and other limit functions will stop iterating once an error occurs (#754)
|
||||||
|
- Always pass `null` in callbacks when there is no error (#439)
|
||||||
|
- Ensure proper conditions when calling `drain()` after pushing an empty data set to a queue (#668)
|
||||||
|
- `each` and family will properly handle an empty array (#578)
|
||||||
|
- `eachSeries` and family will finish if the underlying array is modified during execution (#557)
|
||||||
|
- `queue` will throw if a non-function is passed to `q.push()` (#593)
|
||||||
|
- Doc fixes (#629, #766)
|
||||||
|
|
||||||
|
|
||||||
|
# v1.0.0
|
||||||
|
|
||||||
|
No known breaking changes, we are simply complying with semver from here on out.
|
||||||
|
|
||||||
|
Changes:
|
||||||
|
|
||||||
|
- Start using a changelog!
|
||||||
|
- Add `forEachOf` for iterating over Objects (or to iterate Arrays with indexes available) (#168 #704 #321)
|
||||||
|
- Detect deadlocks in `auto` (#663)
|
||||||
|
- Better support for require.js (#527)
|
||||||
|
- Throw if queue created with concurrency `0` (#714)
|
||||||
|
- Fix unneeded iteration in `queue.resume()` (#758)
|
||||||
|
- Guard against timer mocking overriding `setImmediate` (#609 #611)
|
||||||
|
- Miscellaneous doc fixes (#542 #596 #615 #628 #631 #690 #729)
|
||||||
|
- Use single noop function internally (#546)
|
||||||
|
- Optimize internal `_each`, `_map` and `_keys` functions.
|
19
node_modules/async/LICENSE
generated
vendored
Normal file
19
node_modules/async/LICENSE
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
Copyright (c) 2010-2014 Caolan McMahon
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
1877
node_modules/async/README.md
generated
vendored
Normal file
1877
node_modules/async/README.md
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1265
node_modules/async/dist/async.js
generated
vendored
Normal file
1265
node_modules/async/dist/async.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
2
node_modules/async/dist/async.min.js
generated
vendored
Normal file
2
node_modules/async/dist/async.min.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1265
node_modules/async/lib/async.js
generated
vendored
Normal file
1265
node_modules/async/lib/async.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
157
node_modules/async/package.json
generated
vendored
Normal file
157
node_modules/async/package.json
generated
vendored
Normal file
@ -0,0 +1,157 @@
|
|||||||
|
{
|
||||||
|
"_args": [
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"raw": "async@^1.4.0",
|
||||||
|
"scope": null,
|
||||||
|
"escapedName": "async",
|
||||||
|
"name": "async",
|
||||||
|
"rawSpec": "^1.4.0",
|
||||||
|
"spec": ">=1.4.0 <2.0.0",
|
||||||
|
"type": "range"
|
||||||
|
},
|
||||||
|
"/tank/data/SERVER/zoneadm-master/node_modules/nconf"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"_from": "async@>=1.4.0 <2.0.0",
|
||||||
|
"_id": "async@1.5.2",
|
||||||
|
"_inCache": true,
|
||||||
|
"_location": "/async",
|
||||||
|
"_nodeVersion": "4.2.3",
|
||||||
|
"_npmUser": {
|
||||||
|
"name": "aearly",
|
||||||
|
"email": "alexander.early@gmail.com"
|
||||||
|
},
|
||||||
|
"_npmVersion": "3.5.2",
|
||||||
|
"_phantomChildren": {},
|
||||||
|
"_requested": {
|
||||||
|
"raw": "async@^1.4.0",
|
||||||
|
"scope": null,
|
||||||
|
"escapedName": "async",
|
||||||
|
"name": "async",
|
||||||
|
"rawSpec": "^1.4.0",
|
||||||
|
"spec": ">=1.4.0 <2.0.0",
|
||||||
|
"type": "range"
|
||||||
|
},
|
||||||
|
"_requiredBy": [
|
||||||
|
"/nconf"
|
||||||
|
],
|
||||||
|
"_resolved": "https://registry.npmjs.org/async/-/async-1.5.2.tgz",
|
||||||
|
"_shasum": "ec6a61ae56480c0c3cb241c95618e20892f9672a",
|
||||||
|
"_shrinkwrap": null,
|
||||||
|
"_spec": "async@^1.4.0",
|
||||||
|
"_where": "/tank/data/SERVER/zoneadm-master/node_modules/nconf",
|
||||||
|
"author": {
|
||||||
|
"name": "Caolan McMahon"
|
||||||
|
},
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/caolan/async/issues"
|
||||||
|
},
|
||||||
|
"dependencies": {},
|
||||||
|
"description": "Higher-order functions and common patterns for asynchronous code",
|
||||||
|
"devDependencies": {
|
||||||
|
"benchmark": "github:bestiejs/benchmark.js",
|
||||||
|
"bluebird": "^2.9.32",
|
||||||
|
"chai": "^3.1.0",
|
||||||
|
"coveralls": "^2.11.2",
|
||||||
|
"es6-promise": "^2.3.0",
|
||||||
|
"jscs": "^1.13.1",
|
||||||
|
"jshint": "~2.8.0",
|
||||||
|
"karma": "^0.13.2",
|
||||||
|
"karma-browserify": "^4.2.1",
|
||||||
|
"karma-firefox-launcher": "^0.1.6",
|
||||||
|
"karma-mocha": "^0.2.0",
|
||||||
|
"karma-mocha-reporter": "^1.0.2",
|
||||||
|
"lodash": "^3.9.0",
|
||||||
|
"mkdirp": "~0.5.1",
|
||||||
|
"mocha": "^2.2.5",
|
||||||
|
"native-promise-only": "^0.8.0-a",
|
||||||
|
"nodeunit": ">0.0.0",
|
||||||
|
"nyc": "^2.1.0",
|
||||||
|
"rsvp": "^3.0.18",
|
||||||
|
"semver": "^4.3.6",
|
||||||
|
"uglify-js": "~2.4.0",
|
||||||
|
"xyz": "^0.5.0",
|
||||||
|
"yargs": "~3.9.1"
|
||||||
|
},
|
||||||
|
"directories": {},
|
||||||
|
"dist": {
|
||||||
|
"shasum": "ec6a61ae56480c0c3cb241c95618e20892f9672a",
|
||||||
|
"tarball": "https://registry.npmjs.org/async/-/async-1.5.2.tgz"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"lib",
|
||||||
|
"dist/async.js",
|
||||||
|
"dist/async.min.js"
|
||||||
|
],
|
||||||
|
"gitHead": "9ab5c67b7cb3a4c3dad4a2d4552a2f6775545d6c",
|
||||||
|
"homepage": "https://github.com/caolan/async#readme",
|
||||||
|
"jam": {
|
||||||
|
"main": "lib/async.js",
|
||||||
|
"include": [
|
||||||
|
"lib/async.js",
|
||||||
|
"README.md",
|
||||||
|
"LICENSE"
|
||||||
|
],
|
||||||
|
"categories": [
|
||||||
|
"Utilities"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"keywords": [
|
||||||
|
"async",
|
||||||
|
"callback",
|
||||||
|
"utility",
|
||||||
|
"module"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"main": "lib/async.js",
|
||||||
|
"maintainers": [
|
||||||
|
{
|
||||||
|
"name": "caolan",
|
||||||
|
"email": "caolan.mcmahon@gmail.com"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "beaugunderson",
|
||||||
|
"email": "beau@beaugunderson.com"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "aearly",
|
||||||
|
"email": "alexander.early@gmail.com"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "megawac",
|
||||||
|
"email": "megawac@gmail.com"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "async",
|
||||||
|
"optionalDependencies": {},
|
||||||
|
"readme": "ERROR: No README data found!",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/caolan/async.git"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"coverage": "nyc npm test && nyc report",
|
||||||
|
"coveralls": "nyc npm test && nyc report --reporter=text-lcov | coveralls",
|
||||||
|
"lint": "jshint lib/*.js test/*.js perf/*.js && jscs lib/*.js test/*.js perf/*.js",
|
||||||
|
"mocha-browser-test": "karma start",
|
||||||
|
"mocha-node-test": "mocha mocha_test/",
|
||||||
|
"mocha-test": "npm run mocha-node-test && npm run mocha-browser-test",
|
||||||
|
"nodeunit-test": "nodeunit test/test-async.js",
|
||||||
|
"test": "npm run-script lint && npm run nodeunit-test && npm run mocha-test"
|
||||||
|
},
|
||||||
|
"spm": {
|
||||||
|
"main": "lib/async.js"
|
||||||
|
},
|
||||||
|
"version": "1.5.2",
|
||||||
|
"volo": {
|
||||||
|
"main": "lib/async.js",
|
||||||
|
"ignore": [
|
||||||
|
"**/.*",
|
||||||
|
"node_modules",
|
||||||
|
"bower_components",
|
||||||
|
"test",
|
||||||
|
"tests"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
21
node_modules/asynckit/LICENSE
generated
vendored
Normal file
21
node_modules/asynckit/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2016 Alex Indigo
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
233
node_modules/asynckit/README.md
generated
vendored
Normal file
233
node_modules/asynckit/README.md
generated
vendored
Normal file
@ -0,0 +1,233 @@
|
|||||||
|
# asynckit [](https://www.npmjs.com/package/asynckit)
|
||||||
|
|
||||||
|
Minimal async jobs utility library, with streams support.
|
||||||
|
|
||||||
|
[](https://travis-ci.org/alexindigo/asynckit)
|
||||||
|
[](https://travis-ci.org/alexindigo/asynckit)
|
||||||
|
[](https://ci.appveyor.com/project/alexindigo/asynckit)
|
||||||
|
|
||||||
|
[](https://coveralls.io/github/alexindigo/asynckit?branch=master)
|
||||||
|
[](https://david-dm.org/alexindigo/asynckit)
|
||||||
|
[](https://www.bithound.io/github/alexindigo/asynckit)
|
||||||
|
|
||||||
|
<!-- [](https://www.npmjs.com/package/reamde) -->
|
||||||
|
|
||||||
|
AsyncKit provides harness for `parallel` and `serial` iterators over list of items represented by arrays or objects.
|
||||||
|
Optionally it accepts abort function (should be synchronously return by iterator for each item), and terminates left over jobs upon an error event. For specific iteration order built-in (`ascending` and `descending`) and custom sort helpers also supported, via `asynckit.serialOrdered` method.
|
||||||
|
|
||||||
|
It ensures async operations to keep behavior more stable and prevent `Maximum call stack size exceeded` errors, from sync iterators.
|
||||||
|
|
||||||
|
| compression | size |
|
||||||
|
| :----------------- | -------: |
|
||||||
|
| asynckit.js | 12.34 kB |
|
||||||
|
| asynckit.min.js | 4.11 kB |
|
||||||
|
| asynckit.min.js.gz | 1.47 kB |
|
||||||
|
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
```sh
|
||||||
|
$ npm install --save asynckit
|
||||||
|
```
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
### Parallel Jobs
|
||||||
|
|
||||||
|
Runs iterator over provided array in parallel. Stores output in the `result` array,
|
||||||
|
on the matching positions. In unlikely event of an error from one of the jobs,
|
||||||
|
will terminate rest of the active jobs (if abort function is provided)
|
||||||
|
and return error along with salvaged data to the main callback function.
|
||||||
|
|
||||||
|
#### Input Array
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var parallel = require('asynckit').parallel
|
||||||
|
, assert = require('assert')
|
||||||
|
;
|
||||||
|
|
||||||
|
var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
|
||||||
|
, expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ]
|
||||||
|
, expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ]
|
||||||
|
, target = []
|
||||||
|
;
|
||||||
|
|
||||||
|
parallel(source, asyncJob, function(err, result)
|
||||||
|
{
|
||||||
|
assert.deepEqual(result, expectedResult);
|
||||||
|
assert.deepEqual(target, expectedTarget);
|
||||||
|
});
|
||||||
|
|
||||||
|
// async job accepts one element from the array
|
||||||
|
// and a callback function
|
||||||
|
function asyncJob(item, cb)
|
||||||
|
{
|
||||||
|
// different delays (in ms) per item
|
||||||
|
var delay = item * 25;
|
||||||
|
|
||||||
|
// pretend different jobs take different time to finish
|
||||||
|
// and not in consequential order
|
||||||
|
var timeoutId = setTimeout(function() {
|
||||||
|
target.push(item);
|
||||||
|
cb(null, item * 2);
|
||||||
|
}, delay);
|
||||||
|
|
||||||
|
// allow to cancel "leftover" jobs upon error
|
||||||
|
// return function, invoking of which will abort this job
|
||||||
|
return clearTimeout.bind(null, timeoutId);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
More examples could be found in [test/test-parallel-array.js](test/test-parallel-array.js).
|
||||||
|
|
||||||
|
#### Input Object
|
||||||
|
|
||||||
|
Also it supports named jobs, listed via object.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var parallel = require('asynckit/parallel')
|
||||||
|
, assert = require('assert')
|
||||||
|
;
|
||||||
|
|
||||||
|
var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 }
|
||||||
|
, expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 }
|
||||||
|
, expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ]
|
||||||
|
, expectedKeys = [ 'first', 'one', 'two', 'four', 'eight', 'sixteen', 'thirtyTwo', 'sixtyFour' ]
|
||||||
|
, target = []
|
||||||
|
, keys = []
|
||||||
|
;
|
||||||
|
|
||||||
|
parallel(source, asyncJob, function(err, result)
|
||||||
|
{
|
||||||
|
assert.deepEqual(result, expectedResult);
|
||||||
|
assert.deepEqual(target, expectedTarget);
|
||||||
|
assert.deepEqual(keys, expectedKeys);
|
||||||
|
});
|
||||||
|
|
||||||
|
// supports full value, key, callback (shortcut) interface
|
||||||
|
function asyncJob(item, key, cb)
|
||||||
|
{
|
||||||
|
// different delays (in ms) per item
|
||||||
|
var delay = item * 25;
|
||||||
|
|
||||||
|
// pretend different jobs take different time to finish
|
||||||
|
// and not in consequential order
|
||||||
|
var timeoutId = setTimeout(function() {
|
||||||
|
keys.push(key);
|
||||||
|
target.push(item);
|
||||||
|
cb(null, item * 2);
|
||||||
|
}, delay);
|
||||||
|
|
||||||
|
// allow to cancel "leftover" jobs upon error
|
||||||
|
// return function, invoking of which will abort this job
|
||||||
|
return clearTimeout.bind(null, timeoutId);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
More examples could be found in [test/test-parallel-object.js](test/test-parallel-object.js).
|
||||||
|
|
||||||
|
### Serial Jobs
|
||||||
|
|
||||||
|
Runs iterator over provided array sequentially. Stores output in the `result` array,
|
||||||
|
on the matching positions. In unlikely event of an error from one of the jobs,
|
||||||
|
will not proceed to the rest of the items in the list
|
||||||
|
and return error along with salvaged data to the main callback function.
|
||||||
|
|
||||||
|
#### Input Array
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var serial = require('asynckit/serial')
|
||||||
|
, assert = require('assert')
|
||||||
|
;
|
||||||
|
|
||||||
|
var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
|
||||||
|
, expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ]
|
||||||
|
, expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ]
|
||||||
|
, target = []
|
||||||
|
;
|
||||||
|
|
||||||
|
serial(source, asyncJob, function(err, result)
|
||||||
|
{
|
||||||
|
assert.deepEqual(result, expectedResult);
|
||||||
|
assert.deepEqual(target, expectedTarget);
|
||||||
|
});
|
||||||
|
|
||||||
|
// extended interface (item, key, callback)
|
||||||
|
// also supported for arrays
|
||||||
|
function asyncJob(item, key, cb)
|
||||||
|
{
|
||||||
|
target.push(key);
|
||||||
|
|
||||||
|
// it will be automatically made async
|
||||||
|
// even it iterator "returns" in the same event loop
|
||||||
|
cb(null, item * 2);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
More examples could be found in [test/test-serial-array.js](test/test-serial-array.js).
|
||||||
|
|
||||||
|
#### Input Object
|
||||||
|
|
||||||
|
Also it supports named jobs, listed via object.
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var serial = require('asynckit').serial
|
||||||
|
, assert = require('assert')
|
||||||
|
;
|
||||||
|
|
||||||
|
var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
|
||||||
|
, expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ]
|
||||||
|
, expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ]
|
||||||
|
, target = []
|
||||||
|
;
|
||||||
|
|
||||||
|
var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 }
|
||||||
|
, expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 }
|
||||||
|
, expectedTarget = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
|
||||||
|
, target = []
|
||||||
|
;
|
||||||
|
|
||||||
|
|
||||||
|
serial(source, asyncJob, function(err, result)
|
||||||
|
{
|
||||||
|
assert.deepEqual(result, expectedResult);
|
||||||
|
assert.deepEqual(target, expectedTarget);
|
||||||
|
});
|
||||||
|
|
||||||
|
// shortcut interface (item, callback)
|
||||||
|
// works for object as well as for the arrays
|
||||||
|
function asyncJob(item, cb)
|
||||||
|
{
|
||||||
|
target.push(item);
|
||||||
|
|
||||||
|
// it will be automatically made async
|
||||||
|
// even it iterator "returns" in the same event loop
|
||||||
|
cb(null, item * 2);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
More examples could be found in [test/test-serial-object.js](test/test-serial-object.js).
|
||||||
|
|
||||||
|
_Note: Since _object_ is an _unordered_ collection of properties,
|
||||||
|
it may produce unexpected results with sequential iterations.
|
||||||
|
Whenever order of the jobs' execution is important please use `serialOrdered` method._
|
||||||
|
|
||||||
|
### Ordered Serial Iterations
|
||||||
|
|
||||||
|
TBD
|
||||||
|
|
||||||
|
For example [compare-property](compare-property) package.
|
||||||
|
|
||||||
|
### Streaming interface
|
||||||
|
|
||||||
|
TBD
|
||||||
|
|
||||||
|
## Want to Know More?
|
||||||
|
|
||||||
|
More examples can be found in [test folder](test/).
|
||||||
|
|
||||||
|
Or open an [issue](https://github.com/alexindigo/asynckit/issues) with questions and/or suggestions.
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
AsyncKit is licensed under the MIT license.
|
76
node_modules/asynckit/bench.js
generated
vendored
Normal file
76
node_modules/asynckit/bench.js
generated
vendored
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
/* eslint no-console: "off" */
|
||||||
|
|
||||||
|
var asynckit = require('./')
|
||||||
|
, async = require('async')
|
||||||
|
, assert = require('assert')
|
||||||
|
, expected = 0
|
||||||
|
;
|
||||||
|
|
||||||
|
var Benchmark = require('benchmark');
|
||||||
|
var suite = new Benchmark.Suite;
|
||||||
|
|
||||||
|
var source = [];
|
||||||
|
for (var z = 1; z < 100; z++)
|
||||||
|
{
|
||||||
|
source.push(z);
|
||||||
|
expected += z;
|
||||||
|
}
|
||||||
|
|
||||||
|
suite
|
||||||
|
// add tests
|
||||||
|
|
||||||
|
.add('async.map', function(deferred)
|
||||||
|
{
|
||||||
|
var total = 0;
|
||||||
|
|
||||||
|
async.map(source,
|
||||||
|
function(i, cb)
|
||||||
|
{
|
||||||
|
setImmediate(function()
|
||||||
|
{
|
||||||
|
total += i;
|
||||||
|
cb(null, total);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
function(err, result)
|
||||||
|
{
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.equal(result[result.length - 1], expected);
|
||||||
|
deferred.resolve();
|
||||||
|
});
|
||||||
|
}, {'defer': true})
|
||||||
|
|
||||||
|
|
||||||
|
.add('asynckit.parallel', function(deferred)
|
||||||
|
{
|
||||||
|
var total = 0;
|
||||||
|
|
||||||
|
asynckit.parallel(source,
|
||||||
|
function(i, cb)
|
||||||
|
{
|
||||||
|
setImmediate(function()
|
||||||
|
{
|
||||||
|
total += i;
|
||||||
|
cb(null, total);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
function(err, result)
|
||||||
|
{
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.equal(result[result.length - 1], expected);
|
||||||
|
deferred.resolve();
|
||||||
|
});
|
||||||
|
}, {'defer': true})
|
||||||
|
|
||||||
|
|
||||||
|
// add listeners
|
||||||
|
.on('cycle', function(ev)
|
||||||
|
{
|
||||||
|
console.log(String(ev.target));
|
||||||
|
})
|
||||||
|
.on('complete', function()
|
||||||
|
{
|
||||||
|
console.log('Fastest is ' + this.filter('fastest').map('name'));
|
||||||
|
})
|
||||||
|
// run async
|
||||||
|
.run({ 'async': true });
|
6
node_modules/asynckit/index.js
generated
vendored
Normal file
6
node_modules/asynckit/index.js
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
module.exports =
|
||||||
|
{
|
||||||
|
parallel : require('./parallel.js'),
|
||||||
|
serial : require('./serial.js'),
|
||||||
|
serialOrdered : require('./serialOrdered.js')
|
||||||
|
};
|
29
node_modules/asynckit/lib/abort.js
generated
vendored
Normal file
29
node_modules/asynckit/lib/abort.js
generated
vendored
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
// API
|
||||||
|
module.exports = abort;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Aborts leftover active jobs
|
||||||
|
*
|
||||||
|
* @param {object} state - current state object
|
||||||
|
*/
|
||||||
|
function abort(state)
|
||||||
|
{
|
||||||
|
Object.keys(state.jobs).forEach(clean.bind(state));
|
||||||
|
|
||||||
|
// reset leftover jobs
|
||||||
|
state.jobs = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cleans up leftover job by invoking abort function for the provided job id
|
||||||
|
*
|
||||||
|
* @this state
|
||||||
|
* @param {string|number} key - job id to abort
|
||||||
|
*/
|
||||||
|
function clean(key)
|
||||||
|
{
|
||||||
|
if (typeof this.jobs[key] == 'function')
|
||||||
|
{
|
||||||
|
this.jobs[key]();
|
||||||
|
}
|
||||||
|
}
|
34
node_modules/asynckit/lib/async.js
generated
vendored
Normal file
34
node_modules/asynckit/lib/async.js
generated
vendored
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
var defer = require('./defer.js');
|
||||||
|
|
||||||
|
// API
|
||||||
|
module.exports = async;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Runs provided callback asynchronously
|
||||||
|
* even if callback itself is not
|
||||||
|
*
|
||||||
|
* @param {function} callback - callback to invoke
|
||||||
|
* @returns {function} - augmented callback
|
||||||
|
*/
|
||||||
|
function async(callback)
|
||||||
|
{
|
||||||
|
var isAsync = false;
|
||||||
|
|
||||||
|
// check if async happened
|
||||||
|
defer(function() { isAsync = true; });
|
||||||
|
|
||||||
|
return function async_callback(err, result)
|
||||||
|
{
|
||||||
|
if (isAsync)
|
||||||
|
{
|
||||||
|
callback(err, result);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
defer(function nextTick_callback()
|
||||||
|
{
|
||||||
|
callback(err, result);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
26
node_modules/asynckit/lib/defer.js
generated
vendored
Normal file
26
node_modules/asynckit/lib/defer.js
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
module.exports = defer;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Runs provided function on next iteration of the event loop
|
||||||
|
*
|
||||||
|
* @param {function} fn - function to run
|
||||||
|
*/
|
||||||
|
function defer(fn)
|
||||||
|
{
|
||||||
|
var nextTick = typeof setImmediate == 'function'
|
||||||
|
? setImmediate
|
||||||
|
: (
|
||||||
|
typeof process == 'object' && typeof process.nextTick == 'function'
|
||||||
|
? process.nextTick
|
||||||
|
: null
|
||||||
|
);
|
||||||
|
|
||||||
|
if (nextTick)
|
||||||
|
{
|
||||||
|
nextTick(fn);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
setTimeout(fn, 0);
|
||||||
|
}
|
||||||
|
}
|
75
node_modules/asynckit/lib/iterate.js
generated
vendored
Normal file
75
node_modules/asynckit/lib/iterate.js
generated
vendored
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
var async = require('./async.js')
|
||||||
|
, abort = require('./abort.js')
|
||||||
|
;
|
||||||
|
|
||||||
|
// API
|
||||||
|
module.exports = iterate;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Iterates over each job object
|
||||||
|
*
|
||||||
|
* @param {array|object} list - array or object (named list) to iterate over
|
||||||
|
* @param {function} iterator - iterator to run
|
||||||
|
* @param {object} state - current job status
|
||||||
|
* @param {function} callback - invoked when all elements processed
|
||||||
|
*/
|
||||||
|
function iterate(list, iterator, state, callback)
|
||||||
|
{
|
||||||
|
// store current index
|
||||||
|
var key = state['keyedList'] ? state['keyedList'][state.index] : state.index;
|
||||||
|
|
||||||
|
state.jobs[key] = runJob(iterator, key, list[key], function(error, output)
|
||||||
|
{
|
||||||
|
// don't repeat yourself
|
||||||
|
// skip secondary callbacks
|
||||||
|
if (!(key in state.jobs))
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// clean up jobs
|
||||||
|
delete state.jobs[key];
|
||||||
|
|
||||||
|
if (error)
|
||||||
|
{
|
||||||
|
// don't process rest of the results
|
||||||
|
// stop still active jobs
|
||||||
|
// and reset the list
|
||||||
|
abort(state);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
state.results[key] = output;
|
||||||
|
}
|
||||||
|
|
||||||
|
// return salvaged results
|
||||||
|
callback(error, state.results);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Runs iterator over provided job element
|
||||||
|
*
|
||||||
|
* @param {function} iterator - iterator to invoke
|
||||||
|
* @param {string|number} key - key/index of the element in the list of jobs
|
||||||
|
* @param {mixed} item - job description
|
||||||
|
* @param {function} callback - invoked after iterator is done with the job
|
||||||
|
* @returns {function|mixed} - job abort function or something else
|
||||||
|
*/
|
||||||
|
function runJob(iterator, key, item, callback)
|
||||||
|
{
|
||||||
|
var aborter;
|
||||||
|
|
||||||
|
// allow shortcut if iterator expects only two arguments
|
||||||
|
if (iterator.length == 2)
|
||||||
|
{
|
||||||
|
aborter = iterator(item, async(callback));
|
||||||
|
}
|
||||||
|
// otherwise go with full three arguments
|
||||||
|
else
|
||||||
|
{
|
||||||
|
aborter = iterator(item, key, async(callback));
|
||||||
|
}
|
||||||
|
|
||||||
|
return aborter;
|
||||||
|
}
|
91
node_modules/asynckit/lib/readable_asynckit.js
generated
vendored
Normal file
91
node_modules/asynckit/lib/readable_asynckit.js
generated
vendored
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
var streamify = require('./streamify.js')
|
||||||
|
, defer = require('./defer.js')
|
||||||
|
;
|
||||||
|
|
||||||
|
// API
|
||||||
|
module.exports = ReadableAsyncKit;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Base constructor for all streams
|
||||||
|
* used to hold properties/methods
|
||||||
|
*/
|
||||||
|
function ReadableAsyncKit()
|
||||||
|
{
|
||||||
|
ReadableAsyncKit.super_.apply(this, arguments);
|
||||||
|
|
||||||
|
// list of active jobs
|
||||||
|
this.jobs = {};
|
||||||
|
|
||||||
|
// add stream methods
|
||||||
|
this.destroy = destroy;
|
||||||
|
this._start = _start;
|
||||||
|
this._read = _read;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Destroys readable stream,
|
||||||
|
* by aborting outstanding jobs
|
||||||
|
*
|
||||||
|
* @returns {void}
|
||||||
|
*/
|
||||||
|
function destroy()
|
||||||
|
{
|
||||||
|
if (this.destroyed)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.destroyed = true;
|
||||||
|
|
||||||
|
if (typeof this.terminator == 'function')
|
||||||
|
{
|
||||||
|
this.terminator();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Starts provided jobs in async manner
|
||||||
|
*
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
function _start()
|
||||||
|
{
|
||||||
|
// first argument – runner function
|
||||||
|
var runner = arguments[0]
|
||||||
|
// take away first argument
|
||||||
|
, args = Array.prototype.slice.call(arguments, 1)
|
||||||
|
// second argument - input data
|
||||||
|
, input = args[0]
|
||||||
|
// last argument - result callback
|
||||||
|
, endCb = streamify.callback.call(this, args[args.length - 1])
|
||||||
|
;
|
||||||
|
|
||||||
|
args[args.length - 1] = endCb;
|
||||||
|
// third argument - iterator
|
||||||
|
args[1] = streamify.iterator.call(this, args[1]);
|
||||||
|
|
||||||
|
// allow time for proper setup
|
||||||
|
defer(function()
|
||||||
|
{
|
||||||
|
if (!this.destroyed)
|
||||||
|
{
|
||||||
|
this.terminator = runner.apply(null, args);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
endCb(null, Array.isArray(input) ? [] : {});
|
||||||
|
}
|
||||||
|
}.bind(this));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Implement _read to comply with Readable streams
|
||||||
|
* Doesn't really make sense for flowing object mode
|
||||||
|
*
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
function _read()
|
||||||
|
{
|
||||||
|
|
||||||
|
}
|
25
node_modules/asynckit/lib/readable_parallel.js
generated
vendored
Normal file
25
node_modules/asynckit/lib/readable_parallel.js
generated
vendored
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
var parallel = require('../parallel.js');
|
||||||
|
|
||||||
|
// API
|
||||||
|
module.exports = ReadableParallel;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Streaming wrapper to `asynckit.parallel`
|
||||||
|
*
|
||||||
|
* @param {array|object} list - array or object (named list) to iterate over
|
||||||
|
* @param {function} iterator - iterator to run
|
||||||
|
* @param {function} callback - invoked when all elements processed
|
||||||
|
* @returns {stream.Readable#}
|
||||||
|
*/
|
||||||
|
function ReadableParallel(list, iterator, callback)
|
||||||
|
{
|
||||||
|
if (!(this instanceof ReadableParallel))
|
||||||
|
{
|
||||||
|
return new ReadableParallel(list, iterator, callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
// turn on object mode
|
||||||
|
ReadableParallel.super_.call(this, {objectMode: true});
|
||||||
|
|
||||||
|
this._start(parallel, list, iterator, callback);
|
||||||
|
}
|
25
node_modules/asynckit/lib/readable_serial.js
generated
vendored
Normal file
25
node_modules/asynckit/lib/readable_serial.js
generated
vendored
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
var serial = require('../serial.js');
|
||||||
|
|
||||||
|
// API
|
||||||
|
module.exports = ReadableSerial;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Streaming wrapper to `asynckit.serial`
|
||||||
|
*
|
||||||
|
* @param {array|object} list - array or object (named list) to iterate over
|
||||||
|
* @param {function} iterator - iterator to run
|
||||||
|
* @param {function} callback - invoked when all elements processed
|
||||||
|
* @returns {stream.Readable#}
|
||||||
|
*/
|
||||||
|
function ReadableSerial(list, iterator, callback)
|
||||||
|
{
|
||||||
|
if (!(this instanceof ReadableSerial))
|
||||||
|
{
|
||||||
|
return new ReadableSerial(list, iterator, callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
// turn on object mode
|
||||||
|
ReadableSerial.super_.call(this, {objectMode: true});
|
||||||
|
|
||||||
|
this._start(serial, list, iterator, callback);
|
||||||
|
}
|
29
node_modules/asynckit/lib/readable_serial_ordered.js
generated
vendored
Normal file
29
node_modules/asynckit/lib/readable_serial_ordered.js
generated
vendored
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
var serialOrdered = require('../serialOrdered.js');
|
||||||
|
|
||||||
|
// API
|
||||||
|
module.exports = ReadableSerialOrdered;
|
||||||
|
// expose sort helpers
|
||||||
|
module.exports.ascending = serialOrdered.ascending;
|
||||||
|
module.exports.descending = serialOrdered.descending;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Streaming wrapper to `asynckit.serialOrdered`
|
||||||
|
*
|
||||||
|
* @param {array|object} list - array or object (named list) to iterate over
|
||||||
|
* @param {function} iterator - iterator to run
|
||||||
|
* @param {function} sortMethod - custom sort function
|
||||||
|
* @param {function} callback - invoked when all elements processed
|
||||||
|
* @returns {stream.Readable#}
|
||||||
|
*/
|
||||||
|
function ReadableSerialOrdered(list, iterator, sortMethod, callback)
|
||||||
|
{
|
||||||
|
if (!(this instanceof ReadableSerialOrdered))
|
||||||
|
{
|
||||||
|
return new ReadableSerialOrdered(list, iterator, sortMethod, callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
// turn on object mode
|
||||||
|
ReadableSerialOrdered.super_.call(this, {objectMode: true});
|
||||||
|
|
||||||
|
this._start(serialOrdered, list, iterator, sortMethod, callback);
|
||||||
|
}
|
37
node_modules/asynckit/lib/state.js
generated
vendored
Normal file
37
node_modules/asynckit/lib/state.js
generated
vendored
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
// API
|
||||||
|
module.exports = state;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates initial state object
|
||||||
|
* for iteration over list
|
||||||
|
*
|
||||||
|
* @param {array|object} list - list to iterate over
|
||||||
|
* @param {function|null} sortMethod - function to use for keys sort,
|
||||||
|
* or `null` to keep them as is
|
||||||
|
* @returns {object} - initial state object
|
||||||
|
*/
|
||||||
|
function state(list, sortMethod)
|
||||||
|
{
|
||||||
|
var isNamedList = !Array.isArray(list)
|
||||||
|
, initState =
|
||||||
|
{
|
||||||
|
index : 0,
|
||||||
|
keyedList: isNamedList || sortMethod ? Object.keys(list) : null,
|
||||||
|
jobs : {},
|
||||||
|
results : isNamedList ? {} : [],
|
||||||
|
size : isNamedList ? Object.keys(list).length : list.length
|
||||||
|
}
|
||||||
|
;
|
||||||
|
|
||||||
|
if (sortMethod)
|
||||||
|
{
|
||||||
|
// sort array keys based on it's values
|
||||||
|
// sort object's keys just on own merit
|
||||||
|
initState.keyedList.sort(isNamedList ? sortMethod : function(a, b)
|
||||||
|
{
|
||||||
|
return sortMethod(list[a], list[b]);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return initState;
|
||||||
|
}
|
141
node_modules/asynckit/lib/streamify.js
generated
vendored
Normal file
141
node_modules/asynckit/lib/streamify.js
generated
vendored
Normal file
@ -0,0 +1,141 @@
|
|||||||
|
var async = require('./async.js');
|
||||||
|
|
||||||
|
// API
|
||||||
|
module.exports = {
|
||||||
|
iterator: wrapIterator,
|
||||||
|
callback: wrapCallback
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wraps iterators with long signature
|
||||||
|
*
|
||||||
|
* @this ReadableAsyncKit#
|
||||||
|
* @param {function} iterator - function to wrap
|
||||||
|
* @returns {function} - wrapped function
|
||||||
|
*/
|
||||||
|
function wrapIterator(iterator)
|
||||||
|
{
|
||||||
|
var stream = this;
|
||||||
|
|
||||||
|
return function(item, key, cb)
|
||||||
|
{
|
||||||
|
var aborter
|
||||||
|
, wrappedCb = async(wrapIteratorCallback.call(stream, cb, key))
|
||||||
|
;
|
||||||
|
|
||||||
|
stream.jobs[key] = wrappedCb;
|
||||||
|
|
||||||
|
// it's either shortcut (item, cb)
|
||||||
|
if (iterator.length == 2)
|
||||||
|
{
|
||||||
|
aborter = iterator(item, wrappedCb);
|
||||||
|
}
|
||||||
|
// or long format (item, key, cb)
|
||||||
|
else
|
||||||
|
{
|
||||||
|
aborter = iterator(item, key, wrappedCb);
|
||||||
|
}
|
||||||
|
|
||||||
|
return aborter;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wraps provided callback function
|
||||||
|
* allowing to execute snitch function before
|
||||||
|
* real callback
|
||||||
|
*
|
||||||
|
* @this ReadableAsyncKit#
|
||||||
|
* @param {function} callback - function to wrap
|
||||||
|
* @returns {function} - wrapped function
|
||||||
|
*/
|
||||||
|
function wrapCallback(callback)
|
||||||
|
{
|
||||||
|
var stream = this;
|
||||||
|
|
||||||
|
var wrapped = function(error, result)
|
||||||
|
{
|
||||||
|
return finisher.call(stream, error, result, callback);
|
||||||
|
};
|
||||||
|
|
||||||
|
return wrapped;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wraps provided iterator callback function
|
||||||
|
* makes sure snitch only called once,
|
||||||
|
* but passes secondary calls to the original callback
|
||||||
|
*
|
||||||
|
* @this ReadableAsyncKit#
|
||||||
|
* @param {function} callback - callback to wrap
|
||||||
|
* @param {number|string} key - iteration key
|
||||||
|
* @returns {function} wrapped callback
|
||||||
|
*/
|
||||||
|
function wrapIteratorCallback(callback, key)
|
||||||
|
{
|
||||||
|
var stream = this;
|
||||||
|
|
||||||
|
return function(error, output)
|
||||||
|
{
|
||||||
|
// don't repeat yourself
|
||||||
|
if (!(key in stream.jobs))
|
||||||
|
{
|
||||||
|
callback(error, output);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// clean up jobs
|
||||||
|
delete stream.jobs[key];
|
||||||
|
|
||||||
|
return streamer.call(stream, error, {key: key, value: output}, callback);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stream wrapper for iterator callback
|
||||||
|
*
|
||||||
|
* @this ReadableAsyncKit#
|
||||||
|
* @param {mixed} error - error response
|
||||||
|
* @param {mixed} output - iterator output
|
||||||
|
* @param {function} callback - callback that expects iterator results
|
||||||
|
*/
|
||||||
|
function streamer(error, output, callback)
|
||||||
|
{
|
||||||
|
if (error && !this.error)
|
||||||
|
{
|
||||||
|
this.error = error;
|
||||||
|
this.pause();
|
||||||
|
this.emit('error', error);
|
||||||
|
// send back value only, as expected
|
||||||
|
callback(error, output && output.value);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// stream stuff
|
||||||
|
this.push(output);
|
||||||
|
|
||||||
|
// back to original track
|
||||||
|
// send back value only, as expected
|
||||||
|
callback(error, output && output.value);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stream wrapper for finishing callback
|
||||||
|
*
|
||||||
|
* @this ReadableAsyncKit#
|
||||||
|
* @param {mixed} error - error response
|
||||||
|
* @param {mixed} output - iterator output
|
||||||
|
* @param {function} callback - callback that expects final results
|
||||||
|
*/
|
||||||
|
function finisher(error, output, callback)
|
||||||
|
{
|
||||||
|
// signal end of the stream
|
||||||
|
// only for successfully finished streams
|
||||||
|
if (!error)
|
||||||
|
{
|
||||||
|
this.push(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
// back to original track
|
||||||
|
callback(error, output);
|
||||||
|
}
|
29
node_modules/asynckit/lib/terminator.js
generated
vendored
Normal file
29
node_modules/asynckit/lib/terminator.js
generated
vendored
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
var abort = require('./abort.js')
|
||||||
|
, async = require('./async.js')
|
||||||
|
;
|
||||||
|
|
||||||
|
// API
|
||||||
|
module.exports = terminator;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Terminates jobs in the attached state context
|
||||||
|
*
|
||||||
|
* @this AsyncKitState#
|
||||||
|
* @param {function} callback - final callback to invoke after termination
|
||||||
|
*/
|
||||||
|
function terminator(callback)
|
||||||
|
{
|
||||||
|
if (!Object.keys(this.jobs).length)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// fast forward iteration index
|
||||||
|
this.index = this.size;
|
||||||
|
|
||||||
|
// abort jobs
|
||||||
|
abort(this);
|
||||||
|
|
||||||
|
// send back results we have so far
|
||||||
|
async(callback)(null, this.results);
|
||||||
|
}
|
126
node_modules/asynckit/package.json
generated
vendored
Normal file
126
node_modules/asynckit/package.json
generated
vendored
Normal file
@ -0,0 +1,126 @@
|
|||||||
|
{
|
||||||
|
"_args": [
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"raw": "asynckit@^0.4.0",
|
||||||
|
"scope": null,
|
||||||
|
"escapedName": "asynckit",
|
||||||
|
"name": "asynckit",
|
||||||
|
"rawSpec": "^0.4.0",
|
||||||
|
"spec": ">=0.4.0 <0.5.0",
|
||||||
|
"type": "range"
|
||||||
|
},
|
||||||
|
"/tank/data/SERVER/zoneadm-master/node_modules/form-data"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"_from": "asynckit@>=0.4.0 <0.5.0",
|
||||||
|
"_id": "asynckit@0.4.0",
|
||||||
|
"_inCache": true,
|
||||||
|
"_location": "/asynckit",
|
||||||
|
"_nodeVersion": "0.12.11",
|
||||||
|
"_npmOperationalInternal": {
|
||||||
|
"host": "packages-16-east.internal.npmjs.com",
|
||||||
|
"tmp": "tmp/asynckit-0.4.0.tgz_1465928940169_0.8008207362145185"
|
||||||
|
},
|
||||||
|
"_npmUser": {
|
||||||
|
"name": "alexindigo",
|
||||||
|
"email": "iam@alexindigo.com"
|
||||||
|
},
|
||||||
|
"_npmVersion": "2.15.6",
|
||||||
|
"_phantomChildren": {},
|
||||||
|
"_requested": {
|
||||||
|
"raw": "asynckit@^0.4.0",
|
||||||
|
"scope": null,
|
||||||
|
"escapedName": "asynckit",
|
||||||
|
"name": "asynckit",
|
||||||
|
"rawSpec": "^0.4.0",
|
||||||
|
"spec": ">=0.4.0 <0.5.0",
|
||||||
|
"type": "range"
|
||||||
|
},
|
||||||
|
"_requiredBy": [
|
||||||
|
"/form-data"
|
||||||
|
],
|
||||||
|
"_resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
||||||
|
"_shasum": "c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79",
|
||||||
|
"_shrinkwrap": null,
|
||||||
|
"_spec": "asynckit@^0.4.0",
|
||||||
|
"_where": "/tank/data/SERVER/zoneadm-master/node_modules/form-data",
|
||||||
|
"author": {
|
||||||
|
"name": "Alex Indigo",
|
||||||
|
"email": "iam@alexindigo.com"
|
||||||
|
},
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/alexindigo/asynckit/issues"
|
||||||
|
},
|
||||||
|
"dependencies": {},
|
||||||
|
"description": "Minimal async jobs utility library, with streams support",
|
||||||
|
"devDependencies": {
|
||||||
|
"browserify": "^13.0.0",
|
||||||
|
"browserify-istanbul": "^2.0.0",
|
||||||
|
"coveralls": "^2.11.9",
|
||||||
|
"eslint": "^2.9.0",
|
||||||
|
"istanbul": "^0.4.3",
|
||||||
|
"obake": "^0.1.2",
|
||||||
|
"phantomjs-prebuilt": "^2.1.7",
|
||||||
|
"pre-commit": "^1.1.3",
|
||||||
|
"reamde": "^1.1.0",
|
||||||
|
"rimraf": "^2.5.2",
|
||||||
|
"size-table": "^0.2.0",
|
||||||
|
"tap-spec": "^4.1.1",
|
||||||
|
"tape": "^4.5.1"
|
||||||
|
},
|
||||||
|
"directories": {},
|
||||||
|
"dist": {
|
||||||
|
"shasum": "c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79",
|
||||||
|
"tarball": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz"
|
||||||
|
},
|
||||||
|
"gitHead": "583a75ed4fe41761b66416bb6e703ebb1f8963bf",
|
||||||
|
"homepage": "https://github.com/alexindigo/asynckit#readme",
|
||||||
|
"keywords": [
|
||||||
|
"async",
|
||||||
|
"jobs",
|
||||||
|
"parallel",
|
||||||
|
"serial",
|
||||||
|
"iterator",
|
||||||
|
"array",
|
||||||
|
"object",
|
||||||
|
"stream",
|
||||||
|
"destroy",
|
||||||
|
"terminate",
|
||||||
|
"abort"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"main": "index.js",
|
||||||
|
"maintainers": [
|
||||||
|
{
|
||||||
|
"name": "alexindigo",
|
||||||
|
"email": "iam@alexindigo.com"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "asynckit",
|
||||||
|
"optionalDependencies": {},
|
||||||
|
"pre-commit": [
|
||||||
|
"clean",
|
||||||
|
"lint",
|
||||||
|
"test",
|
||||||
|
"browser",
|
||||||
|
"report",
|
||||||
|
"size"
|
||||||
|
],
|
||||||
|
"readme": "ERROR: No README data found!",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/alexindigo/asynckit.git"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"browser": "browserify -t browserify-istanbul test/lib/browserify_adjustment.js test/test-*.js | obake --coverage | tap-spec",
|
||||||
|
"clean": "rimraf coverage",
|
||||||
|
"debug": "tape test/test-*.js",
|
||||||
|
"lint": "eslint *.js lib/*.js test/*.js",
|
||||||
|
"report": "istanbul report",
|
||||||
|
"size": "browserify index.js | size-table asynckit",
|
||||||
|
"test": "istanbul cover --reporter=json tape -- 'test/test-*.js' | tap-spec",
|
||||||
|
"win-test": "tape test/test-*.js"
|
||||||
|
},
|
||||||
|
"version": "0.4.0"
|
||||||
|
}
|
43
node_modules/asynckit/parallel.js
generated
vendored
Normal file
43
node_modules/asynckit/parallel.js
generated
vendored
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
var iterate = require('./lib/iterate.js')
|
||||||
|
, initState = require('./lib/state.js')
|
||||||
|
, terminator = require('./lib/terminator.js')
|
||||||
|
;
|
||||||
|
|
||||||
|
// Public API
|
||||||
|
module.exports = parallel;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Runs iterator over provided array elements in parallel
|
||||||
|
*
|
||||||
|
* @param {array|object} list - array or object (named list) to iterate over
|
||||||
|
* @param {function} iterator - iterator to run
|
||||||
|
* @param {function} callback - invoked when all elements processed
|
||||||
|
* @returns {function} - jobs terminator
|
||||||
|
*/
|
||||||
|
function parallel(list, iterator, callback)
|
||||||
|
{
|
||||||
|
var state = initState(list);
|
||||||
|
|
||||||
|
while (state.index < (state['keyedList'] || list).length)
|
||||||
|
{
|
||||||
|
iterate(list, iterator, state, function(error, result)
|
||||||
|
{
|
||||||
|
if (error)
|
||||||
|
{
|
||||||
|
callback(error, result);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// looks like it's the last one
|
||||||
|
if (Object.keys(state.jobs).length === 0)
|
||||||
|
{
|
||||||
|
callback(null, state.results);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
state.index++;
|
||||||
|
}
|
||||||
|
|
||||||
|
return terminator.bind(state, callback);
|
||||||
|
}
|
17
node_modules/asynckit/serial.js
generated
vendored
Normal file
17
node_modules/asynckit/serial.js
generated
vendored
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
var serialOrdered = require('./serialOrdered.js');
|
||||||
|
|
||||||
|
// Public API
|
||||||
|
module.exports = serial;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Runs iterator over provided array elements in series
|
||||||
|
*
|
||||||
|
* @param {array|object} list - array or object (named list) to iterate over
|
||||||
|
* @param {function} iterator - iterator to run
|
||||||
|
* @param {function} callback - invoked when all elements processed
|
||||||
|
* @returns {function} - jobs terminator
|
||||||
|
*/
|
||||||
|
function serial(list, iterator, callback)
|
||||||
|
{
|
||||||
|
return serialOrdered(list, iterator, null, callback);
|
||||||
|
}
|
75
node_modules/asynckit/serialOrdered.js
generated
vendored
Normal file
75
node_modules/asynckit/serialOrdered.js
generated
vendored
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
var iterate = require('./lib/iterate.js')
|
||||||
|
, initState = require('./lib/state.js')
|
||||||
|
, terminator = require('./lib/terminator.js')
|
||||||
|
;
|
||||||
|
|
||||||
|
// Public API
|
||||||
|
module.exports = serialOrdered;
|
||||||
|
// sorting helpers
|
||||||
|
module.exports.ascending = ascending;
|
||||||
|
module.exports.descending = descending;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Runs iterator over provided sorted array elements in series
|
||||||
|
*
|
||||||
|
* @param {array|object} list - array or object (named list) to iterate over
|
||||||
|
* @param {function} iterator - iterator to run
|
||||||
|
* @param {function} sortMethod - custom sort function
|
||||||
|
* @param {function} callback - invoked when all elements processed
|
||||||
|
* @returns {function} - jobs terminator
|
||||||
|
*/
|
||||||
|
function serialOrdered(list, iterator, sortMethod, callback)
|
||||||
|
{
|
||||||
|
var state = initState(list, sortMethod);
|
||||||
|
|
||||||
|
iterate(list, iterator, state, function iteratorHandler(error, result)
|
||||||
|
{
|
||||||
|
if (error)
|
||||||
|
{
|
||||||
|
callback(error, result);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
state.index++;
|
||||||
|
|
||||||
|
// are we there yet?
|
||||||
|
if (state.index < (state['keyedList'] || list).length)
|
||||||
|
{
|
||||||
|
iterate(list, iterator, state, iteratorHandler);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// done here
|
||||||
|
callback(null, state.results);
|
||||||
|
});
|
||||||
|
|
||||||
|
return terminator.bind(state, callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* -- Sort methods
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* sort helper to sort array elements in ascending order
|
||||||
|
*
|
||||||
|
* @param {mixed} a - an item to compare
|
||||||
|
* @param {mixed} b - an item to compare
|
||||||
|
* @returns {number} - comparison result
|
||||||
|
*/
|
||||||
|
function ascending(a, b)
|
||||||
|
{
|
||||||
|
return a < b ? -1 : a > b ? 1 : 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* sort helper to sort array elements in descending order
|
||||||
|
*
|
||||||
|
* @param {mixed} a - an item to compare
|
||||||
|
* @param {mixed} b - an item to compare
|
||||||
|
* @returns {number} - comparison result
|
||||||
|
*/
|
||||||
|
function descending(a, b)
|
||||||
|
{
|
||||||
|
return -1 * ascending(a, b);
|
||||||
|
}
|
21
node_modules/asynckit/stream.js
generated
vendored
Normal file
21
node_modules/asynckit/stream.js
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
var inherits = require('util').inherits
|
||||||
|
, Readable = require('stream').Readable
|
||||||
|
, ReadableAsyncKit = require('./lib/readable_asynckit.js')
|
||||||
|
, ReadableParallel = require('./lib/readable_parallel.js')
|
||||||
|
, ReadableSerial = require('./lib/readable_serial.js')
|
||||||
|
, ReadableSerialOrdered = require('./lib/readable_serial_ordered.js')
|
||||||
|
;
|
||||||
|
|
||||||
|
// API
|
||||||
|
module.exports =
|
||||||
|
{
|
||||||
|
parallel : ReadableParallel,
|
||||||
|
serial : ReadableSerial,
|
||||||
|
serialOrdered : ReadableSerialOrdered,
|
||||||
|
};
|
||||||
|
|
||||||
|
inherits(ReadableAsyncKit, Readable);
|
||||||
|
|
||||||
|
inherits(ReadableParallel, ReadableAsyncKit);
|
||||||
|
inherits(ReadableSerial, ReadableAsyncKit);
|
||||||
|
inherits(ReadableSerialOrdered, ReadableAsyncKit);
|
55
node_modules/aws-sign2/LICENSE
generated
vendored
Normal file
55
node_modules/aws-sign2/LICENSE
generated
vendored
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
Apache License
|
||||||
|
|
||||||
|
Version 2.0, January 2004
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
|
||||||
|
|
||||||
|
You must give any other recipients of the Work or Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
You must cause any modified files to carry prominent notices stating that You changed the files; and
|
||||||
|
|
||||||
|
You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
|
||||||
|
|
||||||
|
If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
4
node_modules/aws-sign2/README.md
generated
vendored
Normal file
4
node_modules/aws-sign2/README.md
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
aws-sign
|
||||||
|
========
|
||||||
|
|
||||||
|
AWS signing. Originally pulled from LearnBoost/knox, maintained as vendor in request, now a standalone module.
|
212
node_modules/aws-sign2/index.js
generated
vendored
Normal file
212
node_modules/aws-sign2/index.js
generated
vendored
Normal file
@ -0,0 +1,212 @@
|
|||||||
|
|
||||||
|
/*!
|
||||||
|
* Copyright 2010 LearnBoost <dev@learnboost.com>
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Module dependencies.
|
||||||
|
*/
|
||||||
|
|
||||||
|
var crypto = require('crypto')
|
||||||
|
, parse = require('url').parse
|
||||||
|
;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Valid keys.
|
||||||
|
*/
|
||||||
|
|
||||||
|
var keys =
|
||||||
|
[ 'acl'
|
||||||
|
, 'location'
|
||||||
|
, 'logging'
|
||||||
|
, 'notification'
|
||||||
|
, 'partNumber'
|
||||||
|
, 'policy'
|
||||||
|
, 'requestPayment'
|
||||||
|
, 'torrent'
|
||||||
|
, 'uploadId'
|
||||||
|
, 'uploads'
|
||||||
|
, 'versionId'
|
||||||
|
, 'versioning'
|
||||||
|
, 'versions'
|
||||||
|
, 'website'
|
||||||
|
]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return an "Authorization" header value with the given `options`
|
||||||
|
* in the form of "AWS <key>:<signature>"
|
||||||
|
*
|
||||||
|
* @param {Object} options
|
||||||
|
* @return {String}
|
||||||
|
* @api private
|
||||||
|
*/
|
||||||
|
|
||||||
|
function authorization (options) {
|
||||||
|
return 'AWS ' + options.key + ':' + sign(options)
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = authorization
|
||||||
|
module.exports.authorization = authorization
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Simple HMAC-SHA1 Wrapper
|
||||||
|
*
|
||||||
|
* @param {Object} options
|
||||||
|
* @return {String}
|
||||||
|
* @api private
|
||||||
|
*/
|
||||||
|
|
||||||
|
function hmacSha1 (options) {
|
||||||
|
return crypto.createHmac('sha1', options.secret).update(options.message).digest('base64')
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports.hmacSha1 = hmacSha1
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a base64 sha1 HMAC for `options`.
|
||||||
|
*
|
||||||
|
* @param {Object} options
|
||||||
|
* @return {String}
|
||||||
|
* @api private
|
||||||
|
*/
|
||||||
|
|
||||||
|
function sign (options) {
|
||||||
|
options.message = stringToSign(options)
|
||||||
|
return hmacSha1(options)
|
||||||
|
}
|
||||||
|
module.exports.sign = sign
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a base64 sha1 HMAC for `options`.
|
||||||
|
*
|
||||||
|
* Specifically to be used with S3 presigned URLs
|
||||||
|
*
|
||||||
|
* @param {Object} options
|
||||||
|
* @return {String}
|
||||||
|
* @api private
|
||||||
|
*/
|
||||||
|
|
||||||
|
function signQuery (options) {
|
||||||
|
options.message = queryStringToSign(options)
|
||||||
|
return hmacSha1(options)
|
||||||
|
}
|
||||||
|
module.exports.signQuery= signQuery
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return a string for sign() with the given `options`.
|
||||||
|
*
|
||||||
|
* Spec:
|
||||||
|
*
|
||||||
|
* <verb>\n
|
||||||
|
* <md5>\n
|
||||||
|
* <content-type>\n
|
||||||
|
* <date>\n
|
||||||
|
* [headers\n]
|
||||||
|
* <resource>
|
||||||
|
*
|
||||||
|
* @param {Object} options
|
||||||
|
* @return {String}
|
||||||
|
* @api private
|
||||||
|
*/
|
||||||
|
|
||||||
|
function stringToSign (options) {
|
||||||
|
var headers = options.amazonHeaders || ''
|
||||||
|
if (headers) headers += '\n'
|
||||||
|
var r =
|
||||||
|
[ options.verb
|
||||||
|
, options.md5
|
||||||
|
, options.contentType
|
||||||
|
, options.date ? options.date.toUTCString() : ''
|
||||||
|
, headers + options.resource
|
||||||
|
]
|
||||||
|
return r.join('\n')
|
||||||
|
}
|
||||||
|
module.exports.queryStringToSign = stringToSign
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return a string for sign() with the given `options`, but is meant exclusively
|
||||||
|
* for S3 presigned URLs
|
||||||
|
*
|
||||||
|
* Spec:
|
||||||
|
*
|
||||||
|
* <date>\n
|
||||||
|
* <resource>
|
||||||
|
*
|
||||||
|
* @param {Object} options
|
||||||
|
* @return {String}
|
||||||
|
* @api private
|
||||||
|
*/
|
||||||
|
|
||||||
|
function queryStringToSign (options){
|
||||||
|
return 'GET\n\n\n' + options.date + '\n' + options.resource
|
||||||
|
}
|
||||||
|
module.exports.queryStringToSign = queryStringToSign
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Perform the following:
|
||||||
|
*
|
||||||
|
* - ignore non-amazon headers
|
||||||
|
* - lowercase fields
|
||||||
|
* - sort lexicographically
|
||||||
|
* - trim whitespace between ":"
|
||||||
|
* - join with newline
|
||||||
|
*
|
||||||
|
* @param {Object} headers
|
||||||
|
* @return {String}
|
||||||
|
* @api private
|
||||||
|
*/
|
||||||
|
|
||||||
|
function canonicalizeHeaders (headers) {
|
||||||
|
var buf = []
|
||||||
|
, fields = Object.keys(headers)
|
||||||
|
;
|
||||||
|
for (var i = 0, len = fields.length; i < len; ++i) {
|
||||||
|
var field = fields[i]
|
||||||
|
, val = headers[field]
|
||||||
|
, field = field.toLowerCase()
|
||||||
|
;
|
||||||
|
if (0 !== field.indexOf('x-amz')) continue
|
||||||
|
buf.push(field + ':' + val)
|
||||||
|
}
|
||||||
|
return buf.sort().join('\n')
|
||||||
|
}
|
||||||
|
module.exports.canonicalizeHeaders = canonicalizeHeaders
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Perform the following:
|
||||||
|
*
|
||||||
|
* - ignore non sub-resources
|
||||||
|
* - sort lexicographically
|
||||||
|
*
|
||||||
|
* @param {String} resource
|
||||||
|
* @return {String}
|
||||||
|
* @api private
|
||||||
|
*/
|
||||||
|
|
||||||
|
function canonicalizeResource (resource) {
|
||||||
|
var url = parse(resource, true)
|
||||||
|
, path = url.pathname
|
||||||
|
, buf = []
|
||||||
|
;
|
||||||
|
|
||||||
|
Object.keys(url.query).forEach(function(key){
|
||||||
|
if (!~keys.indexOf(key)) return
|
||||||
|
var val = '' == url.query[key] ? '' : '=' + encodeURIComponent(url.query[key])
|
||||||
|
buf.push(key + val)
|
||||||
|
})
|
||||||
|
|
||||||
|
return path + (buf.length ? '?' + buf.sort().join('&') : '')
|
||||||
|
}
|
||||||
|
module.exports.canonicalizeResource = canonicalizeResource
|
81
node_modules/aws-sign2/package.json
generated
vendored
Normal file
81
node_modules/aws-sign2/package.json
generated
vendored
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
{
|
||||||
|
"_args": [
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"raw": "aws-sign2@~0.6.0",
|
||||||
|
"scope": null,
|
||||||
|
"escapedName": "aws-sign2",
|
||||||
|
"name": "aws-sign2",
|
||||||
|
"rawSpec": "~0.6.0",
|
||||||
|
"spec": ">=0.6.0 <0.7.0",
|
||||||
|
"type": "range"
|
||||||
|
},
|
||||||
|
"/tank/data/SERVER/zoneadm-master/node_modules/request"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"_from": "aws-sign2@>=0.6.0 <0.7.0",
|
||||||
|
"_id": "aws-sign2@0.6.0",
|
||||||
|
"_inCache": true,
|
||||||
|
"_location": "/aws-sign2",
|
||||||
|
"_nodeVersion": "4.1.2",
|
||||||
|
"_npmUser": {
|
||||||
|
"name": "mikeal",
|
||||||
|
"email": "mikeal.rogers@gmail.com"
|
||||||
|
},
|
||||||
|
"_npmVersion": "2.14.4",
|
||||||
|
"_phantomChildren": {},
|
||||||
|
"_requested": {
|
||||||
|
"raw": "aws-sign2@~0.6.0",
|
||||||
|
"scope": null,
|
||||||
|
"escapedName": "aws-sign2",
|
||||||
|
"name": "aws-sign2",
|
||||||
|
"rawSpec": "~0.6.0",
|
||||||
|
"spec": ">=0.6.0 <0.7.0",
|
||||||
|
"type": "range"
|
||||||
|
},
|
||||||
|
"_requiredBy": [
|
||||||
|
"/request"
|
||||||
|
],
|
||||||
|
"_resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.6.0.tgz",
|
||||||
|
"_shasum": "14342dd38dbcc94d0e5b87d763cd63612c0e794f",
|
||||||
|
"_shrinkwrap": null,
|
||||||
|
"_spec": "aws-sign2@~0.6.0",
|
||||||
|
"_where": "/tank/data/SERVER/zoneadm-master/node_modules/request",
|
||||||
|
"author": {
|
||||||
|
"name": "Mikeal Rogers",
|
||||||
|
"email": "mikeal.rogers@gmail.com",
|
||||||
|
"url": "http://www.futurealoof.com"
|
||||||
|
},
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/mikeal/aws-sign/issues"
|
||||||
|
},
|
||||||
|
"dependencies": {},
|
||||||
|
"description": "AWS signing. Originally pulled from LearnBoost/knox, maintained as vendor in request, now a standalone module.",
|
||||||
|
"devDependencies": {},
|
||||||
|
"directories": {},
|
||||||
|
"dist": {
|
||||||
|
"shasum": "14342dd38dbcc94d0e5b87d763cd63612c0e794f",
|
||||||
|
"tarball": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.6.0.tgz"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": "*"
|
||||||
|
},
|
||||||
|
"gitHead": "8554bdb41268fa295eb1ee300f4adaa9f7f07fec",
|
||||||
|
"homepage": "https://github.com/mikeal/aws-sign#readme",
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"main": "index.js",
|
||||||
|
"maintainers": [
|
||||||
|
{
|
||||||
|
"name": "mikeal",
|
||||||
|
"email": "mikeal.rogers@gmail.com"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "aws-sign2",
|
||||||
|
"optionalDependencies": {},
|
||||||
|
"readme": "ERROR: No README data found!",
|
||||||
|
"repository": {
|
||||||
|
"url": "git+https://github.com/mikeal/aws-sign.git"
|
||||||
|
},
|
||||||
|
"scripts": {},
|
||||||
|
"version": "0.6.0"
|
||||||
|
}
|
4
node_modules/aws4/.npmignore
generated
vendored
Normal file
4
node_modules/aws4/.npmignore
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
test
|
||||||
|
examples
|
||||||
|
example.js
|
||||||
|
browser
|
1
node_modules/aws4/.tern-port
generated
vendored
Normal file
1
node_modules/aws4/.tern-port
generated
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
62638
|
5
node_modules/aws4/.travis.yml
generated
vendored
Normal file
5
node_modules/aws4/.travis.yml
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
language: node_js
|
||||||
|
node_js:
|
||||||
|
- "0.10"
|
||||||
|
- "0.12"
|
||||||
|
- "4.2"
|
19
node_modules/aws4/LICENSE
generated
vendored
Normal file
19
node_modules/aws4/LICENSE
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
Copyright 2013 Michael Hart (michael.hart.au@gmail.com)
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
|
this software and associated documentation files (the "Software"), to deal in
|
||||||
|
the Software without restriction, including without limitation the rights to
|
||||||
|
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||||
|
of the Software, and to permit persons to whom the Software is furnished to do
|
||||||
|
so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
523
node_modules/aws4/README.md
generated
vendored
Normal file
523
node_modules/aws4/README.md
generated
vendored
Normal file
@ -0,0 +1,523 @@
|
|||||||
|
aws4
|
||||||
|
----
|
||||||
|
|
||||||
|
[](http://travis-ci.org/mhart/aws4)
|
||||||
|
|
||||||
|
A small utility to sign vanilla node.js http(s) request options using Amazon's
|
||||||
|
[AWS Signature Version 4](http://docs.amazonwebservices.com/general/latest/gr/signature-version-4.html).
|
||||||
|
|
||||||
|
Can also be used [in the browser](./browser).
|
||||||
|
|
||||||
|
This signature is supported by nearly all Amazon services, including
|
||||||
|
[S3](http://docs.aws.amazon.com/AmazonS3/latest/API/),
|
||||||
|
[EC2](http://docs.aws.amazon.com/AWSEC2/latest/APIReference/),
|
||||||
|
[DynamoDB](http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/API.html),
|
||||||
|
[Kinesis](http://docs.aws.amazon.com/kinesis/latest/APIReference/),
|
||||||
|
[Lambda](http://docs.aws.amazon.com/lambda/latest/dg/API_Reference.html),
|
||||||
|
[SQS](http://docs.aws.amazon.com/AWSSimpleQueueService/latest/APIReference/),
|
||||||
|
[SNS](http://docs.aws.amazon.com/sns/latest/api/),
|
||||||
|
[IAM](http://docs.aws.amazon.com/IAM/latest/APIReference/),
|
||||||
|
[STS](http://docs.aws.amazon.com/STS/latest/APIReference/),
|
||||||
|
[RDS](http://docs.aws.amazon.com/AmazonRDS/latest/APIReference/),
|
||||||
|
[CloudWatch](http://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/),
|
||||||
|
[CloudWatch Logs](http://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/),
|
||||||
|
[CodeDeploy](http://docs.aws.amazon.com/codedeploy/latest/APIReference/),
|
||||||
|
[CloudFront](http://docs.aws.amazon.com/AmazonCloudFront/latest/APIReference/),
|
||||||
|
[CloudTrail](http://docs.aws.amazon.com/awscloudtrail/latest/APIReference/),
|
||||||
|
[ElastiCache](http://docs.aws.amazon.com/AmazonElastiCache/latest/APIReference/),
|
||||||
|
[EMR](http://docs.aws.amazon.com/ElasticMapReduce/latest/API/),
|
||||||
|
[Glacier](http://docs.aws.amazon.com/amazonglacier/latest/dev/amazon-glacier-api.html),
|
||||||
|
[CloudSearch](http://docs.aws.amazon.com/cloudsearch/latest/developerguide/APIReq.html),
|
||||||
|
[Elastic Load Balancing](http://docs.aws.amazon.com/ElasticLoadBalancing/latest/APIReference/),
|
||||||
|
[Elastic Transcoder](http://docs.aws.amazon.com/elastictranscoder/latest/developerguide/api-reference.html),
|
||||||
|
[CloudFormation](http://docs.aws.amazon.com/AWSCloudFormation/latest/APIReference/),
|
||||||
|
[Elastic Beanstalk](http://docs.aws.amazon.com/elasticbeanstalk/latest/api/),
|
||||||
|
[Storage Gateway](http://docs.aws.amazon.com/storagegateway/latest/userguide/AWSStorageGatewayAPI.html),
|
||||||
|
[Data Pipeline](http://docs.aws.amazon.com/datapipeline/latest/APIReference/),
|
||||||
|
[Direct Connect](http://docs.aws.amazon.com/directconnect/latest/APIReference/),
|
||||||
|
[Redshift](http://docs.aws.amazon.com/redshift/latest/APIReference/),
|
||||||
|
[OpsWorks](http://docs.aws.amazon.com/opsworks/latest/APIReference/),
|
||||||
|
[SES](http://docs.aws.amazon.com/ses/latest/APIReference/),
|
||||||
|
[SWF](http://docs.aws.amazon.com/amazonswf/latest/apireference/),
|
||||||
|
[AutoScaling](http://docs.aws.amazon.com/AutoScaling/latest/APIReference/),
|
||||||
|
[Mobile Analytics](http://docs.aws.amazon.com/mobileanalytics/latest/ug/server-reference.html),
|
||||||
|
[Cognito Identity](http://docs.aws.amazon.com/cognitoidentity/latest/APIReference/),
|
||||||
|
[Cognito Sync](http://docs.aws.amazon.com/cognitosync/latest/APIReference/),
|
||||||
|
[Container Service](http://docs.aws.amazon.com/AmazonECS/latest/APIReference/),
|
||||||
|
[AppStream](http://docs.aws.amazon.com/appstream/latest/developerguide/appstream-api-rest.html),
|
||||||
|
[Key Management Service](http://docs.aws.amazon.com/kms/latest/APIReference/),
|
||||||
|
[Config](http://docs.aws.amazon.com/config/latest/APIReference/),
|
||||||
|
[CloudHSM](http://docs.aws.amazon.com/cloudhsm/latest/dg/api-ref.html),
|
||||||
|
[Route53](http://docs.aws.amazon.com/Route53/latest/APIReference/requests-rest.html) and
|
||||||
|
[Route53 Domains](http://docs.aws.amazon.com/Route53/latest/APIReference/requests-rpc.html).
|
||||||
|
|
||||||
|
Indeed, the only AWS services that *don't* support v4 as of 2014-12-30 are
|
||||||
|
[Import/Export](http://docs.aws.amazon.com/AWSImportExport/latest/DG/api-reference.html) and
|
||||||
|
[SimpleDB](http://docs.aws.amazon.com/AmazonSimpleDB/latest/DeveloperGuide/SDB_API.html)
|
||||||
|
(they only support [AWS Signature Version 2](https://github.com/mhart/aws2)).
|
||||||
|
|
||||||
|
It also provides defaults for a number of core AWS headers and
|
||||||
|
request parameters, making it very easy to query AWS services, or
|
||||||
|
build out a fully-featured AWS library.
|
||||||
|
|
||||||
|
Example
|
||||||
|
-------
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var http = require('http'),
|
||||||
|
https = require('https'),
|
||||||
|
aws4 = require('aws4')
|
||||||
|
|
||||||
|
// given an options object you could pass to http.request
|
||||||
|
var opts = {host: 'sqs.us-east-1.amazonaws.com', path: '/?Action=ListQueues'}
|
||||||
|
|
||||||
|
// alternatively (as aws4 can infer the host):
|
||||||
|
opts = {service: 'sqs', region: 'us-east-1', path: '/?Action=ListQueues'}
|
||||||
|
|
||||||
|
// alternatively (as us-east-1 is default):
|
||||||
|
opts = {service: 'sqs', path: '/?Action=ListQueues'}
|
||||||
|
|
||||||
|
aws4.sign(opts) // assumes AWS credentials are available in process.env
|
||||||
|
|
||||||
|
console.log(opts)
|
||||||
|
/*
|
||||||
|
{
|
||||||
|
host: 'sqs.us-east-1.amazonaws.com',
|
||||||
|
path: '/?Action=ListQueues',
|
||||||
|
headers: {
|
||||||
|
Host: 'sqs.us-east-1.amazonaws.com',
|
||||||
|
'X-Amz-Date': '20121226T061030Z',
|
||||||
|
Authorization: 'AWS4-HMAC-SHA256 Credential=ABCDEF/20121226/us-east-1/sqs/aws4_request, ...'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
// we can now use this to query AWS using the standard node.js http API
|
||||||
|
http.request(opts, function(res) { res.pipe(process.stdout) }).end()
|
||||||
|
/*
|
||||||
|
<?xml version="1.0"?>
|
||||||
|
<ListQueuesResponse xmlns="http://queue.amazonaws.com/doc/2012-11-05/">
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
```
|
||||||
|
|
||||||
|
More options
|
||||||
|
------------
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// you can also pass AWS credentials in explicitly (otherwise taken from process.env)
|
||||||
|
aws4.sign(opts, {accessKeyId: '', secretAccessKey: ''})
|
||||||
|
|
||||||
|
// can also add the signature to query strings
|
||||||
|
aws4.sign({service: 's3', path: '/my-bucket?X-Amz-Expires=12345', signQuery: true})
|
||||||
|
|
||||||
|
// create a utility function to pipe to stdout (with https this time)
|
||||||
|
function request(o) { https.request(o, function(res) { res.pipe(process.stdout) }).end(o.body || '') }
|
||||||
|
|
||||||
|
// aws4 can infer the HTTP method if a body is passed in
|
||||||
|
// method will be POST and Content-Type: 'application/x-www-form-urlencoded; charset=utf-8'
|
||||||
|
request(aws4.sign({service: 'iam', body: 'Action=ListGroups&Version=2010-05-08'}))
|
||||||
|
/*
|
||||||
|
<ListGroupsResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
// can specify any custom option or header as per usual
|
||||||
|
request(aws4.sign({
|
||||||
|
service: 'dynamodb',
|
||||||
|
region: 'ap-southeast-2',
|
||||||
|
method: 'POST',
|
||||||
|
path: '/',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/x-amz-json-1.0',
|
||||||
|
'X-Amz-Target': 'DynamoDB_20120810.ListTables'
|
||||||
|
},
|
||||||
|
body: '{}'
|
||||||
|
}))
|
||||||
|
/*
|
||||||
|
{"TableNames":[]}
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
// works with all other services that support Signature Version 4
|
||||||
|
|
||||||
|
request(aws4.sign({service: 's3', path: '/', signQuery: true}))
|
||||||
|
/*
|
||||||
|
<ListAllMyBucketsResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'ec2', path: '/?Action=DescribeRegions&Version=2014-06-15'}))
|
||||||
|
/*
|
||||||
|
<DescribeRegionsResponse xmlns="http://ec2.amazonaws.com/doc/2014-06-15/">
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'sns', path: '/?Action=ListTopics&Version=2010-03-31'}))
|
||||||
|
/*
|
||||||
|
<ListTopicsResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/">
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'sts', path: '/?Action=GetSessionToken&Version=2011-06-15'}))
|
||||||
|
/*
|
||||||
|
<GetSessionTokenResponse xmlns="https://sts.amazonaws.com/doc/2011-06-15/">
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'cloudsearch', path: '/?Action=ListDomainNames&Version=2013-01-01'}))
|
||||||
|
/*
|
||||||
|
<ListDomainNamesResponse xmlns="http://cloudsearch.amazonaws.com/doc/2013-01-01/">
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'ses', path: '/?Action=ListIdentities&Version=2010-12-01'}))
|
||||||
|
/*
|
||||||
|
<ListIdentitiesResponse xmlns="http://ses.amazonaws.com/doc/2010-12-01/">
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'autoscaling', path: '/?Action=DescribeAutoScalingInstances&Version=2011-01-01'}))
|
||||||
|
/*
|
||||||
|
<DescribeAutoScalingInstancesResponse xmlns="http://autoscaling.amazonaws.com/doc/2011-01-01/">
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'elasticloadbalancing', path: '/?Action=DescribeLoadBalancers&Version=2012-06-01'}))
|
||||||
|
/*
|
||||||
|
<DescribeLoadBalancersResponse xmlns="http://elasticloadbalancing.amazonaws.com/doc/2012-06-01/">
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'cloudformation', path: '/?Action=ListStacks&Version=2010-05-15'}))
|
||||||
|
/*
|
||||||
|
<ListStacksResponse xmlns="http://cloudformation.amazonaws.com/doc/2010-05-15/">
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'elasticbeanstalk', path: '/?Action=ListAvailableSolutionStacks&Version=2010-12-01'}))
|
||||||
|
/*
|
||||||
|
<ListAvailableSolutionStacksResponse xmlns="http://elasticbeanstalk.amazonaws.com/docs/2010-12-01/">
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'rds', path: '/?Action=DescribeDBInstances&Version=2012-09-17'}))
|
||||||
|
/*
|
||||||
|
<DescribeDBInstancesResponse xmlns="http://rds.amazonaws.com/doc/2012-09-17/">
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'monitoring', path: '/?Action=ListMetrics&Version=2010-08-01'}))
|
||||||
|
/*
|
||||||
|
<ListMetricsResponse xmlns="http://monitoring.amazonaws.com/doc/2010-08-01/">
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'redshift', path: '/?Action=DescribeClusters&Version=2012-12-01'}))
|
||||||
|
/*
|
||||||
|
<DescribeClustersResponse xmlns="http://redshift.amazonaws.com/doc/2012-12-01/">
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'cloudfront', path: '/2014-05-31/distribution'}))
|
||||||
|
/*
|
||||||
|
<DistributionList xmlns="http://cloudfront.amazonaws.com/doc/2014-05-31/">
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'elasticache', path: '/?Action=DescribeCacheClusters&Version=2014-07-15'}))
|
||||||
|
/*
|
||||||
|
<DescribeCacheClustersResponse xmlns="http://elasticache.amazonaws.com/doc/2014-07-15/">
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'elasticmapreduce', path: '/?Action=DescribeJobFlows&Version=2009-03-31'}))
|
||||||
|
/*
|
||||||
|
<DescribeJobFlowsResponse xmlns="http://elasticmapreduce.amazonaws.com/doc/2009-03-31">
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'route53', path: '/2013-04-01/hostedzone'}))
|
||||||
|
/*
|
||||||
|
<ListHostedZonesResponse xmlns="https://route53.amazonaws.com/doc/2013-04-01/">
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'appstream', path: '/applications'}))
|
||||||
|
/*
|
||||||
|
{"_links":{"curie":[{"href":"http://docs.aws.amazon.com/appstream/latest/...
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'cognito-sync', path: '/identitypools'}))
|
||||||
|
/*
|
||||||
|
{"Count":0,"IdentityPoolUsages":[],"MaxResults":16,"NextToken":null}
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'elastictranscoder', path: '/2012-09-25/pipelines'}))
|
||||||
|
/*
|
||||||
|
{"NextPageToken":null,"Pipelines":[]}
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'lambda', path: '/2014-11-13/functions/'}))
|
||||||
|
/*
|
||||||
|
{"Functions":[],"NextMarker":null}
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'ecs', path: '/?Action=ListClusters&Version=2014-11-13'}))
|
||||||
|
/*
|
||||||
|
<ListClustersResponse xmlns="http://ecs.amazonaws.com/doc/2014-11-13/">
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'glacier', path: '/-/vaults', headers: {'X-Amz-Glacier-Version': '2012-06-01'}}))
|
||||||
|
/*
|
||||||
|
{"Marker":null,"VaultList":[]}
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'storagegateway', body: '{}', headers: {
|
||||||
|
'Content-Type': 'application/x-amz-json-1.1',
|
||||||
|
'X-Amz-Target': 'StorageGateway_20120630.ListGateways'
|
||||||
|
}}))
|
||||||
|
/*
|
||||||
|
{"Gateways":[]}
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'datapipeline', body: '{}', headers: {
|
||||||
|
'Content-Type': 'application/x-amz-json-1.1',
|
||||||
|
'X-Amz-Target': 'DataPipeline.ListPipelines'
|
||||||
|
}}))
|
||||||
|
/*
|
||||||
|
{"hasMoreResults":false,"pipelineIdList":[]}
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'opsworks', body: '{}', headers: {
|
||||||
|
'Content-Type': 'application/x-amz-json-1.1',
|
||||||
|
'X-Amz-Target': 'OpsWorks_20130218.DescribeStacks'
|
||||||
|
}}))
|
||||||
|
/*
|
||||||
|
{"Stacks":[]}
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'route53domains', body: '{}', headers: {
|
||||||
|
'Content-Type': 'application/x-amz-json-1.1',
|
||||||
|
'X-Amz-Target': 'Route53Domains_v20140515.ListDomains'
|
||||||
|
}}))
|
||||||
|
/*
|
||||||
|
{"Domains":[]}
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'kinesis', body: '{}', headers: {
|
||||||
|
'Content-Type': 'application/x-amz-json-1.1',
|
||||||
|
'X-Amz-Target': 'Kinesis_20131202.ListStreams'
|
||||||
|
}}))
|
||||||
|
/*
|
||||||
|
{"HasMoreStreams":false,"StreamNames":[]}
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'cloudtrail', body: '{}', headers: {
|
||||||
|
'Content-Type': 'application/x-amz-json-1.1',
|
||||||
|
'X-Amz-Target': 'CloudTrail_20131101.DescribeTrails'
|
||||||
|
}}))
|
||||||
|
/*
|
||||||
|
{"trailList":[]}
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'logs', body: '{}', headers: {
|
||||||
|
'Content-Type': 'application/x-amz-json-1.1',
|
||||||
|
'X-Amz-Target': 'Logs_20140328.DescribeLogGroups'
|
||||||
|
}}))
|
||||||
|
/*
|
||||||
|
{"logGroups":[]}
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'codedeploy', body: '{}', headers: {
|
||||||
|
'Content-Type': 'application/x-amz-json-1.1',
|
||||||
|
'X-Amz-Target': 'CodeDeploy_20141006.ListApplications'
|
||||||
|
}}))
|
||||||
|
/*
|
||||||
|
{"applications":[]}
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'directconnect', body: '{}', headers: {
|
||||||
|
'Content-Type': 'application/x-amz-json-1.1',
|
||||||
|
'X-Amz-Target': 'OvertureService.DescribeConnections'
|
||||||
|
}}))
|
||||||
|
/*
|
||||||
|
{"connections":[]}
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'kms', body: '{}', headers: {
|
||||||
|
'Content-Type': 'application/x-amz-json-1.1',
|
||||||
|
'X-Amz-Target': 'TrentService.ListKeys'
|
||||||
|
}}))
|
||||||
|
/*
|
||||||
|
{"Keys":[],"Truncated":false}
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'config', body: '{}', headers: {
|
||||||
|
'Content-Type': 'application/x-amz-json-1.1',
|
||||||
|
'X-Amz-Target': 'StarlingDoveService.DescribeDeliveryChannels'
|
||||||
|
}}))
|
||||||
|
/*
|
||||||
|
{"DeliveryChannels":[]}
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({service: 'cloudhsm', body: '{}', headers: {
|
||||||
|
'Content-Type': 'application/x-amz-json-1.1',
|
||||||
|
'X-Amz-Target': 'CloudHsmFrontendService.ListAvailableZones'
|
||||||
|
}}))
|
||||||
|
/*
|
||||||
|
{"AZList":["us-east-1a","us-east-1b","us-east-1c"]}
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({
|
||||||
|
service: 'swf',
|
||||||
|
body: '{"registrationStatus":"REGISTERED"}',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/x-amz-json-1.0',
|
||||||
|
'X-Amz-Target': 'SimpleWorkflowService.ListDomains'
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
/*
|
||||||
|
{"domainInfos":[]}
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({
|
||||||
|
service: 'cognito-identity',
|
||||||
|
body: '{"MaxResults": 1}',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/x-amz-json-1.1',
|
||||||
|
'X-Amz-Target': 'AWSCognitoIdentityService.ListIdentityPools'
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
/*
|
||||||
|
{"IdentityPools":[]}
|
||||||
|
...
|
||||||
|
*/
|
||||||
|
|
||||||
|
request(aws4.sign({
|
||||||
|
service: 'mobileanalytics',
|
||||||
|
path: '/2014-06-05/events',
|
||||||
|
body: JSON.stringify({events:[{
|
||||||
|
eventType: 'a',
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
session: {},
|
||||||
|
}]}),
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'X-Amz-Client-Context': JSON.stringify({
|
||||||
|
client: {client_id: 'a', app_title: 'a'},
|
||||||
|
custom: {},
|
||||||
|
env: {platform: 'a'},
|
||||||
|
services: {},
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
/*
|
||||||
|
(HTTP 202, empty response)
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Generate CodeCommit Git access password
|
||||||
|
var signer = new aws4.RequestSigner({
|
||||||
|
service: 'codecommit',
|
||||||
|
host: 'git-codecommit.us-east-1.amazonaws.com',
|
||||||
|
method: 'GIT',
|
||||||
|
path: '/v1/repos/MyAwesomeRepo',
|
||||||
|
})
|
||||||
|
var password = signer.getDateTime() + 'Z' + signer.signature()
|
||||||
|
```
|
||||||
|
|
||||||
|
API
|
||||||
|
---
|
||||||
|
|
||||||
|
### aws4.sign(requestOptions, [credentials])
|
||||||
|
|
||||||
|
This calculates and populates the `Authorization` header of
|
||||||
|
`requestOptions`, and any other necessary AWS headers and/or request
|
||||||
|
options. Returns `requestOptions` as a convenience for chaining.
|
||||||
|
|
||||||
|
`requestOptions` is an object holding the same options that the node.js
|
||||||
|
[http.request](http://nodejs.org/docs/latest/api/http.html#http_http_request_options_callback)
|
||||||
|
function takes.
|
||||||
|
|
||||||
|
The following properties of `requestOptions` are used in the signing or
|
||||||
|
populated if they don't already exist:
|
||||||
|
|
||||||
|
- `hostname` or `host` (will be determined from `service` and `region` if not given)
|
||||||
|
- `method` (will use `'GET'` if not given or `'POST'` if there is a `body`)
|
||||||
|
- `path` (will use `'/'` if not given)
|
||||||
|
- `body` (will use `''` if not given)
|
||||||
|
- `service` (will be calculated from `hostname` or `host` if not given)
|
||||||
|
- `region` (will be calculated from `hostname` or `host` or use `'us-east-1'` if not given)
|
||||||
|
- `headers['Host']` (will use `hostname` or `host` or be calculated if not given)
|
||||||
|
- `headers['Content-Type']` (will use `'application/x-www-form-urlencoded; charset=utf-8'`
|
||||||
|
if not given and there is a `body`)
|
||||||
|
- `headers['Date']` (used to calculate the signature date if given, otherwise `new Date` is used)
|
||||||
|
|
||||||
|
Your AWS credentials (which can be found in your
|
||||||
|
[AWS console](https://portal.aws.amazon.com/gp/aws/securityCredentials))
|
||||||
|
can be specified in one of two ways:
|
||||||
|
|
||||||
|
- As the second argument, like this:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
aws4.sign(requestOptions, {
|
||||||
|
secretAccessKey: "<your-secret-access-key>",
|
||||||
|
accessKeyId: "<your-access-key-id>",
|
||||||
|
sessionToken: "<your-session-token>"
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
- From `process.env`, such as this:
|
||||||
|
|
||||||
|
```
|
||||||
|
export AWS_SECRET_ACCESS_KEY="<your-secret-access-key>"
|
||||||
|
export AWS_ACCESS_KEY_ID="<your-access-key-id>"
|
||||||
|
export AWS_SESSION_TOKEN="<your-session-token>"
|
||||||
|
```
|
||||||
|
|
||||||
|
(will also use `AWS_ACCESS_KEY` and `AWS_SECRET_KEY` if available)
|
||||||
|
|
||||||
|
The `sessionToken` property and `AWS_SESSION_TOKEN` environment variable are optional for signing
|
||||||
|
with [IAM STS temporary credentials](http://docs.aws.amazon.com/STS/latest/UsingSTS/using-temp-creds.html).
|
||||||
|
|
||||||
|
Installation
|
||||||
|
------------
|
||||||
|
|
||||||
|
With [npm](http://npmjs.org/) do:
|
||||||
|
|
||||||
|
```
|
||||||
|
npm install aws4
|
||||||
|
```
|
||||||
|
|
||||||
|
Can also be used [in the browser](./browser).
|
||||||
|
|
||||||
|
Thanks
|
||||||
|
------
|
||||||
|
|
||||||
|
Thanks to [@jed](https://github.com/jed) for his
|
||||||
|
[dynamo-client](https://github.com/jed/dynamo-client) lib where I first
|
||||||
|
committed and subsequently extracted this code.
|
||||||
|
|
||||||
|
Also thanks to the
|
||||||
|
[official node.js AWS SDK](https://github.com/aws/aws-sdk-js) for giving
|
||||||
|
me a start on implementing the v4 signature.
|
||||||
|
|
323
node_modules/aws4/aws4.js
generated
vendored
Normal file
323
node_modules/aws4/aws4.js
generated
vendored
Normal file
@ -0,0 +1,323 @@
|
|||||||
|
var aws4 = exports,
|
||||||
|
url = require('url'),
|
||||||
|
querystring = require('querystring'),
|
||||||
|
crypto = require('crypto'),
|
||||||
|
lru = require('./lru'),
|
||||||
|
credentialsCache = lru(1000)
|
||||||
|
|
||||||
|
// http://docs.amazonwebservices.com/general/latest/gr/signature-version-4.html
|
||||||
|
|
||||||
|
function hmac(key, string, encoding) {
|
||||||
|
return crypto.createHmac('sha256', key).update(string, 'utf8').digest(encoding)
|
||||||
|
}
|
||||||
|
|
||||||
|
function hash(string, encoding) {
|
||||||
|
return crypto.createHash('sha256').update(string, 'utf8').digest(encoding)
|
||||||
|
}
|
||||||
|
|
||||||
|
// This function assumes the string has already been percent encoded
|
||||||
|
function encodeRfc3986(urlEncodedString) {
|
||||||
|
return urlEncodedString.replace(/[!'()*]/g, function(c) {
|
||||||
|
return '%' + c.charCodeAt(0).toString(16).toUpperCase()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// request: { path | body, [host], [method], [headers], [service], [region] }
|
||||||
|
// credentials: { accessKeyId, secretAccessKey, [sessionToken] }
|
||||||
|
function RequestSigner(request, credentials) {
|
||||||
|
|
||||||
|
if (typeof request === 'string') request = url.parse(request)
|
||||||
|
|
||||||
|
var headers = request.headers = (request.headers || {}),
|
||||||
|
hostParts = this.matchHost(request.hostname || request.host || headers.Host || headers.host)
|
||||||
|
|
||||||
|
this.request = request
|
||||||
|
this.credentials = credentials || this.defaultCredentials()
|
||||||
|
|
||||||
|
this.service = request.service || hostParts[0] || ''
|
||||||
|
this.region = request.region || hostParts[1] || 'us-east-1'
|
||||||
|
|
||||||
|
// SES uses a different domain from the service name
|
||||||
|
if (this.service === 'email') this.service = 'ses'
|
||||||
|
|
||||||
|
if (!request.method && request.body)
|
||||||
|
request.method = 'POST'
|
||||||
|
|
||||||
|
if (!headers.Host && !headers.host) {
|
||||||
|
headers.Host = request.hostname || request.host || this.createHost()
|
||||||
|
|
||||||
|
// If a port is specified explicitly, use it as is
|
||||||
|
if (request.port)
|
||||||
|
headers.Host += ':' + request.port
|
||||||
|
}
|
||||||
|
if (!request.hostname && !request.host)
|
||||||
|
request.hostname = headers.Host || headers.host
|
||||||
|
|
||||||
|
this.isCodeCommitGit = this.service === 'codecommit' && request.method === 'GIT'
|
||||||
|
}
|
||||||
|
|
||||||
|
RequestSigner.prototype.matchHost = function(host) {
|
||||||
|
var match = (host || '').match(/([^\.]+)\.(?:([^\.]*)\.)?amazonaws\.com$/)
|
||||||
|
var hostParts = (match || []).slice(1, 3)
|
||||||
|
|
||||||
|
// ES's hostParts are sometimes the other way round, if the value that is expected
|
||||||
|
// to be region equals ‘es’ switch them back
|
||||||
|
// e.g. search-cluster-name-aaaa00aaaa0aaa0aaaaaaa0aaa.us-east-1.es.amazonaws.com
|
||||||
|
if (hostParts[1] === 'es')
|
||||||
|
hostParts = hostParts.reverse()
|
||||||
|
|
||||||
|
return hostParts
|
||||||
|
}
|
||||||
|
|
||||||
|
// http://docs.aws.amazon.com/general/latest/gr/rande.html
|
||||||
|
RequestSigner.prototype.isSingleRegion = function() {
|
||||||
|
// Special case for S3 and SimpleDB in us-east-1
|
||||||
|
if (['s3', 'sdb'].indexOf(this.service) >= 0 && this.region === 'us-east-1') return true
|
||||||
|
|
||||||
|
return ['cloudfront', 'ls', 'route53', 'iam', 'importexport', 'sts']
|
||||||
|
.indexOf(this.service) >= 0
|
||||||
|
}
|
||||||
|
|
||||||
|
RequestSigner.prototype.createHost = function() {
|
||||||
|
var region = this.isSingleRegion() ? '' :
|
||||||
|
(this.service === 's3' && this.region !== 'us-east-1' ? '-' : '.') + this.region,
|
||||||
|
service = this.service === 'ses' ? 'email' : this.service
|
||||||
|
return service + region + '.amazonaws.com'
|
||||||
|
}
|
||||||
|
|
||||||
|
RequestSigner.prototype.prepareRequest = function() {
|
||||||
|
this.parsePath()
|
||||||
|
|
||||||
|
var request = this.request, headers = request.headers, query
|
||||||
|
|
||||||
|
if (request.signQuery) {
|
||||||
|
|
||||||
|
this.parsedPath.query = query = this.parsedPath.query || {}
|
||||||
|
|
||||||
|
if (this.credentials.sessionToken)
|
||||||
|
query['X-Amz-Security-Token'] = this.credentials.sessionToken
|
||||||
|
|
||||||
|
if (this.service === 's3' && !query['X-Amz-Expires'])
|
||||||
|
query['X-Amz-Expires'] = 86400
|
||||||
|
|
||||||
|
if (query['X-Amz-Date'])
|
||||||
|
this.datetime = query['X-Amz-Date']
|
||||||
|
else
|
||||||
|
query['X-Amz-Date'] = this.getDateTime()
|
||||||
|
|
||||||
|
query['X-Amz-Algorithm'] = 'AWS4-HMAC-SHA256'
|
||||||
|
query['X-Amz-Credential'] = this.credentials.accessKeyId + '/' + this.credentialString()
|
||||||
|
query['X-Amz-SignedHeaders'] = this.signedHeaders()
|
||||||
|
|
||||||
|
} else {
|
||||||
|
|
||||||
|
if (!request.doNotModifyHeaders && !this.isCodeCommitGit) {
|
||||||
|
if (request.body && !headers['Content-Type'] && !headers['content-type'])
|
||||||
|
headers['Content-Type'] = 'application/x-www-form-urlencoded; charset=utf-8'
|
||||||
|
|
||||||
|
if (request.body && !headers['Content-Length'] && !headers['content-length'])
|
||||||
|
headers['Content-Length'] = Buffer.byteLength(request.body)
|
||||||
|
|
||||||
|
if (this.credentials.sessionToken)
|
||||||
|
headers['X-Amz-Security-Token'] = this.credentials.sessionToken
|
||||||
|
|
||||||
|
if (this.service === 's3')
|
||||||
|
headers['X-Amz-Content-Sha256'] = hash(this.request.body || '', 'hex')
|
||||||
|
|
||||||
|
if (headers['X-Amz-Date'])
|
||||||
|
this.datetime = headers['X-Amz-Date']
|
||||||
|
else
|
||||||
|
headers['X-Amz-Date'] = this.getDateTime()
|
||||||
|
}
|
||||||
|
|
||||||
|
delete headers.Authorization
|
||||||
|
delete headers.authorization
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
RequestSigner.prototype.sign = function() {
|
||||||
|
if (!this.parsedPath) this.prepareRequest()
|
||||||
|
|
||||||
|
if (this.request.signQuery) {
|
||||||
|
this.parsedPath.query['X-Amz-Signature'] = this.signature()
|
||||||
|
} else {
|
||||||
|
this.request.headers.Authorization = this.authHeader()
|
||||||
|
}
|
||||||
|
|
||||||
|
this.request.path = this.formatPath()
|
||||||
|
|
||||||
|
return this.request
|
||||||
|
}
|
||||||
|
|
||||||
|
RequestSigner.prototype.getDateTime = function() {
|
||||||
|
if (!this.datetime) {
|
||||||
|
var headers = this.request.headers,
|
||||||
|
date = new Date(headers.Date || headers.date || new Date)
|
||||||
|
|
||||||
|
this.datetime = date.toISOString().replace(/[:\-]|\.\d{3}/g, '')
|
||||||
|
|
||||||
|
// Remove the trailing 'Z' on the timestamp string for CodeCommit git access
|
||||||
|
if (this.isCodeCommitGit) this.datetime = this.datetime.slice(0, -1)
|
||||||
|
}
|
||||||
|
return this.datetime
|
||||||
|
}
|
||||||
|
|
||||||
|
RequestSigner.prototype.getDate = function() {
|
||||||
|
return this.getDateTime().substr(0, 8)
|
||||||
|
}
|
||||||
|
|
||||||
|
RequestSigner.prototype.authHeader = function() {
|
||||||
|
return [
|
||||||
|
'AWS4-HMAC-SHA256 Credential=' + this.credentials.accessKeyId + '/' + this.credentialString(),
|
||||||
|
'SignedHeaders=' + this.signedHeaders(),
|
||||||
|
'Signature=' + this.signature(),
|
||||||
|
].join(', ')
|
||||||
|
}
|
||||||
|
|
||||||
|
RequestSigner.prototype.signature = function() {
|
||||||
|
var date = this.getDate(),
|
||||||
|
cacheKey = [this.credentials.secretAccessKey, date, this.region, this.service].join(),
|
||||||
|
kDate, kRegion, kService, kCredentials = credentialsCache.get(cacheKey)
|
||||||
|
if (!kCredentials) {
|
||||||
|
kDate = hmac('AWS4' + this.credentials.secretAccessKey, date)
|
||||||
|
kRegion = hmac(kDate, this.region)
|
||||||
|
kService = hmac(kRegion, this.service)
|
||||||
|
kCredentials = hmac(kService, 'aws4_request')
|
||||||
|
credentialsCache.set(cacheKey, kCredentials)
|
||||||
|
}
|
||||||
|
return hmac(kCredentials, this.stringToSign(), 'hex')
|
||||||
|
}
|
||||||
|
|
||||||
|
RequestSigner.prototype.stringToSign = function() {
|
||||||
|
return [
|
||||||
|
'AWS4-HMAC-SHA256',
|
||||||
|
this.getDateTime(),
|
||||||
|
this.credentialString(),
|
||||||
|
hash(this.canonicalString(), 'hex'),
|
||||||
|
].join('\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
RequestSigner.prototype.canonicalString = function() {
|
||||||
|
if (!this.parsedPath) this.prepareRequest()
|
||||||
|
|
||||||
|
var pathStr = this.parsedPath.path,
|
||||||
|
query = this.parsedPath.query,
|
||||||
|
queryStr = '',
|
||||||
|
normalizePath = this.service !== 's3',
|
||||||
|
decodePath = this.service === 's3' || this.request.doNotEncodePath,
|
||||||
|
decodeSlashesInPath = this.service === 's3',
|
||||||
|
firstValOnly = this.service === 's3',
|
||||||
|
bodyHash = this.service === 's3' && this.request.signQuery ? 'UNSIGNED-PAYLOAD' :
|
||||||
|
(this.isCodeCommitGit ? '' : hash(this.request.body || '', 'hex'))
|
||||||
|
|
||||||
|
if (query) {
|
||||||
|
queryStr = encodeRfc3986(querystring.stringify(Object.keys(query).sort().reduce(function(obj, key) {
|
||||||
|
if (!key) return obj
|
||||||
|
obj[key] = !Array.isArray(query[key]) ? query[key] :
|
||||||
|
(firstValOnly ? query[key][0] : query[key].slice().sort())
|
||||||
|
return obj
|
||||||
|
}, {})))
|
||||||
|
}
|
||||||
|
if (pathStr !== '/') {
|
||||||
|
if (normalizePath) pathStr = pathStr.replace(/\/{2,}/g, '/')
|
||||||
|
pathStr = pathStr.split('/').reduce(function(path, piece) {
|
||||||
|
if (normalizePath && piece === '..') {
|
||||||
|
path.pop()
|
||||||
|
} else if (!normalizePath || piece !== '.') {
|
||||||
|
if (decodePath) piece = querystring.unescape(piece)
|
||||||
|
path.push(encodeRfc3986(querystring.escape(piece)))
|
||||||
|
}
|
||||||
|
return path
|
||||||
|
}, []).join('/')
|
||||||
|
if (pathStr[0] !== '/') pathStr = '/' + pathStr
|
||||||
|
if (decodeSlashesInPath) pathStr = pathStr.replace(/%2F/g, '/')
|
||||||
|
}
|
||||||
|
|
||||||
|
return [
|
||||||
|
this.request.method || 'GET',
|
||||||
|
pathStr,
|
||||||
|
queryStr,
|
||||||
|
this.canonicalHeaders() + '\n',
|
||||||
|
this.signedHeaders(),
|
||||||
|
bodyHash,
|
||||||
|
].join('\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
RequestSigner.prototype.canonicalHeaders = function() {
|
||||||
|
var headers = this.request.headers
|
||||||
|
function trimAll(header) {
|
||||||
|
return header.toString().trim().replace(/\s+/g, ' ')
|
||||||
|
}
|
||||||
|
return Object.keys(headers)
|
||||||
|
.sort(function(a, b) { return a.toLowerCase() < b.toLowerCase() ? -1 : 1 })
|
||||||
|
.map(function(key) { return key.toLowerCase() + ':' + trimAll(headers[key]) })
|
||||||
|
.join('\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
RequestSigner.prototype.signedHeaders = function() {
|
||||||
|
return Object.keys(this.request.headers)
|
||||||
|
.map(function(key) { return key.toLowerCase() })
|
||||||
|
.sort()
|
||||||
|
.join(';')
|
||||||
|
}
|
||||||
|
|
||||||
|
RequestSigner.prototype.credentialString = function() {
|
||||||
|
return [
|
||||||
|
this.getDate(),
|
||||||
|
this.region,
|
||||||
|
this.service,
|
||||||
|
'aws4_request',
|
||||||
|
].join('/')
|
||||||
|
}
|
||||||
|
|
||||||
|
RequestSigner.prototype.defaultCredentials = function() {
|
||||||
|
var env = process.env
|
||||||
|
return {
|
||||||
|
accessKeyId: env.AWS_ACCESS_KEY_ID || env.AWS_ACCESS_KEY,
|
||||||
|
secretAccessKey: env.AWS_SECRET_ACCESS_KEY || env.AWS_SECRET_KEY,
|
||||||
|
sessionToken: env.AWS_SESSION_TOKEN,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
RequestSigner.prototype.parsePath = function() {
|
||||||
|
var path = this.request.path || '/',
|
||||||
|
queryIx = path.indexOf('?'),
|
||||||
|
query = null
|
||||||
|
|
||||||
|
if (queryIx >= 0) {
|
||||||
|
query = querystring.parse(path.slice(queryIx + 1))
|
||||||
|
path = path.slice(0, queryIx)
|
||||||
|
}
|
||||||
|
|
||||||
|
// S3 doesn't always encode characters > 127 correctly and
|
||||||
|
// all services don't encode characters > 255 correctly
|
||||||
|
// So if there are non-reserved chars (and it's not already all % encoded), just encode them all
|
||||||
|
if (/[^0-9A-Za-z!'()*\-._~%/]/.test(path)) {
|
||||||
|
path = path.split('/').map(function(piece) {
|
||||||
|
return querystring.escape(querystring.unescape(piece))
|
||||||
|
}).join('/')
|
||||||
|
}
|
||||||
|
|
||||||
|
this.parsedPath = {
|
||||||
|
path: path,
|
||||||
|
query: query,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
RequestSigner.prototype.formatPath = function() {
|
||||||
|
var path = this.parsedPath.path,
|
||||||
|
query = this.parsedPath.query
|
||||||
|
|
||||||
|
if (!query) return path
|
||||||
|
|
||||||
|
// Services don't support empty query string keys
|
||||||
|
if (query[''] != null) delete query['']
|
||||||
|
|
||||||
|
return path + '?' + encodeRfc3986(querystring.stringify(query))
|
||||||
|
}
|
||||||
|
|
||||||
|
aws4.RequestSigner = RequestSigner
|
||||||
|
|
||||||
|
aws4.sign = function(request, credentials) {
|
||||||
|
return new RequestSigner(request, credentials).sign()
|
||||||
|
}
|
96
node_modules/aws4/lru.js
generated
vendored
Normal file
96
node_modules/aws4/lru.js
generated
vendored
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
module.exports = function(size) {
|
||||||
|
return new LruCache(size)
|
||||||
|
}
|
||||||
|
|
||||||
|
function LruCache(size) {
|
||||||
|
this.capacity = size | 0
|
||||||
|
this.map = Object.create(null)
|
||||||
|
this.list = new DoublyLinkedList()
|
||||||
|
}
|
||||||
|
|
||||||
|
LruCache.prototype.get = function(key) {
|
||||||
|
var node = this.map[key]
|
||||||
|
if (node == null) return undefined
|
||||||
|
this.used(node)
|
||||||
|
return node.val
|
||||||
|
}
|
||||||
|
|
||||||
|
LruCache.prototype.set = function(key, val) {
|
||||||
|
var node = this.map[key]
|
||||||
|
if (node != null) {
|
||||||
|
node.val = val
|
||||||
|
} else {
|
||||||
|
if (!this.capacity) this.prune()
|
||||||
|
if (!this.capacity) return false
|
||||||
|
node = new DoublyLinkedNode(key, val)
|
||||||
|
this.map[key] = node
|
||||||
|
this.capacity--
|
||||||
|
}
|
||||||
|
this.used(node)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
LruCache.prototype.used = function(node) {
|
||||||
|
this.list.moveToFront(node)
|
||||||
|
}
|
||||||
|
|
||||||
|
LruCache.prototype.prune = function() {
|
||||||
|
var node = this.list.pop()
|
||||||
|
if (node != null) {
|
||||||
|
delete this.map[node.key]
|
||||||
|
this.capacity++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function DoublyLinkedList() {
|
||||||
|
this.firstNode = null
|
||||||
|
this.lastNode = null
|
||||||
|
}
|
||||||
|
|
||||||
|
DoublyLinkedList.prototype.moveToFront = function(node) {
|
||||||
|
if (this.firstNode == node) return
|
||||||
|
|
||||||
|
this.remove(node)
|
||||||
|
|
||||||
|
if (this.firstNode == null) {
|
||||||
|
this.firstNode = node
|
||||||
|
this.lastNode = node
|
||||||
|
node.prev = null
|
||||||
|
node.next = null
|
||||||
|
} else {
|
||||||
|
node.prev = null
|
||||||
|
node.next = this.firstNode
|
||||||
|
node.next.prev = node
|
||||||
|
this.firstNode = node
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
DoublyLinkedList.prototype.pop = function() {
|
||||||
|
var lastNode = this.lastNode
|
||||||
|
if (lastNode != null) {
|
||||||
|
this.remove(lastNode)
|
||||||
|
}
|
||||||
|
return lastNode
|
||||||
|
}
|
||||||
|
|
||||||
|
DoublyLinkedList.prototype.remove = function(node) {
|
||||||
|
if (this.firstNode == node) {
|
||||||
|
this.firstNode = node.next
|
||||||
|
} else if (node.prev != null) {
|
||||||
|
node.prev.next = node.next
|
||||||
|
}
|
||||||
|
if (this.lastNode == node) {
|
||||||
|
this.lastNode = node.prev
|
||||||
|
} else if (node.next != null) {
|
||||||
|
node.next.prev = node.prev
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function DoublyLinkedNode(key, val) {
|
||||||
|
this.key = key
|
||||||
|
this.val = val
|
||||||
|
this.prev = null
|
||||||
|
this.next = null
|
||||||
|
}
|
140
node_modules/aws4/package.json
generated
vendored
Normal file
140
node_modules/aws4/package.json
generated
vendored
Normal file
@ -0,0 +1,140 @@
|
|||||||
|
{
|
||||||
|
"_args": [
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"raw": "aws4@^1.2.1",
|
||||||
|
"scope": null,
|
||||||
|
"escapedName": "aws4",
|
||||||
|
"name": "aws4",
|
||||||
|
"rawSpec": "^1.2.1",
|
||||||
|
"spec": ">=1.2.1 <2.0.0",
|
||||||
|
"type": "range"
|
||||||
|
},
|
||||||
|
"/tank/data/SERVER/zoneadm-master/node_modules/request"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"_from": "aws4@>=1.2.1 <2.0.0",
|
||||||
|
"_id": "aws4@1.5.0",
|
||||||
|
"_inCache": true,
|
||||||
|
"_location": "/aws4",
|
||||||
|
"_nodeVersion": "4.5.0",
|
||||||
|
"_npmOperationalInternal": {
|
||||||
|
"host": "packages-16-east.internal.npmjs.com",
|
||||||
|
"tmp": "tmp/aws4-1.5.0.tgz_1476226259635_0.2796843808609992"
|
||||||
|
},
|
||||||
|
"_npmUser": {
|
||||||
|
"name": "hichaelmart",
|
||||||
|
"email": "michael.hart.au@gmail.com"
|
||||||
|
},
|
||||||
|
"_npmVersion": "2.15.11",
|
||||||
|
"_phantomChildren": {},
|
||||||
|
"_requested": {
|
||||||
|
"raw": "aws4@^1.2.1",
|
||||||
|
"scope": null,
|
||||||
|
"escapedName": "aws4",
|
||||||
|
"name": "aws4",
|
||||||
|
"rawSpec": "^1.2.1",
|
||||||
|
"spec": ">=1.2.1 <2.0.0",
|
||||||
|
"type": "range"
|
||||||
|
},
|
||||||
|
"_requiredBy": [
|
||||||
|
"/request"
|
||||||
|
],
|
||||||
|
"_resolved": "https://registry.npmjs.org/aws4/-/aws4-1.5.0.tgz",
|
||||||
|
"_shasum": "0a29ffb79c31c9e712eeb087e8e7a64b4a56d755",
|
||||||
|
"_shrinkwrap": null,
|
||||||
|
"_spec": "aws4@^1.2.1",
|
||||||
|
"_where": "/tank/data/SERVER/zoneadm-master/node_modules/request",
|
||||||
|
"author": {
|
||||||
|
"name": "Michael Hart",
|
||||||
|
"email": "michael.hart.au@gmail.com",
|
||||||
|
"url": "http://github.com/mhart"
|
||||||
|
},
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/mhart/aws4/issues"
|
||||||
|
},
|
||||||
|
"dependencies": {},
|
||||||
|
"description": "Signs and prepares requests using AWS Signature Version 4",
|
||||||
|
"devDependencies": {
|
||||||
|
"mocha": "^2.4.5",
|
||||||
|
"should": "^8.2.2"
|
||||||
|
},
|
||||||
|
"directories": {},
|
||||||
|
"dist": {
|
||||||
|
"shasum": "0a29ffb79c31c9e712eeb087e8e7a64b4a56d755",
|
||||||
|
"tarball": "https://registry.npmjs.org/aws4/-/aws4-1.5.0.tgz"
|
||||||
|
},
|
||||||
|
"gitHead": "ba136334ee08884c6042c8578a22e376233eef34",
|
||||||
|
"homepage": "https://github.com/mhart/aws4#readme",
|
||||||
|
"keywords": [
|
||||||
|
"amazon",
|
||||||
|
"aws",
|
||||||
|
"signature",
|
||||||
|
"s3",
|
||||||
|
"ec2",
|
||||||
|
"autoscaling",
|
||||||
|
"cloudformation",
|
||||||
|
"elasticloadbalancing",
|
||||||
|
"elb",
|
||||||
|
"elasticbeanstalk",
|
||||||
|
"cloudsearch",
|
||||||
|
"dynamodb",
|
||||||
|
"kinesis",
|
||||||
|
"lambda",
|
||||||
|
"glacier",
|
||||||
|
"sqs",
|
||||||
|
"sns",
|
||||||
|
"iam",
|
||||||
|
"sts",
|
||||||
|
"ses",
|
||||||
|
"swf",
|
||||||
|
"storagegateway",
|
||||||
|
"datapipeline",
|
||||||
|
"directconnect",
|
||||||
|
"redshift",
|
||||||
|
"opsworks",
|
||||||
|
"rds",
|
||||||
|
"monitoring",
|
||||||
|
"cloudtrail",
|
||||||
|
"cloudfront",
|
||||||
|
"codedeploy",
|
||||||
|
"elasticache",
|
||||||
|
"elasticmapreduce",
|
||||||
|
"elastictranscoder",
|
||||||
|
"emr",
|
||||||
|
"cloudwatch",
|
||||||
|
"mobileanalytics",
|
||||||
|
"cognitoidentity",
|
||||||
|
"cognitosync",
|
||||||
|
"cognito",
|
||||||
|
"containerservice",
|
||||||
|
"ecs",
|
||||||
|
"appstream",
|
||||||
|
"keymanagementservice",
|
||||||
|
"kms",
|
||||||
|
"config",
|
||||||
|
"cloudhsm",
|
||||||
|
"route53",
|
||||||
|
"route53domains",
|
||||||
|
"logs"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"main": "aws4.js",
|
||||||
|
"maintainers": [
|
||||||
|
{
|
||||||
|
"name": "hichaelmart",
|
||||||
|
"email": "michael.hart.au@gmail.com"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "aws4",
|
||||||
|
"optionalDependencies": {},
|
||||||
|
"readme": "ERROR: No README data found!",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/mhart/aws4.git"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "mocha ./test/fast.js ./test/slow.js -b -t 100s -R list"
|
||||||
|
},
|
||||||
|
"version": "1.5.0"
|
||||||
|
}
|
35
node_modules/basic-auth/HISTORY.md
generated
vendored
Normal file
35
node_modules/basic-auth/HISTORY.md
generated
vendored
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
1.0.4 / 2016-05-10
|
||||||
|
==================
|
||||||
|
|
||||||
|
* Improve error message when `req` argument is not an object
|
||||||
|
* Improve error message when `req` missing `headers` property
|
||||||
|
|
||||||
|
1.0.3 / 2015-07-01
|
||||||
|
==================
|
||||||
|
|
||||||
|
* Fix regression accepting a Koa context
|
||||||
|
|
||||||
|
1.0.2 / 2015-06-12
|
||||||
|
==================
|
||||||
|
|
||||||
|
* Improve error message when `req` argument missing
|
||||||
|
* perf: enable strict mode
|
||||||
|
* perf: hoist regular expression
|
||||||
|
* perf: parse with regular expressions
|
||||||
|
* perf: remove argument reassignment
|
||||||
|
|
||||||
|
1.0.1 / 2015-05-04
|
||||||
|
==================
|
||||||
|
|
||||||
|
* Update readme
|
||||||
|
|
||||||
|
1.0.0 / 2014-07-01
|
||||||
|
==================
|
||||||
|
|
||||||
|
* Support empty password
|
||||||
|
* Support empty username
|
||||||
|
|
||||||
|
0.0.1 / 2013-11-30
|
||||||
|
==================
|
||||||
|
|
||||||
|
* Initial release
|
24
node_modules/basic-auth/LICENSE
generated
vendored
Normal file
24
node_modules/basic-auth/LICENSE
generated
vendored
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
(The MIT License)
|
||||||
|
|
||||||
|
Copyright (c) 2013 TJ Holowaychuk
|
||||||
|
Copyright (c) 2014 Jonathan Ong <me@jongleberry.com>
|
||||||
|
Copyright (c) 2015 Douglas Christopher Wilson <doug@somethingdoug.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining
|
||||||
|
a copy of this software and associated documentation files (the
|
||||||
|
'Software'), to deal in the Software without restriction, including
|
||||||
|
without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
distribute, sublicense, and/or sell copies of the Software, and to
|
||||||
|
permit persons to whom the Software is furnished to do so, subject to
|
||||||
|
the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be
|
||||||
|
included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||||
|
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||||
|
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||||
|
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||||
|
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
78
node_modules/basic-auth/README.md
generated
vendored
Normal file
78
node_modules/basic-auth/README.md
generated
vendored
Normal file
@ -0,0 +1,78 @@
|
|||||||
|
# basic-auth
|
||||||
|
|
||||||
|
[![NPM Version][npm-image]][npm-url]
|
||||||
|
[![NPM Downloads][downloads-image]][downloads-url]
|
||||||
|
[![Node.js Version][node-version-image]][node-version-url]
|
||||||
|
[![Build Status][travis-image]][travis-url]
|
||||||
|
[![Test Coverage][coveralls-image]][coveralls-url]
|
||||||
|
|
||||||
|
Generic basic auth Authorization header field parser for whatever.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```
|
||||||
|
$ npm install basic-auth
|
||||||
|
```
|
||||||
|
|
||||||
|
## API
|
||||||
|
|
||||||
|
```js
|
||||||
|
var auth = require('basic-auth')
|
||||||
|
```
|
||||||
|
|
||||||
|
### auth(req)
|
||||||
|
|
||||||
|
Get the basic auth credentials from the given request. The `Authorization`
|
||||||
|
header is parsed and if the header is invalid, `undefined` is returned,
|
||||||
|
otherwise an object with `name` and `pass` properties.
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
Pass a node request or koa Context object to the module exported. If
|
||||||
|
parsing fails `undefined` is returned, otherwise an object with
|
||||||
|
`.name` and `.pass`.
|
||||||
|
|
||||||
|
```js
|
||||||
|
var auth = require('basic-auth');
|
||||||
|
var user = auth(req);
|
||||||
|
// => { name: 'something', pass: 'whatever' }
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
### With vanilla node.js http server
|
||||||
|
|
||||||
|
```js
|
||||||
|
var http = require('http')
|
||||||
|
var auth = require('basic-auth')
|
||||||
|
|
||||||
|
// Create server
|
||||||
|
var server = http.createServer(function (req, res) {
|
||||||
|
var credentials = auth(req)
|
||||||
|
|
||||||
|
if (!credentials || credentials.name !== 'john' || credentials.pass !== 'secret') {
|
||||||
|
res.statusCode = 401
|
||||||
|
res.setHeader('WWW-Authenticate', 'Basic realm="example"')
|
||||||
|
res.end('Access denied')
|
||||||
|
} else {
|
||||||
|
res.end('Access granted')
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// Listen
|
||||||
|
server.listen(3000)
|
||||||
|
```
|
||||||
|
|
||||||
|
# License
|
||||||
|
|
||||||
|
[MIT](LICENSE)
|
||||||
|
|
||||||
|
[npm-image]: https://img.shields.io/npm/v/basic-auth.svg
|
||||||
|
[npm-url]: https://npmjs.org/package/basic-auth
|
||||||
|
[node-version-image]: https://img.shields.io/node/v/basic-auth.svg
|
||||||
|
[node-version-url]: https://nodejs.org/en/download
|
||||||
|
[travis-image]: https://img.shields.io/travis/jshttp/basic-auth/master.svg
|
||||||
|
[travis-url]: https://travis-ci.org/jshttp/basic-auth
|
||||||
|
[coveralls-image]: https://img.shields.io/coveralls/jshttp/basic-auth/master.svg
|
||||||
|
[coveralls-url]: https://coveralls.io/r/jshttp/basic-auth?branch=master
|
||||||
|
[downloads-image]: https://img.shields.io/npm/dm/basic-auth.svg
|
||||||
|
[downloads-url]: https://npmjs.org/package/basic-auth
|
108
node_modules/basic-auth/index.js
generated
vendored
Normal file
108
node_modules/basic-auth/index.js
generated
vendored
Normal file
@ -0,0 +1,108 @@
|
|||||||
|
/*!
|
||||||
|
* basic-auth
|
||||||
|
* Copyright(c) 2013 TJ Holowaychuk
|
||||||
|
* Copyright(c) 2014 Jonathan Ong
|
||||||
|
* Copyright(c) 2015 Douglas Christopher Wilson
|
||||||
|
* MIT Licensed
|
||||||
|
*/
|
||||||
|
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Module exports.
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
|
||||||
|
module.exports = auth
|
||||||
|
|
||||||
|
/**
|
||||||
|
* RegExp for basic auth credentials
|
||||||
|
*
|
||||||
|
* credentials = auth-scheme 1*SP token68
|
||||||
|
* auth-scheme = "Basic" ; case insensitive
|
||||||
|
* token68 = 1*( ALPHA / DIGIT / "-" / "." / "_" / "~" / "+" / "/" ) *"="
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
|
||||||
|
var credentialsRegExp = /^ *(?:[Bb][Aa][Ss][Ii][Cc]) +([A-Za-z0-9\-\._~\+\/]+=*) *$/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* RegExp for basic auth user/pass
|
||||||
|
*
|
||||||
|
* user-pass = userid ":" password
|
||||||
|
* userid = *<TEXT excluding ":">
|
||||||
|
* password = *TEXT
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
|
||||||
|
var userPassRegExp = /^([^:]*):(.*)$/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse the Authorization header field of a request.
|
||||||
|
*
|
||||||
|
* @param {object} req
|
||||||
|
* @return {object} with .name and .pass
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
|
||||||
|
function auth(req) {
|
||||||
|
if (!req) {
|
||||||
|
throw new TypeError('argument req is required')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof req !== 'object') {
|
||||||
|
throw new TypeError('argument req is required to be an object')
|
||||||
|
}
|
||||||
|
|
||||||
|
// get header
|
||||||
|
var header = getAuthorization(req.req || req)
|
||||||
|
|
||||||
|
// parse header
|
||||||
|
var match = credentialsRegExp.exec(header || '')
|
||||||
|
|
||||||
|
if (!match) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// decode user pass
|
||||||
|
var userPass = userPassRegExp.exec(decodeBase64(match[1]))
|
||||||
|
|
||||||
|
if (!userPass) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// return credentials object
|
||||||
|
return new Credentials(userPass[1], userPass[2])
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Decode base64 string.
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
|
||||||
|
function decodeBase64(str) {
|
||||||
|
return new Buffer(str, 'base64').toString()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the Authorization header from request object.
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
|
||||||
|
function getAuthorization(req) {
|
||||||
|
if (!req.headers || typeof req.headers !== 'object') {
|
||||||
|
throw new TypeError('argument req is required to have headers property')
|
||||||
|
}
|
||||||
|
|
||||||
|
return req.headers.authorization
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Object to represent user credentials.
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
|
||||||
|
function Credentials(name, pass) {
|
||||||
|
this.name = name
|
||||||
|
this.pass = pass
|
||||||
|
}
|
110
node_modules/basic-auth/package.json
generated
vendored
Normal file
110
node_modules/basic-auth/package.json
generated
vendored
Normal file
@ -0,0 +1,110 @@
|
|||||||
|
{
|
||||||
|
"_args": [
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"raw": "basic-auth@~1.0.3",
|
||||||
|
"scope": null,
|
||||||
|
"escapedName": "basic-auth",
|
||||||
|
"name": "basic-auth",
|
||||||
|
"rawSpec": "~1.0.3",
|
||||||
|
"spec": ">=1.0.3 <1.1.0",
|
||||||
|
"type": "range"
|
||||||
|
},
|
||||||
|
"/tank/data/SERVER/zoneadm-master/node_modules/morgan"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"_from": "basic-auth@>=1.0.3 <1.1.0",
|
||||||
|
"_id": "basic-auth@1.0.4",
|
||||||
|
"_inCache": true,
|
||||||
|
"_location": "/basic-auth",
|
||||||
|
"_nodeVersion": "4.4.3",
|
||||||
|
"_npmOperationalInternal": {
|
||||||
|
"host": "packages-16-east.internal.npmjs.com",
|
||||||
|
"tmp": "tmp/basic-auth-1.0.4.tgz_1462938878912_0.717464140150696"
|
||||||
|
},
|
||||||
|
"_npmUser": {
|
||||||
|
"name": "dougwilson",
|
||||||
|
"email": "doug@somethingdoug.com"
|
||||||
|
},
|
||||||
|
"_npmVersion": "2.15.1",
|
||||||
|
"_phantomChildren": {},
|
||||||
|
"_requested": {
|
||||||
|
"raw": "basic-auth@~1.0.3",
|
||||||
|
"scope": null,
|
||||||
|
"escapedName": "basic-auth",
|
||||||
|
"name": "basic-auth",
|
||||||
|
"rawSpec": "~1.0.3",
|
||||||
|
"spec": ">=1.0.3 <1.1.0",
|
||||||
|
"type": "range"
|
||||||
|
},
|
||||||
|
"_requiredBy": [
|
||||||
|
"/morgan"
|
||||||
|
],
|
||||||
|
"_resolved": "https://registry.npmjs.org/basic-auth/-/basic-auth-1.0.4.tgz",
|
||||||
|
"_shasum": "030935b01de7c9b94a824b29f3fccb750d3a5290",
|
||||||
|
"_shrinkwrap": null,
|
||||||
|
"_spec": "basic-auth@~1.0.3",
|
||||||
|
"_where": "/tank/data/SERVER/zoneadm-master/node_modules/morgan",
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/jshttp/basic-auth/issues"
|
||||||
|
},
|
||||||
|
"dependencies": {},
|
||||||
|
"description": "node.js basic auth parser",
|
||||||
|
"devDependencies": {
|
||||||
|
"istanbul": "0.4.3",
|
||||||
|
"mocha": "1.21.5"
|
||||||
|
},
|
||||||
|
"directories": {},
|
||||||
|
"dist": {
|
||||||
|
"shasum": "030935b01de7c9b94a824b29f3fccb750d3a5290",
|
||||||
|
"tarball": "https://registry.npmjs.org/basic-auth/-/basic-auth-1.0.4.tgz"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 0.6"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"HISTORY.md",
|
||||||
|
"LICENSE",
|
||||||
|
"index.js"
|
||||||
|
],
|
||||||
|
"gitHead": "4682d99600383bad5a266efbaa5055657dd9891d",
|
||||||
|
"homepage": "https://github.com/jshttp/basic-auth#readme",
|
||||||
|
"keywords": [
|
||||||
|
"basic",
|
||||||
|
"auth",
|
||||||
|
"authorization",
|
||||||
|
"basicauth"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"maintainers": [
|
||||||
|
{
|
||||||
|
"name": "dougwilson",
|
||||||
|
"email": "doug@somethingdoug.com"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "jonathanong",
|
||||||
|
"email": "jonathanrichardong@gmail.com"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "jongleberry",
|
||||||
|
"email": "jonathanrichardong@gmail.com"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "tjholowaychuk",
|
||||||
|
"email": "tj@vision-media.ca"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "basic-auth",
|
||||||
|
"optionalDependencies": {},
|
||||||
|
"readme": "ERROR: No README data found!",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/jshttp/basic-auth.git"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "mocha --check-leaks --reporter spec --bail",
|
||||||
|
"test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/",
|
||||||
|
"test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/"
|
||||||
|
},
|
||||||
|
"version": "1.0.4"
|
||||||
|
}
|
39
node_modules/bcrypt-pbkdf/README.md
generated
vendored
Normal file
39
node_modules/bcrypt-pbkdf/README.md
generated
vendored
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
Port of the OpenBSD `bcrypt_pbkdf` function to pure Javascript. `npm`-ified
|
||||||
|
version of [Devi Mandiri's port]
|
||||||
|
(https://github.com/devi/tmp/blob/master/js/bcrypt_pbkdf.js),
|
||||||
|
with some minor performance improvements. The code is copied verbatim (and
|
||||||
|
un-styled) from Devi's work.
|
||||||
|
|
||||||
|
This product includes software developed by Niels Provos.
|
||||||
|
|
||||||
|
## API
|
||||||
|
|
||||||
|
### `bcrypt_pbkdf.pbkdf(pass, passlen, salt, saltlen, key, keylen, rounds)`
|
||||||
|
|
||||||
|
Derive a cryptographic key of arbitrary length from a given password and salt,
|
||||||
|
using the OpenBSD `bcrypt_pbkdf` function. This is a combination of Blowfish and
|
||||||
|
SHA-512.
|
||||||
|
|
||||||
|
See [this article](http://www.tedunangst.com/flak/post/bcrypt-pbkdf) for
|
||||||
|
further information.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
|
||||||
|
* `pass`, a Uint8Array of length `passlen`
|
||||||
|
* `passlen`, an integer Number
|
||||||
|
* `salt`, a Uint8Array of length `saltlen`
|
||||||
|
* `saltlen`, an integer Number
|
||||||
|
* `key`, a Uint8Array of length `keylen`, will be filled with output
|
||||||
|
* `keylen`, an integer Number
|
||||||
|
* `rounds`, an integer Number, number of rounds of the PBKDF to run
|
||||||
|
|
||||||
|
### `bcrypt_pbkdf.hash(sha2pass, sha2salt, out)`
|
||||||
|
|
||||||
|
Calculate a Blowfish hash, given SHA2-512 output of a password and salt. Used as
|
||||||
|
part of the inner round function in the PBKDF.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
|
||||||
|
* `sha2pass`, a Uint8Array of length 64
|
||||||
|
* `sha2salt`, a Uint8Array of length 64
|
||||||
|
* `out`, a Uint8Array of length 32, will be filled with output
|
559
node_modules/bcrypt-pbkdf/index.js
generated
vendored
Normal file
559
node_modules/bcrypt-pbkdf/index.js
generated
vendored
Normal file
@ -0,0 +1,559 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
var crypto_hash_sha512 = require('tweetnacl').lowlevel.crypto_hash;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* This file is a 1:1 port from the OpenBSD blowfish.c and bcrypt_pbkdf.c. As a
|
||||||
|
* result, it retains the original copyright and license. The two files are
|
||||||
|
* under slightly different (but compatible) licenses, and are here combined in
|
||||||
|
* one file.
|
||||||
|
*
|
||||||
|
* Credit for the actual porting work goes to:
|
||||||
|
* Devi Mandiri <me@devi.web.id>
|
||||||
|
*/
|
||||||
|
|
||||||
|
/*
|
||||||
|
* The Blowfish portions are under the following license:
|
||||||
|
*
|
||||||
|
* Blowfish block cipher for OpenBSD
|
||||||
|
* Copyright 1997 Niels Provos <provos@physnet.uni-hamburg.de>
|
||||||
|
* All rights reserved.
|
||||||
|
*
|
||||||
|
* Implementation advice by David Mazieres <dm@lcs.mit.edu>.
|
||||||
|
*
|
||||||
|
* Redistribution and use in source and binary forms, with or without
|
||||||
|
* modification, are permitted provided that the following conditions
|
||||||
|
* are met:
|
||||||
|
* 1. Redistributions of source code must retain the above copyright
|
||||||
|
* notice, this list of conditions and the following disclaimer.
|
||||||
|
* 2. Redistributions in binary form must reproduce the above copyright
|
||||||
|
* notice, this list of conditions and the following disclaimer in the
|
||||||
|
* documentation and/or other materials provided with the distribution.
|
||||||
|
* 3. All advertising materials mentioning features or use of this software
|
||||||
|
* must display the following acknowledgement:
|
||||||
|
* This product includes software developed by Niels Provos.
|
||||||
|
* 4. The name of the author may not be used to endorse or promote products
|
||||||
|
* derived from this software without specific prior written permission.
|
||||||
|
*
|
||||||
|
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
|
||||||
|
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
||||||
|
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||||
|
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||||
|
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
||||||
|
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
||||||
|
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/*
|
||||||
|
* The bcrypt_pbkdf portions are under the following license:
|
||||||
|
*
|
||||||
|
* Copyright (c) 2013 Ted Unangst <tedu@openbsd.org>
|
||||||
|
*
|
||||||
|
* Permission to use, copy, modify, and distribute this software for any
|
||||||
|
* purpose with or without fee is hereby granted, provided that the above
|
||||||
|
* copyright notice and this permission notice appear in all copies.
|
||||||
|
*
|
||||||
|
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Performance improvements (Javascript-specific):
|
||||||
|
*
|
||||||
|
* Copyright 2016, Joyent Inc
|
||||||
|
* Author: Alex Wilson <alex.wilson@joyent.com>
|
||||||
|
*
|
||||||
|
* Permission to use, copy, modify, and distribute this software for any
|
||||||
|
* purpose with or without fee is hereby granted, provided that the above
|
||||||
|
* copyright notice and this permission notice appear in all copies.
|
||||||
|
*
|
||||||
|
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Ported from OpenBSD bcrypt_pbkdf.c v1.9
|
||||||
|
|
||||||
|
var BLF_J = 0;
|
||||||
|
|
||||||
|
var Blowfish = function() {
|
||||||
|
this.S = [
|
||||||
|
new Uint32Array([
|
||||||
|
0xd1310ba6, 0x98dfb5ac, 0x2ffd72db, 0xd01adfb7,
|
||||||
|
0xb8e1afed, 0x6a267e96, 0xba7c9045, 0xf12c7f99,
|
||||||
|
0x24a19947, 0xb3916cf7, 0x0801f2e2, 0x858efc16,
|
||||||
|
0x636920d8, 0x71574e69, 0xa458fea3, 0xf4933d7e,
|
||||||
|
0x0d95748f, 0x728eb658, 0x718bcd58, 0x82154aee,
|
||||||
|
0x7b54a41d, 0xc25a59b5, 0x9c30d539, 0x2af26013,
|
||||||
|
0xc5d1b023, 0x286085f0, 0xca417918, 0xb8db38ef,
|
||||||
|
0x8e79dcb0, 0x603a180e, 0x6c9e0e8b, 0xb01e8a3e,
|
||||||
|
0xd71577c1, 0xbd314b27, 0x78af2fda, 0x55605c60,
|
||||||
|
0xe65525f3, 0xaa55ab94, 0x57489862, 0x63e81440,
|
||||||
|
0x55ca396a, 0x2aab10b6, 0xb4cc5c34, 0x1141e8ce,
|
||||||
|
0xa15486af, 0x7c72e993, 0xb3ee1411, 0x636fbc2a,
|
||||||
|
0x2ba9c55d, 0x741831f6, 0xce5c3e16, 0x9b87931e,
|
||||||
|
0xafd6ba33, 0x6c24cf5c, 0x7a325381, 0x28958677,
|
||||||
|
0x3b8f4898, 0x6b4bb9af, 0xc4bfe81b, 0x66282193,
|
||||||
|
0x61d809cc, 0xfb21a991, 0x487cac60, 0x5dec8032,
|
||||||
|
0xef845d5d, 0xe98575b1, 0xdc262302, 0xeb651b88,
|
||||||
|
0x23893e81, 0xd396acc5, 0x0f6d6ff3, 0x83f44239,
|
||||||
|
0x2e0b4482, 0xa4842004, 0x69c8f04a, 0x9e1f9b5e,
|
||||||
|
0x21c66842, 0xf6e96c9a, 0x670c9c61, 0xabd388f0,
|
||||||
|
0x6a51a0d2, 0xd8542f68, 0x960fa728, 0xab5133a3,
|
||||||
|
0x6eef0b6c, 0x137a3be4, 0xba3bf050, 0x7efb2a98,
|
||||||
|
0xa1f1651d, 0x39af0176, 0x66ca593e, 0x82430e88,
|
||||||
|
0x8cee8619, 0x456f9fb4, 0x7d84a5c3, 0x3b8b5ebe,
|
||||||
|
0xe06f75d8, 0x85c12073, 0x401a449f, 0x56c16aa6,
|
||||||
|
0x4ed3aa62, 0x363f7706, 0x1bfedf72, 0x429b023d,
|
||||||
|
0x37d0d724, 0xd00a1248, 0xdb0fead3, 0x49f1c09b,
|
||||||
|
0x075372c9, 0x80991b7b, 0x25d479d8, 0xf6e8def7,
|
||||||
|
0xe3fe501a, 0xb6794c3b, 0x976ce0bd, 0x04c006ba,
|
||||||
|
0xc1a94fb6, 0x409f60c4, 0x5e5c9ec2, 0x196a2463,
|
||||||
|
0x68fb6faf, 0x3e6c53b5, 0x1339b2eb, 0x3b52ec6f,
|
||||||
|
0x6dfc511f, 0x9b30952c, 0xcc814544, 0xaf5ebd09,
|
||||||
|
0xbee3d004, 0xde334afd, 0x660f2807, 0x192e4bb3,
|
||||||
|
0xc0cba857, 0x45c8740f, 0xd20b5f39, 0xb9d3fbdb,
|
||||||
|
0x5579c0bd, 0x1a60320a, 0xd6a100c6, 0x402c7279,
|
||||||
|
0x679f25fe, 0xfb1fa3cc, 0x8ea5e9f8, 0xdb3222f8,
|
||||||
|
0x3c7516df, 0xfd616b15, 0x2f501ec8, 0xad0552ab,
|
||||||
|
0x323db5fa, 0xfd238760, 0x53317b48, 0x3e00df82,
|
||||||
|
0x9e5c57bb, 0xca6f8ca0, 0x1a87562e, 0xdf1769db,
|
||||||
|
0xd542a8f6, 0x287effc3, 0xac6732c6, 0x8c4f5573,
|
||||||
|
0x695b27b0, 0xbbca58c8, 0xe1ffa35d, 0xb8f011a0,
|
||||||
|
0x10fa3d98, 0xfd2183b8, 0x4afcb56c, 0x2dd1d35b,
|
||||||
|
0x9a53e479, 0xb6f84565, 0xd28e49bc, 0x4bfb9790,
|
||||||
|
0xe1ddf2da, 0xa4cb7e33, 0x62fb1341, 0xcee4c6e8,
|
||||||
|
0xef20cada, 0x36774c01, 0xd07e9efe, 0x2bf11fb4,
|
||||||
|
0x95dbda4d, 0xae909198, 0xeaad8e71, 0x6b93d5a0,
|
||||||
|
0xd08ed1d0, 0xafc725e0, 0x8e3c5b2f, 0x8e7594b7,
|
||||||
|
0x8ff6e2fb, 0xf2122b64, 0x8888b812, 0x900df01c,
|
||||||
|
0x4fad5ea0, 0x688fc31c, 0xd1cff191, 0xb3a8c1ad,
|
||||||
|
0x2f2f2218, 0xbe0e1777, 0xea752dfe, 0x8b021fa1,
|
||||||
|
0xe5a0cc0f, 0xb56f74e8, 0x18acf3d6, 0xce89e299,
|
||||||
|
0xb4a84fe0, 0xfd13e0b7, 0x7cc43b81, 0xd2ada8d9,
|
||||||
|
0x165fa266, 0x80957705, 0x93cc7314, 0x211a1477,
|
||||||
|
0xe6ad2065, 0x77b5fa86, 0xc75442f5, 0xfb9d35cf,
|
||||||
|
0xebcdaf0c, 0x7b3e89a0, 0xd6411bd3, 0xae1e7e49,
|
||||||
|
0x00250e2d, 0x2071b35e, 0x226800bb, 0x57b8e0af,
|
||||||
|
0x2464369b, 0xf009b91e, 0x5563911d, 0x59dfa6aa,
|
||||||
|
0x78c14389, 0xd95a537f, 0x207d5ba2, 0x02e5b9c5,
|
||||||
|
0x83260376, 0x6295cfa9, 0x11c81968, 0x4e734a41,
|
||||||
|
0xb3472dca, 0x7b14a94a, 0x1b510052, 0x9a532915,
|
||||||
|
0xd60f573f, 0xbc9bc6e4, 0x2b60a476, 0x81e67400,
|
||||||
|
0x08ba6fb5, 0x571be91f, 0xf296ec6b, 0x2a0dd915,
|
||||||
|
0xb6636521, 0xe7b9f9b6, 0xff34052e, 0xc5855664,
|
||||||
|
0x53b02d5d, 0xa99f8fa1, 0x08ba4799, 0x6e85076a]),
|
||||||
|
new Uint32Array([
|
||||||
|
0x4b7a70e9, 0xb5b32944, 0xdb75092e, 0xc4192623,
|
||||||
|
0xad6ea6b0, 0x49a7df7d, 0x9cee60b8, 0x8fedb266,
|
||||||
|
0xecaa8c71, 0x699a17ff, 0x5664526c, 0xc2b19ee1,
|
||||||
|
0x193602a5, 0x75094c29, 0xa0591340, 0xe4183a3e,
|
||||||
|
0x3f54989a, 0x5b429d65, 0x6b8fe4d6, 0x99f73fd6,
|
||||||
|
0xa1d29c07, 0xefe830f5, 0x4d2d38e6, 0xf0255dc1,
|
||||||
|
0x4cdd2086, 0x8470eb26, 0x6382e9c6, 0x021ecc5e,
|
||||||
|
0x09686b3f, 0x3ebaefc9, 0x3c971814, 0x6b6a70a1,
|
||||||
|
0x687f3584, 0x52a0e286, 0xb79c5305, 0xaa500737,
|
||||||
|
0x3e07841c, 0x7fdeae5c, 0x8e7d44ec, 0x5716f2b8,
|
||||||
|
0xb03ada37, 0xf0500c0d, 0xf01c1f04, 0x0200b3ff,
|
||||||
|
0xae0cf51a, 0x3cb574b2, 0x25837a58, 0xdc0921bd,
|
||||||
|
0xd19113f9, 0x7ca92ff6, 0x94324773, 0x22f54701,
|
||||||
|
0x3ae5e581, 0x37c2dadc, 0xc8b57634, 0x9af3dda7,
|
||||||
|
0xa9446146, 0x0fd0030e, 0xecc8c73e, 0xa4751e41,
|
||||||
|
0xe238cd99, 0x3bea0e2f, 0x3280bba1, 0x183eb331,
|
||||||
|
0x4e548b38, 0x4f6db908, 0x6f420d03, 0xf60a04bf,
|
||||||
|
0x2cb81290, 0x24977c79, 0x5679b072, 0xbcaf89af,
|
||||||
|
0xde9a771f, 0xd9930810, 0xb38bae12, 0xdccf3f2e,
|
||||||
|
0x5512721f, 0x2e6b7124, 0x501adde6, 0x9f84cd87,
|
||||||
|
0x7a584718, 0x7408da17, 0xbc9f9abc, 0xe94b7d8c,
|
||||||
|
0xec7aec3a, 0xdb851dfa, 0x63094366, 0xc464c3d2,
|
||||||
|
0xef1c1847, 0x3215d908, 0xdd433b37, 0x24c2ba16,
|
||||||
|
0x12a14d43, 0x2a65c451, 0x50940002, 0x133ae4dd,
|
||||||
|
0x71dff89e, 0x10314e55, 0x81ac77d6, 0x5f11199b,
|
||||||
|
0x043556f1, 0xd7a3c76b, 0x3c11183b, 0x5924a509,
|
||||||
|
0xf28fe6ed, 0x97f1fbfa, 0x9ebabf2c, 0x1e153c6e,
|
||||||
|
0x86e34570, 0xeae96fb1, 0x860e5e0a, 0x5a3e2ab3,
|
||||||
|
0x771fe71c, 0x4e3d06fa, 0x2965dcb9, 0x99e71d0f,
|
||||||
|
0x803e89d6, 0x5266c825, 0x2e4cc978, 0x9c10b36a,
|
||||||
|
0xc6150eba, 0x94e2ea78, 0xa5fc3c53, 0x1e0a2df4,
|
||||||
|
0xf2f74ea7, 0x361d2b3d, 0x1939260f, 0x19c27960,
|
||||||
|
0x5223a708, 0xf71312b6, 0xebadfe6e, 0xeac31f66,
|
||||||
|
0xe3bc4595, 0xa67bc883, 0xb17f37d1, 0x018cff28,
|
||||||
|
0xc332ddef, 0xbe6c5aa5, 0x65582185, 0x68ab9802,
|
||||||
|
0xeecea50f, 0xdb2f953b, 0x2aef7dad, 0x5b6e2f84,
|
||||||
|
0x1521b628, 0x29076170, 0xecdd4775, 0x619f1510,
|
||||||
|
0x13cca830, 0xeb61bd96, 0x0334fe1e, 0xaa0363cf,
|
||||||
|
0xb5735c90, 0x4c70a239, 0xd59e9e0b, 0xcbaade14,
|
||||||
|
0xeecc86bc, 0x60622ca7, 0x9cab5cab, 0xb2f3846e,
|
||||||
|
0x648b1eaf, 0x19bdf0ca, 0xa02369b9, 0x655abb50,
|
||||||
|
0x40685a32, 0x3c2ab4b3, 0x319ee9d5, 0xc021b8f7,
|
||||||
|
0x9b540b19, 0x875fa099, 0x95f7997e, 0x623d7da8,
|
||||||
|
0xf837889a, 0x97e32d77, 0x11ed935f, 0x16681281,
|
||||||
|
0x0e358829, 0xc7e61fd6, 0x96dedfa1, 0x7858ba99,
|
||||||
|
0x57f584a5, 0x1b227263, 0x9b83c3ff, 0x1ac24696,
|
||||||
|
0xcdb30aeb, 0x532e3054, 0x8fd948e4, 0x6dbc3128,
|
||||||
|
0x58ebf2ef, 0x34c6ffea, 0xfe28ed61, 0xee7c3c73,
|
||||||
|
0x5d4a14d9, 0xe864b7e3, 0x42105d14, 0x203e13e0,
|
||||||
|
0x45eee2b6, 0xa3aaabea, 0xdb6c4f15, 0xfacb4fd0,
|
||||||
|
0xc742f442, 0xef6abbb5, 0x654f3b1d, 0x41cd2105,
|
||||||
|
0xd81e799e, 0x86854dc7, 0xe44b476a, 0x3d816250,
|
||||||
|
0xcf62a1f2, 0x5b8d2646, 0xfc8883a0, 0xc1c7b6a3,
|
||||||
|
0x7f1524c3, 0x69cb7492, 0x47848a0b, 0x5692b285,
|
||||||
|
0x095bbf00, 0xad19489d, 0x1462b174, 0x23820e00,
|
||||||
|
0x58428d2a, 0x0c55f5ea, 0x1dadf43e, 0x233f7061,
|
||||||
|
0x3372f092, 0x8d937e41, 0xd65fecf1, 0x6c223bdb,
|
||||||
|
0x7cde3759, 0xcbee7460, 0x4085f2a7, 0xce77326e,
|
||||||
|
0xa6078084, 0x19f8509e, 0xe8efd855, 0x61d99735,
|
||||||
|
0xa969a7aa, 0xc50c06c2, 0x5a04abfc, 0x800bcadc,
|
||||||
|
0x9e447a2e, 0xc3453484, 0xfdd56705, 0x0e1e9ec9,
|
||||||
|
0xdb73dbd3, 0x105588cd, 0x675fda79, 0xe3674340,
|
||||||
|
0xc5c43465, 0x713e38d8, 0x3d28f89e, 0xf16dff20,
|
||||||
|
0x153e21e7, 0x8fb03d4a, 0xe6e39f2b, 0xdb83adf7]),
|
||||||
|
new Uint32Array([
|
||||||
|
0xe93d5a68, 0x948140f7, 0xf64c261c, 0x94692934,
|
||||||
|
0x411520f7, 0x7602d4f7, 0xbcf46b2e, 0xd4a20068,
|
||||||
|
0xd4082471, 0x3320f46a, 0x43b7d4b7, 0x500061af,
|
||||||
|
0x1e39f62e, 0x97244546, 0x14214f74, 0xbf8b8840,
|
||||||
|
0x4d95fc1d, 0x96b591af, 0x70f4ddd3, 0x66a02f45,
|
||||||
|
0xbfbc09ec, 0x03bd9785, 0x7fac6dd0, 0x31cb8504,
|
||||||
|
0x96eb27b3, 0x55fd3941, 0xda2547e6, 0xabca0a9a,
|
||||||
|
0x28507825, 0x530429f4, 0x0a2c86da, 0xe9b66dfb,
|
||||||
|
0x68dc1462, 0xd7486900, 0x680ec0a4, 0x27a18dee,
|
||||||
|
0x4f3ffea2, 0xe887ad8c, 0xb58ce006, 0x7af4d6b6,
|
||||||
|
0xaace1e7c, 0xd3375fec, 0xce78a399, 0x406b2a42,
|
||||||
|
0x20fe9e35, 0xd9f385b9, 0xee39d7ab, 0x3b124e8b,
|
||||||
|
0x1dc9faf7, 0x4b6d1856, 0x26a36631, 0xeae397b2,
|
||||||
|
0x3a6efa74, 0xdd5b4332, 0x6841e7f7, 0xca7820fb,
|
||||||
|
0xfb0af54e, 0xd8feb397, 0x454056ac, 0xba489527,
|
||||||
|
0x55533a3a, 0x20838d87, 0xfe6ba9b7, 0xd096954b,
|
||||||
|
0x55a867bc, 0xa1159a58, 0xcca92963, 0x99e1db33,
|
||||||
|
0xa62a4a56, 0x3f3125f9, 0x5ef47e1c, 0x9029317c,
|
||||||
|
0xfdf8e802, 0x04272f70, 0x80bb155c, 0x05282ce3,
|
||||||
|
0x95c11548, 0xe4c66d22, 0x48c1133f, 0xc70f86dc,
|
||||||
|
0x07f9c9ee, 0x41041f0f, 0x404779a4, 0x5d886e17,
|
||||||
|
0x325f51eb, 0xd59bc0d1, 0xf2bcc18f, 0x41113564,
|
||||||
|
0x257b7834, 0x602a9c60, 0xdff8e8a3, 0x1f636c1b,
|
||||||
|
0x0e12b4c2, 0x02e1329e, 0xaf664fd1, 0xcad18115,
|
||||||
|
0x6b2395e0, 0x333e92e1, 0x3b240b62, 0xeebeb922,
|
||||||
|
0x85b2a20e, 0xe6ba0d99, 0xde720c8c, 0x2da2f728,
|
||||||
|
0xd0127845, 0x95b794fd, 0x647d0862, 0xe7ccf5f0,
|
||||||
|
0x5449a36f, 0x877d48fa, 0xc39dfd27, 0xf33e8d1e,
|
||||||
|
0x0a476341, 0x992eff74, 0x3a6f6eab, 0xf4f8fd37,
|
||||||
|
0xa812dc60, 0xa1ebddf8, 0x991be14c, 0xdb6e6b0d,
|
||||||
|
0xc67b5510, 0x6d672c37, 0x2765d43b, 0xdcd0e804,
|
||||||
|
0xf1290dc7, 0xcc00ffa3, 0xb5390f92, 0x690fed0b,
|
||||||
|
0x667b9ffb, 0xcedb7d9c, 0xa091cf0b, 0xd9155ea3,
|
||||||
|
0xbb132f88, 0x515bad24, 0x7b9479bf, 0x763bd6eb,
|
||||||
|
0x37392eb3, 0xcc115979, 0x8026e297, 0xf42e312d,
|
||||||
|
0x6842ada7, 0xc66a2b3b, 0x12754ccc, 0x782ef11c,
|
||||||
|
0x6a124237, 0xb79251e7, 0x06a1bbe6, 0x4bfb6350,
|
||||||
|
0x1a6b1018, 0x11caedfa, 0x3d25bdd8, 0xe2e1c3c9,
|
||||||
|
0x44421659, 0x0a121386, 0xd90cec6e, 0xd5abea2a,
|
||||||
|
0x64af674e, 0xda86a85f, 0xbebfe988, 0x64e4c3fe,
|
||||||
|
0x9dbc8057, 0xf0f7c086, 0x60787bf8, 0x6003604d,
|
||||||
|
0xd1fd8346, 0xf6381fb0, 0x7745ae04, 0xd736fccc,
|
||||||
|
0x83426b33, 0xf01eab71, 0xb0804187, 0x3c005e5f,
|
||||||
|
0x77a057be, 0xbde8ae24, 0x55464299, 0xbf582e61,
|
||||||
|
0x4e58f48f, 0xf2ddfda2, 0xf474ef38, 0x8789bdc2,
|
||||||
|
0x5366f9c3, 0xc8b38e74, 0xb475f255, 0x46fcd9b9,
|
||||||
|
0x7aeb2661, 0x8b1ddf84, 0x846a0e79, 0x915f95e2,
|
||||||
|
0x466e598e, 0x20b45770, 0x8cd55591, 0xc902de4c,
|
||||||
|
0xb90bace1, 0xbb8205d0, 0x11a86248, 0x7574a99e,
|
||||||
|
0xb77f19b6, 0xe0a9dc09, 0x662d09a1, 0xc4324633,
|
||||||
|
0xe85a1f02, 0x09f0be8c, 0x4a99a025, 0x1d6efe10,
|
||||||
|
0x1ab93d1d, 0x0ba5a4df, 0xa186f20f, 0x2868f169,
|
||||||
|
0xdcb7da83, 0x573906fe, 0xa1e2ce9b, 0x4fcd7f52,
|
||||||
|
0x50115e01, 0xa70683fa, 0xa002b5c4, 0x0de6d027,
|
||||||
|
0x9af88c27, 0x773f8641, 0xc3604c06, 0x61a806b5,
|
||||||
|
0xf0177a28, 0xc0f586e0, 0x006058aa, 0x30dc7d62,
|
||||||
|
0x11e69ed7, 0x2338ea63, 0x53c2dd94, 0xc2c21634,
|
||||||
|
0xbbcbee56, 0x90bcb6de, 0xebfc7da1, 0xce591d76,
|
||||||
|
0x6f05e409, 0x4b7c0188, 0x39720a3d, 0x7c927c24,
|
||||||
|
0x86e3725f, 0x724d9db9, 0x1ac15bb4, 0xd39eb8fc,
|
||||||
|
0xed545578, 0x08fca5b5, 0xd83d7cd3, 0x4dad0fc4,
|
||||||
|
0x1e50ef5e, 0xb161e6f8, 0xa28514d9, 0x6c51133c,
|
||||||
|
0x6fd5c7e7, 0x56e14ec4, 0x362abfce, 0xddc6c837,
|
||||||
|
0xd79a3234, 0x92638212, 0x670efa8e, 0x406000e0]),
|
||||||
|
new Uint32Array([
|
||||||
|
0x3a39ce37, 0xd3faf5cf, 0xabc27737, 0x5ac52d1b,
|
||||||
|
0x5cb0679e, 0x4fa33742, 0xd3822740, 0x99bc9bbe,
|
||||||
|
0xd5118e9d, 0xbf0f7315, 0xd62d1c7e, 0xc700c47b,
|
||||||
|
0xb78c1b6b, 0x21a19045, 0xb26eb1be, 0x6a366eb4,
|
||||||
|
0x5748ab2f, 0xbc946e79, 0xc6a376d2, 0x6549c2c8,
|
||||||
|
0x530ff8ee, 0x468dde7d, 0xd5730a1d, 0x4cd04dc6,
|
||||||
|
0x2939bbdb, 0xa9ba4650, 0xac9526e8, 0xbe5ee304,
|
||||||
|
0xa1fad5f0, 0x6a2d519a, 0x63ef8ce2, 0x9a86ee22,
|
||||||
|
0xc089c2b8, 0x43242ef6, 0xa51e03aa, 0x9cf2d0a4,
|
||||||
|
0x83c061ba, 0x9be96a4d, 0x8fe51550, 0xba645bd6,
|
||||||
|
0x2826a2f9, 0xa73a3ae1, 0x4ba99586, 0xef5562e9,
|
||||||
|
0xc72fefd3, 0xf752f7da, 0x3f046f69, 0x77fa0a59,
|
||||||
|
0x80e4a915, 0x87b08601, 0x9b09e6ad, 0x3b3ee593,
|
||||||
|
0xe990fd5a, 0x9e34d797, 0x2cf0b7d9, 0x022b8b51,
|
||||||
|
0x96d5ac3a, 0x017da67d, 0xd1cf3ed6, 0x7c7d2d28,
|
||||||
|
0x1f9f25cf, 0xadf2b89b, 0x5ad6b472, 0x5a88f54c,
|
||||||
|
0xe029ac71, 0xe019a5e6, 0x47b0acfd, 0xed93fa9b,
|
||||||
|
0xe8d3c48d, 0x283b57cc, 0xf8d56629, 0x79132e28,
|
||||||
|
0x785f0191, 0xed756055, 0xf7960e44, 0xe3d35e8c,
|
||||||
|
0x15056dd4, 0x88f46dba, 0x03a16125, 0x0564f0bd,
|
||||||
|
0xc3eb9e15, 0x3c9057a2, 0x97271aec, 0xa93a072a,
|
||||||
|
0x1b3f6d9b, 0x1e6321f5, 0xf59c66fb, 0x26dcf319,
|
||||||
|
0x7533d928, 0xb155fdf5, 0x03563482, 0x8aba3cbb,
|
||||||
|
0x28517711, 0xc20ad9f8, 0xabcc5167, 0xccad925f,
|
||||||
|
0x4de81751, 0x3830dc8e, 0x379d5862, 0x9320f991,
|
||||||
|
0xea7a90c2, 0xfb3e7bce, 0x5121ce64, 0x774fbe32,
|
||||||
|
0xa8b6e37e, 0xc3293d46, 0x48de5369, 0x6413e680,
|
||||||
|
0xa2ae0810, 0xdd6db224, 0x69852dfd, 0x09072166,
|
||||||
|
0xb39a460a, 0x6445c0dd, 0x586cdecf, 0x1c20c8ae,
|
||||||
|
0x5bbef7dd, 0x1b588d40, 0xccd2017f, 0x6bb4e3bb,
|
||||||
|
0xdda26a7e, 0x3a59ff45, 0x3e350a44, 0xbcb4cdd5,
|
||||||
|
0x72eacea8, 0xfa6484bb, 0x8d6612ae, 0xbf3c6f47,
|
||||||
|
0xd29be463, 0x542f5d9e, 0xaec2771b, 0xf64e6370,
|
||||||
|
0x740e0d8d, 0xe75b1357, 0xf8721671, 0xaf537d5d,
|
||||||
|
0x4040cb08, 0x4eb4e2cc, 0x34d2466a, 0x0115af84,
|
||||||
|
0xe1b00428, 0x95983a1d, 0x06b89fb4, 0xce6ea048,
|
||||||
|
0x6f3f3b82, 0x3520ab82, 0x011a1d4b, 0x277227f8,
|
||||||
|
0x611560b1, 0xe7933fdc, 0xbb3a792b, 0x344525bd,
|
||||||
|
0xa08839e1, 0x51ce794b, 0x2f32c9b7, 0xa01fbac9,
|
||||||
|
0xe01cc87e, 0xbcc7d1f6, 0xcf0111c3, 0xa1e8aac7,
|
||||||
|
0x1a908749, 0xd44fbd9a, 0xd0dadecb, 0xd50ada38,
|
||||||
|
0x0339c32a, 0xc6913667, 0x8df9317c, 0xe0b12b4f,
|
||||||
|
0xf79e59b7, 0x43f5bb3a, 0xf2d519ff, 0x27d9459c,
|
||||||
|
0xbf97222c, 0x15e6fc2a, 0x0f91fc71, 0x9b941525,
|
||||||
|
0xfae59361, 0xceb69ceb, 0xc2a86459, 0x12baa8d1,
|
||||||
|
0xb6c1075e, 0xe3056a0c, 0x10d25065, 0xcb03a442,
|
||||||
|
0xe0ec6e0e, 0x1698db3b, 0x4c98a0be, 0x3278e964,
|
||||||
|
0x9f1f9532, 0xe0d392df, 0xd3a0342b, 0x8971f21e,
|
||||||
|
0x1b0a7441, 0x4ba3348c, 0xc5be7120, 0xc37632d8,
|
||||||
|
0xdf359f8d, 0x9b992f2e, 0xe60b6f47, 0x0fe3f11d,
|
||||||
|
0xe54cda54, 0x1edad891, 0xce6279cf, 0xcd3e7e6f,
|
||||||
|
0x1618b166, 0xfd2c1d05, 0x848fd2c5, 0xf6fb2299,
|
||||||
|
0xf523f357, 0xa6327623, 0x93a83531, 0x56cccd02,
|
||||||
|
0xacf08162, 0x5a75ebb5, 0x6e163697, 0x88d273cc,
|
||||||
|
0xde966292, 0x81b949d0, 0x4c50901b, 0x71c65614,
|
||||||
|
0xe6c6c7bd, 0x327a140a, 0x45e1d006, 0xc3f27b9a,
|
||||||
|
0xc9aa53fd, 0x62a80f00, 0xbb25bfe2, 0x35bdd2f6,
|
||||||
|
0x71126905, 0xb2040222, 0xb6cbcf7c, 0xcd769c2b,
|
||||||
|
0x53113ec0, 0x1640e3d3, 0x38abbd60, 0x2547adf0,
|
||||||
|
0xba38209c, 0xf746ce76, 0x77afa1c5, 0x20756060,
|
||||||
|
0x85cbfe4e, 0x8ae88dd8, 0x7aaaf9b0, 0x4cf9aa7e,
|
||||||
|
0x1948c25c, 0x02fb8a8c, 0x01c36ae4, 0xd6ebe1f9,
|
||||||
|
0x90d4f869, 0xa65cdea0, 0x3f09252d, 0xc208e69f,
|
||||||
|
0xb74e6132, 0xce77e25b, 0x578fdfe3, 0x3ac372e6])
|
||||||
|
];
|
||||||
|
this.P = new Uint32Array([
|
||||||
|
0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344,
|
||||||
|
0xa4093822, 0x299f31d0, 0x082efa98, 0xec4e6c89,
|
||||||
|
0x452821e6, 0x38d01377, 0xbe5466cf, 0x34e90c6c,
|
||||||
|
0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917,
|
||||||
|
0x9216d5d9, 0x8979fb1b]);
|
||||||
|
};
|
||||||
|
|
||||||
|
function F(S, x8, i) {
|
||||||
|
return (((S[0][x8[i+3]] +
|
||||||
|
S[1][x8[i+2]]) ^
|
||||||
|
S[2][x8[i+1]]) +
|
||||||
|
S[3][x8[i]]);
|
||||||
|
};
|
||||||
|
|
||||||
|
Blowfish.prototype.encipher = function(x, x8) {
|
||||||
|
if (x8 === undefined) {
|
||||||
|
x8 = new Uint8Array(x.buffer);
|
||||||
|
if (x.byteOffset !== 0)
|
||||||
|
x8 = x8.subarray(x.byteOffset);
|
||||||
|
}
|
||||||
|
x[0] ^= this.P[0];
|
||||||
|
for (var i = 1; i < 16; i += 2) {
|
||||||
|
x[1] ^= F(this.S, x8, 0) ^ this.P[i];
|
||||||
|
x[0] ^= F(this.S, x8, 4) ^ this.P[i+1];
|
||||||
|
}
|
||||||
|
var t = x[0];
|
||||||
|
x[0] = x[1] ^ this.P[17];
|
||||||
|
x[1] = t;
|
||||||
|
};
|
||||||
|
|
||||||
|
Blowfish.prototype.decipher = function(x) {
|
||||||
|
var x8 = new Uint8Array(x.buffer);
|
||||||
|
if (x.byteOffset !== 0)
|
||||||
|
x8 = x8.subarray(x.byteOffset);
|
||||||
|
x[0] ^= this.P[17];
|
||||||
|
for (var i = 16; i > 0; i -= 2) {
|
||||||
|
x[1] ^= F(this.S, x8, 0) ^ this.P[i];
|
||||||
|
x[0] ^= F(this.S, x8, 4) ^ this.P[i-1];
|
||||||
|
}
|
||||||
|
var t = x[0];
|
||||||
|
x[0] = x[1] ^ this.P[0];
|
||||||
|
x[1] = t;
|
||||||
|
};
|
||||||
|
|
||||||
|
function stream2word(data, databytes){
|
||||||
|
var i, temp = 0;
|
||||||
|
for (i = 0; i < 4; i++, BLF_J++) {
|
||||||
|
if (BLF_J >= databytes) BLF_J = 0;
|
||||||
|
temp = (temp << 8) | data[BLF_J];
|
||||||
|
}
|
||||||
|
return temp;
|
||||||
|
};
|
||||||
|
|
||||||
|
Blowfish.prototype.expand0state = function(key, keybytes) {
|
||||||
|
var d = new Uint32Array(2), i, k;
|
||||||
|
var d8 = new Uint8Array(d.buffer);
|
||||||
|
|
||||||
|
for (i = 0, BLF_J = 0; i < 18; i++) {
|
||||||
|
this.P[i] ^= stream2word(key, keybytes);
|
||||||
|
}
|
||||||
|
BLF_J = 0;
|
||||||
|
|
||||||
|
for (i = 0; i < 18; i += 2) {
|
||||||
|
this.encipher(d, d8);
|
||||||
|
this.P[i] = d[0];
|
||||||
|
this.P[i+1] = d[1];
|
||||||
|
}
|
||||||
|
|
||||||
|
for (i = 0; i < 4; i++) {
|
||||||
|
for (k = 0; k < 256; k += 2) {
|
||||||
|
this.encipher(d, d8);
|
||||||
|
this.S[i][k] = d[0];
|
||||||
|
this.S[i][k+1] = d[1];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Blowfish.prototype.expandstate = function(data, databytes, key, keybytes) {
|
||||||
|
var d = new Uint32Array(2), i, k;
|
||||||
|
|
||||||
|
for (i = 0, BLF_J = 0; i < 18; i++) {
|
||||||
|
this.P[i] ^= stream2word(key, keybytes);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (i = 0, BLF_J = 0; i < 18; i += 2) {
|
||||||
|
d[0] ^= stream2word(data, databytes);
|
||||||
|
d[1] ^= stream2word(data, databytes);
|
||||||
|
this.encipher(d);
|
||||||
|
this.P[i] = d[0];
|
||||||
|
this.P[i+1] = d[1];
|
||||||
|
}
|
||||||
|
|
||||||
|
for (i = 0; i < 4; i++) {
|
||||||
|
for (k = 0; k < 256; k += 2) {
|
||||||
|
d[0] ^= stream2word(data, databytes);
|
||||||
|
d[1] ^= stream2word(data, databytes);
|
||||||
|
this.encipher(d);
|
||||||
|
this.S[i][k] = d[0];
|
||||||
|
this.S[i][k+1] = d[1];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
BLF_J = 0;
|
||||||
|
};
|
||||||
|
|
||||||
|
Blowfish.prototype.enc = function(data, blocks) {
|
||||||
|
for (var i = 0; i < blocks; i++) {
|
||||||
|
this.encipher(data.subarray(i*2));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Blowfish.prototype.dec = function(data, blocks) {
|
||||||
|
for (var i = 0; i < blocks; i++) {
|
||||||
|
this.decipher(data.subarray(i*2));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
var BCRYPT_BLOCKS = 8,
|
||||||
|
BCRYPT_HASHSIZE = 32;
|
||||||
|
|
||||||
|
function bcrypt_hash(sha2pass, sha2salt, out) {
|
||||||
|
var state = new Blowfish(),
|
||||||
|
cdata = new Uint32Array(BCRYPT_BLOCKS), i,
|
||||||
|
ciphertext = new Uint8Array([79,120,121,99,104,114,111,109,97,116,105,
|
||||||
|
99,66,108,111,119,102,105,115,104,83,119,97,116,68,121,110,97,109,
|
||||||
|
105,116,101]); //"OxychromaticBlowfishSwatDynamite"
|
||||||
|
|
||||||
|
state.expandstate(sha2salt, 64, sha2pass, 64);
|
||||||
|
for (i = 0; i < 64; i++) {
|
||||||
|
state.expand0state(sha2salt, 64);
|
||||||
|
state.expand0state(sha2pass, 64);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (i = 0; i < BCRYPT_BLOCKS; i++)
|
||||||
|
cdata[i] = stream2word(ciphertext, ciphertext.byteLength);
|
||||||
|
for (i = 0; i < 64; i++)
|
||||||
|
state.enc(cdata, cdata.byteLength / 8);
|
||||||
|
|
||||||
|
for (i = 0; i < BCRYPT_BLOCKS; i++) {
|
||||||
|
out[4*i+3] = cdata[i] >>> 24;
|
||||||
|
out[4*i+2] = cdata[i] >>> 16;
|
||||||
|
out[4*i+1] = cdata[i] >>> 8;
|
||||||
|
out[4*i+0] = cdata[i];
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
function bcrypt_pbkdf(pass, passlen, salt, saltlen, key, keylen, rounds) {
|
||||||
|
var sha2pass = new Uint8Array(64),
|
||||||
|
sha2salt = new Uint8Array(64),
|
||||||
|
out = new Uint8Array(BCRYPT_HASHSIZE),
|
||||||
|
tmpout = new Uint8Array(BCRYPT_HASHSIZE),
|
||||||
|
countsalt = new Uint8Array(saltlen+4),
|
||||||
|
i, j, amt, stride, dest, count,
|
||||||
|
origkeylen = keylen;
|
||||||
|
|
||||||
|
if (rounds < 1)
|
||||||
|
return -1;
|
||||||
|
if (passlen === 0 || saltlen === 0 || keylen === 0 ||
|
||||||
|
keylen > (out.byteLength * out.byteLength) || saltlen > (1<<20))
|
||||||
|
return -1;
|
||||||
|
|
||||||
|
stride = Math.floor((keylen + out.byteLength - 1) / out.byteLength);
|
||||||
|
amt = Math.floor((keylen + stride - 1) / stride);
|
||||||
|
|
||||||
|
for (i = 0; i < saltlen; i++)
|
||||||
|
countsalt[i] = salt[i];
|
||||||
|
|
||||||
|
crypto_hash_sha512(sha2pass, pass, passlen);
|
||||||
|
|
||||||
|
for (count = 1; keylen > 0; count++) {
|
||||||
|
countsalt[saltlen+0] = count >>> 24;
|
||||||
|
countsalt[saltlen+1] = count >>> 16;
|
||||||
|
countsalt[saltlen+2] = count >>> 8;
|
||||||
|
countsalt[saltlen+3] = count;
|
||||||
|
|
||||||
|
crypto_hash_sha512(sha2salt, countsalt, saltlen + 4);
|
||||||
|
bcrypt_hash(sha2pass, sha2salt, tmpout);
|
||||||
|
for (i = out.byteLength; i--;)
|
||||||
|
out[i] = tmpout[i];
|
||||||
|
|
||||||
|
for (i = 1; i < rounds; i++) {
|
||||||
|
crypto_hash_sha512(sha2salt, tmpout, tmpout.byteLength);
|
||||||
|
bcrypt_hash(sha2pass, sha2salt, tmpout);
|
||||||
|
for (j = 0; j < out.byteLength; j++)
|
||||||
|
out[j] ^= tmpout[j];
|
||||||
|
}
|
||||||
|
|
||||||
|
amt = Math.min(amt, keylen);
|
||||||
|
for (i = 0; i < amt; i++) {
|
||||||
|
dest = i * stride + (count - 1);
|
||||||
|
if (dest >= origkeylen)
|
||||||
|
break;
|
||||||
|
key[dest] = out[i];
|
||||||
|
}
|
||||||
|
keylen -= i;
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
BLOCKS: BCRYPT_BLOCKS,
|
||||||
|
HASHSIZE: BCRYPT_HASHSIZE,
|
||||||
|
hash: bcrypt_hash,
|
||||||
|
pbkdf: bcrypt_pbkdf
|
||||||
|
};
|
72
node_modules/bcrypt-pbkdf/package.json
generated
vendored
Normal file
72
node_modules/bcrypt-pbkdf/package.json
generated
vendored
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
{
|
||||||
|
"_args": [
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"raw": "bcrypt-pbkdf@^1.0.0",
|
||||||
|
"scope": null,
|
||||||
|
"escapedName": "bcrypt-pbkdf",
|
||||||
|
"name": "bcrypt-pbkdf",
|
||||||
|
"rawSpec": "^1.0.0",
|
||||||
|
"spec": ">=1.0.0 <2.0.0",
|
||||||
|
"type": "range"
|
||||||
|
},
|
||||||
|
"/tank/data/SERVER/zoneadm-master/node_modules/sshpk"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"_from": "bcrypt-pbkdf@>=1.0.0 <2.0.0",
|
||||||
|
"_id": "bcrypt-pbkdf@1.0.0",
|
||||||
|
"_inCache": true,
|
||||||
|
"_location": "/bcrypt-pbkdf",
|
||||||
|
"_nodeVersion": "0.12.15",
|
||||||
|
"_npmOperationalInternal": {
|
||||||
|
"host": "packages-16-east.internal.npmjs.com",
|
||||||
|
"tmp": "tmp/bcrypt-pbkdf-1.0.0.tgz_1471381825814_0.06877309852279723"
|
||||||
|
},
|
||||||
|
"_npmUser": {
|
||||||
|
"name": "arekinath",
|
||||||
|
"email": "alex@cooperi.net"
|
||||||
|
},
|
||||||
|
"_npmVersion": "3.10.3",
|
||||||
|
"_phantomChildren": {},
|
||||||
|
"_requested": {
|
||||||
|
"raw": "bcrypt-pbkdf@^1.0.0",
|
||||||
|
"scope": null,
|
||||||
|
"escapedName": "bcrypt-pbkdf",
|
||||||
|
"name": "bcrypt-pbkdf",
|
||||||
|
"rawSpec": "^1.0.0",
|
||||||
|
"spec": ">=1.0.0 <2.0.0",
|
||||||
|
"type": "range"
|
||||||
|
},
|
||||||
|
"_requiredBy": [
|
||||||
|
"/sshpk"
|
||||||
|
],
|
||||||
|
"_resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.0.tgz",
|
||||||
|
"_shasum": "3ca76b85241c7170bf7d9703e7b9aa74630040d4",
|
||||||
|
"_shrinkwrap": null,
|
||||||
|
"_spec": "bcrypt-pbkdf@^1.0.0",
|
||||||
|
"_where": "/tank/data/SERVER/zoneadm-master/node_modules/sshpk",
|
||||||
|
"dependencies": {
|
||||||
|
"tweetnacl": "^0.14.3"
|
||||||
|
},
|
||||||
|
"description": "Port of the OpenBSD bcrypt_pbkdf function to pure JS",
|
||||||
|
"devDependencies": {},
|
||||||
|
"directories": {},
|
||||||
|
"dist": {
|
||||||
|
"shasum": "3ca76b85241c7170bf7d9703e7b9aa74630040d4",
|
||||||
|
"tarball": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.0.tgz"
|
||||||
|
},
|
||||||
|
"gitHead": "e88be37d3cd25395b4aa496ac468b33671368be6",
|
||||||
|
"license": "BSD-4-Clause",
|
||||||
|
"main": "index.js",
|
||||||
|
"maintainers": [
|
||||||
|
{
|
||||||
|
"name": "arekinath",
|
||||||
|
"email": "alex@cooperi.net"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "bcrypt-pbkdf",
|
||||||
|
"optionalDependencies": {},
|
||||||
|
"readme": "ERROR: No README data found!",
|
||||||
|
"scripts": {},
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
464
node_modules/body-parser/HISTORY.md
generated
vendored
Normal file
464
node_modules/body-parser/HISTORY.md
generated
vendored
Normal file
@ -0,0 +1,464 @@
|
|||||||
|
1.15.2 / 2016-06-19
|
||||||
|
===================
|
||||||
|
|
||||||
|
* deps: bytes@2.4.0
|
||||||
|
* deps: content-type@~1.0.2
|
||||||
|
- perf: enable strict mode
|
||||||
|
* deps: http-errors@~1.5.0
|
||||||
|
- Use `setprototypeof` module to replace `__proto__` setting
|
||||||
|
- deps: statuses@'>= 1.3.0 < 2'
|
||||||
|
- perf: enable strict mode
|
||||||
|
* deps: qs@6.2.0
|
||||||
|
* deps: raw-body@~2.1.7
|
||||||
|
- deps: bytes@2.4.0
|
||||||
|
- perf: remove double-cleanup on happy path
|
||||||
|
* deps: type-is@~1.6.13
|
||||||
|
- deps: mime-types@~2.1.11
|
||||||
|
|
||||||
|
1.15.1 / 2016-05-05
|
||||||
|
===================
|
||||||
|
|
||||||
|
* deps: bytes@2.3.0
|
||||||
|
- Drop partial bytes on all parsed units
|
||||||
|
- Fix parsing byte string that looks like hex
|
||||||
|
* deps: raw-body@~2.1.6
|
||||||
|
- deps: bytes@2.3.0
|
||||||
|
* deps: type-is@~1.6.12
|
||||||
|
- deps: mime-types@~2.1.10
|
||||||
|
|
||||||
|
1.15.0 / 2016-02-10
|
||||||
|
===================
|
||||||
|
|
||||||
|
* deps: http-errors@~1.4.0
|
||||||
|
- Add `HttpError` export, for `err instanceof createError.HttpError`
|
||||||
|
- deps: inherits@2.0.1
|
||||||
|
- deps: statuses@'>= 1.2.1 < 2'
|
||||||
|
* deps: qs@6.1.0
|
||||||
|
* deps: type-is@~1.6.11
|
||||||
|
- deps: mime-types@~2.1.9
|
||||||
|
|
||||||
|
1.14.2 / 2015-12-16
|
||||||
|
===================
|
||||||
|
|
||||||
|
* deps: bytes@2.2.0
|
||||||
|
* deps: iconv-lite@0.4.13
|
||||||
|
* deps: qs@5.2.0
|
||||||
|
* deps: raw-body@~2.1.5
|
||||||
|
- deps: bytes@2.2.0
|
||||||
|
- deps: iconv-lite@0.4.13
|
||||||
|
* deps: type-is@~1.6.10
|
||||||
|
- deps: mime-types@~2.1.8
|
||||||
|
|
||||||
|
1.14.1 / 2015-09-27
|
||||||
|
===================
|
||||||
|
|
||||||
|
* Fix issue where invalid charset results in 400 when `verify` used
|
||||||
|
* deps: iconv-lite@0.4.12
|
||||||
|
- Fix CESU-8 decoding in Node.js 4.x
|
||||||
|
* deps: raw-body@~2.1.4
|
||||||
|
- Fix masking critical errors from `iconv-lite`
|
||||||
|
- deps: iconv-lite@0.4.12
|
||||||
|
* deps: type-is@~1.6.9
|
||||||
|
- deps: mime-types@~2.1.7
|
||||||
|
|
||||||
|
1.14.0 / 2015-09-16
|
||||||
|
===================
|
||||||
|
|
||||||
|
* Fix JSON strict parse error to match syntax errors
|
||||||
|
* Provide static `require` analysis in `urlencoded` parser
|
||||||
|
* deps: depd@~1.1.0
|
||||||
|
- Support web browser loading
|
||||||
|
* deps: qs@5.1.0
|
||||||
|
* deps: raw-body@~2.1.3
|
||||||
|
- Fix sync callback when attaching data listener causes sync read
|
||||||
|
* deps: type-is@~1.6.8
|
||||||
|
- Fix type error when given invalid type to match against
|
||||||
|
- deps: mime-types@~2.1.6
|
||||||
|
|
||||||
|
1.13.3 / 2015-07-31
|
||||||
|
===================
|
||||||
|
|
||||||
|
* deps: type-is@~1.6.6
|
||||||
|
- deps: mime-types@~2.1.4
|
||||||
|
|
||||||
|
1.13.2 / 2015-07-05
|
||||||
|
===================
|
||||||
|
|
||||||
|
* deps: iconv-lite@0.4.11
|
||||||
|
* deps: qs@4.0.0
|
||||||
|
- Fix dropping parameters like `hasOwnProperty`
|
||||||
|
- Fix user-visible incompatibilities from 3.1.0
|
||||||
|
- Fix various parsing edge cases
|
||||||
|
* deps: raw-body@~2.1.2
|
||||||
|
- Fix error stack traces to skip `makeError`
|
||||||
|
- deps: iconv-lite@0.4.11
|
||||||
|
* deps: type-is@~1.6.4
|
||||||
|
- deps: mime-types@~2.1.2
|
||||||
|
- perf: enable strict mode
|
||||||
|
- perf: remove argument reassignment
|
||||||
|
|
||||||
|
1.13.1 / 2015-06-16
|
||||||
|
===================
|
||||||
|
|
||||||
|
* deps: qs@2.4.2
|
||||||
|
- Downgraded from 3.1.0 because of user-visible incompatibilities
|
||||||
|
|
||||||
|
1.13.0 / 2015-06-14
|
||||||
|
===================
|
||||||
|
|
||||||
|
* Add `statusCode` property on `Error`s, in addition to `status`
|
||||||
|
* Change `type` default to `application/json` for JSON parser
|
||||||
|
* Change `type` default to `application/x-www-form-urlencoded` for urlencoded parser
|
||||||
|
* Provide static `require` analysis
|
||||||
|
* Use the `http-errors` module to generate errors
|
||||||
|
* deps: bytes@2.1.0
|
||||||
|
- Slight optimizations
|
||||||
|
* deps: iconv-lite@0.4.10
|
||||||
|
- The encoding UTF-16 without BOM now defaults to UTF-16LE when detection fails
|
||||||
|
- Leading BOM is now removed when decoding
|
||||||
|
* deps: on-finished@~2.3.0
|
||||||
|
- Add defined behavior for HTTP `CONNECT` requests
|
||||||
|
- Add defined behavior for HTTP `Upgrade` requests
|
||||||
|
- deps: ee-first@1.1.1
|
||||||
|
* deps: qs@3.1.0
|
||||||
|
- Fix dropping parameters like `hasOwnProperty`
|
||||||
|
- Fix various parsing edge cases
|
||||||
|
- Parsed object now has `null` prototype
|
||||||
|
* deps: raw-body@~2.1.1
|
||||||
|
- Use `unpipe` module for unpiping requests
|
||||||
|
- deps: iconv-lite@0.4.10
|
||||||
|
* deps: type-is@~1.6.3
|
||||||
|
- deps: mime-types@~2.1.1
|
||||||
|
- perf: reduce try block size
|
||||||
|
- perf: remove bitwise operations
|
||||||
|
* perf: enable strict mode
|
||||||
|
* perf: remove argument reassignment
|
||||||
|
* perf: remove delete call
|
||||||
|
|
||||||
|
1.12.4 / 2015-05-10
|
||||||
|
===================
|
||||||
|
|
||||||
|
* deps: debug@~2.2.0
|
||||||
|
* deps: qs@2.4.2
|
||||||
|
- Fix allowing parameters like `constructor`
|
||||||
|
* deps: on-finished@~2.2.1
|
||||||
|
* deps: raw-body@~2.0.1
|
||||||
|
- Fix a false-positive when unpiping in Node.js 0.8
|
||||||
|
- deps: bytes@2.0.1
|
||||||
|
* deps: type-is@~1.6.2
|
||||||
|
- deps: mime-types@~2.0.11
|
||||||
|
|
||||||
|
1.12.3 / 2015-04-15
|
||||||
|
===================
|
||||||
|
|
||||||
|
* Slight efficiency improvement when not debugging
|
||||||
|
* deps: depd@~1.0.1
|
||||||
|
* deps: iconv-lite@0.4.8
|
||||||
|
- Add encoding alias UNICODE-1-1-UTF-7
|
||||||
|
* deps: raw-body@1.3.4
|
||||||
|
- Fix hanging callback if request aborts during read
|
||||||
|
- deps: iconv-lite@0.4.8
|
||||||
|
|
||||||
|
1.12.2 / 2015-03-16
|
||||||
|
===================
|
||||||
|
|
||||||
|
* deps: qs@2.4.1
|
||||||
|
- Fix error when parameter `hasOwnProperty` is present
|
||||||
|
|
||||||
|
1.12.1 / 2015-03-15
|
||||||
|
===================
|
||||||
|
|
||||||
|
* deps: debug@~2.1.3
|
||||||
|
- Fix high intensity foreground color for bold
|
||||||
|
- deps: ms@0.7.0
|
||||||
|
* deps: type-is@~1.6.1
|
||||||
|
- deps: mime-types@~2.0.10
|
||||||
|
|
||||||
|
1.12.0 / 2015-02-13
|
||||||
|
===================
|
||||||
|
|
||||||
|
* add `debug` messages
|
||||||
|
* accept a function for the `type` option
|
||||||
|
* use `content-type` to parse `Content-Type` headers
|
||||||
|
* deps: iconv-lite@0.4.7
|
||||||
|
- Gracefully support enumerables on `Object.prototype`
|
||||||
|
* deps: raw-body@1.3.3
|
||||||
|
- deps: iconv-lite@0.4.7
|
||||||
|
* deps: type-is@~1.6.0
|
||||||
|
- fix argument reassignment
|
||||||
|
- fix false-positives in `hasBody` `Transfer-Encoding` check
|
||||||
|
- support wildcard for both type and subtype (`*/*`)
|
||||||
|
- deps: mime-types@~2.0.9
|
||||||
|
|
||||||
|
1.11.0 / 2015-01-30
|
||||||
|
===================
|
||||||
|
|
||||||
|
* make internal `extended: true` depth limit infinity
|
||||||
|
* deps: type-is@~1.5.6
|
||||||
|
- deps: mime-types@~2.0.8
|
||||||
|
|
||||||
|
1.10.2 / 2015-01-20
|
||||||
|
===================
|
||||||
|
|
||||||
|
* deps: iconv-lite@0.4.6
|
||||||
|
- Fix rare aliases of single-byte encodings
|
||||||
|
* deps: raw-body@1.3.2
|
||||||
|
- deps: iconv-lite@0.4.6
|
||||||
|
|
||||||
|
1.10.1 / 2015-01-01
|
||||||
|
===================
|
||||||
|
|
||||||
|
* deps: on-finished@~2.2.0
|
||||||
|
* deps: type-is@~1.5.5
|
||||||
|
- deps: mime-types@~2.0.7
|
||||||
|
|
||||||
|
1.10.0 / 2014-12-02
|
||||||
|
===================
|
||||||
|
|
||||||
|
* make internal `extended: true` array limit dynamic
|
||||||
|
|
||||||
|
1.9.3 / 2014-11-21
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: iconv-lite@0.4.5
|
||||||
|
- Fix Windows-31J and X-SJIS encoding support
|
||||||
|
* deps: qs@2.3.3
|
||||||
|
- Fix `arrayLimit` behavior
|
||||||
|
* deps: raw-body@1.3.1
|
||||||
|
- deps: iconv-lite@0.4.5
|
||||||
|
* deps: type-is@~1.5.3
|
||||||
|
- deps: mime-types@~2.0.3
|
||||||
|
|
||||||
|
1.9.2 / 2014-10-27
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: qs@2.3.2
|
||||||
|
- Fix parsing of mixed objects and values
|
||||||
|
|
||||||
|
1.9.1 / 2014-10-22
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: on-finished@~2.1.1
|
||||||
|
- Fix handling of pipelined requests
|
||||||
|
* deps: qs@2.3.0
|
||||||
|
- Fix parsing of mixed implicit and explicit arrays
|
||||||
|
* deps: type-is@~1.5.2
|
||||||
|
- deps: mime-types@~2.0.2
|
||||||
|
|
||||||
|
1.9.0 / 2014-09-24
|
||||||
|
==================
|
||||||
|
|
||||||
|
* include the charset in "unsupported charset" error message
|
||||||
|
* include the encoding in "unsupported content encoding" error message
|
||||||
|
* deps: depd@~1.0.0
|
||||||
|
|
||||||
|
1.8.4 / 2014-09-23
|
||||||
|
==================
|
||||||
|
|
||||||
|
* fix content encoding to be case-insensitive
|
||||||
|
|
||||||
|
1.8.3 / 2014-09-19
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: qs@2.2.4
|
||||||
|
- Fix issue with object keys starting with numbers truncated
|
||||||
|
|
||||||
|
1.8.2 / 2014-09-15
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: depd@0.4.5
|
||||||
|
|
||||||
|
1.8.1 / 2014-09-07
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: media-typer@0.3.0
|
||||||
|
* deps: type-is@~1.5.1
|
||||||
|
|
||||||
|
1.8.0 / 2014-09-05
|
||||||
|
==================
|
||||||
|
|
||||||
|
* make empty-body-handling consistent between chunked requests
|
||||||
|
- empty `json` produces `{}`
|
||||||
|
- empty `raw` produces `new Buffer(0)`
|
||||||
|
- empty `text` produces `''`
|
||||||
|
- empty `urlencoded` produces `{}`
|
||||||
|
* deps: qs@2.2.3
|
||||||
|
- Fix issue where first empty value in array is discarded
|
||||||
|
* deps: type-is@~1.5.0
|
||||||
|
- fix `hasbody` to be true for `content-length: 0`
|
||||||
|
|
||||||
|
1.7.0 / 2014-09-01
|
||||||
|
==================
|
||||||
|
|
||||||
|
* add `parameterLimit` option to `urlencoded` parser
|
||||||
|
* change `urlencoded` extended array limit to 100
|
||||||
|
* respond with 413 when over `parameterLimit` in `urlencoded`
|
||||||
|
|
||||||
|
1.6.7 / 2014-08-29
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: qs@2.2.2
|
||||||
|
- Remove unnecessary cloning
|
||||||
|
|
||||||
|
1.6.6 / 2014-08-27
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: qs@2.2.0
|
||||||
|
- Array parsing fix
|
||||||
|
- Performance improvements
|
||||||
|
|
||||||
|
1.6.5 / 2014-08-16
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: on-finished@2.1.0
|
||||||
|
|
||||||
|
1.6.4 / 2014-08-14
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: qs@1.2.2
|
||||||
|
|
||||||
|
1.6.3 / 2014-08-10
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: qs@1.2.1
|
||||||
|
|
||||||
|
1.6.2 / 2014-08-07
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: qs@1.2.0
|
||||||
|
- Fix parsing array of objects
|
||||||
|
|
||||||
|
1.6.1 / 2014-08-06
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: qs@1.1.0
|
||||||
|
- Accept urlencoded square brackets
|
||||||
|
- Accept empty values in implicit array notation
|
||||||
|
|
||||||
|
1.6.0 / 2014-08-05
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: qs@1.0.2
|
||||||
|
- Complete rewrite
|
||||||
|
- Limits array length to 20
|
||||||
|
- Limits object depth to 5
|
||||||
|
- Limits parameters to 1,000
|
||||||
|
|
||||||
|
1.5.2 / 2014-07-27
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: depd@0.4.4
|
||||||
|
- Work-around v8 generating empty stack traces
|
||||||
|
|
||||||
|
1.5.1 / 2014-07-26
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: depd@0.4.3
|
||||||
|
- Fix exception when global `Error.stackTraceLimit` is too low
|
||||||
|
|
||||||
|
1.5.0 / 2014-07-20
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: depd@0.4.2
|
||||||
|
- Add `TRACE_DEPRECATION` environment variable
|
||||||
|
- Remove non-standard grey color from color output
|
||||||
|
- Support `--no-deprecation` argument
|
||||||
|
- Support `--trace-deprecation` argument
|
||||||
|
* deps: iconv-lite@0.4.4
|
||||||
|
- Added encoding UTF-7
|
||||||
|
* deps: raw-body@1.3.0
|
||||||
|
- deps: iconv-lite@0.4.4
|
||||||
|
- Added encoding UTF-7
|
||||||
|
- Fix `Cannot switch to old mode now` error on Node.js 0.10+
|
||||||
|
* deps: type-is@~1.3.2
|
||||||
|
|
||||||
|
1.4.3 / 2014-06-19
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: type-is@1.3.1
|
||||||
|
- fix global variable leak
|
||||||
|
|
||||||
|
1.4.2 / 2014-06-19
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: type-is@1.3.0
|
||||||
|
- improve type parsing
|
||||||
|
|
||||||
|
1.4.1 / 2014-06-19
|
||||||
|
==================
|
||||||
|
|
||||||
|
* fix urlencoded extended deprecation message
|
||||||
|
|
||||||
|
1.4.0 / 2014-06-19
|
||||||
|
==================
|
||||||
|
|
||||||
|
* add `text` parser
|
||||||
|
* add `raw` parser
|
||||||
|
* check accepted charset in content-type (accepts utf-8)
|
||||||
|
* check accepted encoding in content-encoding (accepts identity)
|
||||||
|
* deprecate `bodyParser()` middleware; use `.json()` and `.urlencoded()` as needed
|
||||||
|
* deprecate `urlencoded()` without provided `extended` option
|
||||||
|
* lazy-load urlencoded parsers
|
||||||
|
* parsers split into files for reduced mem usage
|
||||||
|
* support gzip and deflate bodies
|
||||||
|
- set `inflate: false` to turn off
|
||||||
|
* deps: raw-body@1.2.2
|
||||||
|
- Support all encodings from `iconv-lite`
|
||||||
|
|
||||||
|
1.3.1 / 2014-06-11
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: type-is@1.2.1
|
||||||
|
- Switch dependency from mime to mime-types@1.0.0
|
||||||
|
|
||||||
|
1.3.0 / 2014-05-31
|
||||||
|
==================
|
||||||
|
|
||||||
|
* add `extended` option to urlencoded parser
|
||||||
|
|
||||||
|
1.2.2 / 2014-05-27
|
||||||
|
==================
|
||||||
|
|
||||||
|
* deps: raw-body@1.1.6
|
||||||
|
- assert stream encoding on node.js 0.8
|
||||||
|
- assert stream encoding on node.js < 0.10.6
|
||||||
|
- deps: bytes@1
|
||||||
|
|
||||||
|
1.2.1 / 2014-05-26
|
||||||
|
==================
|
||||||
|
|
||||||
|
* invoke `next(err)` after request fully read
|
||||||
|
- prevents hung responses and socket hang ups
|
||||||
|
|
||||||
|
1.2.0 / 2014-05-11
|
||||||
|
==================
|
||||||
|
|
||||||
|
* add `verify` option
|
||||||
|
* deps: type-is@1.2.0
|
||||||
|
- support suffix matching
|
||||||
|
|
||||||
|
1.1.2 / 2014-05-11
|
||||||
|
==================
|
||||||
|
|
||||||
|
* improve json parser speed
|
||||||
|
|
||||||
|
1.1.1 / 2014-05-11
|
||||||
|
==================
|
||||||
|
|
||||||
|
* fix repeated limit parsing with every request
|
||||||
|
|
||||||
|
1.1.0 / 2014-05-10
|
||||||
|
==================
|
||||||
|
|
||||||
|
* add `type` option
|
||||||
|
* deps: pin for safety and consistency
|
||||||
|
|
||||||
|
1.0.2 / 2014-04-14
|
||||||
|
==================
|
||||||
|
|
||||||
|
* use `type-is` module
|
||||||
|
|
||||||
|
1.0.1 / 2014-03-20
|
||||||
|
==================
|
||||||
|
|
||||||
|
* lower default limits to 100kb
|
23
node_modules/body-parser/LICENSE
generated
vendored
Normal file
23
node_modules/body-parser/LICENSE
generated
vendored
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
(The MIT License)
|
||||||
|
|
||||||
|
Copyright (c) 2014 Jonathan Ong <me@jongleberry.com>
|
||||||
|
Copyright (c) 2014-2015 Douglas Christopher Wilson <doug@somethingdoug.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining
|
||||||
|
a copy of this software and associated documentation files (the
|
||||||
|
'Software'), to deal in the Software without restriction, including
|
||||||
|
without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
distribute, sublicense, and/or sell copies of the Software, and to
|
||||||
|
permit persons to whom the Software is furnished to do so, subject to
|
||||||
|
the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be
|
||||||
|
included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||||
|
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||||
|
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||||
|
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||||
|
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
409
node_modules/body-parser/README.md
generated
vendored
Normal file
409
node_modules/body-parser/README.md
generated
vendored
Normal file
@ -0,0 +1,409 @@
|
|||||||
|
# body-parser
|
||||||
|
|
||||||
|
[![NPM Version][npm-image]][npm-url]
|
||||||
|
[![NPM Downloads][downloads-image]][downloads-url]
|
||||||
|
[![Build Status][travis-image]][travis-url]
|
||||||
|
[![Test Coverage][coveralls-image]][coveralls-url]
|
||||||
|
[![Gratipay][gratipay-image]][gratipay-url]
|
||||||
|
|
||||||
|
Node.js body parsing middleware.
|
||||||
|
|
||||||
|
Parse incoming request bodies in a middleware before your handlers, availabe
|
||||||
|
under the `req.body` property.
|
||||||
|
|
||||||
|
[Learn about the anatomy of an HTTP transaction in Node.js](https://nodejs.org/en/docs/guides/anatomy-of-an-http-transaction/).
|
||||||
|
|
||||||
|
_This does not handle multipart bodies_, due to their complex and typically
|
||||||
|
large nature. For multipart bodies, you may be interested in the following
|
||||||
|
modules:
|
||||||
|
|
||||||
|
* [busboy](https://www.npmjs.org/package/busboy#readme) and
|
||||||
|
[connect-busboy](https://www.npmjs.org/package/connect-busboy#readme)
|
||||||
|
* [multiparty](https://www.npmjs.org/package/multiparty#readme) and
|
||||||
|
[connect-multiparty](https://www.npmjs.org/package/connect-multiparty#readme)
|
||||||
|
* [formidable](https://www.npmjs.org/package/formidable#readme)
|
||||||
|
* [multer](https://www.npmjs.org/package/multer#readme)
|
||||||
|
|
||||||
|
This module provides the following parsers:
|
||||||
|
|
||||||
|
* [JSON body parser](#bodyparserjsonoptions)
|
||||||
|
* [Raw body parser](#bodyparserrawoptions)
|
||||||
|
* [Text body parser](#bodyparsertextoptions)
|
||||||
|
* [URL-encoded form body parser](#bodyparserurlencodedoptions)
|
||||||
|
|
||||||
|
Other body parsers you might be interested in:
|
||||||
|
|
||||||
|
- [body](https://www.npmjs.org/package/body#readme)
|
||||||
|
- [co-body](https://www.npmjs.org/package/co-body#readme)
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```sh
|
||||||
|
$ npm install body-parser
|
||||||
|
```
|
||||||
|
|
||||||
|
## API
|
||||||
|
|
||||||
|
```js
|
||||||
|
var bodyParser = require('body-parser')
|
||||||
|
```
|
||||||
|
|
||||||
|
The `bodyParser` object exposes various factories to create middlewares. All
|
||||||
|
middlewares will populate the `req.body` property with the parsed body, or an
|
||||||
|
empty object (`{}`) if there was no body to parse (or an error was returned).
|
||||||
|
|
||||||
|
The various errors returned by this module are described in the
|
||||||
|
[errors section](#errors).
|
||||||
|
|
||||||
|
### bodyParser.json(options)
|
||||||
|
|
||||||
|
Returns middleware that only parses `json`. This parser accepts any Unicode
|
||||||
|
encoding of the body and supports automatic inflation of `gzip` and `deflate`
|
||||||
|
encodings.
|
||||||
|
|
||||||
|
A new `body` object containing the parsed data is populated on the `request`
|
||||||
|
object after the middleware (i.e. `req.body`).
|
||||||
|
|
||||||
|
#### Options
|
||||||
|
|
||||||
|
The `json` function takes an option `options` object that may contain any of
|
||||||
|
the following keys:
|
||||||
|
|
||||||
|
##### inflate
|
||||||
|
|
||||||
|
When set to `true`, then deflated (compressed) bodies will be inflated; when
|
||||||
|
`false`, deflated bodies are rejected. Defaults to `true`.
|
||||||
|
|
||||||
|
##### limit
|
||||||
|
|
||||||
|
Controls the maximum request body size. If this is a number, then the value
|
||||||
|
specifies the number of bytes; if it is a string, the value is passed to the
|
||||||
|
[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults
|
||||||
|
to `'100kb'`.
|
||||||
|
|
||||||
|
##### reviver
|
||||||
|
|
||||||
|
The `reviver` option is passed directly to `JSON.parse` as the second
|
||||||
|
argument. You can find more information on this argument
|
||||||
|
[in the MDN documentation about JSON.parse](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse#Example.3A_Using_the_reviver_parameter).
|
||||||
|
|
||||||
|
##### strict
|
||||||
|
|
||||||
|
When set to `true`, will only accept arrays and objects; when `false` will
|
||||||
|
accept anything `JSON.parse` accepts. Defaults to `true`.
|
||||||
|
|
||||||
|
##### type
|
||||||
|
|
||||||
|
The `type` option is used to determine what media type the middleware will
|
||||||
|
parse. This option can be a function or a string. If a string, `type` option
|
||||||
|
is passed directly to the [type-is](https://www.npmjs.org/package/type-is#readme)
|
||||||
|
library and this can be an extension name (like `json`), a mime type (like
|
||||||
|
`application/json`), or a mime type with a wildcard (like `*/*` or `*/json`).
|
||||||
|
If a function, the `type` option is called as `fn(req)` and the request is
|
||||||
|
parsed if it returns a truthy value. Defaults to `application/json`.
|
||||||
|
|
||||||
|
##### verify
|
||||||
|
|
||||||
|
The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`,
|
||||||
|
where `buf` is a `Buffer` of the raw request body and `encoding` is the
|
||||||
|
encoding of the request. The parsing can be aborted by throwing an error.
|
||||||
|
|
||||||
|
### bodyParser.raw(options)
|
||||||
|
|
||||||
|
Returns middleware that parses all bodies as a `Buffer`. This parser
|
||||||
|
supports automatic inflation of `gzip` and `deflate` encodings.
|
||||||
|
|
||||||
|
A new `body` object containing the parsed data is populated on the `request`
|
||||||
|
object after the middleware (i.e. `req.body`). This will be a `Buffer` object
|
||||||
|
of the body.
|
||||||
|
|
||||||
|
#### Options
|
||||||
|
|
||||||
|
The `raw` function takes an option `options` object that may contain any of
|
||||||
|
the following keys:
|
||||||
|
|
||||||
|
##### inflate
|
||||||
|
|
||||||
|
When set to `true`, then deflated (compressed) bodies will be inflated; when
|
||||||
|
`false`, deflated bodies are rejected. Defaults to `true`.
|
||||||
|
|
||||||
|
##### limit
|
||||||
|
|
||||||
|
Controls the maximum request body size. If this is a number, then the value
|
||||||
|
specifies the number of bytes; if it is a string, the value is passed to the
|
||||||
|
[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults
|
||||||
|
to `'100kb'`.
|
||||||
|
|
||||||
|
##### type
|
||||||
|
|
||||||
|
The `type` option is used to determine what media type the middleware will
|
||||||
|
parse. This option can be a function or a string. If a string, `type` option
|
||||||
|
is passed directly to the [type-is](https://www.npmjs.org/package/type-is#readme)
|
||||||
|
library and this can be an extension name (like `bin`), a mime type (like
|
||||||
|
`application/octet-stream`), or a mime type with a wildcard (like `*/*` or
|
||||||
|
`application/*`). If a function, the `type` option is called as `fn(req)`
|
||||||
|
and the request is parsed if it returns a truthy value. Defaults to
|
||||||
|
`application/octet-stream`.
|
||||||
|
|
||||||
|
##### verify
|
||||||
|
|
||||||
|
The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`,
|
||||||
|
where `buf` is a `Buffer` of the raw request body and `encoding` is the
|
||||||
|
encoding of the request. The parsing can be aborted by throwing an error.
|
||||||
|
|
||||||
|
### bodyParser.text(options)
|
||||||
|
|
||||||
|
Returns middleware that parses all bodies as a string. This parser supports
|
||||||
|
automatic inflation of `gzip` and `deflate` encodings.
|
||||||
|
|
||||||
|
A new `body` string containing the parsed data is populated on the `request`
|
||||||
|
object after the middleware (i.e. `req.body`). This will be a string of the
|
||||||
|
body.
|
||||||
|
|
||||||
|
#### Options
|
||||||
|
|
||||||
|
The `text` function takes an option `options` object that may contain any of
|
||||||
|
the following keys:
|
||||||
|
|
||||||
|
##### defaultCharset
|
||||||
|
|
||||||
|
Specify the default character set for the text content if the charset is not
|
||||||
|
specified in the `Content-Type` header of the request. Defaults to `utf-8`.
|
||||||
|
|
||||||
|
##### inflate
|
||||||
|
|
||||||
|
When set to `true`, then deflated (compressed) bodies will be inflated; when
|
||||||
|
`false`, deflated bodies are rejected. Defaults to `true`.
|
||||||
|
|
||||||
|
##### limit
|
||||||
|
|
||||||
|
Controls the maximum request body size. If this is a number, then the value
|
||||||
|
specifies the number of bytes; if it is a string, the value is passed to the
|
||||||
|
[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults
|
||||||
|
to `'100kb'`.
|
||||||
|
|
||||||
|
##### type
|
||||||
|
|
||||||
|
The `type` option is used to determine what media type the middleware will
|
||||||
|
parse. This option can be a function or a string. If a string, `type` option
|
||||||
|
is passed directly to the [type-is](https://www.npmjs.org/package/type-is#readme)
|
||||||
|
library and this can be an extension name (like `txt`), a mime type (like
|
||||||
|
`text/plain`), or a mime type with a wildcard (like `*/*` or `text/*`).
|
||||||
|
If a function, the `type` option is called as `fn(req)` and the request is
|
||||||
|
parsed if it returns a truthy value. Defaults to `text/plain`.
|
||||||
|
|
||||||
|
##### verify
|
||||||
|
|
||||||
|
The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`,
|
||||||
|
where `buf` is a `Buffer` of the raw request body and `encoding` is the
|
||||||
|
encoding of the request. The parsing can be aborted by throwing an error.
|
||||||
|
|
||||||
|
### bodyParser.urlencoded(options)
|
||||||
|
|
||||||
|
Returns middleware that only parses `urlencoded` bodies. This parser accepts
|
||||||
|
only UTF-8 encoding of the body and supports automatic inflation of `gzip`
|
||||||
|
and `deflate` encodings.
|
||||||
|
|
||||||
|
A new `body` object containing the parsed data is populated on the `request`
|
||||||
|
object after the middleware (i.e. `req.body`). This object will contain
|
||||||
|
key-value pairs, where the value can be a string or array (when `extended` is
|
||||||
|
`false`), or any type (when `extended` is `true`).
|
||||||
|
|
||||||
|
#### Options
|
||||||
|
|
||||||
|
The `urlencoded` function takes an option `options` object that may contain
|
||||||
|
any of the following keys:
|
||||||
|
|
||||||
|
##### extended
|
||||||
|
|
||||||
|
The `extended` option allows to choose between parsing the URL-encoded data
|
||||||
|
with the `querystring` library (when `false`) or the `qs` library (when
|
||||||
|
`true`). The "extended" syntax allows for rich objects and arrays to be
|
||||||
|
encoded into the URL-encoded format, allowing for a JSON-like experience
|
||||||
|
with URL-encoded. For more information, please
|
||||||
|
[see the qs library](https://www.npmjs.org/package/qs#readme).
|
||||||
|
|
||||||
|
Defaults to `true`, but using the default has been deprecated. Please
|
||||||
|
research into the difference between `qs` and `querystring` and choose the
|
||||||
|
appropriate setting.
|
||||||
|
|
||||||
|
##### inflate
|
||||||
|
|
||||||
|
When set to `true`, then deflated (compressed) bodies will be inflated; when
|
||||||
|
`false`, deflated bodies are rejected. Defaults to `true`.
|
||||||
|
|
||||||
|
##### limit
|
||||||
|
|
||||||
|
Controls the maximum request body size. If this is a number, then the value
|
||||||
|
specifies the number of bytes; if it is a string, the value is passed to the
|
||||||
|
[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults
|
||||||
|
to `'100kb'`.
|
||||||
|
|
||||||
|
##### parameterLimit
|
||||||
|
|
||||||
|
The `parameterLimit` option controls the maximum number of parameters that
|
||||||
|
are allowed in the URL-encoded data. If a request contains more parameters
|
||||||
|
than this value, a 413 will be returned to the client. Defaults to `1000`.
|
||||||
|
|
||||||
|
##### type
|
||||||
|
|
||||||
|
The `type` option is used to determine what media type the middleware will
|
||||||
|
parse. This option can be a function or a string. If a string, `type` option
|
||||||
|
is passed directly to the [type-is](https://www.npmjs.org/package/type-is#readme)
|
||||||
|
library and this can be an extension name (like `urlencoded`), a mime type (like
|
||||||
|
`application/x-www-form-urlencoded`), or a mime type with a wildcard (like
|
||||||
|
`*/x-www-form-urlencoded`). If a function, the `type` option is called as
|
||||||
|
`fn(req)` and the request is parsed if it returns a truthy value. Defaults
|
||||||
|
to `application/x-www-form-urlencoded`.
|
||||||
|
|
||||||
|
##### verify
|
||||||
|
|
||||||
|
The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`,
|
||||||
|
where `buf` is a `Buffer` of the raw request body and `encoding` is the
|
||||||
|
encoding of the request. The parsing can be aborted by throwing an error.
|
||||||
|
|
||||||
|
## Errors
|
||||||
|
|
||||||
|
The middlewares provided by this module create errors depending on the error
|
||||||
|
condition during parsing. The errors will typically have a `status` property
|
||||||
|
that contains the suggested HTTP response code and a `body` property containing
|
||||||
|
the read body, if available.
|
||||||
|
|
||||||
|
The following are the common errors emitted, though any error can come through
|
||||||
|
for various reasons.
|
||||||
|
|
||||||
|
### content encoding unsupported
|
||||||
|
|
||||||
|
This error will occur when the request had a `Content-Encoding` header that
|
||||||
|
contained an encoding but the "inflation" option was set to `false`. The
|
||||||
|
`status` property is set to `415`.
|
||||||
|
|
||||||
|
### request aborted
|
||||||
|
|
||||||
|
This error will occur when the request is aborted by the client before reading
|
||||||
|
the body has finished. The `received` property will be set to the number of
|
||||||
|
bytes received before the request was aborted and the `expected` property is
|
||||||
|
set to the number of expected bytes. The `status` property is set to `400`.
|
||||||
|
|
||||||
|
### request entity too large
|
||||||
|
|
||||||
|
This error will occur when the request body's size is larger than the "limit"
|
||||||
|
option. The `limit` property will be set to the byte limit and the `length`
|
||||||
|
property will be set to the request body's length. The `status` property is
|
||||||
|
set to `413`.
|
||||||
|
|
||||||
|
### request size did not match content length
|
||||||
|
|
||||||
|
This error will occur when the request's length did not match the length from
|
||||||
|
the `Content-Length` header. This typically occurs when the request is malformed,
|
||||||
|
typically when the `Content-Length` header was calculated based on characters
|
||||||
|
instead of bytes. The `status` property is set to `400`.
|
||||||
|
|
||||||
|
### stream encoding should not be set
|
||||||
|
|
||||||
|
This error will occur when something called the `req.setEncoding` method prior
|
||||||
|
to this middleware. This module operates directly on bytes only and you cannot
|
||||||
|
call `req.setEncoding` when using this module. The `status` property is set to
|
||||||
|
`500`.
|
||||||
|
|
||||||
|
### unsupported charset "BOGUS"
|
||||||
|
|
||||||
|
This error will occur when the request had a charset parameter in the
|
||||||
|
`Content-Type` header, but the `iconv-lite` module does not support it OR the
|
||||||
|
parser does not support it. The charset is contained in the message as well
|
||||||
|
as in the `charset` property. The `status` property is set to `415`.
|
||||||
|
|
||||||
|
### unsupported content encoding "bogus"
|
||||||
|
|
||||||
|
This error will occur when the request had a `Content-Encoding` header that
|
||||||
|
contained an unsupported encoding. The encoding is contained in the message
|
||||||
|
as well as in the `encoding` property. The `status` property is set to `415`.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
### Express/Connect top-level generic
|
||||||
|
|
||||||
|
This example demonstrates adding a generic JSON and URL-encoded parser as a
|
||||||
|
top-level middleware, which will parse the bodies of all incoming requests.
|
||||||
|
This is the simplest setup.
|
||||||
|
|
||||||
|
```js
|
||||||
|
var express = require('express')
|
||||||
|
var bodyParser = require('body-parser')
|
||||||
|
|
||||||
|
var app = express()
|
||||||
|
|
||||||
|
// parse application/x-www-form-urlencoded
|
||||||
|
app.use(bodyParser.urlencoded({ extended: false }))
|
||||||
|
|
||||||
|
// parse application/json
|
||||||
|
app.use(bodyParser.json())
|
||||||
|
|
||||||
|
app.use(function (req, res) {
|
||||||
|
res.setHeader('Content-Type', 'text/plain')
|
||||||
|
res.write('you posted:\n')
|
||||||
|
res.end(JSON.stringify(req.body, null, 2))
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
### Express route-specific
|
||||||
|
|
||||||
|
This example demonstrates adding body parsers specifically to the routes that
|
||||||
|
need them. In general, this is the most recommended way to use body-parser with
|
||||||
|
Express.
|
||||||
|
|
||||||
|
```js
|
||||||
|
var express = require('express')
|
||||||
|
var bodyParser = require('body-parser')
|
||||||
|
|
||||||
|
var app = express()
|
||||||
|
|
||||||
|
// create application/json parser
|
||||||
|
var jsonParser = bodyParser.json()
|
||||||
|
|
||||||
|
// create application/x-www-form-urlencoded parser
|
||||||
|
var urlencodedParser = bodyParser.urlencoded({ extended: false })
|
||||||
|
|
||||||
|
// POST /login gets urlencoded bodies
|
||||||
|
app.post('/login', urlencodedParser, function (req, res) {
|
||||||
|
if (!req.body) return res.sendStatus(400)
|
||||||
|
res.send('welcome, ' + req.body.username)
|
||||||
|
})
|
||||||
|
|
||||||
|
// POST /api/users gets JSON bodies
|
||||||
|
app.post('/api/users', jsonParser, function (req, res) {
|
||||||
|
if (!req.body) return res.sendStatus(400)
|
||||||
|
// create user in req.body
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
### Change accepted type for parsers
|
||||||
|
|
||||||
|
All the parsers accept a `type` option which allows you to change the
|
||||||
|
`Content-Type` that the middleware will parse.
|
||||||
|
|
||||||
|
```js
|
||||||
|
// parse various different custom JSON types as JSON
|
||||||
|
app.use(bodyParser.json({ type: 'application/*+json' }))
|
||||||
|
|
||||||
|
// parse some custom thing into a Buffer
|
||||||
|
app.use(bodyParser.raw({ type: 'application/vnd.custom-type' }))
|
||||||
|
|
||||||
|
// parse an HTML body into a string
|
||||||
|
app.use(bodyParser.text({ type: 'text/html' }))
|
||||||
|
```
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
[MIT](LICENSE)
|
||||||
|
|
||||||
|
[npm-image]: https://img.shields.io/npm/v/body-parser.svg
|
||||||
|
[npm-url]: https://npmjs.org/package/body-parser
|
||||||
|
[travis-image]: https://img.shields.io/travis/expressjs/body-parser/master.svg
|
||||||
|
[travis-url]: https://travis-ci.org/expressjs/body-parser
|
||||||
|
[coveralls-image]: https://img.shields.io/coveralls/expressjs/body-parser/master.svg
|
||||||
|
[coveralls-url]: https://coveralls.io/r/expressjs/body-parser?branch=master
|
||||||
|
[downloads-image]: https://img.shields.io/npm/dm/body-parser.svg
|
||||||
|
[downloads-url]: https://npmjs.org/package/body-parser
|
||||||
|
[gratipay-image]: https://img.shields.io/gratipay/dougwilson.svg
|
||||||
|
[gratipay-url]: https://www.gratipay.com/dougwilson/
|
157
node_modules/body-parser/index.js
generated
vendored
Normal file
157
node_modules/body-parser/index.js
generated
vendored
Normal file
@ -0,0 +1,157 @@
|
|||||||
|
/*!
|
||||||
|
* body-parser
|
||||||
|
* Copyright(c) 2014-2015 Douglas Christopher Wilson
|
||||||
|
* MIT Licensed
|
||||||
|
*/
|
||||||
|
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Module dependencies.
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
|
||||||
|
var deprecate = require('depd')('body-parser')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cache of loaded parsers.
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
|
||||||
|
var parsers = Object.create(null)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @typedef Parsers
|
||||||
|
* @type {function}
|
||||||
|
* @property {function} json
|
||||||
|
* @property {function} raw
|
||||||
|
* @property {function} text
|
||||||
|
* @property {function} urlencoded
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Module exports.
|
||||||
|
* @type {Parsers}
|
||||||
|
*/
|
||||||
|
|
||||||
|
exports = module.exports = deprecate.function(bodyParser,
|
||||||
|
'bodyParser: use individual json/urlencoded middlewares')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* JSON parser.
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
|
||||||
|
Object.defineProperty(exports, 'json', {
|
||||||
|
configurable: true,
|
||||||
|
enumerable: true,
|
||||||
|
get: createParserGetter('json')
|
||||||
|
})
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Raw parser.
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
|
||||||
|
Object.defineProperty(exports, 'raw', {
|
||||||
|
configurable: true,
|
||||||
|
enumerable: true,
|
||||||
|
get: createParserGetter('raw')
|
||||||
|
})
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Text parser.
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
|
||||||
|
Object.defineProperty(exports, 'text', {
|
||||||
|
configurable: true,
|
||||||
|
enumerable: true,
|
||||||
|
get: createParserGetter('text')
|
||||||
|
})
|
||||||
|
|
||||||
|
/**
|
||||||
|
* URL-encoded parser.
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
|
||||||
|
Object.defineProperty(exports, 'urlencoded', {
|
||||||
|
configurable: true,
|
||||||
|
enumerable: true,
|
||||||
|
get: createParserGetter('urlencoded')
|
||||||
|
})
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a middleware to parse json and urlencoded bodies.
|
||||||
|
*
|
||||||
|
* @param {object} [options]
|
||||||
|
* @return {function}
|
||||||
|
* @deprecated
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
|
||||||
|
function bodyParser (options) {
|
||||||
|
var opts = {}
|
||||||
|
|
||||||
|
// exclude type option
|
||||||
|
if (options) {
|
||||||
|
for (var prop in options) {
|
||||||
|
if (prop !== 'type') {
|
||||||
|
opts[prop] = options[prop]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var _urlencoded = exports.urlencoded(opts)
|
||||||
|
var _json = exports.json(opts)
|
||||||
|
|
||||||
|
return function bodyParser (req, res, next) {
|
||||||
|
_json(req, res, function (err) {
|
||||||
|
if (err) return next(err)
|
||||||
|
_urlencoded(req, res, next)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a getter for loading a parser.
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
|
||||||
|
function createParserGetter (name) {
|
||||||
|
return function get () {
|
||||||
|
return loadParser(name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load a parser module.
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
|
||||||
|
function loadParser (parserName) {
|
||||||
|
var parser = parsers[parserName]
|
||||||
|
|
||||||
|
if (parser !== undefined) {
|
||||||
|
return parser
|
||||||
|
}
|
||||||
|
|
||||||
|
// this uses a switch for static require analysis
|
||||||
|
switch (parserName) {
|
||||||
|
case 'json':
|
||||||
|
parser = require('./lib/types/json')
|
||||||
|
break
|
||||||
|
case 'raw':
|
||||||
|
parser = require('./lib/types/raw')
|
||||||
|
break
|
||||||
|
case 'text':
|
||||||
|
parser = require('./lib/types/text')
|
||||||
|
break
|
||||||
|
case 'urlencoded':
|
||||||
|
parser = require('./lib/types/urlencoded')
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
// store to prevent invoking require()
|
||||||
|
return (parsers[parserName] = parser)
|
||||||
|
}
|
188
node_modules/body-parser/lib/read.js
generated
vendored
Normal file
188
node_modules/body-parser/lib/read.js
generated
vendored
Normal file
@ -0,0 +1,188 @@
|
|||||||
|
/*!
|
||||||
|
* body-parser
|
||||||
|
* Copyright(c) 2014-2015 Douglas Christopher Wilson
|
||||||
|
* MIT Licensed
|
||||||
|
*/
|
||||||
|
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Module dependencies.
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
|
||||||
|
var createError = require('http-errors')
|
||||||
|
var getBody = require('raw-body')
|
||||||
|
var iconv = require('iconv-lite')
|
||||||
|
var onFinished = require('on-finished')
|
||||||
|
var zlib = require('zlib')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Module exports.
|
||||||
|
*/
|
||||||
|
|
||||||
|
module.exports = read
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read a request into a buffer and parse.
|
||||||
|
*
|
||||||
|
* @param {object} req
|
||||||
|
* @param {object} res
|
||||||
|
* @param {function} next
|
||||||
|
* @param {function} parse
|
||||||
|
* @param {function} debug
|
||||||
|
* @param {object} [options]
|
||||||
|
* @api private
|
||||||
|
*/
|
||||||
|
|
||||||
|
function read (req, res, next, parse, debug, options) {
|
||||||
|
var length
|
||||||
|
var opts = options || {}
|
||||||
|
var stream
|
||||||
|
|
||||||
|
// flag as parsed
|
||||||
|
req._body = true
|
||||||
|
|
||||||
|
// read options
|
||||||
|
var encoding = opts.encoding !== null
|
||||||
|
? opts.encoding || 'utf-8'
|
||||||
|
: null
|
||||||
|
var verify = opts.verify
|
||||||
|
|
||||||
|
try {
|
||||||
|
// get the content stream
|
||||||
|
stream = contentstream(req, debug, opts.inflate)
|
||||||
|
length = stream.length
|
||||||
|
stream.length = undefined
|
||||||
|
} catch (err) {
|
||||||
|
return next(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// set raw-body options
|
||||||
|
opts.length = length
|
||||||
|
opts.encoding = verify
|
||||||
|
? null
|
||||||
|
: encoding
|
||||||
|
|
||||||
|
// assert charset is supported
|
||||||
|
if (opts.encoding === null && encoding !== null && !iconv.encodingExists(encoding)) {
|
||||||
|
return next(createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', {
|
||||||
|
charset: encoding.toLowerCase()
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
// read body
|
||||||
|
debug('read body')
|
||||||
|
getBody(stream, opts, function (err, body) {
|
||||||
|
if (err) {
|
||||||
|
// default to 400
|
||||||
|
setErrorStatus(err, 400)
|
||||||
|
|
||||||
|
// echo back charset
|
||||||
|
if (err.type === 'encoding.unsupported') {
|
||||||
|
err = createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', {
|
||||||
|
charset: encoding.toLowerCase()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// read off entire request
|
||||||
|
stream.resume()
|
||||||
|
onFinished(req, function onfinished () {
|
||||||
|
next(err)
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// verify
|
||||||
|
if (verify) {
|
||||||
|
try {
|
||||||
|
debug('verify body')
|
||||||
|
verify(req, res, body, encoding)
|
||||||
|
} catch (err) {
|
||||||
|
// default to 403
|
||||||
|
setErrorStatus(err, 403)
|
||||||
|
next(err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// parse
|
||||||
|
var str
|
||||||
|
try {
|
||||||
|
debug('parse body')
|
||||||
|
str = typeof body !== 'string' && encoding !== null
|
||||||
|
? iconv.decode(body, encoding)
|
||||||
|
: body
|
||||||
|
req.body = parse(str)
|
||||||
|
} catch (err) {
|
||||||
|
err.body = str === undefined
|
||||||
|
? body
|
||||||
|
: str
|
||||||
|
|
||||||
|
// default to 400
|
||||||
|
setErrorStatus(err, 400)
|
||||||
|
|
||||||
|
next(err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
next()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the content stream of the request.
|
||||||
|
*
|
||||||
|
* @param {object} req
|
||||||
|
* @param {function} debug
|
||||||
|
* @param {boolean} [inflate=true]
|
||||||
|
* @return {object}
|
||||||
|
* @api private
|
||||||
|
*/
|
||||||
|
|
||||||
|
function contentstream (req, debug, inflate) {
|
||||||
|
var encoding = (req.headers['content-encoding'] || 'identity').toLowerCase()
|
||||||
|
var length = req.headers['content-length']
|
||||||
|
var stream
|
||||||
|
|
||||||
|
debug('content-encoding "%s"', encoding)
|
||||||
|
|
||||||
|
if (inflate === false && encoding !== 'identity') {
|
||||||
|
throw createError(415, 'content encoding unsupported')
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (encoding) {
|
||||||
|
case 'deflate':
|
||||||
|
stream = zlib.createInflate()
|
||||||
|
debug('inflate body')
|
||||||
|
req.pipe(stream)
|
||||||
|
break
|
||||||
|
case 'gzip':
|
||||||
|
stream = zlib.createGunzip()
|
||||||
|
debug('gunzip body')
|
||||||
|
req.pipe(stream)
|
||||||
|
break
|
||||||
|
case 'identity':
|
||||||
|
stream = req
|
||||||
|
stream.length = length
|
||||||
|
break
|
||||||
|
default:
|
||||||
|
throw createError(415, 'unsupported content encoding "' + encoding + '"', {
|
||||||
|
encoding: encoding
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return stream
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set a status on an error object, if ones does not exist
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
|
||||||
|
function setErrorStatus (error, status) {
|
||||||
|
if (!error.status && !error.statusCode) {
|
||||||
|
error.status = status
|
||||||
|
error.statusCode = status
|
||||||
|
}
|
||||||
|
}
|
175
node_modules/body-parser/lib/types/json.js
generated
vendored
Normal file
175
node_modules/body-parser/lib/types/json.js
generated
vendored
Normal file
@ -0,0 +1,175 @@
|
|||||||
|
/*!
|
||||||
|
* body-parser
|
||||||
|
* Copyright(c) 2014 Jonathan Ong
|
||||||
|
* Copyright(c) 2014-2015 Douglas Christopher Wilson
|
||||||
|
* MIT Licensed
|
||||||
|
*/
|
||||||
|
|
||||||
|
'use strict'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Module dependencies.
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
|
||||||
|
var bytes = require('bytes')
|
||||||
|
var contentType = require('content-type')
|
||||||
|
var createError = require('http-errors')
|
||||||
|
var debug = require('debug')('body-parser:json')
|
||||||
|
var read = require('../read')
|
||||||
|
var typeis = require('type-is')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Module exports.
|
||||||
|
*/
|
||||||
|
|
||||||
|
module.exports = json
|
||||||
|
|
||||||
|
/**
|
||||||
|
* RegExp to match the first non-space in a string.
|
||||||
|
*
|
||||||
|
* Allowed whitespace is defined in RFC 7159:
|
||||||
|
*
|
||||||
|
* ws = *(
|
||||||
|
* %x20 / ; Space
|
||||||
|
* %x09 / ; Horizontal tab
|
||||||
|
* %x0A / ; Line feed or New line
|
||||||
|
* %x0D ) ; Carriage return
|
||||||
|
*/
|
||||||
|
|
||||||
|
var FIRST_CHAR_REGEXP = /^[\x20\x09\x0a\x0d]*(.)/ // eslint-disable-line no-control-regex
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a middleware to parse JSON bodies.
|
||||||
|
*
|
||||||
|
* @param {object} [options]
|
||||||
|
* @return {function}
|
||||||
|
* @public
|
||||||
|
*/
|
||||||
|
|
||||||
|
function json (options) {
|
||||||
|
var opts = options || {}
|
||||||
|
|
||||||
|
var limit = typeof opts.limit !== 'number'
|
||||||
|
? bytes.parse(opts.limit || '100kb')
|
||||||
|
: opts.limit
|
||||||
|
var inflate = opts.inflate !== false
|
||||||
|
var reviver = opts.reviver
|
||||||
|
var strict = opts.strict !== false
|
||||||
|
var type = opts.type || 'application/json'
|
||||||
|
var verify = opts.verify || false
|
||||||
|
|
||||||
|
if (verify !== false && typeof verify !== 'function') {
|
||||||
|
throw new TypeError('option verify must be function')
|
||||||
|
}
|
||||||
|
|
||||||
|
// create the appropriate type checking function
|
||||||
|
var shouldParse = typeof type !== 'function'
|
||||||
|
? typeChecker(type)
|
||||||
|
: type
|
||||||
|
|
||||||
|
function parse (body) {
|
||||||
|
if (body.length === 0) {
|
||||||
|
// special-case empty json body, as it's a common client-side mistake
|
||||||
|
// TODO: maybe make this configurable or part of "strict" option
|
||||||
|
return {}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (strict) {
|
||||||
|
var first = firstchar(body)
|
||||||
|
|
||||||
|
if (first !== '{' && first !== '[') {
|
||||||
|
debug('strict violation')
|
||||||
|
throw new SyntaxError('Unexpected token ' + first)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
debug('parse json')
|
||||||
|
return JSON.parse(body, reviver)
|
||||||
|
}
|
||||||
|
|
||||||
|
return function jsonParser (req, res, next) {
|
||||||
|
if (req._body) {
|
||||||
|
debug('body already parsed')
|
||||||
|
next()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
req.body = req.body || {}
|
||||||
|
|
||||||
|
// skip requests without bodies
|
||||||
|
if (!typeis.hasBody(req)) {
|
||||||
|
debug('skip empty body')
|
||||||
|
next()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
debug('content-type %j', req.headers['content-type'])
|
||||||
|
|
||||||
|
// determine if request should be parsed
|
||||||
|
if (!shouldParse(req)) {
|
||||||
|
debug('skip parsing')
|
||||||
|
next()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// assert charset per RFC 7159 sec 8.1
|
||||||
|
var charset = getCharset(req) || 'utf-8'
|
||||||
|
if (charset.substr(0, 4) !== 'utf-') {
|
||||||
|
debug('invalid charset')
|
||||||
|
next(createError(415, 'unsupported charset "' + charset.toUpperCase() + '"', {
|
||||||
|
charset: charset
|
||||||
|
}))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// read
|
||||||
|
read(req, res, next, parse, debug, {
|
||||||
|
encoding: charset,
|
||||||
|
inflate: inflate,
|
||||||
|
limit: limit,
|
||||||
|
verify: verify
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the first non-whitespace character in a string.
|
||||||
|
*
|
||||||
|
* @param {string} str
|
||||||
|
* @return {function}
|
||||||
|
* @api public
|
||||||
|
*/
|
||||||
|
|
||||||
|
function firstchar (str) {
|
||||||
|
var match = FIRST_CHAR_REGEXP.exec(str)
|
||||||
|
return match ? match[1] : ''
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the charset of a request.
|
||||||
|
*
|
||||||
|
* @param {object} req
|
||||||
|
* @api private
|
||||||
|
*/
|
||||||
|
|
||||||
|
function getCharset (req) {
|
||||||
|
try {
|
||||||
|
return contentType.parse(req).parameters.charset.toLowerCase()
|
||||||
|
} catch (e) {
|
||||||
|
return undefined
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the simple type checker.
|
||||||
|
*
|
||||||
|
* @param {string} type
|
||||||
|
* @return {function}
|
||||||
|
*/
|
||||||
|
|
||||||
|
function typeChecker (type) {
|
||||||
|
return function checkType (req) {
|
||||||
|
return Boolean(typeis(req, type))
|
||||||
|
}
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user