This commit is contained in:
hatuhn
2019-09-13 09:44:33 +07:00
parent 1f1633e801
commit f14a34ba19
16798 changed files with 1652961 additions and 4327 deletions

233
node_modules/gulp/CHANGELOG.md generated vendored Normal file
View File

@@ -0,0 +1,233 @@
# gulp changelog
## 3.9.0
- add babel support
- add transpiler fallback support
- add support for some renamed transpilers (livescript, etc)
- add JSCS
- update dependecies (liftoff, interpret)
- documentation tweaks
## 3.8.11
- fix node 0.12/iojs problems
- add node 0.12 and iojs to travis
- update dependencies (liftoff, v8flags)
- documentation tweaks
## 3.8.10
- add link to spanish docs
- update dependencies (archy, semver, mocha, etc)
- documentation tweaks
## 3.8.9
- fix local version undefined output
- add completion for fish shell
- fix powershell completion line splitting
- add support for arbitrary node flags (oops, should have been a minor bump)
- add v8flags dependency
- update dependencies (liftoff)
- documentation tweaks
## 3.8.8
- update dependencies (minimist, tildify)
- documentation tweaks
## 3.8.7
- handle errors a bit better
- update dependencies (gulp-util, semver, etc)
- documentation tweaks
## 3.8.6
- remove executable flag from LICENSE
- update dependencies (chalk, minimist, liftoff, etc)
- documentation tweaks
## 3.8.5
- simplify --silent and --tasks-simple
- fix bug in autocomplete where errors would come out
## 3.8.4
- CLI will use exit code 1 on exit when any task fails during the lifetime of the process
## 3.8.3
- Tweak error formatting to work better with PluginErrors and strings
## 3.8.2
- add manpage generation
## 3.8.1
- the CLI now adds process.env.INIT_CWD which is the original cwd it was launched from
## 3.8.0
- update vinyl-fs
- gulp.src is now a writable passthrough, this means you can use it to add files to your pipeline at any point
- gulp.dest can now take a function to determine the folder
This is now possible!
```js
gulp.src('lib/*.js')
.pipe(uglify())
.pipe(gulp.src('styles/*.css'))
.pipe(gulp.dest(function(file){
// I don't know, you can do something cool here
return 'build/whatever';
}));
```
## 3.7.0
- update vinyl-fs to remove BOM from UTF8 files
- add --tasks-simple flag for plaintext task listings
- updated autocomplete scripts to be simpler and use new --tasks-simple flag
- added support for transpilers via liftoff 0.11 and interpret
- just npm install your compiler (coffee-script for example) and it will work out of the box
## 3.5.5
- update deps
- gulp.dest now support mode option, uses source file mode by default (file.stat.mode)
- use chalk for colors in bin
- update gulp.env deprecation msg to be more helpful
## 3.5.2
- add -V for version on CLI (unix standard)
- -v is deprecated, use -V
- add -T as an alias for --tasks
- documentation
## 3.5
- added `gulp.watch(globs, tasksArray)` sugar
- remove gulp.taskQueue
- deprecate gulp.run
- deprecate gulp.env
- add engineStrict to prevent people with node < 0.9 from installing
## 3.4
- added `--tasks` that prints out the tree of tasks + deps
- global cli + local install mismatch is no longer fatal
- remove tests for fs stuff
- switch core src, dest, and watch to vinyl-fs
- internal cleaning
## 3.3.4
- `--base` is now `--cwd`
## 3.3.3
- support for `--base` CLI arg to change where the search for gulpfile/`--require`s starts
- support for `--gulpfile` CLI arg to point to a gulpfile specifically
## 3.3.0
- file.contents streams are no longer paused coming out of src
- dest now passes files through before they are empty to fix passing to multiple dests
## 3.2.4
- Bug fix - we didn't have any CLI tests
## 3.2.3
- Update dependencies for bug fixes
- autocomplete stuff in the completion folder
## 3.2
- File object is now [vinyl](https://github.com/wearefractal/vinyl)
- .watch() is now [glob-watcher](https://github.com/wearefractal/glob-watcher)
- Fix CLI -v when no gulpfile found
- gulp-util updated
- Logging moved to CLI bin file
- Will cause double logging if you update global CLI to 3.2 but not local
- Will cause no logging if you update local to 3.1 but not global CLI
- Drop support for < 0.9
## 3.1.3
- Move isStream and isBuffer to gulp-util
## 3.1
- Move file class to gulp-util
## 3.0
- Ability to pass multiple globs and glob negations to glob-stream
- Breaking change to the way glob-stream works
- File object is now a class
- file.shortened changed to file.relative
- file.cwd added
- Break out getStats to avoid nesting
- Major code reorganization
## 2.7
- Breaking change to the way options are passed to glob-stream
- Introduce new File object to ease pain of computing shortened names (now a getter)
## 2.4 - 2.6
- Moved stuff to gulp-util
- Quit exposing createGlobStream (just use the glob-stream module)
- More logging
- Prettier time durations
- Tons of documentation changes
- gulp.trigger(tasks...) as a through stream
## 1.2-2.4 (11/12/13)
- src buffer=false fixed for 0.8 and 0.9 (remember to .resume() on these versions before consuming)
- CLI completely rewritten
- Colorful logging
- Uses local version of gulp to run tasks
- Uses findup to locate gulpfile (so you can run it anywhere in your project)
- chdir to gulpfile directory before loading it
- Correct exit codes on errors
- silent flag added to gulp to disable logging
- Fixes to task orchestration (3rd party)
- Better support for globbed directories (thanks @robrich)
## 1.2 (10/28/13)
- Can specify buffer=false on src streams to make file.content a stream
- Can specify read=false on src streams to disable file.content
## 1.1 (10/21/13)
- Can specify run callback
- Can specify task dependencies
- Tasks can accept callback or return promise
- `gulp.verbose` exposes run-time internals
## 1.0 (9/26/13)
- Specify dependency versions
- Updated docs
## 0.2 (8/6/13)
- Rename .files() to .src() and .folder() to .dest()
## 0.1 (7/18/13)
- Initial Release

22
node_modules/gulp/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,22 @@
The MIT License (MIT)
Copyright (c) 2013-2016 Fractal <contact@wearefractal.com>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

103
node_modules/gulp/README.md generated vendored Normal file
View File

@@ -0,0 +1,103 @@
<p align="center">
<a href="http://gulpjs.com">
<img height="257" width="114" src="https://raw.githubusercontent.com/gulpjs/artwork/master/gulp-2x.png">
</a>
<p align="center">The streaming build system</p>
</p>
[![NPM version][npm-image]][npm-url] [![Downloads][downloads-image]][npm-url] [![Build Status][travis-image]][travis-url] [![Coveralls Status][coveralls-image]][coveralls-url] [![Gitter chat][gitter-image]][gitter-url]
## What is gulp?
- **Automation** - gulp is a toolkit that helps you automate painful or time-consuming tasks in your development workflow.
- **Platform-agnostic** - Integrations are built into all major IDEs and people are using gulp with PHP, .NET, Node.js, Java, and other platforms.
- **Strong Ecosystem** - Use npm modules to do anything you want + over 2000 curated plugins for streaming file transformations
- **Simple** - By providing only a minimal API surface, gulp is easy to learn and simple to use
## Documentation
For a Getting started guide, API docs, recipes, making a plugin, etc. check out or docs!
- Need something reliable? Check out the [documentation for the current release](/docs/README.md)!
- Want to help us test the latest and greatest? Check out the [documentation for the next release](https://github.com/gulpjs/gulp/tree/4.0)!
## Sample `gulpfile.js`
This file will give you a taste of what gulp does.
```js
var gulp = require('gulp');
var coffee = require('gulp-coffee');
var concat = require('gulp-concat');
var uglify = require('gulp-uglify');
var imagemin = require('gulp-imagemin');
var sourcemaps = require('gulp-sourcemaps');
var del = require('del');
var paths = {
scripts: ['client/js/**/*.coffee', '!client/external/**/*.coffee'],
images: 'client/img/**/*'
};
// Not all tasks need to use streams
// A gulpfile is just another node program and you can use any package available on npm
gulp.task('clean', function() {
// You can use multiple globbing patterns as you would with `gulp.src`
return del(['build']);
});
gulp.task('scripts', ['clean'], function() {
// Minify and copy all JavaScript (except vendor scripts)
// with sourcemaps all the way down
return gulp.src(paths.scripts)
.pipe(sourcemaps.init())
.pipe(coffee())
.pipe(uglify())
.pipe(concat('all.min.js'))
.pipe(sourcemaps.write())
.pipe(gulp.dest('build/js'));
});
// Copy all static images
gulp.task('images', ['clean'], function() {
return gulp.src(paths.images)
// Pass in options to the task
.pipe(imagemin({optimizationLevel: 5}))
.pipe(gulp.dest('build/img'));
});
// Rerun the task when a file changes
gulp.task('watch', function() {
gulp.watch(paths.scripts, ['scripts']);
gulp.watch(paths.images, ['images']);
});
// The default task (called when you run `gulp` from cli)
gulp.task('default', ['watch', 'scripts', 'images']);
```
## Incremental Builds
We recommend these plugins:
- [gulp-changed](https://github.com/sindresorhus/gulp-changed) - only pass through changed files
- [gulp-cached](https://github.com/contra/gulp-cached) - in-memory file cache, not for operation on sets of files
- [gulp-remember](https://github.com/ahaurw01/gulp-remember) - pairs nicely with gulp-cached
- [gulp-newer](https://github.com/tschaub/gulp-newer) - pass through newer source files only, supports many:1 source:dest
## Want to contribute?
Anyone can help make this project better - check out our [Contributing guide](/CONTRIBUTING.md)!
[downloads-image]: https://img.shields.io/npm/dm/gulp.svg
[npm-url]: https://www.npmjs.com/package/gulp
[npm-image]: https://img.shields.io/npm/v/gulp.svg
[travis-url]: https://travis-ci.org/gulpjs/gulp
[travis-image]: https://img.shields.io/travis/gulpjs/gulp.svg
[coveralls-url]: https://coveralls.io/r/gulpjs/gulp
[coveralls-image]: https://img.shields.io/coveralls/gulpjs/gulp/master.svg
[gitter-url]: https://gitter.im/gulpjs/gulp
[gitter-image]: https://badges.gitter.im/gulpjs/gulp.png

212
node_modules/gulp/bin/gulp.js generated vendored Executable file
View File

@@ -0,0 +1,212 @@
#!/usr/bin/env node
'use strict';
var gutil = require('gulp-util');
var prettyTime = require('pretty-hrtime');
var chalk = require('chalk');
var semver = require('semver');
var archy = require('archy');
var Liftoff = require('liftoff');
var tildify = require('tildify');
var interpret = require('interpret');
var v8flags = require('v8flags');
var completion = require('../lib/completion');
var argv = require('minimist')(process.argv.slice(2));
var taskTree = require('../lib/taskTree');
// Set env var for ORIGINAL cwd
// before anything touches it
process.env.INIT_CWD = process.cwd();
var cli = new Liftoff({
name: 'gulp',
completions: completion,
extensions: interpret.jsVariants,
v8flags: v8flags,
});
// Exit with 0 or 1
var failed = false;
process.once('exit', function(code) {
if (code === 0 && failed) {
process.exit(1);
}
});
// Parse those args m8
var cliPackage = require('../package');
var versionFlag = argv.v || argv.version;
var tasksFlag = argv.T || argv.tasks;
var tasks = argv._;
var toRun = tasks.length ? tasks : ['default'];
// This is a hold-over until we have a better logging system
// with log levels
var simpleTasksFlag = argv['tasks-simple'];
var shouldLog = !argv.silent && !simpleTasksFlag;
if (!shouldLog) {
gutil.log = function() {};
}
cli.on('require', function(name) {
gutil.log('Requiring external module', chalk.magenta(name));
});
cli.on('requireFail', function(name) {
gutil.log(chalk.red('Failed to load external module'), chalk.magenta(name));
});
cli.on('respawn', function(flags, child) {
var nodeFlags = chalk.magenta(flags.join(', '));
var pid = chalk.magenta(child.pid);
gutil.log('Node flags detected:', nodeFlags);
gutil.log('Respawned to PID:', pid);
});
cli.launch({
cwd: argv.cwd,
configPath: argv.gulpfile,
require: argv.require,
completion: argv.completion,
}, handleArguments);
// The actual logic
function handleArguments(env) {
if (versionFlag && tasks.length === 0) {
gutil.log('CLI version', cliPackage.version);
if (env.modulePackage && typeof env.modulePackage.version !== 'undefined') {
gutil.log('Local version', env.modulePackage.version);
}
process.exit(0);
}
if (!env.modulePath) {
gutil.log(
chalk.red('Local gulp not found in'),
chalk.magenta(tildify(env.cwd))
);
gutil.log(chalk.red('Try running: npm install gulp'));
process.exit(1);
}
if (!env.configPath) {
gutil.log(chalk.red('No gulpfile found'));
process.exit(1);
}
// Check for semver difference between cli and local installation
if (semver.gt(cliPackage.version, env.modulePackage.version)) {
gutil.log(chalk.red('Warning: gulp version mismatch:'));
gutil.log(chalk.red('Global gulp is', cliPackage.version));
gutil.log(chalk.red('Local gulp is', env.modulePackage.version));
}
// Chdir before requiring gulpfile to make sure
// we let them chdir as needed
if (process.cwd() !== env.cwd) {
process.chdir(env.cwd);
gutil.log(
'Working directory changed to',
chalk.magenta(tildify(env.cwd))
);
}
// This is what actually loads up the gulpfile
require(env.configPath);
gutil.log('Using gulpfile', chalk.magenta(tildify(env.configPath)));
var gulpInst = require(env.modulePath);
logEvents(gulpInst);
process.nextTick(function() {
if (simpleTasksFlag) {
return logTasksSimple(env, gulpInst);
}
if (tasksFlag) {
return logTasks(env, gulpInst);
}
gulpInst.start.apply(gulpInst, toRun);
});
}
function logTasks(env, localGulp) {
var tree = taskTree(localGulp.tasks);
tree.label = 'Tasks for ' + chalk.magenta(tildify(env.configPath));
archy(tree)
.split('\n')
.forEach(function(v) {
if (v.trim().length === 0) {
return;
}
gutil.log(v);
});
}
function logTasksSimple(env, localGulp) {
console.log(Object.keys(localGulp.tasks)
.join('\n')
.trim());
}
// Format orchestrator errors
function formatError(e) {
if (!e.err) {
return e.message;
}
// PluginError
if (typeof e.err.showStack === 'boolean') {
return e.err.toString();
}
// Normal error
if (e.err.stack) {
return e.err.stack;
}
// Unknown (string, number, etc.)
return new Error(String(e.err)).stack;
}
// Wire up logging events
function logEvents(gulpInst) {
// Total hack due to poor error management in orchestrator
gulpInst.on('err', function() {
failed = true;
});
gulpInst.on('task_start', function(e) {
// TODO: batch these
// so when 5 tasks start at once it only logs one time with all 5
gutil.log('Starting', '\'' + chalk.cyan(e.task) + '\'...');
});
gulpInst.on('task_stop', function(e) {
var time = prettyTime(e.hrDuration);
gutil.log(
'Finished', '\'' + chalk.cyan(e.task) + '\'',
'after', chalk.magenta(time)
);
});
gulpInst.on('task_err', function(e) {
var msg = formatError(e);
var time = prettyTime(e.hrDuration);
gutil.log(
'\'' + chalk.cyan(e.task) + '\'',
chalk.red('errored after'),
chalk.magenta(time)
);
gutil.log(msg);
});
gulpInst.on('task_not_found', function(err) {
gutil.log(
chalk.red('Task \'' + err.task + '\' is not in your gulpfile')
);
gutil.log('Please check the documentation for proper gulpfile formatting');
process.exit(1);
});
}

20
node_modules/gulp/completion/README.md generated vendored Normal file
View File

@@ -0,0 +1,20 @@
# Completion for gulp
> Thanks to grunt team and Tyler Kellen
To enable tasks auto-completion in shell you should add `eval "$(gulp --completion=shell)"` in your `.shellrc` file.
## Bash
Add `eval "$(gulp --completion=bash)"` to `~/.bashrc`.
## Zsh
Add `eval "$(gulp --completion=zsh)"` to `~/.zshrc`.
## Powershell
Add `Invoke-Expression ((gulp --completion=powershell) -join [System.Environment]::NewLine)` to `$PROFILE`.
## Fish
Add `gulp --completion=fish | source` to `~/.config/fish/config.fish`.

27
node_modules/gulp/completion/bash generated vendored Normal file
View File

@@ -0,0 +1,27 @@
#!/bin/bash
# Borrowed from grunt-cli
# http://gruntjs.com/
#
# Copyright (c) 2012 Tyler Kellen, contributors
# Licensed under the MIT license.
# https://github.com/gruntjs/grunt/blob/master/LICENSE-MIT
# Usage:
#
# To enable bash <tab> completion for gulp, add the following line (minus the
# leading #, which is the bash comment character) to your ~/.bashrc file:
#
# eval "$(gulp --completion=bash)"
# Enable bash autocompletion.
function _gulp_completions() {
# The currently-being-completed word.
local cur="${COMP_WORDS[COMP_CWORD]}"
#Grab tasks
local compls=$(gulp --tasks-simple)
# Tell complete what stuff to show.
COMPREPLY=($(compgen -W "$compls" -- "$cur"))
}
complete -o default -F _gulp_completions gulp

10
node_modules/gulp/completion/fish generated vendored Normal file
View File

@@ -0,0 +1,10 @@
#!/usr/bin/env fish
# Usage:
#
# To enable fish <tab> completion for gulp, add the following line to
# your ~/.config/fish/config.fish file:
#
# gulp --completion=fish | source
complete -c gulp -a "(gulp --tasks-simple)" -f

61
node_modules/gulp/completion/powershell generated vendored Normal file
View File

@@ -0,0 +1,61 @@
# Copyright (c) 2014 Jason Jarrett
#
# Tab completion for the `gulp`
#
# Usage:
#
# To enable powershell <tab> completion for gulp you need to be running
# at least PowerShell v3 or greater and add the below to your $PROFILE
#
# Invoke-Expression ((gulp --completion=powershell) -join [System.Environment]::NewLine)
#
#
$gulp_completion_Process = {
param($commandName, $parameterName, $wordToComplete, $commandAst, $fakeBoundParameter)
# Load up an assembly to read the gulpfile's sha1
if(-not $global:GulpSHA1Managed) {
[Reflection.Assembly]::LoadWithPartialName("System.Security") | out-null
$global:GulpSHA1Managed = new-Object System.Security.Cryptography.SHA1Managed
}
# setup a global (in-memory) cache
if(-not $global:GulpfileShaCache) {
$global:GulpfileShaCache = @{};
}
$cache = $global:GulpfileShaCache;
# Get the gulpfile's sha1
$sha1gulpFile = (resolve-path gulpfile.js -ErrorAction Ignore | %{
$file = [System.IO.File]::Open($_.Path, "open", "read")
[string]::join('', ($global:GulpSHA1Managed.ComputeHash($file) | %{ $_.ToString("x2") }))
$file.Dispose()
})
# lookup the sha1 for previously cached task lists.
if($cache.ContainsKey($sha1gulpFile)){
$tasks = $cache[$sha1gulpFile];
} else {
$tasks = (gulp --tasks-simple).split("`n");
$cache[$sha1gulpFile] = $tasks;
}
$tasks |
where { $_.startswith($commandName) }
Sort-Object |
foreach { New-Object System.Management.Automation.CompletionResult $_, $_, 'ParameterValue', ('{0}' -f $_) }
}
if (-not $global:options) {
$global:options = @{
CustomArgumentCompleters = @{};
NativeArgumentCompleters = @{}
}
}
$global:options['NativeArgumentCompleters']['gulp'] = $gulp_completion_Process
$function:tabexpansion2 = $function:tabexpansion2 -replace 'End\r\n{','End { if ($null -ne $options) { $options += $global:options} else {$options = $global:options}'

25
node_modules/gulp/completion/zsh generated vendored Normal file
View File

@@ -0,0 +1,25 @@
#!/bin/zsh
# Borrowed from grunt-cli
# http://gruntjs.com/
#
# Copyright (c) 2012 Tyler Kellen, contributors
# Licensed under the MIT license.
# https://github.com/gruntjs/grunt/blob/master/LICENSE-MIT
# Usage:
#
# To enable zsh <tab> completion for gulp, add the following line (minus the
# leading #, which is the zsh comment character) to your ~/.zshrc file:
#
# eval "$(gulp --completion=zsh)"
# Enable zsh autocompletion.
function _gulp_completion() {
# Grab tasks
compls=$(gulp --tasks-simple)
completions=(${=compls})
compadd -- $completions
}
compdef _gulp_completion gulp

40
node_modules/gulp/gulp.1 generated vendored Normal file
View File

@@ -0,0 +1,40 @@
.TH "GULP" "" "February 2016" "" ""
.SH "NAME"
\fBgulp\fR
.SH gulp CLI docs
.SS Flags
.P
gulp has very few flags to know about\. All other flags are for tasks to use if needed\.
.RS 0
.IP \(bu 2
\fB\-v\fP or \fB\-\-version\fP will display the global and local gulp versions
.IP \(bu 2
\fB\-\-require <module path>\fP will require a module before running the gulpfile\. This is useful for transpilers but also has other applications\. You can use multiple \fB\-\-require\fP flags
.IP \(bu 2
\fB\-\-gulpfile <gulpfile path>\fP will manually set path of gulpfile\. Useful if you have multiple gulpfiles\. This will set the CWD to the gulpfile directory as well
.IP \(bu 2
\fB\-\-cwd <dir path>\fP will manually set the CWD\. The search for the gulpfile, as well as the relativity of all requires will be from here
.IP \(bu 2
\fB\-T\fP or \fB\-\-tasks\fP will display the task dependency tree for the loaded gulpfile
.IP \(bu 2
\fB\-\-tasks\-simple\fP will display a plaintext list of tasks for the loaded gulpfile
.IP \(bu 2
\fB\-\-color\fP will force gulp and gulp plugins to display colors even when no color support is detected
.IP \(bu 2
\fB\-\-no\-color\fP will force gulp and gulp plugins to not display colors even when color support is detected
.IP \(bu 2
\fB\-\-silent\fP will disable all gulp logging
.RE
.P
The CLI adds process\.env\.INIT_CWD which is the original cwd it was launched from\.
.SS Task specific flags
.P
Refer to this StackOverflow \fIhttp://stackoverflow\.com/questions/23023650/is\-it\-possible\-to\-pass\-a\-flag\-to\-gulp\-to\-have\-it\-run\-tasks\-in\-different\-ways\fR link for how to add task specific flags
.SS Tasks
.P
Tasks can be executed by running \fBgulp <task> <othertask>\fP\|\. Just running \fBgulp\fP will execute the task you registered called \fBdefault\fP\|\. If there is no \fBdefault\fP task gulp will error\.
.SS Compilers
.P
You can find a list of supported languages at interpret \fIhttps://github\.com/tkellen/node\-interpret#jsvariants\fR\|\. If you would like to add support for a new language send pull request/open issues there\.

63
node_modules/gulp/index.js generated vendored Normal file
View File

@@ -0,0 +1,63 @@
'use strict';
var util = require('util');
var Orchestrator = require('orchestrator');
var gutil = require('gulp-util');
var deprecated = require('deprecated');
var vfs = require('vinyl-fs');
function Gulp() {
Orchestrator.call(this);
}
util.inherits(Gulp, Orchestrator);
Gulp.prototype.task = Gulp.prototype.add;
Gulp.prototype.run = function() {
// `run()` is deprecated as of 3.5 and will be removed in 4.0
// Use task dependencies instead
// Impose our opinion of "default" tasks onto orchestrator
var tasks = arguments.length ? arguments : ['default'];
this.start.apply(this, tasks);
};
Gulp.prototype.src = vfs.src;
Gulp.prototype.dest = vfs.dest;
Gulp.prototype.watch = function(glob, opt, fn) {
if (typeof opt === 'function' || Array.isArray(opt)) {
fn = opt;
opt = null;
}
// Array of tasks given
if (Array.isArray(fn)) {
return vfs.watch(glob, opt, function() {
this.start.apply(this, fn);
}.bind(this));
}
return vfs.watch(glob, opt, fn);
};
// Let people use this class from our instance
Gulp.prototype.Gulp = Gulp;
// Deprecations
deprecated.field('gulp.env has been deprecated. ' +
'Use your own CLI parser instead. ' +
'We recommend using yargs or minimist.',
console.warn,
Gulp.prototype,
'env',
gutil.env
);
Gulp.prototype.run = deprecated.method('gulp.run() has been deprecated. ' +
'Use task dependencies or gulp.watch task triggering instead.',
console.warn,
Gulp.prototype.run
);
var inst = new Gulp();
module.exports = inst;

22
node_modules/gulp/lib/completion.js generated vendored Normal file
View File

@@ -0,0 +1,22 @@
'use strict';
var fs = require('fs');
var path = require('path');
module.exports = function(name) {
if (typeof name !== 'string') {
throw new Error('Missing completion type');
}
var file = path.join(__dirname, '../completion', name);
try {
console.log(fs.readFileSync(file, 'utf8'));
process.exit(0);
} catch (err) {
console.log(
'echo "gulp autocompletion rules for',
'\'' + name + '\'',
'not found"'
);
process.exit(5);
}
};

14
node_modules/gulp/lib/taskTree.js generated vendored Normal file
View File

@@ -0,0 +1,14 @@
'use strict';
module.exports = function(tasks) {
return Object.keys(tasks)
.reduce(function(prev, task) {
prev.nodes.push({
label: task,
nodes: tasks[task].dep,
});
return prev;
}, {
nodes: [],
});
};

1
node_modules/gulp/node_modules/.bin/semver generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../semver/bin/semver

1
node_modules/gulp/node_modules/.bin/strip-bom generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../strip-bom/cli.js

21
node_modules/gulp/node_modules/clone-stats/LICENSE.md generated vendored Normal file
View File

@@ -0,0 +1,21 @@
## The MIT License (MIT) ##
Copyright (c) 2014 Hugh Kennedy
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

17
node_modules/gulp/node_modules/clone-stats/README.md generated vendored Normal file
View File

@@ -0,0 +1,17 @@
# clone-stats [![Flattr this!](https://api.flattr.com/button/flattr-badge-large.png)](https://flattr.com/submit/auto?user_id=hughskennedy&url=http://github.com/hughsk/clone-stats&title=clone-stats&description=hughsk/clone-stats%20on%20GitHub&language=en_GB&tags=flattr,github,javascript&category=software)[![experimental](http://hughsk.github.io/stability-badges/dist/experimental.svg)](http://github.com/hughsk/stability-badges) #
Safely clone node's
[`fs.Stats`](http://nodejs.org/api/fs.html#fs_class_fs_stats) instances without
losing their class methods, i.e. `stat.isDirectory()` and co.
## Usage ##
[![clone-stats](https://nodei.co/npm/clone-stats.png?mini=true)](https://nodei.co/npm/clone-stats)
### `copy = require('clone-stats')(stat)` ###
Returns a clone of the original `fs.Stats` instance (`stat`).
## License ##
MIT. See [LICENSE.md](http://github.com/hughsk/clone-stats/blob/master/LICENSE.md) for details.

13
node_modules/gulp/node_modules/clone-stats/index.js generated vendored Normal file
View File

@@ -0,0 +1,13 @@
var Stat = require('fs').Stats
module.exports = cloneStats
function cloneStats(stats) {
var replacement = new Stat
Object.keys(stats).forEach(function(key) {
replacement[key] = stats[key]
})
return replacement
}

View File

@@ -0,0 +1,60 @@
{
"_from": "clone-stats@^0.0.1",
"_id": "clone-stats@0.0.1",
"_inBundle": false,
"_integrity": "sha1-uI+UqCzzi4eR1YBG6kAprYjKmdE=",
"_location": "/gulp/clone-stats",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "clone-stats@^0.0.1",
"name": "clone-stats",
"escapedName": "clone-stats",
"rawSpec": "^0.0.1",
"saveSpec": null,
"fetchSpec": "^0.0.1"
},
"_requiredBy": [
"/gulp/vinyl"
],
"_resolved": "https://registry.npmjs.org/clone-stats/-/clone-stats-0.0.1.tgz",
"_shasum": "b88f94a82cf38b8791d58046ea4029ad88ca99d1",
"_spec": "clone-stats@^0.0.1",
"_where": "/Applications/XAMPP/xamppfiles/htdocs/wordpress/t-latehome/wp-content/plugins/opal-estate-pro/node_modules/gulp/node_modules/vinyl",
"author": {
"name": "Hugh Kennedy",
"email": "hughskennedy@gmail.com",
"url": "http://hughsk.io/"
},
"browser": "index.js",
"bugs": {
"url": "https://github.com/hughsk/clone-stats/issues"
},
"bundleDependencies": false,
"dependencies": {},
"deprecated": false,
"description": "Safely clone node's fs.Stats instances without losing their class methods",
"devDependencies": {
"tape": "~2.3.2"
},
"homepage": "https://github.com/hughsk/clone-stats",
"keywords": [
"stats",
"fs",
"clone",
"copy",
"prototype"
],
"license": "MIT",
"main": "index.js",
"name": "clone-stats",
"repository": {
"type": "git",
"url": "git://github.com/hughsk/clone-stats.git"
},
"scripts": {
"test": "node test"
},
"version": "0.0.1"
}

36
node_modules/gulp/node_modules/clone-stats/test.js generated vendored Normal file
View File

@@ -0,0 +1,36 @@
var test = require('tape')
var clone = require('./')
var fs = require('fs')
test('file', function(t) {
compare(t, fs.statSync(__filename))
t.end()
})
test('directory', function(t) {
compare(t, fs.statSync(__dirname))
t.end()
})
function compare(t, stat) {
var copy = clone(stat)
t.deepEqual(stat, copy, 'clone has equal properties')
t.ok(stat instanceof fs.Stats, 'original is an fs.Stat')
t.ok(copy instanceof fs.Stats, 'copy is an fs.Stat')
;['isDirectory'
, 'isFile'
, 'isBlockDevice'
, 'isCharacterDevice'
, 'isSymbolicLink'
, 'isFIFO'
, 'isSocket'
].forEach(function(method) {
t.equal(
stat[method].call(stat)
, copy[method].call(copy)
, 'equal value for stat.' + method + '()'
)
})
}

1
node_modules/gulp/node_modules/clone/.npmignore generated vendored Normal file
View File

@@ -0,0 +1 @@
node_modules/

5
node_modules/gulp/node_modules/clone/.travis.yml generated vendored Normal file
View File

@@ -0,0 +1,5 @@
language: node_js
node_js:
- 0.6
- 0.8
- 0.10

18
node_modules/gulp/node_modules/clone/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,18 @@
Copyright © 2011-2014 Paul Vorbach <paul@vorba.ch>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the “Software”), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

126
node_modules/gulp/node_modules/clone/README.md generated vendored Normal file
View File

@@ -0,0 +1,126 @@
# clone
[![build status](https://secure.travis-ci.org/pvorb/node-clone.png)](http://travis-ci.org/pvorb/node-clone)
offers foolproof _deep cloning_ of variables in JavaScript.
## Installation
npm install clone
or
ender build clone
## Example
~~~ javascript
var clone = require('clone');
var a, b;
a = { foo: { bar: 'baz' } }; // initial value of a
b = clone(a); // clone a -> b
a.foo.bar = 'foo'; // change a
console.log(a); // show a
console.log(b); // show b
~~~
This will print:
~~~ javascript
{ foo: { bar: 'foo' } }
{ foo: { bar: 'baz' } }
~~~
**clone** masters cloning simple objects (even with custom prototype), arrays,
Date objects, and RegExp objects. Everything is cloned recursively, so that you
can clone dates in arrays in objects, for example.
## API
`clone(val, circular, depth)`
* `val` -- the value that you want to clone, any type allowed
* `circular` -- boolean
Call `clone` with `circular` set to `false` if you are certain that `obj`
contains no circular references. This will give better performance if needed.
There is no error if `undefined` or `null` is passed as `obj`.
* `depth` -- depth to which the object is to be cloned (optional,
defaults to infinity)
`clone.clonePrototype(obj)`
* `obj` -- the object that you want to clone
Does a prototype clone as
[described by Oran Looney](http://oranlooney.com/functional-javascript/).
## Circular References
~~~ javascript
var a, b;
a = { hello: 'world' };
a.myself = a;
b = clone(a);
console.log(b);
~~~
This will print:
~~~ javascript
{ hello: "world", myself: [Circular] }
~~~
So, `b.myself` points to `b`, not `a`. Neat!
## Test
npm test
## Caveat
Some special objects like a socket or `process.stdout`/`stderr` are known to not
be cloneable. If you find other objects that cannot be cloned, please [open an
issue](https://github.com/pvorb/node-clone/issues/new).
## Bugs and Issues
If you encounter any bugs or issues, feel free to [open an issue at
github](https://github.com/pvorb/node-clone/issues) or send me an email to
<paul@vorba.ch>. I also always like to hear from you, if youre using my code.
## License
Copyright © 2011-2014 [Paul Vorbach](http://paul.vorba.ch/) and
[contributors](https://github.com/pvorb/node-clone/graphs/contributors).
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the “Software”), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

144
node_modules/gulp/node_modules/clone/clone.js generated vendored Normal file
View File

@@ -0,0 +1,144 @@
'use strict';
function objectToString(o) {
return Object.prototype.toString.call(o);
}
// shim for Node's 'util' package
// DO NOT REMOVE THIS! It is required for compatibility with EnderJS (http://enderjs.com/).
var util = {
isArray: function (ar) {
return Array.isArray(ar) || (typeof ar === 'object' && objectToString(ar) === '[object Array]');
},
isDate: function (d) {
return typeof d === 'object' && objectToString(d) === '[object Date]';
},
isRegExp: function (re) {
return typeof re === 'object' && objectToString(re) === '[object RegExp]';
},
getRegExpFlags: function (re) {
var flags = '';
re.global && (flags += 'g');
re.ignoreCase && (flags += 'i');
re.multiline && (flags += 'm');
return flags;
}
};
if (typeof module === 'object')
module.exports = clone;
/**
* Clones (copies) an Object using deep copying.
*
* This function supports circular references by default, but if you are certain
* there are no circular references in your object, you can save some CPU time
* by calling clone(obj, false).
*
* Caution: if `circular` is false and `parent` contains circular references,
* your program may enter an infinite loop and crash.
*
* @param `parent` - the object to be cloned
* @param `circular` - set to true if the object to be cloned may contain
* circular references. (optional - true by default)
* @param `depth` - set to a number if the object is only to be cloned to
* a particular depth. (optional - defaults to Infinity)
* @param `prototype` - sets the prototype to be used when cloning an object.
* (optional - defaults to parent prototype).
*/
function clone(parent, circular, depth, prototype) {
// maintain two arrays for circular references, where corresponding parents
// and children have the same index
var allParents = [];
var allChildren = [];
var useBuffer = typeof Buffer != 'undefined';
if (typeof circular == 'undefined')
circular = true;
if (typeof depth == 'undefined')
depth = Infinity;
// recurse this function so we don't reset allParents and allChildren
function _clone(parent, depth) {
// cloning null always returns null
if (parent === null)
return null;
if (depth == 0)
return parent;
var child;
var proto;
if (typeof parent != 'object') {
return parent;
}
if (util.isArray(parent)) {
child = [];
} else if (util.isRegExp(parent)) {
child = new RegExp(parent.source, util.getRegExpFlags(parent));
if (parent.lastIndex) child.lastIndex = parent.lastIndex;
} else if (util.isDate(parent)) {
child = new Date(parent.getTime());
} else if (useBuffer && Buffer.isBuffer(parent)) {
child = new Buffer(parent.length);
parent.copy(child);
return child;
} else {
if (typeof prototype == 'undefined') {
proto = Object.getPrototypeOf(parent);
child = Object.create(proto);
}
else {
child = Object.create(prototype);
proto = prototype;
}
}
if (circular) {
var index = allParents.indexOf(parent);
if (index != -1) {
return allChildren[index];
}
allParents.push(parent);
allChildren.push(child);
}
for (var i in parent) {
var attrs;
if (proto) {
attrs = Object.getOwnPropertyDescriptor(proto, i);
}
if (attrs && attrs.set == null) {
continue;
}
child[i] = _clone(parent[i], depth - 1);
}
return child;
}
return _clone(parent, depth);
}
/**
* Simple flat clone using prototype, accepts only objects, usefull for property
* override on FLAT configuration object (no nested props).
*
* USE WITH CAUTION! This may not behave as you wish if you do not know how this
* works.
*/
clone.clonePrototype = function(parent) {
if (parent === null)
return null;
var c = function () {};
c.prototype = parent;
return new c();
};

130
node_modules/gulp/node_modules/clone/package.json generated vendored Normal file
View File

@@ -0,0 +1,130 @@
{
"_from": "clone@^0.2.0",
"_id": "clone@0.2.0",
"_inBundle": false,
"_integrity": "sha1-xhJqkK1Pctv1rNskPMN3JP6T/B8=",
"_location": "/gulp/clone",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "clone@^0.2.0",
"name": "clone",
"escapedName": "clone",
"rawSpec": "^0.2.0",
"saveSpec": null,
"fetchSpec": "^0.2.0"
},
"_requiredBy": [
"/gulp/vinyl"
],
"_resolved": "https://registry.npmjs.org/clone/-/clone-0.2.0.tgz",
"_shasum": "c6126a90ad4f72dbf5acdb243cc37724fe93fc1f",
"_spec": "clone@^0.2.0",
"_where": "/Applications/XAMPP/xamppfiles/htdocs/wordpress/t-latehome/wp-content/plugins/opal-estate-pro/node_modules/gulp/node_modules/vinyl",
"author": {
"name": "Paul Vorbach",
"email": "paul@vorba.ch",
"url": "http://paul.vorba.ch/"
},
"bugs": {
"url": "https://github.com/pvorb/node-clone/issues"
},
"bundleDependencies": false,
"contributors": [
{
"name": "Blake Miner",
"email": "miner.blake@gmail.com",
"url": "http://www.blakeminer.com/"
},
{
"name": "Tian You",
"email": "axqd001@gmail.com",
"url": "http://blog.axqd.net/"
},
{
"name": "George Stagas",
"email": "gstagas@gmail.com",
"url": "http://stagas.com/"
},
{
"name": "Tobiasz Cudnik",
"email": "tobiasz.cudnik@gmail.com",
"url": "https://github.com/TobiaszCudnik"
},
{
"name": "Pavel Lang",
"email": "langpavel@phpskelet.org",
"url": "https://github.com/langpavel"
},
{
"name": "Dan MacTough",
"url": "http://yabfog.com/"
},
{
"name": "w1nk",
"url": "https://github.com/w1nk"
},
{
"name": "Hugh Kennedy",
"url": "http://twitter.com/hughskennedy"
},
{
"name": "Dustin Diaz",
"url": "http://dustindiaz.com"
},
{
"name": "Ilya Shaisultanov",
"url": "https://github.com/diversario"
},
{
"name": "Nathan MacInnes",
"email": "nathan@macinn.es",
"url": "http://macinn.es/"
},
{
"name": "Benjamin E. Coe",
"email": "ben@npmjs.com",
"url": "https://twitter.com/benjamincoe"
},
{
"name": "Nathan Zadoks",
"url": "https://github.com/nathan7"
},
{
"name": "Róbert Oroszi",
"email": "robert+gh@oroszi.net",
"url": "https://github.com/oroce"
}
],
"dependencies": {},
"deprecated": false,
"description": "deep cloning of objects and arrays",
"devDependencies": {
"nodeunit": "*",
"underscore": "*"
},
"engines": {
"node": "*"
},
"homepage": "https://github.com/pvorb/node-clone#readme",
"license": "MIT",
"main": "clone.js",
"name": "clone",
"optionalDependencies": {},
"repository": {
"type": "git",
"url": "git://github.com/pvorb/node-clone.git"
},
"scripts": {
"test": "nodeunit test.js"
},
"tags": [
"clone",
"object",
"array",
"function",
"date"
],
"version": "0.2.0"
}

289
node_modules/gulp/node_modules/clone/test.js generated vendored Normal file
View File

@@ -0,0 +1,289 @@
if(module.parent === null) {
console.log('Run this test file with nodeunit:');
console.log('$ nodeunit test.js');
}
var clone = require('./');
var util = require('util');
var _ = require('underscore');
exports["clone string"] = function(test) {
test.expect(2); // how many tests?
var a = "foo";
test.strictEqual(clone(a), a);
a = "";
test.strictEqual(clone(a), a);
test.done();
};
exports["clone number"] = function(test) {
test.expect(5); // how many tests?
var a = 0;
test.strictEqual(clone(a), a);
a = 1;
test.strictEqual(clone(a), a);
a = -1000;
test.strictEqual(clone(a), a);
a = 3.1415927;
test.strictEqual(clone(a), a);
a = -3.1415927;
test.strictEqual(clone(a), a);
test.done();
};
exports["clone date"] = function(test) {
test.expect(3); // how many tests?
var a = new Date;
var c = clone(a);
test.ok(a instanceof Date);
test.ok(c instanceof Date);
test.equal(c.getTime(), a.getTime());
test.done();
};
exports["clone object"] = function(test) {
test.expect(2); // how many tests?
var a = { foo: { bar: "baz" } };
var b = clone(a);
test.ok(_(a).isEqual(b), "underscore equal");
test.deepEqual(b, a);
test.done();
};
exports["clone array"] = function(test) {
test.expect(2); // how many tests?
var a = [
{ foo: "bar" },
"baz"
];
var b = clone(a);
test.ok(_(a).isEqual(b), "underscore equal");
test.deepEqual(b, a);
test.done();
};
exports["clone buffer"] = function(test) {
test.expect(1);
var a = new Buffer("this is a test buffer");
var b = clone(a);
// no underscore equal since it has no concept of Buffers
test.deepEqual(b, a);
test.done();
};
exports["clone regexp"] = function(test) {
test.expect(5);
var a = /abc123/gi;
var b = clone(a);
test.deepEqual(b, a);
var c = /a/g;
test.ok(c.lastIndex === 0);
c.exec('123a456a');
test.ok(c.lastIndex === 4);
var d = clone(c);
test.ok(d.global);
test.ok(d.lastIndex === 4);
test.done();
};
exports["clone object containing array"] = function(test) {
test.expect(2); // how many tests?
var a = {
arr1: [ { a: '1234', b: '2345' } ],
arr2: [ { c: '345', d: '456' } ]
};
var b = clone(a);
test.ok(_(a).isEqual(b), "underscore equal");
test.deepEqual(b, a);
test.done();
};
exports["clone object with circular reference"] = function(test) {
test.expect(8); // how many tests?
var _ = test.ok;
var c = [1, "foo", {'hello': 'bar'}, function() {}, false, [2]];
var b = [c, 2, 3, 4];
var a = {'b': b, 'c': c};
a.loop = a;
a.loop2 = a;
c.loop = c;
c.aloop = a;
var aCopy = clone(a);
_(a != aCopy);
_(a.c != aCopy.c);
_(aCopy.c == aCopy.b[0]);
_(aCopy.c.loop.loop.aloop == aCopy);
_(aCopy.c[0] == a.c[0]);
//console.log(util.inspect(aCopy, true, null) );
//console.log("------------------------------------------------------------");
//console.log(util.inspect(a, true, null) );
_(eq(a, aCopy));
aCopy.c[0] = 2;
_(!eq(a, aCopy));
aCopy.c = "2";
_(!eq(a, aCopy));
//console.log("------------------------------------------------------------");
//console.log(util.inspect(aCopy, true, null) );
function eq(x, y) {
return util.inspect(x, true, null) === util.inspect(y, true, null);
}
test.done();
};
exports['clonePrototype'] = function(test) {
test.expect(3); // how many tests?
var a = {
a: "aaa",
x: 123,
y: 45.65
};
var b = clone.clonePrototype(a);
test.strictEqual(b.a, a.a);
test.strictEqual(b.x, a.x);
test.strictEqual(b.y, a.y);
test.done();
}
exports['cloneWithinNewVMContext'] = function(test) {
test.expect(3);
var vm = require('vm');
var ctx = vm.createContext({ clone: clone });
var script = "clone( {array: [1, 2, 3], date: new Date(), regex: /^foo$/ig} );";
var results = vm.runInContext(script, ctx);
test.ok(results.array instanceof Array);
test.ok(results.date instanceof Date);
test.ok(results.regex instanceof RegExp);
test.done();
}
exports['cloneObjectWithNoConstructor'] = function(test) {
test.expect(3);
var n = null;
var a = { foo: 'bar' };
a.__proto__ = n;
test.ok(typeof a === 'object');
test.ok(typeof a !== null);
var b = clone(a);
test.ok(a.foo, b.foo);
test.done();
}
exports['clone object with depth argument'] = function (test) {
test.expect(6);
var a = {
foo: {
bar : {
baz : 'qux'
}
}
};
var b = clone(a, false, 1);
test.deepEqual(b, a);
test.notEqual(b, a);
test.strictEqual(b.foo, a.foo);
b = clone(a, true, 2);
test.deepEqual(b, a);
test.notEqual(b.foo, a.foo);
test.strictEqual(b.foo.bar, a.foo.bar);
test.done();
}
exports['maintain prototype chain in clones'] = function (test) {
test.expect(1);
function Constructor() {}
var a = new Constructor();
var b = clone(a);
test.strictEqual(Object.getPrototypeOf(a), Object.getPrototypeOf(b));
test.done();
}
exports['parent prototype is overriden with prototype provided'] = function (test) {
test.expect(1);
function Constructor() {}
var a = new Constructor();
var b = clone(a, true, Infinity, null);
test.strictEqual(b.__defineSetter__, undefined);
test.done();
}
exports['clone object with null children'] = function(test) {
test.expect(1);
var a = {
foo: {
bar: null,
baz: {
qux: false
}
}
};
var b = clone(a);
test.deepEqual(b, a);
test.done();
}
exports['clone instance with getter'] = function(test) {
test.expect(1);
function Ctor() {};
Object.defineProperty(Ctor.prototype, 'prop', {
configurable: true,
enumerable: true,
get: function() {
return 'value';
}
});
var a = new Ctor();
var b = clone(a);
test.strictEqual(b.prop, 'value');
test.done();
};

20
node_modules/gulp/node_modules/glob-stream/LICENSE generated vendored Executable file
View File

@@ -0,0 +1,20 @@
Copyright (c) 2013 Fractal <contact@wearefractal.com>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

67
node_modules/gulp/node_modules/glob-stream/README.md generated vendored Normal file
View File

@@ -0,0 +1,67 @@
# glob-stream [![NPM version][npm-image]][npm-url] [![Build Status][travis-image]][travis-url] [![Coveralls Status][coveralls-image]][coveralls-url] [![Dependency Status][david-image]][david-url]
## Information
<table>
<tr>
<td>Package</td><td>glob-stream</td>
</tr>
<tr>
<td>Description</td>
<td>File system globs as a stream</td>
</tr>
<tr>
<td>Node Version</td>
<td>>= 0.9</td>
</tr>
</table>
This is a simple wrapper around node-glob to make it streamy.
## Usage
```javascript
var gs = require('glob-stream');
var stream = gs.create("./files/**/*.coffee", {options});
stream.on('data', function(file){
// file has path, base, and cwd attrs
});
```
You can pass any combination of globs. One caveat is that you can not only pass a glob negation, you must give it at least one positive glob so it knows where to start. All given must match for the file to be returned.
### Options
- cwd
- Default is `process.cwd()`
- base
- Default is everything before a glob starts (see [glob2base](https://github.com/wearefractal/glob2base))
- cwdbase
- Default is `false`
- When true it is the same as saying opt.base = opt.cwd
This argument is passed directly to [node-glob](https://github.com/isaacs/node-glob) so check there for more options
#### Glob
```javascript
var stream = gs.create(["./**/*.js", "!./node_modules/**/*.*"]);
```
[npm-url]: https://npmjs.org/package/glob-stream
[npm-image]: https://badge.fury.io/js/glob-stream.png
[travis-url]: https://travis-ci.org/wearefractal/glob-stream
[travis-image]: https://travis-ci.org/wearefractal/glob-stream.png?branch=master
[coveralls-url]: https://coveralls.io/r/wearefractal/glob-stream
[coveralls-image]: https://coveralls.io/repos/wearefractal/glob-stream/badge.png
[depstat-url]: https://david-dm.org/wearefractal/glob-stream
[depstat-image]: https://david-dm.org/wearefractal/glob-stream.png
[david-url]: https://david-dm.org/wearefractal/glob-stream
[david-image]: https://david-dm.org/wearefractal/glob-stream.png?theme=shields.io

117
node_modules/gulp/node_modules/glob-stream/index.js generated vendored Normal file
View File

@@ -0,0 +1,117 @@
/*jslint node: true */
'use strict';
var through2 = require('through2');
var Combine = require('ordered-read-streams');
var unique = require('unique-stream');
var glob = require('glob');
var minimatch = require('minimatch');
var glob2base = require('glob2base');
var path = require('path');
var gs = {
// creates a stream for a single glob or filter
createStream: function(ourGlob, negatives, opt) {
if (!negatives) negatives = [];
if (!opt) opt = {};
if (typeof opt.cwd !== 'string') opt.cwd = process.cwd();
if (typeof opt.dot !== 'boolean') opt.dot = false;
if (typeof opt.silent !== 'boolean') opt.silent = true;
if (typeof opt.nonull !== 'boolean') opt.nonull = false;
if (typeof opt.cwdbase !== 'boolean') opt.cwdbase = false;
if (opt.cwdbase) opt.base = opt.cwd;
// remove path relativity to make globs make sense
ourGlob = unrelative(opt.cwd, ourGlob);
negatives = negatives.map(unrelative.bind(null, opt.cwd));
// create globbing stuff
var globber = new glob.Glob(ourGlob, opt);
// extract base path from glob
var basePath = opt.base ? opt.base : glob2base(globber);
// create stream and map events from globber to it
var stream = through2.obj(negatives.length ? filterNegatives : undefined);
globber.on('error', stream.emit.bind(stream, 'error'));
globber.on('end', function(/* some args here so can't use bind directly */){
stream.end();
});
globber.on('match', function(filename) {
stream.write({
cwd: opt.cwd,
base: basePath,
path: path.resolve(opt.cwd, filename)
});
});
return stream;
function filterNegatives(filename, enc, cb) {
var matcha = isMatch.bind(null, filename, opt);
if (negatives.every(matcha)) {
cb(null, filename); // pass
} else {
cb(); // ignore
}
}
},
// creates a stream for multiple globs or filters
create: function(globs, opt) {
if (!opt) opt = {};
// only one glob no need to aggregate
if (!Array.isArray(globs)) return gs.createStream(globs, null, opt);
var positives = globs.filter(isPositive);
var negatives = globs.filter(isNegative);
if (positives.length === 0) throw new Error("Missing positive glob");
// only one positive glob no need to aggregate
if (positives.length === 1) return gs.createStream(positives[0], negatives, opt);
// create all individual streams
var streams = positives.map(function(glob){
return gs.createStream(glob, negatives, opt);
});
// then just pipe them to a single unique stream and return it
var aggregate = new Combine(streams);
var uniqueStream = unique('path');
return aggregate.pipe(uniqueStream);
}
};
function isMatch(file, opt, pattern) {
if (typeof pattern === 'string') return minimatch(file.path, pattern, opt);
if (pattern instanceof RegExp) return pattern.test(file.path);
return true; // unknown glob type?
}
function isNegative(pattern) {
if (typeof pattern !== 'string') return true;
if (pattern[0] === '!') return true;
return false;
}
function isPositive(pattern) {
return !isNegative(pattern);
}
function unrelative(cwd, glob) {
var mod = '';
if (glob[0] === '!') {
mod = glob[0];
glob = glob.slice(1);
}
return mod+path.resolve(cwd, glob);
}
module.exports = gs;

View File

@@ -0,0 +1,78 @@
{
"_from": "glob-stream@^3.1.5",
"_id": "glob-stream@3.1.18",
"_inBundle": false,
"_integrity": "sha1-kXCl8St5Awb9/lmPMT+PeVT9FDs=",
"_location": "/gulp/glob-stream",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "glob-stream@^3.1.5",
"name": "glob-stream",
"escapedName": "glob-stream",
"rawSpec": "^3.1.5",
"saveSpec": null,
"fetchSpec": "^3.1.5"
},
"_requiredBy": [
"/gulp/vinyl-fs"
],
"_resolved": "https://registry.npmjs.org/glob-stream/-/glob-stream-3.1.18.tgz",
"_shasum": "9170a5f12b790306fdfe598f313f8f7954fd143b",
"_spec": "glob-stream@^3.1.5",
"_where": "/Applications/XAMPP/xamppfiles/htdocs/wordpress/t-latehome/wp-content/plugins/opal-estate-pro/node_modules/gulp/node_modules/vinyl-fs",
"author": {
"name": "Fractal",
"email": "contact@wearefractal.com",
"url": "http://wearefractal.com/"
},
"bugs": {
"url": "https://github.com/wearefractal/glob-stream/issues"
},
"bundleDependencies": false,
"dependencies": {
"glob": "^4.3.1",
"glob2base": "^0.0.12",
"minimatch": "^2.0.1",
"ordered-read-streams": "^0.1.0",
"through2": "^0.6.1",
"unique-stream": "^1.0.0"
},
"deprecated": false,
"description": "File system globs as a stream",
"devDependencies": {
"coveralls": "^2.11.2",
"istanbul": "^0.3.0",
"istanbul-coveralls": "^1.0.1",
"jshint": "^2.5.10",
"mocha": "^2.0.0",
"mocha-lcov-reporter": "0.0.1",
"rimraf": "^2.2.5",
"should": "^4.3.0"
},
"engines": {
"node": ">= 0.9"
},
"files": [
"index.js"
],
"homepage": "http://github.com/wearefractal/glob-stream",
"licenses": [
{
"type": "MIT",
"url": "http://github.com/wearefractal/glob-stream/raw/master/LICENSE"
}
],
"main": "./index.js",
"name": "glob-stream",
"repository": {
"type": "git",
"url": "git://github.com/wearefractal/glob-stream.git"
},
"scripts": {
"coveralls": "istanbul cover _mocha --report lcovonly -- -R spec && istanbul-coveralls",
"test": "mocha --reporter spec && jshint"
},
"version": "3.1.18"
}

15
node_modules/gulp/node_modules/glob/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,15 @@
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

369
node_modules/gulp/node_modules/glob/README.md generated vendored Normal file
View File

@@ -0,0 +1,369 @@
[![Build Status](https://travis-ci.org/isaacs/node-glob.svg?branch=master)](https://travis-ci.org/isaacs/node-glob/) [![Dependency Status](https://david-dm.org/isaacs/node-glob.svg)](https://david-dm.org/isaacs/node-glob) [![devDependency Status](https://david-dm.org/isaacs/node-glob/dev-status.svg)](https://david-dm.org/isaacs/node-glob#info=devDependencies) [![optionalDependency Status](https://david-dm.org/isaacs/node-glob/optional-status.svg)](https://david-dm.org/isaacs/node-glob#info=optionalDependencies)
# Glob
Match files using the patterns the shell uses, like stars and stuff.
This is a glob implementation in JavaScript. It uses the `minimatch`
library to do its matching.
![](oh-my-glob.gif)
## Usage
```javascript
var glob = require("glob")
// options is optional
glob("**/*.js", options, function (er, files) {
// files is an array of filenames.
// If the `nonull` option is set, and nothing
// was found, then files is ["**/*.js"]
// er is an error object or null.
})
```
## Glob Primer
"Globs" are the patterns you type when you do stuff like `ls *.js` on
the command line, or put `build/*` in a `.gitignore` file.
Before parsing the path part patterns, braced sections are expanded
into a set. Braced sections start with `{` and end with `}`, with any
number of comma-delimited sections within. Braced sections may contain
slash characters, so `a{/b/c,bcd}` would expand into `a/b/c` and `abcd`.
The following characters have special magic meaning when used in a
path portion:
* `*` Matches 0 or more characters in a single path portion
* `?` Matches 1 character
* `[...]` Matches a range of characters, similar to a RegExp range.
If the first character of the range is `!` or `^` then it matches
any character not in the range.
* `!(pattern|pattern|pattern)` Matches anything that does not match
any of the patterns provided.
* `?(pattern|pattern|pattern)` Matches zero or one occurrence of the
patterns provided.
* `+(pattern|pattern|pattern)` Matches one or more occurrences of the
patterns provided.
* `*(a|b|c)` Matches zero or more occurrences of the patterns provided
* `@(pattern|pat*|pat?erN)` Matches exactly one of the patterns
provided
* `**` If a "globstar" is alone in a path portion, then it matches
zero or more directories and subdirectories searching for matches.
It does not crawl symlinked directories.
### Dots
If a file or directory path portion has a `.` as the first character,
then it will not match any glob pattern unless that pattern's
corresponding path part also has a `.` as its first character.
For example, the pattern `a/.*/c` would match the file at `a/.b/c`.
However the pattern `a/*/c` would not, because `*` does not start with
a dot character.
You can make glob treat dots as normal characters by setting
`dot:true` in the options.
### Basename Matching
If you set `matchBase:true` in the options, and the pattern has no
slashes in it, then it will seek for any file anywhere in the tree
with a matching basename. For example, `*.js` would match
`test/simple/basic.js`.
### Negation
The intent for negation would be for a pattern starting with `!` to
match everything that *doesn't* match the supplied pattern. However,
the implementation is weird, and for the time being, this should be
avoided. The behavior will change or be deprecated in version 5.
### Empty Sets
If no matching files are found, then an empty array is returned. This
differs from the shell, where the pattern itself is returned. For
example:
$ echo a*s*d*f
a*s*d*f
To get the bash-style behavior, set the `nonull:true` in the options.
### See Also:
* `man sh`
* `man bash` (Search for "Pattern Matching")
* `man 3 fnmatch`
* `man 5 gitignore`
* [minimatch documentation](https://github.com/isaacs/minimatch)
## glob.hasMagic(pattern, [options])
Returns `true` if there are any special characters in the pattern, and
`false` otherwise.
Note that the options affect the results. If `noext:true` is set in
the options object, then `+(a|b)` will not be considered a magic
pattern. If the pattern has a brace expansion, like `a/{b/c,x/y}`
then that is considered magical, unless `nobrace:true` is set in the
options.
## glob(pattern, [options], cb)
* `pattern` {String} Pattern to be matched
* `options` {Object}
* `cb` {Function}
* `err` {Error | null}
* `matches` {Array<String>} filenames found matching the pattern
Perform an asynchronous glob search.
## glob.sync(pattern, [options])
* `pattern` {String} Pattern to be matched
* `options` {Object}
* return: {Array<String>} filenames found matching the pattern
Perform a synchronous glob search.
## Class: glob.Glob
Create a Glob object by instantiating the `glob.Glob` class.
```javascript
var Glob = require("glob").Glob
var mg = new Glob(pattern, options, cb)
```
It's an EventEmitter, and starts walking the filesystem to find matches
immediately.
### new glob.Glob(pattern, [options], [cb])
* `pattern` {String} pattern to search for
* `options` {Object}
* `cb` {Function} Called when an error occurs, or matches are found
* `err` {Error | null}
* `matches` {Array<String>} filenames found matching the pattern
Note that if the `sync` flag is set in the options, then matches will
be immediately available on the `g.found` member.
### Properties
* `minimatch` The minimatch object that the glob uses.
* `options` The options object passed in.
* `aborted` Boolean which is set to true when calling `abort()`. There
is no way at this time to continue a glob search after aborting, but
you can re-use the statCache to avoid having to duplicate syscalls.
* `statCache` Collection of all the stat results the glob search
performed.
* `cache` Convenience object. Each field has the following possible
values:
* `false` - Path does not exist
* `true` - Path exists
* `'DIR'` - Path exists, and is not a directory
* `'FILE'` - Path exists, and is a directory
* `[file, entries, ...]` - Path exists, is a directory, and the
array value is the results of `fs.readdir`
* `statCache` Cache of `fs.stat` results, to prevent statting the same
path multiple times.
* `symlinks` A record of which paths are symbolic links, which is
relevant in resolving `**` patterns.
* `realpathCache` An optional object which is passed to `fs.realpath`
to minimize unnecessary syscalls. It is stored on the instantiated
Glob object, and may be re-used.
### Events
* `end` When the matching is finished, this is emitted with all the
matches found. If the `nonull` option is set, and no match was found,
then the `matches` list contains the original pattern. The matches
are sorted, unless the `nosort` flag is set.
* `match` Every time a match is found, this is emitted with the matched.
* `error` Emitted when an unexpected error is encountered, or whenever
any fs error occurs if `options.strict` is set.
* `abort` When `abort()` is called, this event is raised.
### Methods
* `pause` Temporarily stop the search
* `resume` Resume the search
* `abort` Stop the search forever
### Options
All the options that can be passed to Minimatch can also be passed to
Glob to change pattern matching behavior. Also, some have been added,
or have glob-specific ramifications.
All options are false by default, unless otherwise noted.
All options are added to the Glob object, as well.
If you are running many `glob` operations, you can pass a Glob object
as the `options` argument to a subsequent operation to shortcut some
`stat` and `readdir` calls. At the very least, you may pass in shared
`symlinks`, `statCache`, `realpathCache`, and `cache` options, so that
parallel glob operations will be sped up by sharing information about
the filesystem.
* `cwd` The current working directory in which to search. Defaults
to `process.cwd()`.
* `root` The place where patterns starting with `/` will be mounted
onto. Defaults to `path.resolve(options.cwd, "/")` (`/` on Unix
systems, and `C:\` or some such on Windows.)
* `dot` Include `.dot` files in normal matches and `globstar` matches.
Note that an explicit dot in a portion of the pattern will always
match dot files.
* `nomount` By default, a pattern starting with a forward-slash will be
"mounted" onto the root setting, so that a valid filesystem path is
returned. Set this flag to disable that behavior.
* `mark` Add a `/` character to directory matches. Note that this
requires additional stat calls.
* `nosort` Don't sort the results.
* `stat` Set to true to stat *all* results. This reduces performance
somewhat, and is completely unnecessary, unless `readdir` is presumed
to be an untrustworthy indicator of file existence.
* `silent` When an unusual error is encountered when attempting to
read a directory, a warning will be printed to stderr. Set the
`silent` option to true to suppress these warnings.
* `strict` When an unusual error is encountered when attempting to
read a directory, the process will just continue on in search of
other matches. Set the `strict` option to raise an error in these
cases.
* `cache` See `cache` property above. Pass in a previously generated
cache object to save some fs calls.
* `statCache` A cache of results of filesystem information, to prevent
unnecessary stat calls. While it should not normally be necessary
to set this, you may pass the statCache from one glob() call to the
options object of another, if you know that the filesystem will not
change between calls. (See "Race Conditions" below.)
* `symlinks` A cache of known symbolic links. You may pass in a
previously generated `symlinks` object to save `lstat` calls when
resolving `**` matches.
* `sync` DEPRECATED: use `glob.sync(pattern, opts)` instead.
* `nounique` In some cases, brace-expanded patterns can result in the
same file showing up multiple times in the result set. By default,
this implementation prevents duplicates in the result set. Set this
flag to disable that behavior.
* `nonull` Set to never return an empty set, instead returning a set
containing the pattern itself. This is the default in glob(3).
* `debug` Set to enable debug logging in minimatch and glob.
* `nobrace` Do not expand `{a,b}` and `{1..3}` brace sets.
* `noglobstar` Do not match `**` against multiple filenames. (Ie,
treat it as a normal `*` instead.)
* `noext` Do not match `+(a|b)` "extglob" patterns.
* `nocase` Perform a case-insensitive match. Note: on
case-insensitive filesystems, non-magic patterns will match by
default, since `stat` and `readdir` will not raise errors.
* `matchBase` Perform a basename-only match if the pattern does not
contain any slash characters. That is, `*.js` would be treated as
equivalent to `**/*.js`, matching all js files in all directories.
* `nonegate` Suppress `negate` behavior. (See below.)
* `nocomment` Suppress `comment` behavior. (See below.)
* `nonull` Return the pattern when no matches are found.
* `nodir` Do not match directories, only files. (Note: to match
*only* directories, simply put a `/` at the end of the pattern.)
* `ignore` Add a pattern or an array of patterns to exclude matches.
* `follow` Follow symlinked directories when expanding `**` patterns.
Note that this can result in a lot of duplicate references in the
presence of cyclic links.
* `realpath` Set to true to call `fs.realpath` on all of the results.
In the case of a symlink that cannot be resolved, the full absolute
path to the matched entry is returned (though it will usually be a
broken symlink)
## Comparisons to other fnmatch/glob implementations
While strict compliance with the existing standards is a worthwhile
goal, some discrepancies exist between node-glob and other
implementations, and are intentional.
If the pattern starts with a `!` character, then it is negated. Set the
`nonegate` flag to suppress this behavior, and treat leading `!`
characters normally. This is perhaps relevant if you wish to start the
pattern with a negative extglob pattern like `!(a|B)`. Multiple `!`
characters at the start of a pattern will negate the pattern multiple
times.
If a pattern starts with `#`, then it is treated as a comment, and
will not match anything. Use `\#` to match a literal `#` at the
start of a line, or set the `nocomment` flag to suppress this behavior.
The double-star character `**` is supported by default, unless the
`noglobstar` flag is set. This is supported in the manner of bsdglob
and bash 4.3, where `**` only has special significance if it is the only
thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but
`a/**b` will not.
Note that symlinked directories are not crawled as part of a `**`,
though their contents may match against subsequent portions of the
pattern. This prevents infinite loops and duplicates and the like.
If an escaped pattern has no matches, and the `nonull` flag is set,
then glob returns the pattern as-provided, rather than
interpreting the character escapes. For example,
`glob.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than
`"*a?"`. This is akin to setting the `nullglob` option in bash, except
that it does not resolve escaped pattern characters.
If brace expansion is not disabled, then it is performed before any
other interpretation of the glob pattern. Thus, a pattern like
`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded
**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are
checked for validity. Since those two are valid, matching proceeds.
## Windows
**Please only use forward-slashes in glob expressions.**
Though windows uses either `/` or `\` as its path separator, only `/`
characters are used by this glob implementation. You must use
forward-slashes **only** in glob expressions. Back-slashes will always
be interpreted as escape characters, not path separators.
Results from absolute patterns such as `/foo/*` are mounted onto the
root setting using `path.join`. On windows, this will by default result
in `/foo/*` matching `C:\foo\bar.txt`.
## Race Conditions
Glob searching, by its very nature, is susceptible to race conditions,
since it relies on directory walking and such.
As a result, it is possible that a file that exists when glob looks for
it may have been deleted or modified by the time it returns the result.
As part of its internal implementation, this program caches all stat
and readdir calls that it makes, in order to cut down on system
overhead. However, this also makes it even more susceptible to races,
especially if the cache or statCache objects are reused between glob
calls.
Users are thus advised not to use a glob result as a guarantee of
filesystem state in the face of rapid changes. For the vast majority
of operations, this is never a problem.
## Contributing
Any change to behavior (including bugfixes) must come with a test.
Patches that fail tests or reduce performance will be rejected.
```
# to run tests
npm test
# to re-generate test fixtures
npm run test-regen
# to benchmark against bash/zsh
npm run bench
# to profile javascript
npm run prof
```

237
node_modules/gulp/node_modules/glob/common.js generated vendored Normal file
View File

@@ -0,0 +1,237 @@
exports.alphasort = alphasort
exports.alphasorti = alphasorti
exports.isAbsolute = process.platform === "win32" ? absWin : absUnix
exports.setopts = setopts
exports.ownProp = ownProp
exports.makeAbs = makeAbs
exports.finish = finish
exports.mark = mark
exports.isIgnored = isIgnored
exports.childrenIgnored = childrenIgnored
function ownProp (obj, field) {
return Object.prototype.hasOwnProperty.call(obj, field)
}
var path = require("path")
var minimatch = require("minimatch")
var Minimatch = minimatch.Minimatch
function absWin (p) {
if (absUnix(p)) return true
// pull off the device/UNC bit from a windows path.
// from node's lib/path.js
var splitDeviceRe =
/^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/
var result = splitDeviceRe.exec(p)
var device = result[1] || ''
var isUnc = device && device.charAt(1) !== ':'
var isAbsolute = !!result[2] || isUnc // UNC paths are always absolute
return isAbsolute
}
function absUnix (p) {
return p.charAt(0) === "/" || p === ""
}
function alphasorti (a, b) {
return a.toLowerCase().localeCompare(b.toLowerCase())
}
function alphasort (a, b) {
return a.localeCompare(b)
}
function setupIgnores (self, options) {
self.ignore = options.ignore || []
if (!Array.isArray(self.ignore))
self.ignore = [self.ignore]
if (self.ignore.length) {
self.ignore = self.ignore.map(ignoreMap)
}
}
function ignoreMap (pattern) {
var gmatcher = null
if (pattern.slice(-3) === '/**') {
var gpattern = pattern.replace(/(\/\*\*)+$/, '')
gmatcher = new Minimatch(gpattern, { nonegate: true })
}
return {
matcher: new Minimatch(pattern, { nonegate: true }),
gmatcher: gmatcher
}
}
function setopts (self, pattern, options) {
if (!options)
options = {}
// base-matching: just use globstar for that.
if (options.matchBase && -1 === pattern.indexOf("/")) {
if (options.noglobstar) {
throw new Error("base matching requires globstar")
}
pattern = "**/" + pattern
}
self.pattern = pattern
self.strict = options.strict !== false
self.realpath = !!options.realpath
self.realpathCache = options.realpathCache || Object.create(null)
self.follow = !!options.follow
self.dot = !!options.dot
self.mark = !!options.mark
self.nodir = !!options.nodir
if (self.nodir)
self.mark = true
self.sync = !!options.sync
self.nounique = !!options.nounique
self.nonull = !!options.nonull
self.nosort = !!options.nosort
self.nocase = !!options.nocase
self.stat = !!options.stat
self.noprocess = !!options.noprocess
self.maxLength = options.maxLength || Infinity
self.cache = options.cache || Object.create(null)
self.statCache = options.statCache || Object.create(null)
self.symlinks = options.symlinks || Object.create(null)
setupIgnores(self, options)
self.changedCwd = false
var cwd = process.cwd()
if (!ownProp(options, "cwd"))
self.cwd = cwd
else {
self.cwd = options.cwd
self.changedCwd = path.resolve(options.cwd) !== cwd
}
self.root = options.root || path.resolve(self.cwd, "/")
self.root = path.resolve(self.root)
if (process.platform === "win32")
self.root = self.root.replace(/\\/g, "/")
self.nomount = !!options.nomount
self.minimatch = new Minimatch(pattern, options)
self.options = self.minimatch.options
}
function finish (self) {
var nou = self.nounique
var all = nou ? [] : Object.create(null)
for (var i = 0, l = self.matches.length; i < l; i ++) {
var matches = self.matches[i]
if (!matches || Object.keys(matches).length === 0) {
if (self.nonull) {
// do like the shell, and spit out the literal glob
var literal = self.minimatch.globSet[i]
if (nou)
all.push(literal)
else
all[literal] = true
}
} else {
// had matches
var m = Object.keys(matches)
if (nou)
all.push.apply(all, m)
else
m.forEach(function (m) {
all[m] = true
})
}
}
if (!nou)
all = Object.keys(all)
if (!self.nosort)
all = all.sort(self.nocase ? alphasorti : alphasort)
// at *some* point we statted all of these
if (self.mark) {
for (var i = 0; i < all.length; i++) {
all[i] = self._mark(all[i])
}
if (self.nodir) {
all = all.filter(function (e) {
return !(/\/$/.test(e))
})
}
}
if (self.ignore.length)
all = all.filter(function(m) {
return !isIgnored(self, m)
})
self.found = all
}
function mark (self, p) {
var abs = makeAbs(self, p)
var c = self.cache[abs]
var m = p
if (c) {
var isDir = c === 'DIR' || Array.isArray(c)
var slash = p.slice(-1) === '/'
if (isDir && !slash)
m += '/'
else if (!isDir && slash)
m = m.slice(0, -1)
if (m !== p) {
var mabs = makeAbs(self, m)
self.statCache[mabs] = self.statCache[abs]
self.cache[mabs] = self.cache[abs]
}
}
return m
}
// lotta situps...
function makeAbs (self, f) {
var abs = f
if (f.charAt(0) === '/') {
abs = path.join(self.root, f)
} else if (exports.isAbsolute(f)) {
abs = f
} else if (self.changedCwd) {
abs = path.resolve(self.cwd, f)
} else if (self.realpath) {
abs = path.resolve(f)
}
return abs
}
// Return true, if pattern ends with globstar '**', for the accompanying parent directory.
// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents
function isIgnored (self, path) {
if (!self.ignore.length)
return false
return self.ignore.some(function(item) {
return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path))
})
}
function childrenIgnored (self, path) {
if (!self.ignore.length)
return false
return self.ignore.some(function(item) {
return !!(item.gmatcher && item.gmatcher.match(path))
})
}

740
node_modules/gulp/node_modules/glob/glob.js generated vendored Normal file
View File

@@ -0,0 +1,740 @@
// Approach:
//
// 1. Get the minimatch set
// 2. For each pattern in the set, PROCESS(pattern, false)
// 3. Store matches per-set, then uniq them
//
// PROCESS(pattern, inGlobStar)
// Get the first [n] items from pattern that are all strings
// Join these together. This is PREFIX.
// If there is no more remaining, then stat(PREFIX) and
// add to matches if it succeeds. END.
//
// If inGlobStar and PREFIX is symlink and points to dir
// set ENTRIES = []
// else readdir(PREFIX) as ENTRIES
// If fail, END
//
// with ENTRIES
// If pattern[n] is GLOBSTAR
// // handle the case where the globstar match is empty
// // by pruning it out, and testing the resulting pattern
// PROCESS(pattern[0..n] + pattern[n+1 .. $], false)
// // handle other cases.
// for ENTRY in ENTRIES (not dotfiles)
// // attach globstar + tail onto the entry
// // Mark that this entry is a globstar match
// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true)
//
// else // not globstar
// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot)
// Test ENTRY against pattern[n]
// If fails, continue
// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $])
//
// Caveat:
// Cache all stats and readdirs results to minimize syscall. Since all
// we ever care about is existence and directory-ness, we can just keep
// `true` for files, and [children,...] for directories, or `false` for
// things that don't exist.
module.exports = glob
var fs = require('fs')
var minimatch = require('minimatch')
var Minimatch = minimatch.Minimatch
var inherits = require('inherits')
var EE = require('events').EventEmitter
var path = require('path')
var assert = require('assert')
var globSync = require('./sync.js')
var common = require('./common.js')
var alphasort = common.alphasort
var alphasorti = common.alphasorti
var isAbsolute = common.isAbsolute
var setopts = common.setopts
var ownProp = common.ownProp
var inflight = require('inflight')
var util = require('util')
var childrenIgnored = common.childrenIgnored
var once = require('once')
function glob (pattern, options, cb) {
if (typeof options === 'function') cb = options, options = {}
if (!options) options = {}
if (options.sync) {
if (cb)
throw new TypeError('callback provided to sync glob')
return globSync(pattern, options)
}
return new Glob(pattern, options, cb)
}
glob.sync = globSync
var GlobSync = glob.GlobSync = globSync.GlobSync
// old api surface
glob.glob = glob
glob.hasMagic = function (pattern, options_) {
var options = util._extend({}, options_)
options.noprocess = true
var g = new Glob(pattern, options)
var set = g.minimatch.set
if (set.length > 1)
return true
for (var j = 0; j < set[0].length; j++) {
if (typeof set[0][j] !== 'string')
return true
}
return false
}
glob.Glob = Glob
inherits(Glob, EE)
function Glob (pattern, options, cb) {
if (typeof options === 'function') {
cb = options
options = null
}
if (options && options.sync) {
if (cb)
throw new TypeError('callback provided to sync glob')
return new GlobSync(pattern, options)
}
if (!(this instanceof Glob))
return new Glob(pattern, options, cb)
setopts(this, pattern, options)
this._didRealPath = false
// process each pattern in the minimatch set
var n = this.minimatch.set.length
// The matches are stored as {<filename>: true,...} so that
// duplicates are automagically pruned.
// Later, we do an Object.keys() on these.
// Keep them as a list so we can fill in when nonull is set.
this.matches = new Array(n)
if (typeof cb === 'function') {
cb = once(cb)
this.on('error', cb)
this.on('end', function (matches) {
cb(null, matches)
})
}
var self = this
var n = this.minimatch.set.length
this._processing = 0
this.matches = new Array(n)
this._emitQueue = []
this._processQueue = []
this.paused = false
if (this.noprocess)
return this
if (n === 0)
return done()
for (var i = 0; i < n; i ++) {
this._process(this.minimatch.set[i], i, false, done)
}
function done () {
--self._processing
if (self._processing <= 0)
self._finish()
}
}
Glob.prototype._finish = function () {
assert(this instanceof Glob)
if (this.aborted)
return
if (this.realpath && !this._didRealpath)
return this._realpath()
common.finish(this)
this.emit('end', this.found)
}
Glob.prototype._realpath = function () {
if (this._didRealpath)
return
this._didRealpath = true
var n = this.matches.length
if (n === 0)
return this._finish()
var self = this
for (var i = 0; i < this.matches.length; i++)
this._realpathSet(i, next)
function next () {
if (--n === 0)
self._finish()
}
}
Glob.prototype._realpathSet = function (index, cb) {
var matchset = this.matches[index]
if (!matchset)
return cb()
var found = Object.keys(matchset)
var self = this
var n = found.length
if (n === 0)
return cb()
var set = this.matches[index] = Object.create(null)
found.forEach(function (p, i) {
// If there's a problem with the stat, then it means that
// one or more of the links in the realpath couldn't be
// resolved. just return the abs value in that case.
p = self._makeAbs(p)
fs.realpath(p, self.realpathCache, function (er, real) {
if (!er)
set[real] = true
else if (er.syscall === 'stat')
set[p] = true
else
self.emit('error', er) // srsly wtf right here
if (--n === 0) {
self.matches[index] = set
cb()
}
})
})
}
Glob.prototype._mark = function (p) {
return common.mark(this, p)
}
Glob.prototype._makeAbs = function (f) {
return common.makeAbs(this, f)
}
Glob.prototype.abort = function () {
this.aborted = true
this.emit('abort')
}
Glob.prototype.pause = function () {
if (!this.paused) {
this.paused = true
this.emit('pause')
}
}
Glob.prototype.resume = function () {
if (this.paused) {
this.emit('resume')
this.paused = false
if (this._emitQueue.length) {
var eq = this._emitQueue.slice(0)
this._emitQueue.length = 0
for (var i = 0; i < eq.length; i ++) {
var e = eq[i]
this._emitMatch(e[0], e[1])
}
}
if (this._processQueue.length) {
var pq = this._processQueue.slice(0)
this._processQueue.length = 0
for (var i = 0; i < pq.length; i ++) {
var p = pq[i]
this._processing--
this._process(p[0], p[1], p[2], p[3])
}
}
}
}
Glob.prototype._process = function (pattern, index, inGlobStar, cb) {
assert(this instanceof Glob)
assert(typeof cb === 'function')
if (this.aborted)
return
this._processing++
if (this.paused) {
this._processQueue.push([pattern, index, inGlobStar, cb])
return
}
//console.error('PROCESS %d', this._processing, pattern)
// Get the first [n] parts of pattern that are all strings.
var n = 0
while (typeof pattern[n] === 'string') {
n ++
}
// now n is the index of the first one that is *not* a string.
// see if there's anything else
var prefix
switch (n) {
// if not, then this is rather simple
case pattern.length:
this._processSimple(pattern.join('/'), index, cb)
return
case 0:
// pattern *starts* with some non-trivial item.
// going to readdir(cwd), but not include the prefix in matches.
prefix = null
break
default:
// pattern has some string bits in the front.
// whatever it starts with, whether that's 'absolute' like /foo/bar,
// or 'relative' like '../baz'
prefix = pattern.slice(0, n).join('/')
break
}
var remain = pattern.slice(n)
// get the list of entries.
var read
if (prefix === null)
read = '.'
else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {
if (!prefix || !isAbsolute(prefix))
prefix = '/' + prefix
read = prefix
} else
read = prefix
var abs = this._makeAbs(read)
//if ignored, skip _processing
if (childrenIgnored(this, read))
return cb()
var isGlobStar = remain[0] === minimatch.GLOBSTAR
if (isGlobStar)
this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb)
else
this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb)
}
Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) {
var self = this
this._readdir(abs, inGlobStar, function (er, entries) {
return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
})
}
Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
// if the abs isn't a dir, then nothing can match!
if (!entries)
return cb()
// It will only match dot entries if it starts with a dot, or if
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
var pn = remain[0]
var negate = !!this.minimatch.negate
var rawGlob = pn._glob
var dotOk = this.dot || rawGlob.charAt(0) === '.'
var matchedEntries = []
for (var i = 0; i < entries.length; i++) {
var e = entries[i]
if (e.charAt(0) !== '.' || dotOk) {
var m
if (negate && !prefix) {
m = !e.match(pn)
} else {
m = e.match(pn)
}
if (m)
matchedEntries.push(e)
}
}
//console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries)
var len = matchedEntries.length
// If there are no matched entries, then nothing matches.
if (len === 0)
return cb()
// if this is the last remaining pattern bit, then no need for
// an additional stat *unless* the user has specified mark or
// stat explicitly. We know they exist, since readdir returned
// them.
if (remain.length === 1 && !this.mark && !this.stat) {
if (!this.matches[index])
this.matches[index] = Object.create(null)
for (var i = 0; i < len; i ++) {
var e = matchedEntries[i]
if (prefix) {
if (prefix !== '/')
e = prefix + '/' + e
else
e = prefix + e
}
if (e.charAt(0) === '/' && !this.nomount) {
e = path.join(this.root, e)
}
this._emitMatch(index, e)
}
// This was the last one, and no stats were needed
return cb()
}
// now test all matched entries as stand-ins for that part
// of the pattern.
remain.shift()
for (var i = 0; i < len; i ++) {
var e = matchedEntries[i]
var newPattern
if (prefix) {
if (prefix !== '/')
e = prefix + '/' + e
else
e = prefix + e
}
this._process([e].concat(remain), index, inGlobStar, cb)
}
cb()
}
Glob.prototype._emitMatch = function (index, e) {
if (this.aborted)
return
if (this.matches[index][e])
return
if (this.paused) {
this._emitQueue.push([index, e])
return
}
var abs = this._makeAbs(e)
if (this.nodir) {
var c = this.cache[abs]
if (c === 'DIR' || Array.isArray(c))
return
}
if (this.mark)
e = this._mark(e)
this.matches[index][e] = true
var st = this.statCache[abs]
if (st)
this.emit('stat', e, st)
this.emit('match', e)
}
Glob.prototype._readdirInGlobStar = function (abs, cb) {
if (this.aborted)
return
// follow all symlinked directories forever
// just proceed as if this is a non-globstar situation
if (this.follow)
return this._readdir(abs, false, cb)
var lstatkey = 'lstat\0' + abs
var self = this
var lstatcb = inflight(lstatkey, lstatcb_)
if (lstatcb)
fs.lstat(abs, lstatcb)
function lstatcb_ (er, lstat) {
if (er)
return cb()
var isSym = lstat.isSymbolicLink()
self.symlinks[abs] = isSym
// If it's not a symlink or a dir, then it's definitely a regular file.
// don't bother doing a readdir in that case.
if (!isSym && !lstat.isDirectory()) {
self.cache[abs] = 'FILE'
cb()
} else
self._readdir(abs, false, cb)
}
}
Glob.prototype._readdir = function (abs, inGlobStar, cb) {
if (this.aborted)
return
cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb)
if (!cb)
return
//console.error('RD %j %j', +inGlobStar, abs)
if (inGlobStar && !ownProp(this.symlinks, abs))
return this._readdirInGlobStar(abs, cb)
if (ownProp(this.cache, abs)) {
var c = this.cache[abs]
if (!c || c === 'FILE')
return cb()
if (Array.isArray(c))
return cb(null, c)
}
var self = this
fs.readdir(abs, readdirCb(this, abs, cb))
}
function readdirCb (self, abs, cb) {
return function (er, entries) {
if (er)
self._readdirError(abs, er, cb)
else
self._readdirEntries(abs, entries, cb)
}
}
Glob.prototype._readdirEntries = function (abs, entries, cb) {
if (this.aborted)
return
// if we haven't asked to stat everything, then just
// assume that everything in there exists, so we can avoid
// having to stat it a second time.
if (!this.mark && !this.stat) {
for (var i = 0; i < entries.length; i ++) {
var e = entries[i]
if (abs === '/')
e = abs + e
else
e = abs + '/' + e
this.cache[e] = true
}
}
this.cache[abs] = entries
return cb(null, entries)
}
Glob.prototype._readdirError = function (f, er, cb) {
if (this.aborted)
return
// handle errors, and cache the information
switch (er.code) {
case 'ENOTDIR': // totally normal. means it *does* exist.
this.cache[this._makeAbs(f)] = 'FILE'
break
case 'ENOENT': // not terribly unusual
case 'ELOOP':
case 'ENAMETOOLONG':
case 'UNKNOWN':
this.cache[this._makeAbs(f)] = false
break
default: // some unusual error. Treat as failure.
this.cache[this._makeAbs(f)] = false
if (this.strict) return this.emit('error', er)
if (!this.silent) console.error('glob error', er)
break
}
return cb()
}
Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) {
var self = this
this._readdir(abs, inGlobStar, function (er, entries) {
self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
})
}
Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
//console.error('pgs2', prefix, remain[0], entries)
// no entries means not a dir, so it can never have matches
// foo.txt/** doesn't match foo.txt
if (!entries)
return cb()
// test without the globstar, and with every child both below
// and replacing the globstar.
var remainWithoutGlobStar = remain.slice(1)
var gspref = prefix ? [ prefix ] : []
var noGlobStar = gspref.concat(remainWithoutGlobStar)
// the noGlobStar pattern exits the inGlobStar state
this._process(noGlobStar, index, false, cb)
var isSym = this.symlinks[abs]
var len = entries.length
// If it's a symlink, and we're in a globstar, then stop
if (isSym && inGlobStar)
return cb()
for (var i = 0; i < len; i++) {
var e = entries[i]
if (e.charAt(0) === '.' && !this.dot)
continue
// these two cases enter the inGlobStar state
var instead = gspref.concat(entries[i], remainWithoutGlobStar)
this._process(instead, index, true, cb)
var below = gspref.concat(entries[i], remain)
this._process(below, index, true, cb)
}
cb()
}
Glob.prototype._processSimple = function (prefix, index, cb) {
// XXX review this. Shouldn't it be doing the mounting etc
// before doing stat? kinda weird?
var self = this
this._stat(prefix, function (er, exists) {
self._processSimple2(prefix, index, er, exists, cb)
})
}
Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) {
//console.error('ps2', prefix, exists)
if (!this.matches[index])
this.matches[index] = Object.create(null)
// If it doesn't exist, then just mark the lack of results
if (!exists)
return cb()
if (prefix && isAbsolute(prefix) && !this.nomount) {
var trail = /[\/\\]$/.test(prefix)
if (prefix.charAt(0) === '/') {
prefix = path.join(this.root, prefix)
} else {
prefix = path.resolve(this.root, prefix)
if (trail)
prefix += '/'
}
}
if (process.platform === 'win32')
prefix = prefix.replace(/\\/g, '/')
// Mark this as a match
this._emitMatch(index, prefix)
cb()
}
// Returns either 'DIR', 'FILE', or false
Glob.prototype._stat = function (f, cb) {
var abs = this._makeAbs(f)
var needDir = f.slice(-1) === '/'
if (f.length > this.maxLength)
return cb()
if (!this.stat && ownProp(this.cache, abs)) {
var c = this.cache[abs]
if (Array.isArray(c))
c = 'DIR'
// It exists, but maybe not how we need it
if (!needDir || c === 'DIR')
return cb(null, c)
if (needDir && c === 'FILE')
return cb()
// otherwise we have to stat, because maybe c=true
// if we know it exists, but not what it is.
}
var exists
var stat = this.statCache[abs]
if (stat !== undefined) {
if (stat === false)
return cb(null, stat)
else {
var type = stat.isDirectory() ? 'DIR' : 'FILE'
if (needDir && type === 'FILE')
return cb()
else
return cb(null, type, stat)
}
}
var self = this
var statcb = inflight('stat\0' + abs, lstatcb_)
if (statcb)
fs.lstat(abs, statcb)
function lstatcb_ (er, lstat) {
if (lstat && lstat.isSymbolicLink()) {
// If it's a symlink, then treat it as the target, unless
// the target does not exist, then treat it as a file.
return fs.stat(abs, function (er, stat) {
if (er)
self._stat2(f, abs, null, lstat, cb)
else
self._stat2(f, abs, er, stat, cb)
})
} else {
self._stat2(f, abs, er, lstat, cb)
}
}
}
Glob.prototype._stat2 = function (f, abs, er, stat, cb) {
if (er) {
this.statCache[abs] = false
return cb()
}
var needDir = f.slice(-1) === '/'
this.statCache[abs] = stat
if (abs.slice(-1) === '/' && !stat.isDirectory())
return cb(null, false, stat)
var c = stat.isDirectory() ? 'DIR' : 'FILE'
this.cache[abs] = this.cache[abs] || c
if (needDir && c !== 'DIR')
return cb()
return cb(null, c, stat)
}

74
node_modules/gulp/node_modules/glob/package.json generated vendored Normal file
View File

@@ -0,0 +1,74 @@
{
"_from": "glob@^4.3.1",
"_id": "glob@4.5.3",
"_inBundle": false,
"_integrity": "sha1-xstz0yJsHv7wTePFbQEvAzd+4V8=",
"_location": "/gulp/glob",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "glob@^4.3.1",
"name": "glob",
"escapedName": "glob",
"rawSpec": "^4.3.1",
"saveSpec": null,
"fetchSpec": "^4.3.1"
},
"_requiredBy": [
"/gulp/glob-stream"
],
"_resolved": "https://registry.npmjs.org/glob/-/glob-4.5.3.tgz",
"_shasum": "c6cb73d3226c1efef04de3c56d012f03377ee15f",
"_spec": "glob@^4.3.1",
"_where": "/Applications/XAMPP/xamppfiles/htdocs/wordpress/t-latehome/wp-content/plugins/opal-estate-pro/node_modules/gulp/node_modules/glob-stream",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
"url": "http://blog.izs.me/"
},
"bugs": {
"url": "https://github.com/isaacs/node-glob/issues"
},
"bundleDependencies": false,
"dependencies": {
"inflight": "^1.0.4",
"inherits": "2",
"minimatch": "^2.0.1",
"once": "^1.3.0"
},
"deprecated": false,
"description": "a little globber",
"devDependencies": {
"mkdirp": "0",
"rimraf": "^2.2.8",
"tap": "^0.5.0",
"tick": "0.0.6"
},
"engines": {
"node": "*"
},
"files": [
"glob.js",
"sync.js",
"common.js"
],
"homepage": "https://github.com/isaacs/node-glob#readme",
"license": "ISC",
"main": "glob.js",
"name": "glob",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/node-glob.git"
},
"scripts": {
"bench": "bash benchmark.sh",
"benchclean": "bash benchclean.sh",
"prepublish": "npm run benchclean",
"prof": "bash prof.sh && cat profile.txt",
"profclean": "rm -f v8.log profile.txt",
"test": "npm run profclean && tap test/*.js",
"test-regen": "npm run profclean && TEST_REGEN=1 node test/00-setup.js"
},
"version": "4.5.3"
}

457
node_modules/gulp/node_modules/glob/sync.js generated vendored Normal file
View File

@@ -0,0 +1,457 @@
module.exports = globSync
globSync.GlobSync = GlobSync
var fs = require('fs')
var minimatch = require('minimatch')
var Minimatch = minimatch.Minimatch
var Glob = require('./glob.js').Glob
var util = require('util')
var path = require('path')
var assert = require('assert')
var common = require('./common.js')
var alphasort = common.alphasort
var alphasorti = common.alphasorti
var isAbsolute = common.isAbsolute
var setopts = common.setopts
var ownProp = common.ownProp
var childrenIgnored = common.childrenIgnored
function globSync (pattern, options) {
if (typeof options === 'function' || arguments.length === 3)
throw new TypeError('callback provided to sync glob\n'+
'See: https://github.com/isaacs/node-glob/issues/167')
return new GlobSync(pattern, options).found
}
function GlobSync (pattern, options) {
if (!pattern)
throw new Error('must provide pattern')
if (typeof options === 'function' || arguments.length === 3)
throw new TypeError('callback provided to sync glob\n'+
'See: https://github.com/isaacs/node-glob/issues/167')
if (!(this instanceof GlobSync))
return new GlobSync(pattern, options)
setopts(this, pattern, options)
if (this.noprocess)
return this
var n = this.minimatch.set.length
this.matches = new Array(n)
for (var i = 0; i < n; i ++) {
this._process(this.minimatch.set[i], i, false)
}
this._finish()
}
GlobSync.prototype._finish = function () {
assert(this instanceof GlobSync)
if (this.realpath) {
var self = this
this.matches.forEach(function (matchset, index) {
var set = self.matches[index] = Object.create(null)
for (var p in matchset) {
try {
p = self._makeAbs(p)
var real = fs.realpathSync(p, this.realpathCache)
set[real] = true
} catch (er) {
if (er.syscall === 'stat')
set[self._makeAbs(p)] = true
else
throw er
}
}
})
}
common.finish(this)
}
GlobSync.prototype._process = function (pattern, index, inGlobStar) {
assert(this instanceof GlobSync)
// Get the first [n] parts of pattern that are all strings.
var n = 0
while (typeof pattern[n] === 'string') {
n ++
}
// now n is the index of the first one that is *not* a string.
// See if there's anything else
var prefix
switch (n) {
// if not, then this is rather simple
case pattern.length:
this._processSimple(pattern.join('/'), index)
return
case 0:
// pattern *starts* with some non-trivial item.
// going to readdir(cwd), but not include the prefix in matches.
prefix = null
break
default:
// pattern has some string bits in the front.
// whatever it starts with, whether that's 'absolute' like /foo/bar,
// or 'relative' like '../baz'
prefix = pattern.slice(0, n).join('/')
break
}
var remain = pattern.slice(n)
// get the list of entries.
var read
if (prefix === null)
read = '.'
else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {
if (!prefix || !isAbsolute(prefix))
prefix = '/' + prefix
read = prefix
} else
read = prefix
var abs = this._makeAbs(read)
//if ignored, skip processing
if (childrenIgnored(this, read))
return
var isGlobStar = remain[0] === minimatch.GLOBSTAR
if (isGlobStar)
this._processGlobStar(prefix, read, abs, remain, index, inGlobStar)
else
this._processReaddir(prefix, read, abs, remain, index, inGlobStar)
}
GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) {
var entries = this._readdir(abs, inGlobStar)
// if the abs isn't a dir, then nothing can match!
if (!entries)
return
// It will only match dot entries if it starts with a dot, or if
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
var pn = remain[0]
var negate = !!this.minimatch.negate
var rawGlob = pn._glob
var dotOk = this.dot || rawGlob.charAt(0) === '.'
var matchedEntries = []
for (var i = 0; i < entries.length; i++) {
var e = entries[i]
if (e.charAt(0) !== '.' || dotOk) {
var m
if (negate && !prefix) {
m = !e.match(pn)
} else {
m = e.match(pn)
}
if (m)
matchedEntries.push(e)
}
}
var len = matchedEntries.length
// If there are no matched entries, then nothing matches.
if (len === 0)
return
// if this is the last remaining pattern bit, then no need for
// an additional stat *unless* the user has specified mark or
// stat explicitly. We know they exist, since readdir returned
// them.
if (remain.length === 1 && !this.mark && !this.stat) {
if (!this.matches[index])
this.matches[index] = Object.create(null)
for (var i = 0; i < len; i ++) {
var e = matchedEntries[i]
if (prefix) {
if (prefix.slice(-1) !== '/')
e = prefix + '/' + e
else
e = prefix + e
}
if (e.charAt(0) === '/' && !this.nomount) {
e = path.join(this.root, e)
}
this.matches[index][e] = true
}
// This was the last one, and no stats were needed
return
}
// now test all matched entries as stand-ins for that part
// of the pattern.
remain.shift()
for (var i = 0; i < len; i ++) {
var e = matchedEntries[i]
var newPattern
if (prefix)
newPattern = [prefix, e]
else
newPattern = [e]
this._process(newPattern.concat(remain), index, inGlobStar)
}
}
GlobSync.prototype._emitMatch = function (index, e) {
var abs = this._makeAbs(e)
if (this.mark)
e = this._mark(e)
if (this.matches[index][e])
return
if (this.nodir) {
var c = this.cache[this._makeAbs(e)]
if (c === 'DIR' || Array.isArray(c))
return
}
this.matches[index][e] = true
if (this.stat)
this._stat(e)
}
GlobSync.prototype._readdirInGlobStar = function (abs) {
// follow all symlinked directories forever
// just proceed as if this is a non-globstar situation
if (this.follow)
return this._readdir(abs, false)
var entries
var lstat
var stat
try {
lstat = fs.lstatSync(abs)
} catch (er) {
// lstat failed, doesn't exist
return null
}
var isSym = lstat.isSymbolicLink()
this.symlinks[abs] = isSym
// If it's not a symlink or a dir, then it's definitely a regular file.
// don't bother doing a readdir in that case.
if (!isSym && !lstat.isDirectory())
this.cache[abs] = 'FILE'
else
entries = this._readdir(abs, false)
return entries
}
GlobSync.prototype._readdir = function (abs, inGlobStar) {
var entries
if (inGlobStar && !ownProp(this.symlinks, abs))
return this._readdirInGlobStar(abs)
if (ownProp(this.cache, abs)) {
var c = this.cache[abs]
if (!c || c === 'FILE')
return null
if (Array.isArray(c))
return c
}
try {
return this._readdirEntries(abs, fs.readdirSync(abs))
} catch (er) {
this._readdirError(abs, er)
return null
}
}
GlobSync.prototype._readdirEntries = function (abs, entries) {
// if we haven't asked to stat everything, then just
// assume that everything in there exists, so we can avoid
// having to stat it a second time.
if (!this.mark && !this.stat) {
for (var i = 0; i < entries.length; i ++) {
var e = entries[i]
if (abs === '/')
e = abs + e
else
e = abs + '/' + e
this.cache[e] = true
}
}
this.cache[abs] = entries
// mark and cache dir-ness
return entries
}
GlobSync.prototype._readdirError = function (f, er) {
// handle errors, and cache the information
switch (er.code) {
case 'ENOTDIR': // totally normal. means it *does* exist.
this.cache[this._makeAbs(f)] = 'FILE'
break
case 'ENOENT': // not terribly unusual
case 'ELOOP':
case 'ENAMETOOLONG':
case 'UNKNOWN':
this.cache[this._makeAbs(f)] = false
break
default: // some unusual error. Treat as failure.
this.cache[this._makeAbs(f)] = false
if (this.strict) throw er
if (!this.silent) console.error('glob error', er)
break
}
}
GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) {
var entries = this._readdir(abs, inGlobStar)
// no entries means not a dir, so it can never have matches
// foo.txt/** doesn't match foo.txt
if (!entries)
return
// test without the globstar, and with every child both below
// and replacing the globstar.
var remainWithoutGlobStar = remain.slice(1)
var gspref = prefix ? [ prefix ] : []
var noGlobStar = gspref.concat(remainWithoutGlobStar)
// the noGlobStar pattern exits the inGlobStar state
this._process(noGlobStar, index, false)
var len = entries.length
var isSym = this.symlinks[abs]
// If it's a symlink, and we're in a globstar, then stop
if (isSym && inGlobStar)
return
for (var i = 0; i < len; i++) {
var e = entries[i]
if (e.charAt(0) === '.' && !this.dot)
continue
// these two cases enter the inGlobStar state
var instead = gspref.concat(entries[i], remainWithoutGlobStar)
this._process(instead, index, true)
var below = gspref.concat(entries[i], remain)
this._process(below, index, true)
}
}
GlobSync.prototype._processSimple = function (prefix, index) {
// XXX review this. Shouldn't it be doing the mounting etc
// before doing stat? kinda weird?
var exists = this._stat(prefix)
if (!this.matches[index])
this.matches[index] = Object.create(null)
// If it doesn't exist, then just mark the lack of results
if (!exists)
return
if (prefix && isAbsolute(prefix) && !this.nomount) {
var trail = /[\/\\]$/.test(prefix)
if (prefix.charAt(0) === '/') {
prefix = path.join(this.root, prefix)
} else {
prefix = path.resolve(this.root, prefix)
if (trail)
prefix += '/'
}
}
if (process.platform === 'win32')
prefix = prefix.replace(/\\/g, '/')
// Mark this as a match
this.matches[index][prefix] = true
}
// Returns either 'DIR', 'FILE', or false
GlobSync.prototype._stat = function (f) {
var abs = this._makeAbs(f)
var needDir = f.slice(-1) === '/'
if (f.length > this.maxLength)
return false
if (!this.stat && ownProp(this.cache, abs)) {
var c = this.cache[abs]
if (Array.isArray(c))
c = 'DIR'
// It exists, but maybe not how we need it
if (!needDir || c === 'DIR')
return c
if (needDir && c === 'FILE')
return false
// otherwise we have to stat, because maybe c=true
// if we know it exists, but not what it is.
}
var exists
var stat = this.statCache[abs]
if (!stat) {
var lstat
try {
lstat = fs.lstatSync(abs)
} catch (er) {
return false
}
if (lstat.isSymbolicLink()) {
try {
stat = fs.statSync(abs)
} catch (er) {
stat = lstat
}
} else {
stat = lstat
}
}
this.statCache[abs] = stat
var c = stat.isDirectory() ? 'DIR' : 'FILE'
this.cache[abs] = this.cache[abs] || c
if (needDir && c !== 'DIR')
return false
return c
}
GlobSync.prototype._mark = function (p) {
return common.mark(this, p)
}
GlobSync.prototype._makeAbs = function (f) {
return common.makeAbs(this, f)
}

View File

@@ -0,0 +1,7 @@
sudo: false
language: node_js
node_js:
- '0.10'
- '0.12'
- '4'
- '6'

View File

@@ -0,0 +1,51 @@
commit 4374b79aed648738d2458ce027cbe2b372bf7b3a
Author: isaacs <i@izs.me>
Date: Wed Jun 26 12:12:17 2019 -0700
Add support for stat options
Fix #158
diff --git a/polyfills.js b/polyfills.js
index b964ed0..bc9759b 100644
--- a/polyfills.js
+++ b/polyfills.js
@@ -273,17 +273,23 @@ function patch (fs) {
}
+ // stat options added in v10.5.0
+ var supportsStatOptions = /^v[1-9][1-9]|^v10\.[5-9]/.test(process.version)
function statFix (orig) {
if (!orig) return orig
// Older versions of Node erroneously returned signed integers for
// uid + gid.
- return function (target, cb) {
- return orig.call(fs, target, function (er, stats) {
- if (!stats) return cb.apply(this, arguments)
+ return function (target, options, cb) {
+ if (typeof options === 'function')
+ cb = options, options = null
+ function callback (er, stats) {
+ if (!stats) return cb && cb.apply(this, arguments)
if (stats.uid < 0) stats.uid += 0x100000000
if (stats.gid < 0) stats.gid += 0x100000000
if (cb) cb.apply(this, arguments)
- })
+ }
+ return supportsStatOptions ? orig.call(fs, target, options || {}, callback)
+ : orig.call(fs, target, callback)
}
}
@@ -291,8 +297,8 @@ function patch (fs) {
if (!orig) return orig
// Older versions of Node erroneously returned signed integers for
// uid + gid.
- return function (target) {
- var stats = orig.call(fs, target)
+ return function (target, options) {
+ var stats = orig.call(fs, target, options)
if (stats.uid < 0) stats.uid += 0x100000000
if (stats.gid < 0) stats.gid += 0x100000000
return stats;

15
node_modules/gulp/node_modules/graceful-fs/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,15 @@
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

36
node_modules/gulp/node_modules/graceful-fs/README.md generated vendored Normal file
View File

@@ -0,0 +1,36 @@
# graceful-fs
graceful-fs functions as a drop-in replacement for the fs module,
making various improvements.
The improvements are meant to normalize behavior across different
platforms and environments, and to make filesystem access more
resilient to errors.
## Improvements over [fs module](http://api.nodejs.org/fs.html)
graceful-fs:
* Queues up `open` and `readdir` calls, and retries them once
something closes if there is an EMFILE error from too many file
descriptors.
* fixes `lchmod` for Node versions prior to 0.6.2.
* implements `fs.lutimes` if possible. Otherwise it becomes a noop.
* ignores `EINVAL` and `EPERM` errors in `chown`, `fchown` or
`lchown` if the user isn't root.
* makes `lchmod` and `lchown` become noops, if not available.
* retries reading a file if `read` results in EAGAIN error.
On Windows, it retries renaming a file for up to one second if `EACCESS`
or `EPERM` error occurs, likely because antivirus software has locked
the directory.
## USAGE
```javascript
// use just like fs
var fs = require('graceful-fs')
// now go and do stuff with it...
fs.readFileSync('some-file-or-whatever')
```

5
node_modules/gulp/node_modules/graceful-fs/foo.sh generated vendored Executable file
View File

@@ -0,0 +1,5 @@
#!/bin/bash
# set -e
false
echo 'ok'

1
node_modules/gulp/node_modules/graceful-fs/fs.js generated vendored Normal file
View File

@@ -0,0 +1 @@
module.exports = require('natives').require('fs', ['stream'])

View File

@@ -0,0 +1,158 @@
// Monkey-patching the fs module.
// It's ugly, but there is simply no other way to do this.
var fs = module.exports = require('./fs.js')
var assert = require('assert')
// fix up some busted stuff, mostly on windows and old nodes
require('./polyfills.js')
var util = require('util')
function noop () {}
var debug = noop
if (util.debuglog)
debug = util.debuglog('gfs')
else if (/\bgfs\b/i.test(process.env.NODE_DEBUG || ''))
debug = function() {
var m = util.format.apply(util, arguments)
m = 'GFS: ' + m.split(/\n/).join('\nGFS: ')
console.error(m)
}
if (/\bgfs\b/i.test(process.env.NODE_DEBUG || '')) {
process.on('exit', function() {
debug('fds', fds)
debug(queue)
assert.equal(queue.length, 0)
})
}
var originalOpen = fs.open
fs.open = open
function open(path, flags, mode, cb) {
if (typeof mode === "function") cb = mode, mode = null
if (typeof cb !== "function") cb = noop
new OpenReq(path, flags, mode, cb)
}
function OpenReq(path, flags, mode, cb) {
this.path = path
this.flags = flags
this.mode = mode
this.cb = cb
Req.call(this)
}
util.inherits(OpenReq, Req)
OpenReq.prototype.process = function() {
originalOpen.call(fs, this.path, this.flags, this.mode, this.done)
}
var fds = {}
OpenReq.prototype.done = function(er, fd) {
debug('open done', er, fd)
if (fd)
fds['fd' + fd] = this.path
Req.prototype.done.call(this, er, fd)
}
var originalReaddir = fs.readdir
fs.readdir = readdir
function readdir(path, cb) {
if (typeof cb !== "function") cb = noop
new ReaddirReq(path, cb)
}
function ReaddirReq(path, cb) {
this.path = path
this.cb = cb
Req.call(this)
}
util.inherits(ReaddirReq, Req)
ReaddirReq.prototype.process = function() {
originalReaddir.call(fs, this.path, this.done)
}
ReaddirReq.prototype.done = function(er, files) {
if (files && files.sort)
files = files.sort()
Req.prototype.done.call(this, er, files)
onclose()
}
var originalClose = fs.close
fs.close = close
function close (fd, cb) {
debug('close', fd)
if (typeof cb !== "function") cb = noop
delete fds['fd' + fd]
originalClose.call(fs, fd, function(er) {
onclose()
cb(er)
})
}
var originalCloseSync = fs.closeSync
fs.closeSync = closeSync
function closeSync (fd) {
try {
return originalCloseSync(fd)
} finally {
onclose()
}
}
// Req class
function Req () {
// start processing
this.done = this.done.bind(this)
this.failures = 0
this.process()
}
Req.prototype.done = function (er, result) {
var tryAgain = false
if (er) {
var code = er.code
var tryAgain = code === "EMFILE" || code === "ENFILE"
if (process.platform === "win32")
tryAgain = tryAgain || code === "OK"
}
if (tryAgain) {
this.failures ++
enqueue(this)
} else {
var cb = this.cb
cb(er, result)
}
}
var queue = []
function enqueue(req) {
queue.push(req)
debug('enqueue %d %s', queue.length, req.constructor.name, req)
}
function onclose() {
var req = queue.shift()
if (req) {
debug('process', req.constructor.name, req)
req.process()
}
}

0
node_modules/gulp/node_modules/graceful-fs/heap.txt generated vendored Normal file
View File

View File

@@ -0,0 +1,81 @@
{
"_from": "graceful-fs@^3.0.0",
"_id": "graceful-fs@3.0.12",
"_inBundle": false,
"_integrity": "sha512-J55gaCS4iTTJfTXIxSVw3EMQckcqkpdRv3IR7gu6sq0+tbC363Zx6KH/SEwXASK9JRbhyZmVjJEVJIOxYsB3Qg==",
"_location": "/gulp/graceful-fs",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "graceful-fs@^3.0.0",
"name": "graceful-fs",
"escapedName": "graceful-fs",
"rawSpec": "^3.0.0",
"saveSpec": null,
"fetchSpec": "^3.0.0"
},
"_requiredBy": [
"/gulp/vinyl-fs"
],
"_resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-3.0.12.tgz",
"_shasum": "0034947ce9ed695ec8ab0b854bc919e82b1ffaef",
"_spec": "graceful-fs@^3.0.0",
"_where": "/Applications/XAMPP/xamppfiles/htdocs/wordpress/t-latehome/wp-content/plugins/opal-estate-pro/node_modules/gulp/node_modules/vinyl-fs",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
"url": "http://blog.izs.me"
},
"bugs": {
"url": "https://github.com/isaacs/node-graceful-fs/issues"
},
"bundleDependencies": false,
"dependencies": {
"natives": "^1.1.3"
},
"deprecated": false,
"description": "A drop-in replacement for fs, making various improvements.",
"devDependencies": {
"mkdirp": "^0.5.0",
"rimraf": "^2.2.8",
"tap": "^1.2.0"
},
"directories": {
"test": "test"
},
"engines": {
"node": ">=0.4.0"
},
"homepage": "https://github.com/isaacs/node-graceful-fs#readme",
"keywords": [
"fs",
"module",
"reading",
"retry",
"retries",
"queue",
"error",
"errors",
"handling",
"EMFILE",
"EAGAIN",
"EINVAL",
"EPERM",
"EACCESS"
],
"license": "ISC",
"main": "graceful-fs.js",
"name": "graceful-fs",
"publishConfig": {
"tag": "old"
},
"repository": {
"type": "git",
"url": "git://github.com/isaacs/node-graceful-fs.git"
},
"scripts": {
"test": "tap test/*.js"
},
"version": "3.0.12"
}

255
node_modules/gulp/node_modules/graceful-fs/polyfills.js generated vendored Normal file
View File

@@ -0,0 +1,255 @@
var fs = require('./fs.js')
var constants = require('constants')
var origCwd = process.cwd
var cwd = null
process.cwd = function() {
if (!cwd)
cwd = origCwd.call(process)
return cwd
}
var chdir = process.chdir
process.chdir = function(d) {
cwd = null
chdir.call(process, d)
}
// (re-)implement some things that are known busted or missing.
// lchmod, broken prior to 0.6.2
// back-port the fix here.
if (constants.hasOwnProperty('O_SYMLINK') &&
process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) {
fs.lchmod = function (path, mode, callback) {
callback = callback || noop
fs.open( path
, constants.O_WRONLY | constants.O_SYMLINK
, mode
, function (err, fd) {
if (err) {
callback(err)
return
}
// prefer to return the chmod error, if one occurs,
// but still try to close, and report closing errors if they occur.
fs.fchmod(fd, mode, function (err) {
fs.close(fd, function(err2) {
callback(err || err2)
})
})
})
}
fs.lchmodSync = function (path, mode) {
var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode)
// prefer to return the chmod error, if one occurs,
// but still try to close, and report closing errors if they occur.
var err, err2
try {
var ret = fs.fchmodSync(fd, mode)
} catch (er) {
err = er
}
try {
fs.closeSync(fd)
} catch (er) {
err2 = er
}
if (err || err2) throw (err || err2)
return ret
}
}
// lutimes implementation, or no-op
if (!fs.lutimes) {
if (constants.hasOwnProperty("O_SYMLINK")) {
fs.lutimes = function (path, at, mt, cb) {
fs.open(path, constants.O_SYMLINK, function (er, fd) {
cb = cb || noop
if (er) return cb(er)
fs.futimes(fd, at, mt, function (er) {
fs.close(fd, function (er2) {
return cb(er || er2)
})
})
})
}
fs.lutimesSync = function (path, at, mt) {
var fd = fs.openSync(path, constants.O_SYMLINK)
, err
, err2
, ret
try {
var ret = fs.futimesSync(fd, at, mt)
} catch (er) {
err = er
}
try {
fs.closeSync(fd)
} catch (er) {
err2 = er
}
if (err || err2) throw (err || err2)
return ret
}
} else if (fs.utimensat && constants.hasOwnProperty("AT_SYMLINK_NOFOLLOW")) {
// maybe utimensat will be bound soonish?
fs.lutimes = function (path, at, mt, cb) {
fs.utimensat(path, at, mt, constants.AT_SYMLINK_NOFOLLOW, cb)
}
fs.lutimesSync = function (path, at, mt) {
return fs.utimensatSync(path, at, mt, constants.AT_SYMLINK_NOFOLLOW)
}
} else {
fs.lutimes = function (_a, _b, _c, cb) { process.nextTick(cb) }
fs.lutimesSync = function () {}
}
}
// https://github.com/isaacs/node-graceful-fs/issues/4
// Chown should not fail on einval or eperm if non-root.
// It should not fail on enosys ever, as this just indicates
// that a fs doesn't support the intended operation.
fs.chown = chownFix(fs.chown)
fs.fchown = chownFix(fs.fchown)
fs.lchown = chownFix(fs.lchown)
fs.chmod = chownFix(fs.chmod)
fs.fchmod = chownFix(fs.fchmod)
fs.lchmod = chownFix(fs.lchmod)
fs.chownSync = chownFixSync(fs.chownSync)
fs.fchownSync = chownFixSync(fs.fchownSync)
fs.lchownSync = chownFixSync(fs.lchownSync)
fs.chmodSync = chownFix(fs.chmodSync)
fs.fchmodSync = chownFix(fs.fchmodSync)
fs.lchmodSync = chownFix(fs.lchmodSync)
function chownFix (orig) {
if (!orig) return orig
return function (target, uid, gid, cb) {
return orig.call(fs, target, uid, gid, function (er, res) {
if (chownErOk(er)) er = null
cb(er, res)
})
}
}
function chownFixSync (orig) {
if (!orig) return orig
return function (target, uid, gid) {
try {
return orig.call(fs, target, uid, gid)
} catch (er) {
if (!chownErOk(er)) throw er
}
}
}
// ENOSYS means that the fs doesn't support the op. Just ignore
// that, because it doesn't matter.
//
// if there's no getuid, or if getuid() is something other
// than 0, and the error is EINVAL or EPERM, then just ignore
// it.
//
// This specific case is a silent failure in cp, install, tar,
// and most other unix tools that manage permissions.
//
// When running as root, or if other types of errors are
// encountered, then it's strict.
function chownErOk (er) {
if (!er)
return true
if (er.code === "ENOSYS")
return true
var nonroot = !process.getuid || process.getuid() !== 0
if (nonroot) {
if (er.code === "EINVAL" || er.code === "EPERM")
return true
}
return false
}
// if lchmod/lchown do not exist, then make them no-ops
if (!fs.lchmod) {
fs.lchmod = function (path, mode, cb) {
process.nextTick(cb)
}
fs.lchmodSync = function () {}
}
if (!fs.lchown) {
fs.lchown = function (path, uid, gid, cb) {
process.nextTick(cb)
}
fs.lchownSync = function () {}
}
// on Windows, A/V software can lock the directory, causing this
// to fail with an EACCES or EPERM if the directory contains newly
// created files. Try again on failure, for up to 1 second.
if (process.platform === "win32") {
var rename_ = fs.rename
fs.rename = function rename (from, to, cb) {
var start = Date.now()
rename_(from, to, function CB (er) {
if (er
&& (er.code === "EACCES" || er.code === "EPERM")
&& Date.now() - start < 1000) {
return rename_(from, to, CB)
}
if(cb) cb(er)
})
}
}
// if read() returns EAGAIN, then just try it again.
var read = fs.read
fs.read = function (fd, buffer, offset, length, position, callback_) {
var callback
if (callback_ && typeof callback_ === 'function') {
var eagCounter = 0
callback = function (er, _, __) {
if (er && er.code === 'EAGAIN' && eagCounter < 10) {
eagCounter ++
return read.call(fs, fd, buffer, offset, length, position, callback)
}
callback_.apply(this, arguments)
}
}
return read.call(fs, fd, buffer, offset, length, position, callback)
}
var readSync = fs.readSync
fs.readSync = function (fd, buffer, offset, length, position) {
var eagCounter = 0
while (true) {
try {
return readSync.call(fs, fd, buffer, offset, length, position)
} catch (er) {
if (er.code === 'EAGAIN' && eagCounter < 10) {
eagCounter ++
continue
}
throw er
}
}
}

View File

@@ -0,0 +1,69 @@
var test = require('tap').test
var fs = require('../')
test('open lots of stuff', function (t) {
// Get around EBADF from libuv by making sure that stderr is opened
// Otherwise Darwin will refuse to give us a FD for stderr!
process.stderr.write('')
// How many parallel open()'s to do
var n = 1024
var opens = 0
var fds = []
var going = true
var closing = false
var doneCalled = 0
for (var i = 0; i < n; i++) {
go()
}
function go() {
opens++
fs.open(__filename, 'r', function (er, fd) {
if (er) throw er
fds.push(fd)
if (going) go()
})
}
// should hit ulimit pretty fast
setTimeout(function () {
going = false
t.equal(opens - fds.length, n)
done()
}, 100)
function done () {
if (closing) return
doneCalled++
if (fds.length === 0) {
console.error('done called %d times', doneCalled)
// First because of the timeout
// Then to close the fd's opened afterwards
// Then this time, to complete.
// Might take multiple passes, depending on CPU speed
// and ulimit, but at least 3 in every case.
t.ok(doneCalled >= 2)
return t.end()
}
closing = true
setTimeout(function () {
// console.error('do closing again')
closing = false
done()
}, 100)
// console.error('closing time')
var closes = fds.slice(0)
fds.length = 0
closes.forEach(function (fd) {
fs.close(fd, function (er) {
if (er) throw er
})
})
}
})

View File

@@ -0,0 +1,39 @@
var test = require('tap').test
var fs = require('../graceful-fs.js')
test('graceful fs is monkeypatched fs', function (t) {
t.equal(fs, require('../fs.js'))
t.end()
})
test('open an existing file works', function (t) {
var fd = fs.openSync(__filename, 'r')
fs.closeSync(fd)
fs.open(__filename, 'r', function (er, fd) {
if (er) throw er
fs.close(fd, function (er) {
if (er) throw er
t.pass('works')
t.end()
})
})
})
test('open a non-existing file throws', function (t) {
var er
try {
var fd = fs.openSync('this file does not exist', 'r')
} catch (x) {
er = x
}
t.ok(er, 'should throw')
t.notOk(fd, 'should not get an fd')
t.equal(er.code, 'ENOENT')
fs.open('neither does this file', 'r', function (er, fd) {
t.ok(er, 'should throw')
t.notOk(fd, 'should not get an fd')
t.equal(er.code, 'ENOENT')
t.end()
})
})

View File

@@ -0,0 +1,20 @@
var test = require("tap").test
var fs = require("../fs.js")
var readdir = fs.readdir
fs.readdir = function(path, cb) {
process.nextTick(function() {
cb(null, ["b", "z", "a"])
})
}
var g = require("../")
test("readdir reorder", function (t) {
g.readdir("whatevers", function (er, files) {
if (er)
throw er
t.same(files, [ "a", "b", "z" ])
t.end()
})
})

View File

@@ -0,0 +1,47 @@
var fs = require('../');
var rimraf = require('rimraf');
var mkdirp = require('mkdirp');
var test = require('tap').test;
var p = require('path').resolve(__dirname, 'files');
process.chdir(__dirname)
// Make sure to reserve the stderr fd
process.stderr.write('');
var num = 4097;
var paths = new Array(num);
test('make files', function (t) {
rimraf.sync(p);
mkdirp.sync(p);
for (var i = 0; i < num; ++i) {
paths[i] = 'files/file-' + i;
fs.writeFileSync(paths[i], 'content');
}
t.end();
})
test('read files', function (t) {
// now read them
var done = 0;
for (var i = 0; i < num; ++i) {
fs.readFile(paths[i], function(err, data) {
if (err)
throw err;
++done;
if (done === num) {
t.pass('success');
t.end()
}
});
}
});
test('cleanup', function (t) {
rimraf.sync(p);
t.end();
});

54
node_modules/gulp/node_modules/isarray/README.md generated vendored Normal file
View File

@@ -0,0 +1,54 @@
# isarray
`Array#isArray` for older browsers.
## Usage
```js
var isArray = require('isarray');
console.log(isArray([])); // => true
console.log(isArray({})); // => false
```
## Installation
With [npm](http://npmjs.org) do
```bash
$ npm install isarray
```
Then bundle for the browser with
[browserify](https://github.com/substack/browserify).
With [component](http://component.io) do
```bash
$ component install juliangruber/isarray
```
## License
(MIT)
Copyright (c) 2013 Julian Gruber &lt;julian@juliangruber.com&gt;
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

209
node_modules/gulp/node_modules/isarray/build/build.js generated vendored Normal file
View File

@@ -0,0 +1,209 @@
/**
* Require the given path.
*
* @param {String} path
* @return {Object} exports
* @api public
*/
function require(path, parent, orig) {
var resolved = require.resolve(path);
// lookup failed
if (null == resolved) {
orig = orig || path;
parent = parent || 'root';
var err = new Error('Failed to require "' + orig + '" from "' + parent + '"');
err.path = orig;
err.parent = parent;
err.require = true;
throw err;
}
var module = require.modules[resolved];
// perform real require()
// by invoking the module's
// registered function
if (!module.exports) {
module.exports = {};
module.client = module.component = true;
module.call(this, module.exports, require.relative(resolved), module);
}
return module.exports;
}
/**
* Registered modules.
*/
require.modules = {};
/**
* Registered aliases.
*/
require.aliases = {};
/**
* Resolve `path`.
*
* Lookup:
*
* - PATH/index.js
* - PATH.js
* - PATH
*
* @param {String} path
* @return {String} path or null
* @api private
*/
require.resolve = function(path) {
if (path.charAt(0) === '/') path = path.slice(1);
var index = path + '/index.js';
var paths = [
path,
path + '.js',
path + '.json',
path + '/index.js',
path + '/index.json'
];
for (var i = 0; i < paths.length; i++) {
var path = paths[i];
if (require.modules.hasOwnProperty(path)) return path;
}
if (require.aliases.hasOwnProperty(index)) {
return require.aliases[index];
}
};
/**
* Normalize `path` relative to the current path.
*
* @param {String} curr
* @param {String} path
* @return {String}
* @api private
*/
require.normalize = function(curr, path) {
var segs = [];
if ('.' != path.charAt(0)) return path;
curr = curr.split('/');
path = path.split('/');
for (var i = 0; i < path.length; ++i) {
if ('..' == path[i]) {
curr.pop();
} else if ('.' != path[i] && '' != path[i]) {
segs.push(path[i]);
}
}
return curr.concat(segs).join('/');
};
/**
* Register module at `path` with callback `definition`.
*
* @param {String} path
* @param {Function} definition
* @api private
*/
require.register = function(path, definition) {
require.modules[path] = definition;
};
/**
* Alias a module definition.
*
* @param {String} from
* @param {String} to
* @api private
*/
require.alias = function(from, to) {
if (!require.modules.hasOwnProperty(from)) {
throw new Error('Failed to alias "' + from + '", it does not exist');
}
require.aliases[to] = from;
};
/**
* Return a require function relative to the `parent` path.
*
* @param {String} parent
* @return {Function}
* @api private
*/
require.relative = function(parent) {
var p = require.normalize(parent, '..');
/**
* lastIndexOf helper.
*/
function lastIndexOf(arr, obj) {
var i = arr.length;
while (i--) {
if (arr[i] === obj) return i;
}
return -1;
}
/**
* The relative require() itself.
*/
function localRequire(path) {
var resolved = localRequire.resolve(path);
return require(resolved, parent, path);
}
/**
* Resolve relative to the parent.
*/
localRequire.resolve = function(path) {
var c = path.charAt(0);
if ('/' == c) return path.slice(1);
if ('.' == c) return require.normalize(p, path);
// resolve deps by returning
// the dep in the nearest "deps"
// directory
var segs = parent.split('/');
var i = lastIndexOf(segs, 'deps') + 1;
if (!i) i = 0;
path = segs.slice(0, i + 1).join('/') + '/deps/' + path;
return path;
};
/**
* Check if module is defined at `path`.
*/
localRequire.exists = function(path) {
return require.modules.hasOwnProperty(localRequire.resolve(path));
};
return localRequire;
};
require.register("isarray/index.js", function(exports, require, module){
module.exports = Array.isArray || function (arr) {
return Object.prototype.toString.call(arr) == '[object Array]';
};
});
require.alias("isarray/index.js", "isarray/index.js");

19
node_modules/gulp/node_modules/isarray/component.json generated vendored Normal file
View File

@@ -0,0 +1,19 @@
{
"name" : "isarray",
"description" : "Array#isArray for older browsers",
"version" : "0.0.1",
"repository" : "juliangruber/isarray",
"homepage": "https://github.com/juliangruber/isarray",
"main" : "index.js",
"scripts" : [
"index.js"
],
"dependencies" : {},
"keywords": ["browser","isarray","array"],
"author": {
"name": "Julian Gruber",
"email": "mail@juliangruber.com",
"url": "http://juliangruber.com"
},
"license": "MIT"
}

3
node_modules/gulp/node_modules/isarray/index.js generated vendored Normal file
View File

@@ -0,0 +1,3 @@
module.exports = Array.isArray || function (arr) {
return Object.prototype.toString.call(arr) == '[object Array]';
};

57
node_modules/gulp/node_modules/isarray/package.json generated vendored Normal file
View File

@@ -0,0 +1,57 @@
{
"_from": "isarray@0.0.1",
"_id": "isarray@0.0.1",
"_inBundle": false,
"_integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=",
"_location": "/gulp/isarray",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
"raw": "isarray@0.0.1",
"name": "isarray",
"escapedName": "isarray",
"rawSpec": "0.0.1",
"saveSpec": null,
"fetchSpec": "0.0.1"
},
"_requiredBy": [
"/gulp/readable-stream"
],
"_resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz",
"_shasum": "8a18acfca9a8f4177e09abfc6038939b05d1eedf",
"_spec": "isarray@0.0.1",
"_where": "/Applications/XAMPP/xamppfiles/htdocs/wordpress/t-latehome/wp-content/plugins/opal-estate-pro/node_modules/gulp/node_modules/readable-stream",
"author": {
"name": "Julian Gruber",
"email": "mail@juliangruber.com",
"url": "http://juliangruber.com"
},
"bugs": {
"url": "https://github.com/juliangruber/isarray/issues"
},
"bundleDependencies": false,
"dependencies": {},
"deprecated": false,
"description": "Array#isArray for older browsers",
"devDependencies": {
"tap": "*"
},
"homepage": "https://github.com/juliangruber/isarray",
"keywords": [
"browser",
"isarray",
"array"
],
"license": "MIT",
"main": "index.js",
"name": "isarray",
"repository": {
"type": "git",
"url": "git://github.com/juliangruber/isarray.git"
},
"scripts": {
"test": "tap test/*.js"
},
"version": "0.0.1"
}

15
node_modules/gulp/node_modules/minimatch/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,15 @@
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

216
node_modules/gulp/node_modules/minimatch/README.md generated vendored Normal file
View File

@@ -0,0 +1,216 @@
# minimatch
A minimal matching utility.
[![Build Status](https://secure.travis-ci.org/isaacs/minimatch.png)](http://travis-ci.org/isaacs/minimatch)
This is the matching library used internally by npm.
It works by converting glob expressions into JavaScript `RegExp`
objects.
## Usage
```javascript
var minimatch = require("minimatch")
minimatch("bar.foo", "*.foo") // true!
minimatch("bar.foo", "*.bar") // false!
minimatch("bar.foo", "*.+(bar|foo)", { debug: true }) // true, and noisy!
```
## Features
Supports these glob features:
* Brace Expansion
* Extended glob matching
* "Globstar" `**` matching
See:
* `man sh`
* `man bash`
* `man 3 fnmatch`
* `man 5 gitignore`
## Minimatch Class
Create a minimatch object by instanting the `minimatch.Minimatch` class.
```javascript
var Minimatch = require("minimatch").Minimatch
var mm = new Minimatch(pattern, options)
```
### Properties
* `pattern` The original pattern the minimatch object represents.
* `options` The options supplied to the constructor.
* `set` A 2-dimensional array of regexp or string expressions.
Each row in the
array corresponds to a brace-expanded pattern. Each item in the row
corresponds to a single path-part. For example, the pattern
`{a,b/c}/d` would expand to a set of patterns like:
[ [ a, d ]
, [ b, c, d ] ]
If a portion of the pattern doesn't have any "magic" in it
(that is, it's something like `"foo"` rather than `fo*o?`), then it
will be left as a string rather than converted to a regular
expression.
* `regexp` Created by the `makeRe` method. A single regular expression
expressing the entire pattern. This is useful in cases where you wish
to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled.
* `negate` True if the pattern is negated.
* `comment` True if the pattern is a comment.
* `empty` True if the pattern is `""`.
### Methods
* `makeRe` Generate the `regexp` member if necessary, and return it.
Will return `false` if the pattern is invalid.
* `match(fname)` Return true if the filename matches the pattern, or
false otherwise.
* `matchOne(fileArray, patternArray, partial)` Take a `/`-split
filename, and match it against a single row in the `regExpSet`. This
method is mainly for internal use, but is exposed so that it can be
used by a glob-walker that needs to avoid excessive filesystem calls.
All other methods are internal, and will be called as necessary.
## Functions
The top-level exported function has a `cache` property, which is an LRU
cache set to store 100 items. So, calling these methods repeatedly
with the same pattern and options will use the same Minimatch object,
saving the cost of parsing it multiple times.
### minimatch(path, pattern, options)
Main export. Tests a path against the pattern using the options.
```javascript
var isJS = minimatch(file, "*.js", { matchBase: true })
```
### minimatch.filter(pattern, options)
Returns a function that tests its
supplied argument, suitable for use with `Array.filter`. Example:
```javascript
var javascripts = fileList.filter(minimatch.filter("*.js", {matchBase: true}))
```
### minimatch.match(list, pattern, options)
Match against the list of
files, in the style of fnmatch or glob. If nothing is matched, and
options.nonull is set, then return a list containing the pattern itself.
```javascript
var javascripts = minimatch.match(fileList, "*.js", {matchBase: true}))
```
### minimatch.makeRe(pattern, options)
Make a regular expression object from the pattern.
## Options
All options are `false` by default.
### debug
Dump a ton of stuff to stderr.
### nobrace
Do not expand `{a,b}` and `{1..3}` brace sets.
### noglobstar
Disable `**` matching against multiple folder names.
### dot
Allow patterns to match filenames starting with a period, even if
the pattern does not explicitly have a period in that spot.
Note that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot`
is set.
### noext
Disable "extglob" style patterns like `+(a|b)`.
### nocase
Perform a case-insensitive match.
### nonull
When a match is not found by `minimatch.match`, return a list containing
the pattern itself if this option is set. When not set, an empty list
is returned if there are no matches.
### matchBase
If set, then patterns without slashes will be matched
against the basename of the path if it contains slashes. For example,
`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`.
### nocomment
Suppress the behavior of treating `#` at the start of a pattern as a
comment.
### nonegate
Suppress the behavior of treating a leading `!` character as negation.
### flipNegate
Returns from negate expressions the same as if they were not negated.
(Ie, true on a hit, false on a miss.)
## Comparisons to other fnmatch/glob implementations
While strict compliance with the existing standards is a worthwhile
goal, some discrepancies exist between minimatch and other
implementations, and are intentional.
If the pattern starts with a `!` character, then it is negated. Set the
`nonegate` flag to suppress this behavior, and treat leading `!`
characters normally. This is perhaps relevant if you wish to start the
pattern with a negative extglob pattern like `!(a|B)`. Multiple `!`
characters at the start of a pattern will negate the pattern multiple
times.
If a pattern starts with `#`, then it is treated as a comment, and
will not match anything. Use `\#` to match a literal `#` at the
start of a line, or set the `nocomment` flag to suppress this behavior.
The double-star character `**` is supported by default, unless the
`noglobstar` flag is set. This is supported in the manner of bsdglob
and bash 4.1, where `**` only has special significance if it is the only
thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but
`a/**b` will not.
If an escaped pattern has no matches, and the `nonull` flag is set,
then minimatch.match returns the pattern as-provided, rather than
interpreting the character escapes. For example,
`minimatch.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than
`"*a?"`. This is akin to setting the `nullglob` option in bash, except
that it does not resolve escaped pattern characters.
If brace expansion is not disabled, then it is performed before any
other interpretation of the glob pattern. Thus, a pattern like
`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded
**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are
checked for validity. Since those two are valid, matching proceeds.

1159
node_modules/gulp/node_modules/minimatch/browser.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

912
node_modules/gulp/node_modules/minimatch/minimatch.js generated vendored Normal file
View File

@@ -0,0 +1,912 @@
module.exports = minimatch
minimatch.Minimatch = Minimatch
var path = { sep: '/' }
try {
path = require('path')
} catch (er) {}
var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
var expand = require('brace-expansion')
// any single thing other than /
// don't need to escape / when using new RegExp()
var qmark = '[^/]'
// * => any number of characters
var star = qmark + '*?'
// ** when dots are allowed. Anything goes, except .. and .
// not (^ or / followed by one or two dots followed by $ or /),
// followed by anything, any number of times.
var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?'
// not a ^ or / followed by a dot,
// followed by anything, any number of times.
var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?'
// characters that need to be escaped in RegExp.
var reSpecials = charSet('().*{}+?[]^$\\!')
// "abc" -> { a:true, b:true, c:true }
function charSet (s) {
return s.split('').reduce(function (set, c) {
set[c] = true
return set
}, {})
}
// normalizes slashes.
var slashSplit = /\/+/
minimatch.filter = filter
function filter (pattern, options) {
options = options || {}
return function (p, i, list) {
return minimatch(p, pattern, options)
}
}
function ext (a, b) {
a = a || {}
b = b || {}
var t = {}
Object.keys(b).forEach(function (k) {
t[k] = b[k]
})
Object.keys(a).forEach(function (k) {
t[k] = a[k]
})
return t
}
minimatch.defaults = function (def) {
if (!def || !Object.keys(def).length) return minimatch
var orig = minimatch
var m = function minimatch (p, pattern, options) {
return orig.minimatch(p, pattern, ext(def, options))
}
m.Minimatch = function Minimatch (pattern, options) {
return new orig.Minimatch(pattern, ext(def, options))
}
return m
}
Minimatch.defaults = function (def) {
if (!def || !Object.keys(def).length) return Minimatch
return minimatch.defaults(def).Minimatch
}
function minimatch (p, pattern, options) {
if (typeof pattern !== 'string') {
throw new TypeError('glob pattern string required')
}
if (!options) options = {}
// shortcut: comments match nothing.
if (!options.nocomment && pattern.charAt(0) === '#') {
return false
}
// "" only matches ""
if (pattern.trim() === '') return p === ''
return new Minimatch(pattern, options).match(p)
}
function Minimatch (pattern, options) {
if (!(this instanceof Minimatch)) {
return new Minimatch(pattern, options)
}
if (typeof pattern !== 'string') {
throw new TypeError('glob pattern string required')
}
if (!options) options = {}
pattern = pattern.trim()
// windows support: need to use /, not \
if (path.sep !== '/') {
pattern = pattern.split(path.sep).join('/')
}
this.options = options
this.set = []
this.pattern = pattern
this.regexp = null
this.negate = false
this.comment = false
this.empty = false
// make the set of regexps etc.
this.make()
}
Minimatch.prototype.debug = function () {}
Minimatch.prototype.make = make
function make () {
// don't do it more than once.
if (this._made) return
var pattern = this.pattern
var options = this.options
// empty patterns and comments match nothing.
if (!options.nocomment && pattern.charAt(0) === '#') {
this.comment = true
return
}
if (!pattern) {
this.empty = true
return
}
// step 1: figure out negation, etc.
this.parseNegate()
// step 2: expand braces
var set = this.globSet = this.braceExpand()
if (options.debug) this.debug = console.error
this.debug(this.pattern, set)
// step 3: now we have a set, so turn each one into a series of path-portion
// matching patterns.
// These will be regexps, except in the case of "**", which is
// set to the GLOBSTAR object for globstar behavior,
// and will not contain any / characters
set = this.globParts = set.map(function (s) {
return s.split(slashSplit)
})
this.debug(this.pattern, set)
// glob --> regexps
set = set.map(function (s, si, set) {
return s.map(this.parse, this)
}, this)
this.debug(this.pattern, set)
// filter out everything that didn't compile properly.
set = set.filter(function (s) {
return s.indexOf(false) === -1
})
this.debug(this.pattern, set)
this.set = set
}
Minimatch.prototype.parseNegate = parseNegate
function parseNegate () {
var pattern = this.pattern
var negate = false
var options = this.options
var negateOffset = 0
if (options.nonegate) return
for (var i = 0, l = pattern.length
; i < l && pattern.charAt(i) === '!'
; i++) {
negate = !negate
negateOffset++
}
if (negateOffset) this.pattern = pattern.substr(negateOffset)
this.negate = negate
}
// Brace expansion:
// a{b,c}d -> abd acd
// a{b,}c -> abc ac
// a{0..3}d -> a0d a1d a2d a3d
// a{b,c{d,e}f}g -> abg acdfg acefg
// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
//
// Invalid sets are not expanded.
// a{2..}b -> a{2..}b
// a{b}c -> a{b}c
minimatch.braceExpand = function (pattern, options) {
return braceExpand(pattern, options)
}
Minimatch.prototype.braceExpand = braceExpand
function braceExpand (pattern, options) {
if (!options) {
if (this instanceof Minimatch) {
options = this.options
} else {
options = {}
}
}
pattern = typeof pattern === 'undefined'
? this.pattern : pattern
if (typeof pattern === 'undefined') {
throw new Error('undefined pattern')
}
if (options.nobrace ||
!pattern.match(/\{.*\}/)) {
// shortcut. no need to expand.
return [pattern]
}
return expand(pattern)
}
// parse a component of the expanded set.
// At this point, no pattern may contain "/" in it
// so we're going to return a 2d array, where each entry is the full
// pattern, split on '/', and then turned into a regular expression.
// A regexp is made at the end which joins each array with an
// escaped /, and another full one which joins each regexp with |.
//
// Following the lead of Bash 4.1, note that "**" only has special meaning
// when it is the *only* thing in a path portion. Otherwise, any series
// of * is equivalent to a single *. Globstar behavior is enabled by
// default, and can be disabled by setting options.noglobstar.
Minimatch.prototype.parse = parse
var SUBPARSE = {}
function parse (pattern, isSub) {
var options = this.options
// shortcuts
if (!options.noglobstar && pattern === '**') return GLOBSTAR
if (pattern === '') return ''
var re = ''
var hasMagic = !!options.nocase
var escaping = false
// ? => one single character
var patternListStack = []
var negativeLists = []
var plType
var stateChar
var inClass = false
var reClassStart = -1
var classStart = -1
// . and .. never match anything that doesn't start with .,
// even when options.dot is set.
var patternStart = pattern.charAt(0) === '.' ? '' // anything
// not (start or / followed by . or .. followed by / or end)
: options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))'
: '(?!\\.)'
var self = this
function clearStateChar () {
if (stateChar) {
// we had some state-tracking character
// that wasn't consumed by this pass.
switch (stateChar) {
case '*':
re += star
hasMagic = true
break
case '?':
re += qmark
hasMagic = true
break
default:
re += '\\' + stateChar
break
}
self.debug('clearStateChar %j %j', stateChar, re)
stateChar = false
}
}
for (var i = 0, len = pattern.length, c
; (i < len) && (c = pattern.charAt(i))
; i++) {
this.debug('%s\t%s %s %j', pattern, i, re, c)
// skip over any that are escaped.
if (escaping && reSpecials[c]) {
re += '\\' + c
escaping = false
continue
}
switch (c) {
case '/':
// completely not allowed, even escaped.
// Should already be path-split by now.
return false
case '\\':
clearStateChar()
escaping = true
continue
// the various stateChar values
// for the "extglob" stuff.
case '?':
case '*':
case '+':
case '@':
case '!':
this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c)
// all of those are literals inside a class, except that
// the glob [!a] means [^a] in regexp
if (inClass) {
this.debug(' in class')
if (c === '!' && i === classStart + 1) c = '^'
re += c
continue
}
// if we already have a stateChar, then it means
// that there was something like ** or +? in there.
// Handle the stateChar, then proceed with this one.
self.debug('call clearStateChar %j', stateChar)
clearStateChar()
stateChar = c
// if extglob is disabled, then +(asdf|foo) isn't a thing.
// just clear the statechar *now*, rather than even diving into
// the patternList stuff.
if (options.noext) clearStateChar()
continue
case '(':
if (inClass) {
re += '('
continue
}
if (!stateChar) {
re += '\\('
continue
}
plType = stateChar
patternListStack.push({
type: plType,
start: i - 1,
reStart: re.length
})
// negation is (?:(?!js)[^/]*)
re += stateChar === '!' ? '(?:(?!(?:' : '(?:'
this.debug('plType %j %j', stateChar, re)
stateChar = false
continue
case ')':
if (inClass || !patternListStack.length) {
re += '\\)'
continue
}
clearStateChar()
hasMagic = true
re += ')'
var pl = patternListStack.pop()
plType = pl.type
// negation is (?:(?!js)[^/]*)
// The others are (?:<pattern>)<type>
switch (plType) {
case '!':
negativeLists.push(pl)
re += ')[^/]*?)'
pl.reEnd = re.length
break
case '?':
case '+':
case '*':
re += plType
break
case '@': break // the default anyway
}
continue
case '|':
if (inClass || !patternListStack.length || escaping) {
re += '\\|'
escaping = false
continue
}
clearStateChar()
re += '|'
continue
// these are mostly the same in regexp and glob
case '[':
// swallow any state-tracking char before the [
clearStateChar()
if (inClass) {
re += '\\' + c
continue
}
inClass = true
classStart = i
reClassStart = re.length
re += c
continue
case ']':
// a right bracket shall lose its special
// meaning and represent itself in
// a bracket expression if it occurs
// first in the list. -- POSIX.2 2.8.3.2
if (i === classStart + 1 || !inClass) {
re += '\\' + c
escaping = false
continue
}
// handle the case where we left a class open.
// "[z-a]" is valid, equivalent to "\[z-a\]"
if (inClass) {
// split where the last [ was, make sure we don't have
// an invalid re. if so, re-walk the contents of the
// would-be class to re-translate any characters that
// were passed through as-is
// TODO: It would probably be faster to determine this
// without a try/catch and a new RegExp, but it's tricky
// to do safely. For now, this is safe and works.
var cs = pattern.substring(classStart + 1, i)
try {
RegExp('[' + cs + ']')
} catch (er) {
// not a valid class!
var sp = this.parse(cs, SUBPARSE)
re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]'
hasMagic = hasMagic || sp[1]
inClass = false
continue
}
}
// finish up the class.
hasMagic = true
inClass = false
re += c
continue
default:
// swallow any state char that wasn't consumed
clearStateChar()
if (escaping) {
// no need
escaping = false
} else if (reSpecials[c]
&& !(c === '^' && inClass)) {
re += '\\'
}
re += c
} // switch
} // for
// handle the case where we left a class open.
// "[abc" is valid, equivalent to "\[abc"
if (inClass) {
// split where the last [ was, and escape it
// this is a huge pita. We now have to re-walk
// the contents of the would-be class to re-translate
// any characters that were passed through as-is
cs = pattern.substr(classStart + 1)
sp = this.parse(cs, SUBPARSE)
re = re.substr(0, reClassStart) + '\\[' + sp[0]
hasMagic = hasMagic || sp[1]
}
// handle the case where we had a +( thing at the *end*
// of the pattern.
// each pattern list stack adds 3 chars, and we need to go through
// and escape any | chars that were passed through as-is for the regexp.
// Go through and escape them, taking care not to double-escape any
// | chars that were already escaped.
for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {
var tail = re.slice(pl.reStart + 3)
// maybe some even number of \, then maybe 1 \, followed by a |
tail = tail.replace(/((?:\\{2})*)(\\?)\|/g, function (_, $1, $2) {
if (!$2) {
// the | isn't already escaped, so escape it.
$2 = '\\'
}
// need to escape all those slashes *again*, without escaping the
// one that we need for escaping the | character. As it works out,
// escaping an even number of slashes can be done by simply repeating
// it exactly after itself. That's why this trick works.
//
// I am sorry that you have to see this.
return $1 + $1 + $2 + '|'
})
this.debug('tail=%j\n %s', tail, tail)
var t = pl.type === '*' ? star
: pl.type === '?' ? qmark
: '\\' + pl.type
hasMagic = true
re = re.slice(0, pl.reStart) + t + '\\(' + tail
}
// handle trailing things that only matter at the very end.
clearStateChar()
if (escaping) {
// trailing \\
re += '\\\\'
}
// only need to apply the nodot start if the re starts with
// something that could conceivably capture a dot
var addPatternStart = false
switch (re.charAt(0)) {
case '.':
case '[':
case '(': addPatternStart = true
}
// Hack to work around lack of negative lookbehind in JS
// A pattern like: *.!(x).!(y|z) needs to ensure that a name
// like 'a.xyz.yz' doesn't match. So, the first negative
// lookahead, has to look ALL the way ahead, to the end of
// the pattern.
for (var n = negativeLists.length - 1; n > -1; n--) {
var nl = negativeLists[n]
var nlBefore = re.slice(0, nl.reStart)
var nlFirst = re.slice(nl.reStart, nl.reEnd - 8)
var nlLast = re.slice(nl.reEnd - 8, nl.reEnd)
var nlAfter = re.slice(nl.reEnd)
nlLast += nlAfter
// Handle nested stuff like *(*.js|!(*.json)), where open parens
// mean that we should *not* include the ) in the bit that is considered
// "after" the negated section.
var openParensBefore = nlBefore.split('(').length - 1
var cleanAfter = nlAfter
for (i = 0; i < openParensBefore; i++) {
cleanAfter = cleanAfter.replace(/\)[+*?]?/, '')
}
nlAfter = cleanAfter
var dollar = ''
if (nlAfter === '' && isSub !== SUBPARSE) {
dollar = '$'
}
var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast
re = newRe
}
// if the re is not "" at this point, then we need to make sure
// it doesn't match against an empty path part.
// Otherwise a/* will match a/, which it should not.
if (re !== '' && hasMagic) {
re = '(?=.)' + re
}
if (addPatternStart) {
re = patternStart + re
}
// parsing just a piece of a larger pattern.
if (isSub === SUBPARSE) {
return [re, hasMagic]
}
// skip the regexp for non-magical patterns
// unescape anything in it, though, so that it'll be
// an exact match against a file etc.
if (!hasMagic) {
return globUnescape(pattern)
}
var flags = options.nocase ? 'i' : ''
var regExp = new RegExp('^' + re + '$', flags)
regExp._glob = pattern
regExp._src = re
return regExp
}
minimatch.makeRe = function (pattern, options) {
return new Minimatch(pattern, options || {}).makeRe()
}
Minimatch.prototype.makeRe = makeRe
function makeRe () {
if (this.regexp || this.regexp === false) return this.regexp
// at this point, this.set is a 2d array of partial
// pattern strings, or "**".
//
// It's better to use .match(). This function shouldn't
// be used, really, but it's pretty convenient sometimes,
// when you just want to work with a regex.
var set = this.set
if (!set.length) {
this.regexp = false
return this.regexp
}
var options = this.options
var twoStar = options.noglobstar ? star
: options.dot ? twoStarDot
: twoStarNoDot
var flags = options.nocase ? 'i' : ''
var re = set.map(function (pattern) {
return pattern.map(function (p) {
return (p === GLOBSTAR) ? twoStar
: (typeof p === 'string') ? regExpEscape(p)
: p._src
}).join('\\\/')
}).join('|')
// must match entire pattern
// ending in a * or ** will make it less strict.
re = '^(?:' + re + ')$'
// can match anything, as long as it's not this.
if (this.negate) re = '^(?!' + re + ').*$'
try {
this.regexp = new RegExp(re, flags)
} catch (ex) {
this.regexp = false
}
return this.regexp
}
minimatch.match = function (list, pattern, options) {
options = options || {}
var mm = new Minimatch(pattern, options)
list = list.filter(function (f) {
return mm.match(f)
})
if (mm.options.nonull && !list.length) {
list.push(pattern)
}
return list
}
Minimatch.prototype.match = match
function match (f, partial) {
this.debug('match', f, this.pattern)
// short-circuit in the case of busted things.
// comments, etc.
if (this.comment) return false
if (this.empty) return f === ''
if (f === '/' && partial) return true
var options = this.options
// windows: need to use /, not \
if (path.sep !== '/') {
f = f.split(path.sep).join('/')
}
// treat the test path as a set of pathparts.
f = f.split(slashSplit)
this.debug(this.pattern, 'split', f)
// just ONE of the pattern sets in this.set needs to match
// in order for it to be valid. If negating, then just one
// match means that we have failed.
// Either way, return on the first hit.
var set = this.set
this.debug(this.pattern, 'set', set)
// Find the basename of the path by looking for the last non-empty segment
var filename
var i
for (i = f.length - 1; i >= 0; i--) {
filename = f[i]
if (filename) break
}
for (i = 0; i < set.length; i++) {
var pattern = set[i]
var file = f
if (options.matchBase && pattern.length === 1) {
file = [filename]
}
var hit = this.matchOne(file, pattern, partial)
if (hit) {
if (options.flipNegate) return true
return !this.negate
}
}
// didn't get any hits. this is success if it's a negative
// pattern, failure otherwise.
if (options.flipNegate) return false
return this.negate
}
// set partial to true to test if, for example,
// "/a/b" matches the start of "/*/b/*/d"
// Partial means, if you run out of file before you run
// out of pattern, then that's fine, as long as all
// the parts match.
Minimatch.prototype.matchOne = function (file, pattern, partial) {
var options = this.options
this.debug('matchOne',
{ 'this': this, file: file, pattern: pattern })
this.debug('matchOne', file.length, pattern.length)
for (var fi = 0,
pi = 0,
fl = file.length,
pl = pattern.length
; (fi < fl) && (pi < pl)
; fi++, pi++) {
this.debug('matchOne loop')
var p = pattern[pi]
var f = file[fi]
this.debug(pattern, p, f)
// should be impossible.
// some invalid regexp stuff in the set.
if (p === false) return false
if (p === GLOBSTAR) {
this.debug('GLOBSTAR', [pattern, p, f])
// "**"
// a/**/b/**/c would match the following:
// a/b/x/y/z/c
// a/x/y/z/b/c
// a/b/x/b/x/c
// a/b/c
// To do this, take the rest of the pattern after
// the **, and see if it would match the file remainder.
// If so, return success.
// If not, the ** "swallows" a segment, and try again.
// This is recursively awful.
//
// a/**/b/**/c matching a/b/x/y/z/c
// - a matches a
// - doublestar
// - matchOne(b/x/y/z/c, b/**/c)
// - b matches b
// - doublestar
// - matchOne(x/y/z/c, c) -> no
// - matchOne(y/z/c, c) -> no
// - matchOne(z/c, c) -> no
// - matchOne(c, c) yes, hit
var fr = fi
var pr = pi + 1
if (pr === pl) {
this.debug('** at the end')
// a ** at the end will just swallow the rest.
// We have found a match.
// however, it will not swallow /.x, unless
// options.dot is set.
// . and .. are *never* matched by **, for explosively
// exponential reasons.
for (; fi < fl; fi++) {
if (file[fi] === '.' || file[fi] === '..' ||
(!options.dot && file[fi].charAt(0) === '.')) return false
}
return true
}
// ok, let's see if we can swallow whatever we can.
while (fr < fl) {
var swallowee = file[fr]
this.debug('\nglobstar while', file, fr, pattern, pr, swallowee)
// XXX remove this slice. Just pass the start index.
if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
this.debug('globstar found match!', fr, fl, swallowee)
// found a match.
return true
} else {
// can't swallow "." or ".." ever.
// can only swallow ".foo" when explicitly asked.
if (swallowee === '.' || swallowee === '..' ||
(!options.dot && swallowee.charAt(0) === '.')) {
this.debug('dot detected!', file, fr, pattern, pr)
break
}
// ** swallows a segment, and continue.
this.debug('globstar swallow a segment, and continue')
fr++
}
}
// no match was found.
// However, in partial mode, we can't say this is necessarily over.
// If there's more *pattern* left, then
if (partial) {
// ran out of file
this.debug('\n>>> no match, partial?', file, fr, pattern, pr)
if (fr === fl) return true
}
return false
}
// something other than **
// non-magic patterns just have to match exactly
// patterns with magic have been turned into regexps.
var hit
if (typeof p === 'string') {
if (options.nocase) {
hit = f.toLowerCase() === p.toLowerCase()
} else {
hit = f === p
}
this.debug('string match', p, f, hit)
} else {
hit = f.match(p)
this.debug('pattern match', p, f, hit)
}
if (!hit) return false
}
// Note: ending in / means that we'll get a final ""
// at the end of the pattern. This can only match a
// corresponding "" at the end of the file.
// If the file ends in /, then it can only match a
// a pattern that ends in /, unless the pattern just
// doesn't have any more for it. But, a/b/ should *not*
// match "a/b/*", even though "" matches against the
// [^/]*? pattern, except in partial mode, where it might
// simply not be reached yet.
// However, a/b/ should still satisfy a/*
// now either we fell off the end of the pattern, or we're done.
if (fi === fl && pi === pl) {
// ran out of pattern and filename at the same time.
// an exact hit!
return true
} else if (fi === fl) {
// ran out of file, but still had pattern left.
// this is ok if we're doing the match as part of
// a glob fs traversal.
return partial
} else if (pi === pl) {
// ran out of pattern, still have file left.
// this is only acceptable if we're on the very last
// empty segment of a file with a trailing slash.
// a/* should match a/b/
var emptyFileEnd = (fi === fl - 1) && (file[fi] === '')
return emptyFileEnd
}
// should be unreachable.
throw new Error('wtf?')
}
// replace stuff like \* with *
function globUnescape (s) {
return s.replace(/\\(.)/g, '$1')
}
function regExpEscape (s) {
return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&')
}

66
node_modules/gulp/node_modules/minimatch/package.json generated vendored Normal file
View File

@@ -0,0 +1,66 @@
{
"_from": "minimatch@^2.0.1",
"_id": "minimatch@2.0.10",
"_inBundle": false,
"_integrity": "sha1-jQh8OcazjAAbl/ynzm0OHoCvusc=",
"_location": "/gulp/minimatch",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "minimatch@^2.0.1",
"name": "minimatch",
"escapedName": "minimatch",
"rawSpec": "^2.0.1",
"saveSpec": null,
"fetchSpec": "^2.0.1"
},
"_requiredBy": [
"/gulp/glob",
"/gulp/glob-stream"
],
"_resolved": "https://registry.npmjs.org/minimatch/-/minimatch-2.0.10.tgz",
"_shasum": "8d087c39c6b38c001b97fca7ce6d0e1e80afbac7",
"_spec": "minimatch@^2.0.1",
"_where": "/Applications/XAMPP/xamppfiles/htdocs/wordpress/t-latehome/wp-content/plugins/opal-estate-pro/node_modules/gulp/node_modules/glob-stream",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
"url": "http://blog.izs.me"
},
"bugs": {
"url": "https://github.com/isaacs/minimatch/issues"
},
"bundleDependencies": false,
"dependencies": {
"brace-expansion": "^1.0.0"
},
"deprecated": "Please update to minimatch 3.0.2 or higher to avoid a RegExp DoS issue",
"description": "a glob matcher in javascript",
"devDependencies": {
"browserify": "^9.0.3",
"standard": "^3.7.2",
"tap": "^1.2.0"
},
"engines": {
"node": "*"
},
"files": [
"minimatch.js",
"browser.js"
],
"homepage": "https://github.com/isaacs/minimatch#readme",
"license": "ISC",
"main": "minimatch.js",
"name": "minimatch",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/minimatch.git"
},
"scripts": {
"posttest": "standard minimatch.js test/*.js",
"prepublish": "browserify -o browser.js -e minimatch.js -s minimatch --bare",
"test": "tap test/*.js"
},
"version": "2.0.10"
}

8
node_modules/gulp/node_modules/minimist/.travis.yml generated vendored Normal file
View File

@@ -0,0 +1,8 @@
language: node_js
node_js:
- "0.8"
- "0.10"
- "0.12"
- "iojs"
before_install:
- npm install -g npm@~1.4.6

18
node_modules/gulp/node_modules/minimist/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,18 @@
This software is released under the MIT license:
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -0,0 +1,2 @@
var argv = require('../')(process.argv.slice(2));
console.dir(argv);

236
node_modules/gulp/node_modules/minimist/index.js generated vendored Normal file
View File

@@ -0,0 +1,236 @@
module.exports = function (args, opts) {
if (!opts) opts = {};
var flags = { bools : {}, strings : {}, unknownFn: null };
if (typeof opts['unknown'] === 'function') {
flags.unknownFn = opts['unknown'];
}
if (typeof opts['boolean'] === 'boolean' && opts['boolean']) {
flags.allBools = true;
} else {
[].concat(opts['boolean']).filter(Boolean).forEach(function (key) {
flags.bools[key] = true;
});
}
var aliases = {};
Object.keys(opts.alias || {}).forEach(function (key) {
aliases[key] = [].concat(opts.alias[key]);
aliases[key].forEach(function (x) {
aliases[x] = [key].concat(aliases[key].filter(function (y) {
return x !== y;
}));
});
});
[].concat(opts.string).filter(Boolean).forEach(function (key) {
flags.strings[key] = true;
if (aliases[key]) {
flags.strings[aliases[key]] = true;
}
});
var defaults = opts['default'] || {};
var argv = { _ : [] };
Object.keys(flags.bools).forEach(function (key) {
setArg(key, defaults[key] === undefined ? false : defaults[key]);
});
var notFlags = [];
if (args.indexOf('--') !== -1) {
notFlags = args.slice(args.indexOf('--')+1);
args = args.slice(0, args.indexOf('--'));
}
function argDefined(key, arg) {
return (flags.allBools && /^--[^=]+$/.test(arg)) ||
flags.strings[key] || flags.bools[key] || aliases[key];
}
function setArg (key, val, arg) {
if (arg && flags.unknownFn && !argDefined(key, arg)) {
if (flags.unknownFn(arg) === false) return;
}
var value = !flags.strings[key] && isNumber(val)
? Number(val) : val
;
setKey(argv, key.split('.'), value);
(aliases[key] || []).forEach(function (x) {
setKey(argv, x.split('.'), value);
});
}
function setKey (obj, keys, value) {
var o = obj;
keys.slice(0,-1).forEach(function (key) {
if (o[key] === undefined) o[key] = {};
o = o[key];
});
var key = keys[keys.length - 1];
if (o[key] === undefined || flags.bools[key] || typeof o[key] === 'boolean') {
o[key] = value;
}
else if (Array.isArray(o[key])) {
o[key].push(value);
}
else {
o[key] = [ o[key], value ];
}
}
function aliasIsBoolean(key) {
return aliases[key].some(function (x) {
return flags.bools[x];
});
}
for (var i = 0; i < args.length; i++) {
var arg = args[i];
if (/^--.+=/.test(arg)) {
// Using [\s\S] instead of . because js doesn't support the
// 'dotall' regex modifier. See:
// http://stackoverflow.com/a/1068308/13216
var m = arg.match(/^--([^=]+)=([\s\S]*)$/);
var key = m[1];
var value = m[2];
if (flags.bools[key]) {
value = value !== 'false';
}
setArg(key, value, arg);
}
else if (/^--no-.+/.test(arg)) {
var key = arg.match(/^--no-(.+)/)[1];
setArg(key, false, arg);
}
else if (/^--.+/.test(arg)) {
var key = arg.match(/^--(.+)/)[1];
var next = args[i + 1];
if (next !== undefined && !/^-/.test(next)
&& !flags.bools[key]
&& !flags.allBools
&& (aliases[key] ? !aliasIsBoolean(key) : true)) {
setArg(key, next, arg);
i++;
}
else if (/^(true|false)$/.test(next)) {
setArg(key, next === 'true', arg);
i++;
}
else {
setArg(key, flags.strings[key] ? '' : true, arg);
}
}
else if (/^-[^-]+/.test(arg)) {
var letters = arg.slice(1,-1).split('');
var broken = false;
for (var j = 0; j < letters.length; j++) {
var next = arg.slice(j+2);
if (next === '-') {
setArg(letters[j], next, arg)
continue;
}
if (/[A-Za-z]/.test(letters[j]) && /=/.test(next)) {
setArg(letters[j], next.split('=')[1], arg);
broken = true;
break;
}
if (/[A-Za-z]/.test(letters[j])
&& /-?\d+(\.\d*)?(e-?\d+)?$/.test(next)) {
setArg(letters[j], next, arg);
broken = true;
break;
}
if (letters[j+1] && letters[j+1].match(/\W/)) {
setArg(letters[j], arg.slice(j+2), arg);
broken = true;
break;
}
else {
setArg(letters[j], flags.strings[letters[j]] ? '' : true, arg);
}
}
var key = arg.slice(-1)[0];
if (!broken && key !== '-') {
if (args[i+1] && !/^(-|--)[^-]/.test(args[i+1])
&& !flags.bools[key]
&& (aliases[key] ? !aliasIsBoolean(key) : true)) {
setArg(key, args[i+1], arg);
i++;
}
else if (args[i+1] && /true|false/.test(args[i+1])) {
setArg(key, args[i+1] === 'true', arg);
i++;
}
else {
setArg(key, flags.strings[key] ? '' : true, arg);
}
}
}
else {
if (!flags.unknownFn || flags.unknownFn(arg) !== false) {
argv._.push(
flags.strings['_'] || !isNumber(arg) ? arg : Number(arg)
);
}
if (opts.stopEarly) {
argv._.push.apply(argv._, args.slice(i + 1));
break;
}
}
}
Object.keys(defaults).forEach(function (key) {
if (!hasKey(argv, key.split('.'))) {
setKey(argv, key.split('.'), defaults[key]);
(aliases[key] || []).forEach(function (x) {
setKey(argv, x.split('.'), defaults[key]);
});
}
});
if (opts['--']) {
argv['--'] = new Array();
notFlags.forEach(function(key) {
argv['--'].push(key);
});
}
else {
notFlags.forEach(function(key) {
argv._.push(key);
});
}
return argv;
};
function hasKey (obj, keys) {
var o = obj;
keys.slice(0,-1).forEach(function (key) {
o = (o[key] || {});
});
var key = keys[keys.length - 1];
return key in o;
}
function isNumber (x) {
if (typeof x === 'number') return true;
if (/^0x[0-9a-f]+$/i.test(x)) return true;
return /^[-+]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/.test(x);
}

73
node_modules/gulp/node_modules/minimist/package.json generated vendored Normal file
View File

@@ -0,0 +1,73 @@
{
"_from": "minimist@^1.1.0",
"_id": "minimist@1.2.0",
"_inBundle": false,
"_integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=",
"_location": "/gulp/minimist",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "minimist@^1.1.0",
"name": "minimist",
"escapedName": "minimist",
"rawSpec": "^1.1.0",
"saveSpec": null,
"fetchSpec": "^1.1.0"
},
"_requiredBy": [
"/gulp"
],
"_resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz",
"_shasum": "a35008b20f41383eec1fb914f4cd5df79a264284",
"_spec": "minimist@^1.1.0",
"_where": "/Applications/XAMPP/xamppfiles/htdocs/wordpress/t-latehome/wp-content/plugins/opal-estate-pro/node_modules/gulp",
"author": {
"name": "James Halliday",
"email": "mail@substack.net",
"url": "http://substack.net"
},
"bugs": {
"url": "https://github.com/substack/minimist/issues"
},
"bundleDependencies": false,
"deprecated": false,
"description": "parse argument options",
"devDependencies": {
"covert": "^1.0.0",
"tap": "~0.4.0",
"tape": "^3.5.0"
},
"homepage": "https://github.com/substack/minimist",
"keywords": [
"argv",
"getopt",
"parser",
"optimist"
],
"license": "MIT",
"main": "index.js",
"name": "minimist",
"repository": {
"type": "git",
"url": "git://github.com/substack/minimist.git"
},
"scripts": {
"coverage": "covert test/*.js",
"test": "tap test/*.js"
},
"testling": {
"files": "test/*.js",
"browsers": [
"ie/6..latest",
"ff/5",
"firefox/latest",
"chrome/10",
"chrome/latest",
"safari/5.1",
"safari/latest",
"opera/12"
]
},
"version": "1.2.0"
}

View File

@@ -0,0 +1,91 @@
# minimist
parse argument options
This module is the guts of optimist's argument parser without all the
fanciful decoration.
[![browser support](https://ci.testling.com/substack/minimist.png)](http://ci.testling.com/substack/minimist)
[![build status](https://secure.travis-ci.org/substack/minimist.png)](http://travis-ci.org/substack/minimist)
# example
``` js
var argv = require('minimist')(process.argv.slice(2));
console.dir(argv);
```
```
$ node example/parse.js -a beep -b boop
{ _: [], a: 'beep', b: 'boop' }
```
```
$ node example/parse.js -x 3 -y 4 -n5 -abc --beep=boop foo bar baz
{ _: [ 'foo', 'bar', 'baz' ],
x: 3,
y: 4,
n: 5,
a: true,
b: true,
c: true,
beep: 'boop' }
```
# methods
``` js
var parseArgs = require('minimist')
```
## var argv = parseArgs(args, opts={})
Return an argument object `argv` populated with the array arguments from `args`.
`argv._` contains all the arguments that didn't have an option associated with
them.
Numeric-looking arguments will be returned as numbers unless `opts.string` or
`opts.boolean` is set for that argument name.
Any arguments after `'--'` will not be parsed and will end up in `argv._`.
options can be:
* `opts.string` - a string or array of strings argument names to always treat as
strings
* `opts.boolean` - a boolean, string or array of strings to always treat as
booleans. if `true` will treat all double hyphenated arguments without equal signs
as boolean (e.g. affects `--foo`, not `-f` or `--foo=bar`)
* `opts.alias` - an object mapping string names to strings or arrays of string
argument names to use as aliases
* `opts.default` - an object mapping string argument names to default values
* `opts.stopEarly` - when true, populate `argv._` with everything after the
first non-option
* `opts['--']` - when true, populate `argv._` with everything before the `--`
and `argv['--']` with everything after the `--`. Here's an example:
* `opts.unknown` - a function which is invoked with a command line parameter not
defined in the `opts` configuration object. If the function returns `false`, the
unknown option is not added to `argv`.
```
> require('./')('one two three -- four five --six'.split(' '), { '--': true })
{ _: [ 'one', 'two', 'three' ],
'--': [ 'four', 'five', '--six' ] }
```
Note that with `opts['--']` set, parsing for arguments still stops after the
`--`.
# install
With [npm](https://npmjs.org) do:
```
npm install minimist
```
# license
MIT

View File

@@ -0,0 +1,32 @@
var parse = require('../');
var test = require('tape');
test('flag boolean true (default all --args to boolean)', function (t) {
var argv = parse(['moo', '--honk', 'cow'], {
boolean: true
});
t.deepEqual(argv, {
honk: true,
_: ['moo', 'cow']
});
t.deepEqual(typeof argv.honk, 'boolean');
t.end();
});
test('flag boolean true only affects double hyphen arguments without equals signs', function (t) {
var argv = parse(['moo', '--honk', 'cow', '-p', '55', '--tacos=good'], {
boolean: true
});
t.deepEqual(argv, {
honk: true,
tacos: 'good',
p: 55,
_: ['moo', 'cow']
});
t.deepEqual(typeof argv.honk, 'boolean');
t.end();
});

166
node_modules/gulp/node_modules/minimist/test/bool.js generated vendored Normal file
View File

@@ -0,0 +1,166 @@
var parse = require('../');
var test = require('tape');
test('flag boolean default false', function (t) {
var argv = parse(['moo'], {
boolean: ['t', 'verbose'],
default: { verbose: false, t: false }
});
t.deepEqual(argv, {
verbose: false,
t: false,
_: ['moo']
});
t.deepEqual(typeof argv.verbose, 'boolean');
t.deepEqual(typeof argv.t, 'boolean');
t.end();
});
test('boolean groups', function (t) {
var argv = parse([ '-x', '-z', 'one', 'two', 'three' ], {
boolean: ['x','y','z']
});
t.deepEqual(argv, {
x : true,
y : false,
z : true,
_ : [ 'one', 'two', 'three' ]
});
t.deepEqual(typeof argv.x, 'boolean');
t.deepEqual(typeof argv.y, 'boolean');
t.deepEqual(typeof argv.z, 'boolean');
t.end();
});
test('boolean and alias with chainable api', function (t) {
var aliased = [ '-h', 'derp' ];
var regular = [ '--herp', 'derp' ];
var opts = {
herp: { alias: 'h', boolean: true }
};
var aliasedArgv = parse(aliased, {
boolean: 'herp',
alias: { h: 'herp' }
});
var propertyArgv = parse(regular, {
boolean: 'herp',
alias: { h: 'herp' }
});
var expected = {
herp: true,
h: true,
'_': [ 'derp' ]
};
t.same(aliasedArgv, expected);
t.same(propertyArgv, expected);
t.end();
});
test('boolean and alias with options hash', function (t) {
var aliased = [ '-h', 'derp' ];
var regular = [ '--herp', 'derp' ];
var opts = {
alias: { 'h': 'herp' },
boolean: 'herp'
};
var aliasedArgv = parse(aliased, opts);
var propertyArgv = parse(regular, opts);
var expected = {
herp: true,
h: true,
'_': [ 'derp' ]
};
t.same(aliasedArgv, expected);
t.same(propertyArgv, expected);
t.end();
});
test('boolean and alias array with options hash', function (t) {
var aliased = [ '-h', 'derp' ];
var regular = [ '--herp', 'derp' ];
var alt = [ '--harp', 'derp' ];
var opts = {
alias: { 'h': ['herp', 'harp'] },
boolean: 'h'
};
var aliasedArgv = parse(aliased, opts);
var propertyArgv = parse(regular, opts);
var altPropertyArgv = parse(alt, opts);
var expected = {
harp: true,
herp: true,
h: true,
'_': [ 'derp' ]
};
t.same(aliasedArgv, expected);
t.same(propertyArgv, expected);
t.same(altPropertyArgv, expected);
t.end();
});
test('boolean and alias using explicit true', function (t) {
var aliased = [ '-h', 'true' ];
var regular = [ '--herp', 'true' ];
var opts = {
alias: { h: 'herp' },
boolean: 'h'
};
var aliasedArgv = parse(aliased, opts);
var propertyArgv = parse(regular, opts);
var expected = {
herp: true,
h: true,
'_': [ ]
};
t.same(aliasedArgv, expected);
t.same(propertyArgv, expected);
t.end();
});
// regression, see https://github.com/substack/node-optimist/issues/71
test('boolean and --x=true', function(t) {
var parsed = parse(['--boool', '--other=true'], {
boolean: 'boool'
});
t.same(parsed.boool, true);
t.same(parsed.other, 'true');
parsed = parse(['--boool', '--other=false'], {
boolean: 'boool'
});
t.same(parsed.boool, true);
t.same(parsed.other, 'false');
t.end();
});
test('boolean --boool=true', function (t) {
var parsed = parse(['--boool=true'], {
default: {
boool: false
},
boolean: ['boool']
});
t.same(parsed.boool, true);
t.end();
});
test('boolean --boool=false', function (t) {
var parsed = parse(['--boool=false'], {
default: {
boool: true
},
boolean: ['boool']
});
t.same(parsed.boool, false);
t.end();
});

31
node_modules/gulp/node_modules/minimist/test/dash.js generated vendored Normal file
View File

@@ -0,0 +1,31 @@
var parse = require('../');
var test = require('tape');
test('-', function (t) {
t.plan(5);
t.deepEqual(parse([ '-n', '-' ]), { n: '-', _: [] });
t.deepEqual(parse([ '-' ]), { _: [ '-' ] });
t.deepEqual(parse([ '-f-' ]), { f: '-', _: [] });
t.deepEqual(
parse([ '-b', '-' ], { boolean: 'b' }),
{ b: true, _: [ '-' ] }
);
t.deepEqual(
parse([ '-s', '-' ], { string: 's' }),
{ s: '-', _: [] }
);
});
test('-a -- b', function (t) {
t.plan(3);
t.deepEqual(parse([ '-a', '--', 'b' ]), { a: true, _: [ 'b' ] });
t.deepEqual(parse([ '--a', '--', 'b' ]), { a: true, _: [ 'b' ] });
t.deepEqual(parse([ '--a', '--', 'b' ]), { a: true, _: [ 'b' ] });
});
test('move arguments after the -- into their own `--` array', function(t) {
t.plan(1);
t.deepEqual(
parse([ '--name', 'John', 'before', '--', 'after' ], { '--': true }),
{ name: 'John', _: [ 'before' ], '--': [ 'after' ] });
});

View File

@@ -0,0 +1,35 @@
var test = require('tape');
var parse = require('../');
test('boolean default true', function (t) {
var argv = parse([], {
boolean: 'sometrue',
default: { sometrue: true }
});
t.equal(argv.sometrue, true);
t.end();
});
test('boolean default false', function (t) {
var argv = parse([], {
boolean: 'somefalse',
default: { somefalse: false }
});
t.equal(argv.somefalse, false);
t.end();
});
test('boolean default to null', function (t) {
var argv = parse([], {
boolean: 'maybe',
default: { maybe: null }
});
t.equal(argv.maybe, null);
var argv = parse(['--maybe'], {
boolean: 'maybe',
default: { maybe: null }
});
t.equal(argv.maybe, true);
t.end();
})

22
node_modules/gulp/node_modules/minimist/test/dotted.js generated vendored Normal file
View File

@@ -0,0 +1,22 @@
var parse = require('../');
var test = require('tape');
test('dotted alias', function (t) {
var argv = parse(['--a.b', '22'], {default: {'a.b': 11}, alias: {'a.b': 'aa.bb'}});
t.equal(argv.a.b, 22);
t.equal(argv.aa.bb, 22);
t.end();
});
test('dotted default', function (t) {
var argv = parse('', {default: {'a.b': 11}, alias: {'a.b': 'aa.bb'}});
t.equal(argv.a.b, 11);
t.equal(argv.aa.bb, 11);
t.end();
});
test('dotted default with no alias', function (t) {
var argv = parse('', {default: {'a.b': 11}});
t.equal(argv.a.b, 11);
t.end();
});

View File

@@ -0,0 +1,16 @@
var parse = require('../');
var test = require('tape');
test('short -k=v' , function (t) {
t.plan(1);
var argv = parse([ '-b=123' ]);
t.deepEqual(argv, { b: 123, _: [] });
});
test('multi short -k=v' , function (t) {
t.plan(1);
var argv = parse([ '-a=whatever', '-b=robots' ]);
t.deepEqual(argv, { a: 'whatever', b: 'robots', _: [] });
});

31
node_modules/gulp/node_modules/minimist/test/long.js generated vendored Normal file
View File

@@ -0,0 +1,31 @@
var test = require('tape');
var parse = require('../');
test('long opts', function (t) {
t.deepEqual(
parse([ '--bool' ]),
{ bool : true, _ : [] },
'long boolean'
);
t.deepEqual(
parse([ '--pow', 'xixxle' ]),
{ pow : 'xixxle', _ : [] },
'long capture sp'
);
t.deepEqual(
parse([ '--pow=xixxle' ]),
{ pow : 'xixxle', _ : [] },
'long capture eq'
);
t.deepEqual(
parse([ '--host', 'localhost', '--port', '555' ]),
{ host : 'localhost', port : 555, _ : [] },
'long captures sp'
);
t.deepEqual(
parse([ '--host=localhost', '--port=555' ]),
{ host : 'localhost', port : 555, _ : [] },
'long captures eq'
);
t.end();
});

36
node_modules/gulp/node_modules/minimist/test/num.js generated vendored Normal file
View File

@@ -0,0 +1,36 @@
var parse = require('../');
var test = require('tape');
test('nums', function (t) {
var argv = parse([
'-x', '1234',
'-y', '5.67',
'-z', '1e7',
'-w', '10f',
'--hex', '0xdeadbeef',
'789'
]);
t.deepEqual(argv, {
x : 1234,
y : 5.67,
z : 1e7,
w : '10f',
hex : 0xdeadbeef,
_ : [ 789 ]
});
t.deepEqual(typeof argv.x, 'number');
t.deepEqual(typeof argv.y, 'number');
t.deepEqual(typeof argv.z, 'number');
t.deepEqual(typeof argv.w, 'string');
t.deepEqual(typeof argv.hex, 'number');
t.deepEqual(typeof argv._[0], 'number');
t.end();
});
test('already a number', function (t) {
var argv = parse([ '-x', 1234, 789 ]);
t.deepEqual(argv, { x : 1234, _ : [ 789 ] });
t.deepEqual(typeof argv.x, 'number');
t.deepEqual(typeof argv._[0], 'number');
t.end();
});

197
node_modules/gulp/node_modules/minimist/test/parse.js generated vendored Normal file
View File

@@ -0,0 +1,197 @@
var parse = require('../');
var test = require('tape');
test('parse args', function (t) {
t.deepEqual(
parse([ '--no-moo' ]),
{ moo : false, _ : [] },
'no'
);
t.deepEqual(
parse([ '-v', 'a', '-v', 'b', '-v', 'c' ]),
{ v : ['a','b','c'], _ : [] },
'multi'
);
t.end();
});
test('comprehensive', function (t) {
t.deepEqual(
parse([
'--name=meowmers', 'bare', '-cats', 'woo',
'-h', 'awesome', '--multi=quux',
'--key', 'value',
'-b', '--bool', '--no-meep', '--multi=baz',
'--', '--not-a-flag', 'eek'
]),
{
c : true,
a : true,
t : true,
s : 'woo',
h : 'awesome',
b : true,
bool : true,
key : 'value',
multi : [ 'quux', 'baz' ],
meep : false,
name : 'meowmers',
_ : [ 'bare', '--not-a-flag', 'eek' ]
}
);
t.end();
});
test('flag boolean', function (t) {
var argv = parse([ '-t', 'moo' ], { boolean: 't' });
t.deepEqual(argv, { t : true, _ : [ 'moo' ] });
t.deepEqual(typeof argv.t, 'boolean');
t.end();
});
test('flag boolean value', function (t) {
var argv = parse(['--verbose', 'false', 'moo', '-t', 'true'], {
boolean: [ 't', 'verbose' ],
default: { verbose: true }
});
t.deepEqual(argv, {
verbose: false,
t: true,
_: ['moo']
});
t.deepEqual(typeof argv.verbose, 'boolean');
t.deepEqual(typeof argv.t, 'boolean');
t.end();
});
test('newlines in params' , function (t) {
var args = parse([ '-s', "X\nX" ])
t.deepEqual(args, { _ : [], s : "X\nX" });
// reproduce in bash:
// VALUE="new
// line"
// node program.js --s="$VALUE"
args = parse([ "--s=X\nX" ])
t.deepEqual(args, { _ : [], s : "X\nX" });
t.end();
});
test('strings' , function (t) {
var s = parse([ '-s', '0001234' ], { string: 's' }).s;
t.equal(s, '0001234');
t.equal(typeof s, 'string');
var x = parse([ '-x', '56' ], { string: 'x' }).x;
t.equal(x, '56');
t.equal(typeof x, 'string');
t.end();
});
test('stringArgs', function (t) {
var s = parse([ ' ', ' ' ], { string: '_' })._;
t.same(s.length, 2);
t.same(typeof s[0], 'string');
t.same(s[0], ' ');
t.same(typeof s[1], 'string');
t.same(s[1], ' ');
t.end();
});
test('empty strings', function(t) {
var s = parse([ '-s' ], { string: 's' }).s;
t.equal(s, '');
t.equal(typeof s, 'string');
var str = parse([ '--str' ], { string: 'str' }).str;
t.equal(str, '');
t.equal(typeof str, 'string');
var letters = parse([ '-art' ], {
string: [ 'a', 't' ]
});
t.equal(letters.a, '');
t.equal(letters.r, true);
t.equal(letters.t, '');
t.end();
});
test('string and alias', function(t) {
var x = parse([ '--str', '000123' ], {
string: 's',
alias: { s: 'str' }
});
t.equal(x.str, '000123');
t.equal(typeof x.str, 'string');
t.equal(x.s, '000123');
t.equal(typeof x.s, 'string');
var y = parse([ '-s', '000123' ], {
string: 'str',
alias: { str: 's' }
});
t.equal(y.str, '000123');
t.equal(typeof y.str, 'string');
t.equal(y.s, '000123');
t.equal(typeof y.s, 'string');
t.end();
});
test('slashBreak', function (t) {
t.same(
parse([ '-I/foo/bar/baz' ]),
{ I : '/foo/bar/baz', _ : [] }
);
t.same(
parse([ '-xyz/foo/bar/baz' ]),
{ x : true, y : true, z : '/foo/bar/baz', _ : [] }
);
t.end();
});
test('alias', function (t) {
var argv = parse([ '-f', '11', '--zoom', '55' ], {
alias: { z: 'zoom' }
});
t.equal(argv.zoom, 55);
t.equal(argv.z, argv.zoom);
t.equal(argv.f, 11);
t.end();
});
test('multiAlias', function (t) {
var argv = parse([ '-f', '11', '--zoom', '55' ], {
alias: { z: [ 'zm', 'zoom' ] }
});
t.equal(argv.zoom, 55);
t.equal(argv.z, argv.zoom);
t.equal(argv.z, argv.zm);
t.equal(argv.f, 11);
t.end();
});
test('nested dotted objects', function (t) {
var argv = parse([
'--foo.bar', '3', '--foo.baz', '4',
'--foo.quux.quibble', '5', '--foo.quux.o_O',
'--beep.boop'
]);
t.same(argv.foo, {
bar : 3,
baz : 4,
quux : {
quibble : 5,
o_O : true
}
});
t.same(argv.beep, { boop : true });
t.end();
});

View File

@@ -0,0 +1,9 @@
var parse = require('../');
var test = require('tape');
test('parse with modifier functions' , function (t) {
t.plan(1);
var argv = parse([ '-b', '123' ], { boolean: 'b' });
t.deepEqual(argv, { b: true, _: [123] });
});

67
node_modules/gulp/node_modules/minimist/test/short.js generated vendored Normal file
View File

@@ -0,0 +1,67 @@
var parse = require('../');
var test = require('tape');
test('numeric short args', function (t) {
t.plan(2);
t.deepEqual(parse([ '-n123' ]), { n: 123, _: [] });
t.deepEqual(
parse([ '-123', '456' ]),
{ 1: true, 2: true, 3: 456, _: [] }
);
});
test('short', function (t) {
t.deepEqual(
parse([ '-b' ]),
{ b : true, _ : [] },
'short boolean'
);
t.deepEqual(
parse([ 'foo', 'bar', 'baz' ]),
{ _ : [ 'foo', 'bar', 'baz' ] },
'bare'
);
t.deepEqual(
parse([ '-cats' ]),
{ c : true, a : true, t : true, s : true, _ : [] },
'group'
);
t.deepEqual(
parse([ '-cats', 'meow' ]),
{ c : true, a : true, t : true, s : 'meow', _ : [] },
'short group next'
);
t.deepEqual(
parse([ '-h', 'localhost' ]),
{ h : 'localhost', _ : [] },
'short capture'
);
t.deepEqual(
parse([ '-h', 'localhost', '-p', '555' ]),
{ h : 'localhost', p : 555, _ : [] },
'short captures'
);
t.end();
});
test('mixed short bool and capture', function (t) {
t.same(
parse([ '-h', 'localhost', '-fp', '555', 'script.js' ]),
{
f : true, p : 555, h : 'localhost',
_ : [ 'script.js' ]
}
);
t.end();
});
test('short and long', function (t) {
t.deepEqual(
parse([ '-h', 'localhost', '-fp', '555', 'script.js' ]),
{
f : true, p : 555, h : 'localhost',
_ : [ 'script.js' ]
}
);
t.end();
});

View File

@@ -0,0 +1,15 @@
var parse = require('../');
var test = require('tape');
test('stops parsing on the first non-option when stopEarly is set', function (t) {
var argv = parse(['--aaa', 'bbb', 'ccc', '--ddd'], {
stopEarly: true
});
t.deepEqual(argv, {
aaa: 'bbb',
_: ['ccc', '--ddd']
});
t.end();
});

102
node_modules/gulp/node_modules/minimist/test/unknown.js generated vendored Normal file
View File

@@ -0,0 +1,102 @@
var parse = require('../');
var test = require('tape');
test('boolean and alias is not unknown', function (t) {
var unknown = [];
function unknownFn(arg) {
unknown.push(arg);
return false;
}
var aliased = [ '-h', 'true', '--derp', 'true' ];
var regular = [ '--herp', 'true', '-d', 'true' ];
var opts = {
alias: { h: 'herp' },
boolean: 'h',
unknown: unknownFn
};
var aliasedArgv = parse(aliased, opts);
var propertyArgv = parse(regular, opts);
t.same(unknown, ['--derp', '-d']);
t.end();
});
test('flag boolean true any double hyphen argument is not unknown', function (t) {
var unknown = [];
function unknownFn(arg) {
unknown.push(arg);
return false;
}
var argv = parse(['--honk', '--tacos=good', 'cow', '-p', '55'], {
boolean: true,
unknown: unknownFn
});
t.same(unknown, ['--tacos=good', 'cow', '-p']);
t.same(argv, {
honk: true,
_: []
});
t.end();
});
test('string and alias is not unknown', function (t) {
var unknown = [];
function unknownFn(arg) {
unknown.push(arg);
return false;
}
var aliased = [ '-h', 'hello', '--derp', 'goodbye' ];
var regular = [ '--herp', 'hello', '-d', 'moon' ];
var opts = {
alias: { h: 'herp' },
string: 'h',
unknown: unknownFn
};
var aliasedArgv = parse(aliased, opts);
var propertyArgv = parse(regular, opts);
t.same(unknown, ['--derp', '-d']);
t.end();
});
test('default and alias is not unknown', function (t) {
var unknown = [];
function unknownFn(arg) {
unknown.push(arg);
return false;
}
var aliased = [ '-h', 'hello' ];
var regular = [ '--herp', 'hello' ];
var opts = {
default: { 'h': 'bar' },
alias: { 'h': 'herp' },
unknown: unknownFn
};
var aliasedArgv = parse(aliased, opts);
var propertyArgv = parse(regular, opts);
t.same(unknown, []);
t.end();
unknownFn(); // exercise fn for 100% coverage
});
test('value following -- is not unknown', function (t) {
var unknown = [];
function unknownFn(arg) {
unknown.push(arg);
return false;
}
var aliased = [ '--bad', '--', 'good', 'arg' ];
var opts = {
'--': true,
unknown: unknownFn
};
var argv = parse(aliased, opts);
t.same(unknown, ['--bad']);
t.same(argv, {
'--': ['good', 'arg'],
'_': []
})
t.end();
});

View File

@@ -0,0 +1,8 @@
var parse = require('../');
var test = require('tape');
test('whitespace should be whitespace' , function (t) {
t.plan(1);
var x = parse([ '-x', '\t' ]).x;
t.equal(x, '\t');
});

View File

@@ -0,0 +1,16 @@
lib-cov
*.seed
*.log
*.csv
*.dat
*.out
*.pid
*.gz
coverage
pids
logs
results
node_modules
npm-debug.log

View File

@@ -0,0 +1,4 @@
language: node_js
node_js:
- "0.11"
- "0.10"

View File

@@ -0,0 +1,20 @@
The MIT License (MIT)
Copyright (c) 2014 Artem Medeusheyev
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -0,0 +1,65 @@
# ordered-read-streams [![NPM version](https://badge.fury.io/js/ordered-read-streams.png)](http://badge.fury.io/js/ordered-read-streams) [![Build Status](https://travis-ci.org/armed/ordered-read-streams.png?branch=master)](https://travis-ci.org/armed/ordered-read-streams)
Combines array of streams into one read stream in strict order.
## Installation
`npm install ordered-read-streams`
## Overview
`ordered-read-streams` handles all data/errors from input streams in parallel, but emits data/errors in strict order in which streams are passed to constructor. This is `objectMode = true` stream.
## Example
```js
var through = require('through2');
var Ordered = require('ordered-read-streams');
var s1 = through.obj(function (data, enc, next) {
var self = this;
setTimeout(function () {
self.push(data);
next();
}, 200)
});
var s2 = through.obj(function (data, enc, next) {
var self = this;
setTimeout(function () {
self.push(data);
next();
}, 30)
});
var s3 = through.obj(function (data, enc, next) {
var self = this;
setTimeout(function () {
self.push(data);
next();
}, 100)
});
var streams = new Ordered([s1, s2, s3]);
streams.on('data', function (data) {
console.log(data);
})
s1.write('stream 1');
s1.end();
s2.write('stream 2');
s2.end();
s3.write('stream 3');
s3.end();
```
Ouput will be:
```
stream 1
stream 2
stream 3
```
## Licence
MIT

View File

@@ -0,0 +1,87 @@
var Readable = require('stream').Readable;
var util = require('util');
function addStream(streams, stream)
{
if(!stream.readable) throw new Error('All input streams must be readable');
if(this._readableState.ended) throw new Error('Adding streams after ended');
var self = this;
stream._buffer = [];
stream.on('data', function(chunk)
{
if(this === streams[0])
self.push(chunk);
else
this._buffer.push(chunk);
});
stream.on('end', function()
{
for(var stream = streams[0];
stream && stream._readableState.ended;
stream = streams[0])
{
while(stream._buffer.length)
self.push(stream._buffer.shift());
streams.shift();
}
if(!streams.length) self.push(null);
});
stream.on('error', this.emit.bind(this, 'error'));
streams.push(stream);
}
function OrderedStreams(streams, options) {
if (!(this instanceof(OrderedStreams))) {
return new OrderedStreams(streams, options);
}
streams = streams || [];
options = options || {};
options.objectMode = true;
Readable.call(this, options);
if(!Array.isArray(streams)) streams = [streams];
if(!streams.length) return this.push(null); // no streams, close
var addStream_bind = addStream.bind(this, []);
this.concat = function()
{
Array.prototype.forEach.call(arguments, function(item)
{
if(Array.isArray(item))
item.forEach(addStream_bind);
else
addStream_bind(item);
});
};
this.concat(streams);
}
util.inherits(OrderedStreams, Readable);
OrderedStreams.prototype._read = function () {};
module.exports = OrderedStreams;

View File

@@ -0,0 +1,54 @@
{
"_from": "ordered-read-streams@^0.1.0",
"_id": "ordered-read-streams@0.1.0",
"_inBundle": false,
"_integrity": "sha1-/VZamvjrRHO6abbtijQ1LLVS8SY=",
"_location": "/gulp/ordered-read-streams",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "ordered-read-streams@^0.1.0",
"name": "ordered-read-streams",
"escapedName": "ordered-read-streams",
"rawSpec": "^0.1.0",
"saveSpec": null,
"fetchSpec": "^0.1.0"
},
"_requiredBy": [
"/gulp/glob-stream"
],
"_resolved": "https://registry.npmjs.org/ordered-read-streams/-/ordered-read-streams-0.1.0.tgz",
"_shasum": "fd565a9af8eb4473ba69b6ed8a34352cb552f126",
"_spec": "ordered-read-streams@^0.1.0",
"_where": "/Applications/XAMPP/xamppfiles/htdocs/wordpress/t-latehome/wp-content/plugins/opal-estate-pro/node_modules/gulp/node_modules/glob-stream",
"author": {
"name": "Artem Medeusheyev",
"email": "artem.medeusheyev@gmail.com"
},
"bugs": {
"url": "https://github.com/armed/ordered-read-streams/issues"
},
"bundleDependencies": false,
"deprecated": false,
"description": "Combines array of streams into one read stream in strict order",
"devDependencies": {
"jshint": "~2.4.1",
"mocha": "~1.17.0",
"pre-commit": "0.0.4",
"should": "~3.0.1",
"through2": "~0.4.0"
},
"homepage": "https://github.com/armed/ordered-read-streams#readme",
"license": "MIT",
"main": "index.js",
"name": "ordered-read-streams",
"repository": {
"type": "git",
"url": "git://github.com/armed/ordered-read-streams.git"
},
"scripts": {
"test": "jshint *.js test/*.js && mocha -R spec"
},
"version": "0.1.0"
}

View File

@@ -0,0 +1,160 @@
var should = require('should');
var through = require('through2');
var OrderedStreams = require('../');
describe('ordered-read-streams', function () {
it('should end if no streams are given', function (done) {
var streams = OrderedStreams();
streams.on('data', function () {
done('error');
});
streams.on('end', done);
});
it('should throw error if one or more streams are not readable', function (done) {
var writable = { readable: false };
try {
new OrderedStreams(writable);
} catch (e) {
e.message.should.equal('All input streams must be readable');
done();
}
});
it('should emit data from all streams', function(done) {
var s1 = through.obj(function (data, enc, next) {
this.push(data);
next();
});
var s2 = through.obj(function (data, enc, next) {
this.push(data);
next();
});
var s3 = through.obj(function (data, enc, next) {
this.push(data);
next();
});
var streams = new OrderedStreams([s1, s2, s3]);
var results = [];
streams.on('data', function (data) {
results.push(data);
});
streams.on('end', function () {
results.length.should.be.exactly(3);
results[0].should.equal('stream 1');
results[1].should.equal('stream 2');
results[2].should.equal('stream 3');
done();
});
s1.write('stream 1');
s1.end();
s2.write('stream 2');
s2.end();
s3.write('stream 3');
s3.end();
});
it('should emit all data event from each stream', function (done) {
var s = through.obj(function (data, enc, next) {
this.push(data);
next();
});
var streams = new OrderedStreams(s);
var results = [];
streams.on('data', function (data) {
results.push(data);
});
streams.on('end', function () {
results.length.should.be.exactly(3);
done();
});
s.write('data1');
s.write('data2');
s.write('data3');
s.end();
});
it('should preserve streams order', function(done) {
var s1 = through.obj(function (data, enc, next) {
var self = this;
setTimeout(function () {
self.push(data);
next();
}, 200);
});
var s2 = through.obj(function (data, enc, next) {
var self = this;
setTimeout(function () {
self.push(data);
next();
}, 30);
});
var s3 = through.obj(function (data, enc, next) {
var self = this;
setTimeout(function () {
self.push(data);
next();
}, 100);
});
var streams = new OrderedStreams([s1, s2, s3]);
var results = [];
streams.on('data', function (data) {
results.push(data);
});
streams.on('end', function () {
results.length.should.be.exactly(3);
results[0].should.equal('stream 1');
results[1].should.equal('stream 2');
results[2].should.equal('stream 3');
done();
});
s1.write('stream 1');
s1.end();
s2.write('stream 2');
s2.end();
s3.write('stream 3');
s3.end();
});
it('should emit stream errors downstream', function (done) {
var s = through.obj(function (data, enc, next) {
this.emit('error', new Error('stahp!'));
next();
});
var s2 = through.obj(function (data, enc, next) {
this.push(data);
next();
});
var errMsg;
var streamData;
var streams = new OrderedStreams([s, s2]);
streams.on('data', function (data) {
streamData = data;
});
streams.on('error', function (err) {
errMsg = err.message;
});
streams.on('end', function () {
errMsg.should.equal('stahp!');
streamData.should.equal('Im ok!');
done();
});
s.write('go');
s.end();
s2.write('Im ok!');
s2.end();
});
});

View File

@@ -0,0 +1,5 @@
build/
test/
examples/
fs.js
zlib.js

18
node_modules/gulp/node_modules/readable-stream/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,18 @@
Copyright Joyent, Inc. and other Node contributors. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.

View File

@@ -0,0 +1,15 @@
# readable-stream
***Node-core streams for userland***
[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/)
[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/)
This package is a mirror of the Streams2 and Streams3 implementations in Node-core.
If you want to guarantee a stable streams base, regardless of what version of Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core.
**readable-stream** comes in two major versions, v1.0.x and v1.1.x. The former tracks the Streams2 implementation in Node 0.10, including bug-fixes and minor improvements as they are added. The latter tracks Streams3 as it develops in Node 0.11; we will likely see a v1.2.x branch for Node 0.12.
**readable-stream** uses proper patch-level versioning so if you pin to `"~1.0.0"` youll get the latest Node 0.10 Streams2 implementation, including any fixes and minor non-breaking improvements. The patch-level versions of 1.0.x and 1.1.x should mirror the patch-level versions of Node-core releases. You should prefer the **1.0.x** releases for now and when youre ready to start using Streams3, pin to `"~1.1.0"`

View File

@@ -0,0 +1 @@
module.exports = require("./lib/_stream_duplex.js")

View File

@@ -0,0 +1,89 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a duplex stream is just a stream that is both readable and writable.
// Since JS doesn't have multiple prototypal inheritance, this class
// prototypally inherits from Readable, and then parasitically from
// Writable.
module.exports = Duplex;
/*<replacement>*/
var objectKeys = Object.keys || function (obj) {
var keys = [];
for (var key in obj) keys.push(key);
return keys;
}
/*</replacement>*/
/*<replacement>*/
var util = require('core-util-is');
util.inherits = require('inherits');
/*</replacement>*/
var Readable = require('./_stream_readable');
var Writable = require('./_stream_writable');
util.inherits(Duplex, Readable);
forEach(objectKeys(Writable.prototype), function(method) {
if (!Duplex.prototype[method])
Duplex.prototype[method] = Writable.prototype[method];
});
function Duplex(options) {
if (!(this instanceof Duplex))
return new Duplex(options);
Readable.call(this, options);
Writable.call(this, options);
if (options && options.readable === false)
this.readable = false;
if (options && options.writable === false)
this.writable = false;
this.allowHalfOpen = true;
if (options && options.allowHalfOpen === false)
this.allowHalfOpen = false;
this.once('end', onend);
}
// the no-half-open enforcer
function onend() {
// if we allow half-open state, or if the writable side ended,
// then we're ok.
if (this.allowHalfOpen || this._writableState.ended)
return;
// no more data can be written.
// But allow more writes to happen in this tick.
process.nextTick(this.end.bind(this));
}
function forEach (xs, f) {
for (var i = 0, l = xs.length; i < l; i++) {
f(xs[i], i);
}
}

View File

@@ -0,0 +1,46 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a passthrough stream.
// basically just the most minimal sort of Transform stream.
// Every written chunk gets output as-is.
module.exports = PassThrough;
var Transform = require('./_stream_transform');
/*<replacement>*/
var util = require('core-util-is');
util.inherits = require('inherits');
/*</replacement>*/
util.inherits(PassThrough, Transform);
function PassThrough(options) {
if (!(this instanceof PassThrough))
return new PassThrough(options);
Transform.call(this, options);
}
PassThrough.prototype._transform = function(chunk, encoding, cb) {
cb(null, chunk);
};

View File

@@ -0,0 +1,982 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
module.exports = Readable;
/*<replacement>*/
var isArray = require('isarray');
/*</replacement>*/
/*<replacement>*/
var Buffer = require('buffer').Buffer;
/*</replacement>*/
Readable.ReadableState = ReadableState;
var EE = require('events').EventEmitter;
/*<replacement>*/
if (!EE.listenerCount) EE.listenerCount = function(emitter, type) {
return emitter.listeners(type).length;
};
/*</replacement>*/
var Stream = require('stream');
/*<replacement>*/
var util = require('core-util-is');
util.inherits = require('inherits');
/*</replacement>*/
var StringDecoder;
util.inherits(Readable, Stream);
function ReadableState(options, stream) {
options = options || {};
// the point at which it stops calling _read() to fill the buffer
// Note: 0 is a valid value, means "don't call _read preemptively ever"
var hwm = options.highWaterMark;
this.highWaterMark = (hwm || hwm === 0) ? hwm : 16 * 1024;
// cast to ints.
this.highWaterMark = ~~this.highWaterMark;
this.buffer = [];
this.length = 0;
this.pipes = null;
this.pipesCount = 0;
this.flowing = false;
this.ended = false;
this.endEmitted = false;
this.reading = false;
// In streams that never have any data, and do push(null) right away,
// the consumer can miss the 'end' event if they do some I/O before
// consuming the stream. So, we don't emit('end') until some reading
// happens.
this.calledRead = false;
// a flag to be able to tell if the onwrite cb is called immediately,
// or on a later tick. We set this to true at first, becuase any
// actions that shouldn't happen until "later" should generally also
// not happen before the first write call.
this.sync = true;
// whenever we return null, then we set a flag to say
// that we're awaiting a 'readable' event emission.
this.needReadable = false;
this.emittedReadable = false;
this.readableListening = false;
// object stream flag. Used to make read(n) ignore n and to
// make all the buffer merging and length checks go away
this.objectMode = !!options.objectMode;
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
// Everything else in the universe uses 'utf8', though.
this.defaultEncoding = options.defaultEncoding || 'utf8';
// when piping, we only care about 'readable' events that happen
// after read()ing all the bytes and not getting any pushback.
this.ranOut = false;
// the number of writers that are awaiting a drain event in .pipe()s
this.awaitDrain = 0;
// if true, a maybeReadMore has been scheduled
this.readingMore = false;
this.decoder = null;
this.encoding = null;
if (options.encoding) {
if (!StringDecoder)
StringDecoder = require('string_decoder/').StringDecoder;
this.decoder = new StringDecoder(options.encoding);
this.encoding = options.encoding;
}
}
function Readable(options) {
if (!(this instanceof Readable))
return new Readable(options);
this._readableState = new ReadableState(options, this);
// legacy
this.readable = true;
Stream.call(this);
}
// Manually shove something into the read() buffer.
// This returns true if the highWaterMark has not been hit yet,
// similar to how Writable.write() returns true if you should
// write() some more.
Readable.prototype.push = function(chunk, encoding) {
var state = this._readableState;
if (typeof chunk === 'string' && !state.objectMode) {
encoding = encoding || state.defaultEncoding;
if (encoding !== state.encoding) {
chunk = new Buffer(chunk, encoding);
encoding = '';
}
}
return readableAddChunk(this, state, chunk, encoding, false);
};
// Unshift should *always* be something directly out of read()
Readable.prototype.unshift = function(chunk) {
var state = this._readableState;
return readableAddChunk(this, state, chunk, '', true);
};
function readableAddChunk(stream, state, chunk, encoding, addToFront) {
var er = chunkInvalid(state, chunk);
if (er) {
stream.emit('error', er);
} else if (chunk === null || chunk === undefined) {
state.reading = false;
if (!state.ended)
onEofChunk(stream, state);
} else if (state.objectMode || chunk && chunk.length > 0) {
if (state.ended && !addToFront) {
var e = new Error('stream.push() after EOF');
stream.emit('error', e);
} else if (state.endEmitted && addToFront) {
var e = new Error('stream.unshift() after end event');
stream.emit('error', e);
} else {
if (state.decoder && !addToFront && !encoding)
chunk = state.decoder.write(chunk);
// update the buffer info.
state.length += state.objectMode ? 1 : chunk.length;
if (addToFront) {
state.buffer.unshift(chunk);
} else {
state.reading = false;
state.buffer.push(chunk);
}
if (state.needReadable)
emitReadable(stream);
maybeReadMore(stream, state);
}
} else if (!addToFront) {
state.reading = false;
}
return needMoreData(state);
}
// if it's past the high water mark, we can push in some more.
// Also, if we have no data yet, we can stand some
// more bytes. This is to work around cases where hwm=0,
// such as the repl. Also, if the push() triggered a
// readable event, and the user called read(largeNumber) such that
// needReadable was set, then we ought to push more, so that another
// 'readable' event will be triggered.
function needMoreData(state) {
return !state.ended &&
(state.needReadable ||
state.length < state.highWaterMark ||
state.length === 0);
}
// backwards compatibility.
Readable.prototype.setEncoding = function(enc) {
if (!StringDecoder)
StringDecoder = require('string_decoder/').StringDecoder;
this._readableState.decoder = new StringDecoder(enc);
this._readableState.encoding = enc;
};
// Don't raise the hwm > 128MB
var MAX_HWM = 0x800000;
function roundUpToNextPowerOf2(n) {
if (n >= MAX_HWM) {
n = MAX_HWM;
} else {
// Get the next highest power of 2
n--;
for (var p = 1; p < 32; p <<= 1) n |= n >> p;
n++;
}
return n;
}
function howMuchToRead(n, state) {
if (state.length === 0 && state.ended)
return 0;
if (state.objectMode)
return n === 0 ? 0 : 1;
if (n === null || isNaN(n)) {
// only flow one buffer at a time
if (state.flowing && state.buffer.length)
return state.buffer[0].length;
else
return state.length;
}
if (n <= 0)
return 0;
// If we're asking for more than the target buffer level,
// then raise the water mark. Bump up to the next highest
// power of 2, to prevent increasing it excessively in tiny
// amounts.
if (n > state.highWaterMark)
state.highWaterMark = roundUpToNextPowerOf2(n);
// don't have that much. return null, unless we've ended.
if (n > state.length) {
if (!state.ended) {
state.needReadable = true;
return 0;
} else
return state.length;
}
return n;
}
// you can override either this method, or the async _read(n) below.
Readable.prototype.read = function(n) {
var state = this._readableState;
state.calledRead = true;
var nOrig = n;
var ret;
if (typeof n !== 'number' || n > 0)
state.emittedReadable = false;
// if we're doing read(0) to trigger a readable event, but we
// already have a bunch of data in the buffer, then just trigger
// the 'readable' event and move on.
if (n === 0 &&
state.needReadable &&
(state.length >= state.highWaterMark || state.ended)) {
emitReadable(this);
return null;
}
n = howMuchToRead(n, state);
// if we've ended, and we're now clear, then finish it up.
if (n === 0 && state.ended) {
ret = null;
// In cases where the decoder did not receive enough data
// to produce a full chunk, then immediately received an
// EOF, state.buffer will contain [<Buffer >, <Buffer 00 ...>].
// howMuchToRead will see this and coerce the amount to
// read to zero (because it's looking at the length of the
// first <Buffer > in state.buffer), and we'll end up here.
//
// This can only happen via state.decoder -- no other venue
// exists for pushing a zero-length chunk into state.buffer
// and triggering this behavior. In this case, we return our
// remaining data and end the stream, if appropriate.
if (state.length > 0 && state.decoder) {
ret = fromList(n, state);
state.length -= ret.length;
}
if (state.length === 0)
endReadable(this);
return ret;
}
// All the actual chunk generation logic needs to be
// *below* the call to _read. The reason is that in certain
// synthetic stream cases, such as passthrough streams, _read
// may be a completely synchronous operation which may change
// the state of the read buffer, providing enough data when
// before there was *not* enough.
//
// So, the steps are:
// 1. Figure out what the state of things will be after we do
// a read from the buffer.
//
// 2. If that resulting state will trigger a _read, then call _read.
// Note that this may be asynchronous, or synchronous. Yes, it is
// deeply ugly to write APIs this way, but that still doesn't mean
// that the Readable class should behave improperly, as streams are
// designed to be sync/async agnostic.
// Take note if the _read call is sync or async (ie, if the read call
// has returned yet), so that we know whether or not it's safe to emit
// 'readable' etc.
//
// 3. Actually pull the requested chunks out of the buffer and return.
// if we need a readable event, then we need to do some reading.
var doRead = state.needReadable;
// if we currently have less than the highWaterMark, then also read some
if (state.length - n <= state.highWaterMark)
doRead = true;
// however, if we've ended, then there's no point, and if we're already
// reading, then it's unnecessary.
if (state.ended || state.reading)
doRead = false;
if (doRead) {
state.reading = true;
state.sync = true;
// if the length is currently zero, then we *need* a readable event.
if (state.length === 0)
state.needReadable = true;
// call internal read method
this._read(state.highWaterMark);
state.sync = false;
}
// If _read called its callback synchronously, then `reading`
// will be false, and we need to re-evaluate how much data we
// can return to the user.
if (doRead && !state.reading)
n = howMuchToRead(nOrig, state);
if (n > 0)
ret = fromList(n, state);
else
ret = null;
if (ret === null) {
state.needReadable = true;
n = 0;
}
state.length -= n;
// If we have nothing in the buffer, then we want to know
// as soon as we *do* get something into the buffer.
if (state.length === 0 && !state.ended)
state.needReadable = true;
// If we happened to read() exactly the remaining amount in the
// buffer, and the EOF has been seen at this point, then make sure
// that we emit 'end' on the very next tick.
if (state.ended && !state.endEmitted && state.length === 0)
endReadable(this);
return ret;
};
function chunkInvalid(state, chunk) {
var er = null;
if (!Buffer.isBuffer(chunk) &&
'string' !== typeof chunk &&
chunk !== null &&
chunk !== undefined &&
!state.objectMode) {
er = new TypeError('Invalid non-string/buffer chunk');
}
return er;
}
function onEofChunk(stream, state) {
if (state.decoder && !state.ended) {
var chunk = state.decoder.end();
if (chunk && chunk.length) {
state.buffer.push(chunk);
state.length += state.objectMode ? 1 : chunk.length;
}
}
state.ended = true;
// if we've ended and we have some data left, then emit
// 'readable' now to make sure it gets picked up.
if (state.length > 0)
emitReadable(stream);
else
endReadable(stream);
}
// Don't emit readable right away in sync mode, because this can trigger
// another read() call => stack overflow. This way, it might trigger
// a nextTick recursion warning, but that's not so bad.
function emitReadable(stream) {
var state = stream._readableState;
state.needReadable = false;
if (state.emittedReadable)
return;
state.emittedReadable = true;
if (state.sync)
process.nextTick(function() {
emitReadable_(stream);
});
else
emitReadable_(stream);
}
function emitReadable_(stream) {
stream.emit('readable');
}
// at this point, the user has presumably seen the 'readable' event,
// and called read() to consume some data. that may have triggered
// in turn another _read(n) call, in which case reading = true if
// it's in progress.
// However, if we're not ended, or reading, and the length < hwm,
// then go ahead and try to read some more preemptively.
function maybeReadMore(stream, state) {
if (!state.readingMore) {
state.readingMore = true;
process.nextTick(function() {
maybeReadMore_(stream, state);
});
}
}
function maybeReadMore_(stream, state) {
var len = state.length;
while (!state.reading && !state.flowing && !state.ended &&
state.length < state.highWaterMark) {
stream.read(0);
if (len === state.length)
// didn't get any data, stop spinning.
break;
else
len = state.length;
}
state.readingMore = false;
}
// abstract method. to be overridden in specific implementation classes.
// call cb(er, data) where data is <= n in length.
// for virtual (non-string, non-buffer) streams, "length" is somewhat
// arbitrary, and perhaps not very meaningful.
Readable.prototype._read = function(n) {
this.emit('error', new Error('not implemented'));
};
Readable.prototype.pipe = function(dest, pipeOpts) {
var src = this;
var state = this._readableState;
switch (state.pipesCount) {
case 0:
state.pipes = dest;
break;
case 1:
state.pipes = [state.pipes, dest];
break;
default:
state.pipes.push(dest);
break;
}
state.pipesCount += 1;
var doEnd = (!pipeOpts || pipeOpts.end !== false) &&
dest !== process.stdout &&
dest !== process.stderr;
var endFn = doEnd ? onend : cleanup;
if (state.endEmitted)
process.nextTick(endFn);
else
src.once('end', endFn);
dest.on('unpipe', onunpipe);
function onunpipe(readable) {
if (readable !== src) return;
cleanup();
}
function onend() {
dest.end();
}
// when the dest drains, it reduces the awaitDrain counter
// on the source. This would be more elegant with a .once()
// handler in flow(), but adding and removing repeatedly is
// too slow.
var ondrain = pipeOnDrain(src);
dest.on('drain', ondrain);
function cleanup() {
// cleanup event handlers once the pipe is broken
dest.removeListener('close', onclose);
dest.removeListener('finish', onfinish);
dest.removeListener('drain', ondrain);
dest.removeListener('error', onerror);
dest.removeListener('unpipe', onunpipe);
src.removeListener('end', onend);
src.removeListener('end', cleanup);
// if the reader is waiting for a drain event from this
// specific writer, then it would cause it to never start
// flowing again.
// So, if this is awaiting a drain, then we just call it now.
// If we don't know, then assume that we are waiting for one.
if (!dest._writableState || dest._writableState.needDrain)
ondrain();
}
// if the dest has an error, then stop piping into it.
// however, don't suppress the throwing behavior for this.
function onerror(er) {
unpipe();
dest.removeListener('error', onerror);
if (EE.listenerCount(dest, 'error') === 0)
dest.emit('error', er);
}
// This is a brutally ugly hack to make sure that our error handler
// is attached before any userland ones. NEVER DO THIS.
if (!dest._events || !dest._events.error)
dest.on('error', onerror);
else if (isArray(dest._events.error))
dest._events.error.unshift(onerror);
else
dest._events.error = [onerror, dest._events.error];
// Both close and finish should trigger unpipe, but only once.
function onclose() {
dest.removeListener('finish', onfinish);
unpipe();
}
dest.once('close', onclose);
function onfinish() {
dest.removeListener('close', onclose);
unpipe();
}
dest.once('finish', onfinish);
function unpipe() {
src.unpipe(dest);
}
// tell the dest that it's being piped to
dest.emit('pipe', src);
// start the flow if it hasn't been started already.
if (!state.flowing) {
// the handler that waits for readable events after all
// the data gets sucked out in flow.
// This would be easier to follow with a .once() handler
// in flow(), but that is too slow.
this.on('readable', pipeOnReadable);
state.flowing = true;
process.nextTick(function() {
flow(src);
});
}
return dest;
};
function pipeOnDrain(src) {
return function() {
var dest = this;
var state = src._readableState;
state.awaitDrain--;
if (state.awaitDrain === 0)
flow(src);
};
}
function flow(src) {
var state = src._readableState;
var chunk;
state.awaitDrain = 0;
function write(dest, i, list) {
var written = dest.write(chunk);
if (false === written) {
state.awaitDrain++;
}
}
while (state.pipesCount && null !== (chunk = src.read())) {
if (state.pipesCount === 1)
write(state.pipes, 0, null);
else
forEach(state.pipes, write);
src.emit('data', chunk);
// if anyone needs a drain, then we have to wait for that.
if (state.awaitDrain > 0)
return;
}
// if every destination was unpiped, either before entering this
// function, or in the while loop, then stop flowing.
//
// NB: This is a pretty rare edge case.
if (state.pipesCount === 0) {
state.flowing = false;
// if there were data event listeners added, then switch to old mode.
if (EE.listenerCount(src, 'data') > 0)
emitDataEvents(src);
return;
}
// at this point, no one needed a drain, so we just ran out of data
// on the next readable event, start it over again.
state.ranOut = true;
}
function pipeOnReadable() {
if (this._readableState.ranOut) {
this._readableState.ranOut = false;
flow(this);
}
}
Readable.prototype.unpipe = function(dest) {
var state = this._readableState;
// if we're not piping anywhere, then do nothing.
if (state.pipesCount === 0)
return this;
// just one destination. most common case.
if (state.pipesCount === 1) {
// passed in one, but it's not the right one.
if (dest && dest !== state.pipes)
return this;
if (!dest)
dest = state.pipes;
// got a match.
state.pipes = null;
state.pipesCount = 0;
this.removeListener('readable', pipeOnReadable);
state.flowing = false;
if (dest)
dest.emit('unpipe', this);
return this;
}
// slow case. multiple pipe destinations.
if (!dest) {
// remove all.
var dests = state.pipes;
var len = state.pipesCount;
state.pipes = null;
state.pipesCount = 0;
this.removeListener('readable', pipeOnReadable);
state.flowing = false;
for (var i = 0; i < len; i++)
dests[i].emit('unpipe', this);
return this;
}
// try to find the right one.
var i = indexOf(state.pipes, dest);
if (i === -1)
return this;
state.pipes.splice(i, 1);
state.pipesCount -= 1;
if (state.pipesCount === 1)
state.pipes = state.pipes[0];
dest.emit('unpipe', this);
return this;
};
// set up data events if they are asked for
// Ensure readable listeners eventually get something
Readable.prototype.on = function(ev, fn) {
var res = Stream.prototype.on.call(this, ev, fn);
if (ev === 'data' && !this._readableState.flowing)
emitDataEvents(this);
if (ev === 'readable' && this.readable) {
var state = this._readableState;
if (!state.readableListening) {
state.readableListening = true;
state.emittedReadable = false;
state.needReadable = true;
if (!state.reading) {
this.read(0);
} else if (state.length) {
emitReadable(this, state);
}
}
}
return res;
};
Readable.prototype.addListener = Readable.prototype.on;
// pause() and resume() are remnants of the legacy readable stream API
// If the user uses them, then switch into old mode.
Readable.prototype.resume = function() {
emitDataEvents(this);
this.read(0);
this.emit('resume');
};
Readable.prototype.pause = function() {
emitDataEvents(this, true);
this.emit('pause');
};
function emitDataEvents(stream, startPaused) {
var state = stream._readableState;
if (state.flowing) {
// https://github.com/isaacs/readable-stream/issues/16
throw new Error('Cannot switch to old mode now.');
}
var paused = startPaused || false;
var readable = false;
// convert to an old-style stream.
stream.readable = true;
stream.pipe = Stream.prototype.pipe;
stream.on = stream.addListener = Stream.prototype.on;
stream.on('readable', function() {
readable = true;
var c;
while (!paused && (null !== (c = stream.read())))
stream.emit('data', c);
if (c === null) {
readable = false;
stream._readableState.needReadable = true;
}
});
stream.pause = function() {
paused = true;
this.emit('pause');
};
stream.resume = function() {
paused = false;
if (readable)
process.nextTick(function() {
stream.emit('readable');
});
else
this.read(0);
this.emit('resume');
};
// now make it start, just in case it hadn't already.
stream.emit('readable');
}
// wrap an old-style stream as the async data source.
// This is *not* part of the readable stream interface.
// It is an ugly unfortunate mess of history.
Readable.prototype.wrap = function(stream) {
var state = this._readableState;
var paused = false;
var self = this;
stream.on('end', function() {
if (state.decoder && !state.ended) {
var chunk = state.decoder.end();
if (chunk && chunk.length)
self.push(chunk);
}
self.push(null);
});
stream.on('data', function(chunk) {
if (state.decoder)
chunk = state.decoder.write(chunk);
// don't skip over falsy values in objectMode
//if (state.objectMode && util.isNullOrUndefined(chunk))
if (state.objectMode && (chunk === null || chunk === undefined))
return;
else if (!state.objectMode && (!chunk || !chunk.length))
return;
var ret = self.push(chunk);
if (!ret) {
paused = true;
stream.pause();
}
});
// proxy all the other methods.
// important when wrapping filters and duplexes.
for (var i in stream) {
if (typeof stream[i] === 'function' &&
typeof this[i] === 'undefined') {
this[i] = function(method) { return function() {
return stream[method].apply(stream, arguments);
}}(i);
}
}
// proxy certain important events.
var events = ['error', 'close', 'destroy', 'pause', 'resume'];
forEach(events, function(ev) {
stream.on(ev, self.emit.bind(self, ev));
});
// when we try to consume some more bytes, simply unpause the
// underlying stream.
self._read = function(n) {
if (paused) {
paused = false;
stream.resume();
}
};
return self;
};
// exposed for testing purposes only.
Readable._fromList = fromList;
// Pluck off n bytes from an array of buffers.
// Length is the combined lengths of all the buffers in the list.
function fromList(n, state) {
var list = state.buffer;
var length = state.length;
var stringMode = !!state.decoder;
var objectMode = !!state.objectMode;
var ret;
// nothing in the list, definitely empty.
if (list.length === 0)
return null;
if (length === 0)
ret = null;
else if (objectMode)
ret = list.shift();
else if (!n || n >= length) {
// read it all, truncate the array.
if (stringMode)
ret = list.join('');
else
ret = Buffer.concat(list, length);
list.length = 0;
} else {
// read just some of it.
if (n < list[0].length) {
// just take a part of the first list item.
// slice is the same for buffers and strings.
var buf = list[0];
ret = buf.slice(0, n);
list[0] = buf.slice(n);
} else if (n === list[0].length) {
// first list is a perfect match
ret = list.shift();
} else {
// complex case.
// we have enough to cover it, but it spans past the first buffer.
if (stringMode)
ret = '';
else
ret = new Buffer(n);
var c = 0;
for (var i = 0, l = list.length; i < l && c < n; i++) {
var buf = list[0];
var cpy = Math.min(n - c, buf.length);
if (stringMode)
ret += buf.slice(0, cpy);
else
buf.copy(ret, c, 0, cpy);
if (cpy < buf.length)
list[0] = buf.slice(cpy);
else
list.shift();
c += cpy;
}
}
}
return ret;
}
function endReadable(stream) {
var state = stream._readableState;
// If we get here before consuming all the bytes, then that is a
// bug in node. Should never happen.
if (state.length > 0)
throw new Error('endReadable called on non-empty stream');
if (!state.endEmitted && state.calledRead) {
state.ended = true;
process.nextTick(function() {
// Check that we didn't get one last unshift.
if (!state.endEmitted && state.length === 0) {
state.endEmitted = true;
stream.readable = false;
stream.emit('end');
}
});
}
}
function forEach (xs, f) {
for (var i = 0, l = xs.length; i < l; i++) {
f(xs[i], i);
}
}
function indexOf (xs, x) {
for (var i = 0, l = xs.length; i < l; i++) {
if (xs[i] === x) return i;
}
return -1;
}

View File

@@ -0,0 +1,210 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a transform stream is a readable/writable stream where you do
// something with the data. Sometimes it's called a "filter",
// but that's not a great name for it, since that implies a thing where
// some bits pass through, and others are simply ignored. (That would
// be a valid example of a transform, of course.)
//
// While the output is causally related to the input, it's not a
// necessarily symmetric or synchronous transformation. For example,
// a zlib stream might take multiple plain-text writes(), and then
// emit a single compressed chunk some time in the future.
//
// Here's how this works:
//
// The Transform stream has all the aspects of the readable and writable
// stream classes. When you write(chunk), that calls _write(chunk,cb)
// internally, and returns false if there's a lot of pending writes
// buffered up. When you call read(), that calls _read(n) until
// there's enough pending readable data buffered up.
//
// In a transform stream, the written data is placed in a buffer. When
// _read(n) is called, it transforms the queued up data, calling the
// buffered _write cb's as it consumes chunks. If consuming a single
// written chunk would result in multiple output chunks, then the first
// outputted bit calls the readcb, and subsequent chunks just go into
// the read buffer, and will cause it to emit 'readable' if necessary.
//
// This way, back-pressure is actually determined by the reading side,
// since _read has to be called to start processing a new chunk. However,
// a pathological inflate type of transform can cause excessive buffering
// here. For example, imagine a stream where every byte of input is
// interpreted as an integer from 0-255, and then results in that many
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
// 1kb of data being output. In this case, you could write a very small
// amount of input, and end up with a very large amount of output. In
// such a pathological inflating mechanism, there'd be no way to tell
// the system to stop doing the transform. A single 4MB write could
// cause the system to run out of memory.
//
// However, even in such a pathological case, only a single written chunk
// would be consumed, and then the rest would wait (un-transformed) until
// the results of the previous transformed chunk were consumed.
module.exports = Transform;
var Duplex = require('./_stream_duplex');
/*<replacement>*/
var util = require('core-util-is');
util.inherits = require('inherits');
/*</replacement>*/
util.inherits(Transform, Duplex);
function TransformState(options, stream) {
this.afterTransform = function(er, data) {
return afterTransform(stream, er, data);
};
this.needTransform = false;
this.transforming = false;
this.writecb = null;
this.writechunk = null;
}
function afterTransform(stream, er, data) {
var ts = stream._transformState;
ts.transforming = false;
var cb = ts.writecb;
if (!cb)
return stream.emit('error', new Error('no writecb in Transform class'));
ts.writechunk = null;
ts.writecb = null;
if (data !== null && data !== undefined)
stream.push(data);
if (cb)
cb(er);
var rs = stream._readableState;
rs.reading = false;
if (rs.needReadable || rs.length < rs.highWaterMark) {
stream._read(rs.highWaterMark);
}
}
function Transform(options) {
if (!(this instanceof Transform))
return new Transform(options);
Duplex.call(this, options);
var ts = this._transformState = new TransformState(options, this);
// when the writable side finishes, then flush out anything remaining.
var stream = this;
// start out asking for a readable event once data is transformed.
this._readableState.needReadable = true;
// we have implemented the _read method, and done the other things
// that Readable wants before the first _read call, so unset the
// sync guard flag.
this._readableState.sync = false;
this.once('finish', function() {
if ('function' === typeof this._flush)
this._flush(function(er) {
done(stream, er);
});
else
done(stream);
});
}
Transform.prototype.push = function(chunk, encoding) {
this._transformState.needTransform = false;
return Duplex.prototype.push.call(this, chunk, encoding);
};
// This is the part where you do stuff!
// override this function in implementation classes.
// 'chunk' is an input chunk.
//
// Call `push(newChunk)` to pass along transformed output
// to the readable side. You may call 'push' zero or more times.
//
// Call `cb(err)` when you are done with this chunk. If you pass
// an error, then that'll put the hurt on the whole operation. If you
// never call cb(), then you'll never get another chunk.
Transform.prototype._transform = function(chunk, encoding, cb) {
throw new Error('not implemented');
};
Transform.prototype._write = function(chunk, encoding, cb) {
var ts = this._transformState;
ts.writecb = cb;
ts.writechunk = chunk;
ts.writeencoding = encoding;
if (!ts.transforming) {
var rs = this._readableState;
if (ts.needTransform ||
rs.needReadable ||
rs.length < rs.highWaterMark)
this._read(rs.highWaterMark);
}
};
// Doesn't matter what the args are here.
// _transform does all the work.
// That we got here means that the readable side wants more data.
Transform.prototype._read = function(n) {
var ts = this._transformState;
if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
ts.transforming = true;
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
} else {
// mark that we need a transform, so that any data that comes in
// will get processed, now that we've asked for it.
ts.needTransform = true;
}
};
function done(stream, er) {
if (er)
return stream.emit('error', er);
// if there's nothing in the write buffer, then that means
// that nothing more will ever be provided
var ws = stream._writableState;
var rs = stream._readableState;
var ts = stream._transformState;
if (ws.length)
throw new Error('calling transform done when ws.length != 0');
if (ts.transforming)
throw new Error('calling transform done when still transforming');
return stream.push(null);
}

View File

@@ -0,0 +1,386 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// A bit simpler than readable streams.
// Implement an async ._write(chunk, cb), and it'll handle all
// the drain event emission and buffering.
module.exports = Writable;
/*<replacement>*/
var Buffer = require('buffer').Buffer;
/*</replacement>*/
Writable.WritableState = WritableState;
/*<replacement>*/
var util = require('core-util-is');
util.inherits = require('inherits');
/*</replacement>*/
var Stream = require('stream');
util.inherits(Writable, Stream);
function WriteReq(chunk, encoding, cb) {
this.chunk = chunk;
this.encoding = encoding;
this.callback = cb;
}
function WritableState(options, stream) {
options = options || {};
// the point at which write() starts returning false
// Note: 0 is a valid value, means that we always return false if
// the entire buffer is not flushed immediately on write()
var hwm = options.highWaterMark;
this.highWaterMark = (hwm || hwm === 0) ? hwm : 16 * 1024;
// object stream flag to indicate whether or not this stream
// contains buffers or objects.
this.objectMode = !!options.objectMode;
// cast to ints.
this.highWaterMark = ~~this.highWaterMark;
this.needDrain = false;
// at the start of calling end()
this.ending = false;
// when end() has been called, and returned
this.ended = false;
// when 'finish' is emitted
this.finished = false;
// should we decode strings into buffers before passing to _write?
// this is here so that some node-core streams can optimize string
// handling at a lower level.
var noDecode = options.decodeStrings === false;
this.decodeStrings = !noDecode;
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
// Everything else in the universe uses 'utf8', though.
this.defaultEncoding = options.defaultEncoding || 'utf8';
// not an actual buffer we keep track of, but a measurement
// of how much we're waiting to get pushed to some underlying
// socket or file.
this.length = 0;
// a flag to see when we're in the middle of a write.
this.writing = false;
// a flag to be able to tell if the onwrite cb is called immediately,
// or on a later tick. We set this to true at first, becuase any
// actions that shouldn't happen until "later" should generally also
// not happen before the first write call.
this.sync = true;
// a flag to know if we're processing previously buffered items, which
// may call the _write() callback in the same tick, so that we don't
// end up in an overlapped onwrite situation.
this.bufferProcessing = false;
// the callback that's passed to _write(chunk,cb)
this.onwrite = function(er) {
onwrite(stream, er);
};
// the callback that the user supplies to write(chunk,encoding,cb)
this.writecb = null;
// the amount that is being written when _write is called.
this.writelen = 0;
this.buffer = [];
// True if the error was already emitted and should not be thrown again
this.errorEmitted = false;
}
function Writable(options) {
var Duplex = require('./_stream_duplex');
// Writable ctor is applied to Duplexes, though they're not
// instanceof Writable, they're instanceof Readable.
if (!(this instanceof Writable) && !(this instanceof Duplex))
return new Writable(options);
this._writableState = new WritableState(options, this);
// legacy.
this.writable = true;
Stream.call(this);
}
// Otherwise people can pipe Writable streams, which is just wrong.
Writable.prototype.pipe = function() {
this.emit('error', new Error('Cannot pipe. Not readable.'));
};
function writeAfterEnd(stream, state, cb) {
var er = new Error('write after end');
// TODO: defer error events consistently everywhere, not just the cb
stream.emit('error', er);
process.nextTick(function() {
cb(er);
});
}
// If we get something that is not a buffer, string, null, or undefined,
// and we're not in objectMode, then that's an error.
// Otherwise stream chunks are all considered to be of length=1, and the
// watermarks determine how many objects to keep in the buffer, rather than
// how many bytes or characters.
function validChunk(stream, state, chunk, cb) {
var valid = true;
if (!Buffer.isBuffer(chunk) &&
'string' !== typeof chunk &&
chunk !== null &&
chunk !== undefined &&
!state.objectMode) {
var er = new TypeError('Invalid non-string/buffer chunk');
stream.emit('error', er);
process.nextTick(function() {
cb(er);
});
valid = false;
}
return valid;
}
Writable.prototype.write = function(chunk, encoding, cb) {
var state = this._writableState;
var ret = false;
if (typeof encoding === 'function') {
cb = encoding;
encoding = null;
}
if (Buffer.isBuffer(chunk))
encoding = 'buffer';
else if (!encoding)
encoding = state.defaultEncoding;
if (typeof cb !== 'function')
cb = function() {};
if (state.ended)
writeAfterEnd(this, state, cb);
else if (validChunk(this, state, chunk, cb))
ret = writeOrBuffer(this, state, chunk, encoding, cb);
return ret;
};
function decodeChunk(state, chunk, encoding) {
if (!state.objectMode &&
state.decodeStrings !== false &&
typeof chunk === 'string') {
chunk = new Buffer(chunk, encoding);
}
return chunk;
}
// if we're already writing something, then just put this
// in the queue, and wait our turn. Otherwise, call _write
// If we return false, then we need a drain event, so set that flag.
function writeOrBuffer(stream, state, chunk, encoding, cb) {
chunk = decodeChunk(state, chunk, encoding);
if (Buffer.isBuffer(chunk))
encoding = 'buffer';
var len = state.objectMode ? 1 : chunk.length;
state.length += len;
var ret = state.length < state.highWaterMark;
// we must ensure that previous needDrain will not be reset to false.
if (!ret)
state.needDrain = true;
if (state.writing)
state.buffer.push(new WriteReq(chunk, encoding, cb));
else
doWrite(stream, state, len, chunk, encoding, cb);
return ret;
}
function doWrite(stream, state, len, chunk, encoding, cb) {
state.writelen = len;
state.writecb = cb;
state.writing = true;
state.sync = true;
stream._write(chunk, encoding, state.onwrite);
state.sync = false;
}
function onwriteError(stream, state, sync, er, cb) {
if (sync)
process.nextTick(function() {
cb(er);
});
else
cb(er);
stream._writableState.errorEmitted = true;
stream.emit('error', er);
}
function onwriteStateUpdate(state) {
state.writing = false;
state.writecb = null;
state.length -= state.writelen;
state.writelen = 0;
}
function onwrite(stream, er) {
var state = stream._writableState;
var sync = state.sync;
var cb = state.writecb;
onwriteStateUpdate(state);
if (er)
onwriteError(stream, state, sync, er, cb);
else {
// Check if we're actually ready to finish, but don't emit yet
var finished = needFinish(stream, state);
if (!finished && !state.bufferProcessing && state.buffer.length)
clearBuffer(stream, state);
if (sync) {
process.nextTick(function() {
afterWrite(stream, state, finished, cb);
});
} else {
afterWrite(stream, state, finished, cb);
}
}
}
function afterWrite(stream, state, finished, cb) {
if (!finished)
onwriteDrain(stream, state);
cb();
if (finished)
finishMaybe(stream, state);
}
// Must force callback to be called on nextTick, so that we don't
// emit 'drain' before the write() consumer gets the 'false' return
// value, and has a chance to attach a 'drain' listener.
function onwriteDrain(stream, state) {
if (state.length === 0 && state.needDrain) {
state.needDrain = false;
stream.emit('drain');
}
}
// if there's something in the buffer waiting, then process it
function clearBuffer(stream, state) {
state.bufferProcessing = true;
for (var c = 0; c < state.buffer.length; c++) {
var entry = state.buffer[c];
var chunk = entry.chunk;
var encoding = entry.encoding;
var cb = entry.callback;
var len = state.objectMode ? 1 : chunk.length;
doWrite(stream, state, len, chunk, encoding, cb);
// if we didn't call the onwrite immediately, then
// it means that we need to wait until it does.
// also, that means that the chunk and cb are currently
// being processed, so move the buffer counter past them.
if (state.writing) {
c++;
break;
}
}
state.bufferProcessing = false;
if (c < state.buffer.length)
state.buffer = state.buffer.slice(c);
else
state.buffer.length = 0;
}
Writable.prototype._write = function(chunk, encoding, cb) {
cb(new Error('not implemented'));
};
Writable.prototype.end = function(chunk, encoding, cb) {
var state = this._writableState;
if (typeof chunk === 'function') {
cb = chunk;
chunk = null;
encoding = null;
} else if (typeof encoding === 'function') {
cb = encoding;
encoding = null;
}
if (typeof chunk !== 'undefined' && chunk !== null)
this.write(chunk, encoding);
// ignore unnecessary end() calls.
if (!state.ending && !state.finished)
endWritable(this, state, cb);
};
function needFinish(stream, state) {
return (state.ending &&
state.length === 0 &&
!state.finished &&
!state.writing);
}
function finishMaybe(stream, state) {
var need = needFinish(stream, state);
if (need) {
state.finished = true;
stream.emit('finish');
}
return need;
}
function endWritable(stream, state, cb) {
state.ending = true;
finishMaybe(stream, state);
if (cb) {
if (state.finished)
process.nextTick(cb);
else
stream.once('finish', cb);
}
state.ended = true;
}

View File

@@ -0,0 +1,65 @@
{
"_from": "readable-stream@>=1.0.33-1 <1.1.0-0",
"_id": "readable-stream@1.0.34",
"_inBundle": false,
"_integrity": "sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=",
"_location": "/gulp/readable-stream",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "readable-stream@>=1.0.33-1 <1.1.0-0",
"name": "readable-stream",
"escapedName": "readable-stream",
"rawSpec": ">=1.0.33-1 <1.1.0-0",
"saveSpec": null,
"fetchSpec": ">=1.0.33-1 <1.1.0-0"
},
"_requiredBy": [
"/gulp/through2"
],
"_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz",
"_shasum": "125820e34bc842d2f2aaafafe4c2916ee32c157c",
"_spec": "readable-stream@>=1.0.33-1 <1.1.0-0",
"_where": "/Applications/XAMPP/xamppfiles/htdocs/wordpress/t-latehome/wp-content/plugins/opal-estate-pro/node_modules/gulp/node_modules/through2",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
"url": "http://blog.izs.me/"
},
"browser": {
"util": false
},
"bugs": {
"url": "https://github.com/isaacs/readable-stream/issues"
},
"bundleDependencies": false,
"dependencies": {
"core-util-is": "~1.0.0",
"inherits": "~2.0.1",
"isarray": "0.0.1",
"string_decoder": "~0.10.x"
},
"deprecated": false,
"description": "Streams2, a user-land copy of the stream library from Node.js v0.10.x",
"devDependencies": {
"tap": "~0.2.6"
},
"homepage": "https://github.com/isaacs/readable-stream#readme",
"keywords": [
"readable",
"stream",
"pipe"
],
"license": "MIT",
"main": "readable.js",
"name": "readable-stream",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/readable-stream.git"
},
"scripts": {
"test": "tap test/simple/*.js"
},
"version": "1.0.34"
}

View File

@@ -0,0 +1 @@
module.exports = require("./lib/_stream_passthrough.js")

View File

@@ -0,0 +1,11 @@
var Stream = require('stream'); // hack to fix a circular dependency issue when used with browserify
exports = module.exports = require('./lib/_stream_readable.js');
exports.Stream = Stream;
exports.Readable = exports;
exports.Writable = require('./lib/_stream_writable.js');
exports.Duplex = require('./lib/_stream_duplex.js');
exports.Transform = require('./lib/_stream_transform.js');
exports.PassThrough = require('./lib/_stream_passthrough.js');
if (!process.browser && process.env.READABLE_STREAM === 'disable') {
module.exports = require('stream');
}

Some files were not shown because too many files have changed in this diff Show More