1.4.1: Directory Structure Creation
This commit is contained in:
parent
25d622ec58
commit
bed4240acb
360 changed files with 111598 additions and 0 deletions
14
.gitignore
vendored
Normal file
14
.gitignore
vendored
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
# Built/minified assets
|
||||
assets/**/*.min.css
|
||||
assets/**/*.min.js
|
||||
|
||||
# Distribution directory
|
||||
assets/dist/
|
||||
|
||||
# Temporary files
|
||||
*.tmp
|
||||
*.bak
|
||||
|
||||
# OS-specific files
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
16
node_modules/.bin/csso
generated
vendored
Normal file
16
node_modules/.bin/csso
generated
vendored
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*|*MINGW*|*MSYS*)
|
||||
if command -v cygpath > /dev/null 2>&1; then
|
||||
basedir=`cygpath -w "$basedir"`
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../csso-cli/bin/csso" "$@"
|
||||
else
|
||||
exec node "$basedir/../csso-cli/bin/csso" "$@"
|
||||
fi
|
||||
17
node_modules/.bin/csso.cmd
generated
vendored
Normal file
17
node_modules/.bin/csso.cmd
generated
vendored
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
@ECHO off
|
||||
GOTO start
|
||||
:find_dp0
|
||||
SET dp0=%~dp0
|
||||
EXIT /b
|
||||
:start
|
||||
SETLOCAL
|
||||
CALL :find_dp0
|
||||
|
||||
IF EXIST "%dp0%\node.exe" (
|
||||
SET "_prog=%dp0%\node.exe"
|
||||
) ELSE (
|
||||
SET "_prog=node"
|
||||
SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||
)
|
||||
|
||||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\csso-cli\bin\csso" %*
|
||||
28
node_modules/.bin/csso.ps1
generated
vendored
Normal file
28
node_modules/.bin/csso.ps1
generated
vendored
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
#!/usr/bin/env pwsh
|
||||
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
|
||||
|
||||
$exe=""
|
||||
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
|
||||
# Fix case when both the Windows and Linux builds of Node
|
||||
# are installed in the same directory
|
||||
$exe=".exe"
|
||||
}
|
||||
$ret=0
|
||||
if (Test-Path "$basedir/node$exe") {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "$basedir/node$exe" "$basedir/../csso-cli/bin/csso" $args
|
||||
} else {
|
||||
& "$basedir/node$exe" "$basedir/../csso-cli/bin/csso" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
} else {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "node$exe" "$basedir/../csso-cli/bin/csso" $args
|
||||
} else {
|
||||
& "node$exe" "$basedir/../csso-cli/bin/csso" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
}
|
||||
exit $ret
|
||||
16
node_modules/.bin/uglifyjs
generated
vendored
Normal file
16
node_modules/.bin/uglifyjs
generated
vendored
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
#!/bin/sh
|
||||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
|
||||
|
||||
case `uname` in
|
||||
*CYGWIN*|*MINGW*|*MSYS*)
|
||||
if command -v cygpath > /dev/null 2>&1; then
|
||||
basedir=`cygpath -w "$basedir"`
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -x "$basedir/node" ]; then
|
||||
exec "$basedir/node" "$basedir/../uglify-js/bin/uglifyjs" "$@"
|
||||
else
|
||||
exec node "$basedir/../uglify-js/bin/uglifyjs" "$@"
|
||||
fi
|
||||
17
node_modules/.bin/uglifyjs.cmd
generated
vendored
Normal file
17
node_modules/.bin/uglifyjs.cmd
generated
vendored
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
@ECHO off
|
||||
GOTO start
|
||||
:find_dp0
|
||||
SET dp0=%~dp0
|
||||
EXIT /b
|
||||
:start
|
||||
SETLOCAL
|
||||
CALL :find_dp0
|
||||
|
||||
IF EXIST "%dp0%\node.exe" (
|
||||
SET "_prog=%dp0%\node.exe"
|
||||
) ELSE (
|
||||
SET "_prog=node"
|
||||
SET PATHEXT=%PATHEXT:;.JS;=;%
|
||||
)
|
||||
|
||||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\uglify-js\bin\uglifyjs" %*
|
||||
28
node_modules/.bin/uglifyjs.ps1
generated
vendored
Normal file
28
node_modules/.bin/uglifyjs.ps1
generated
vendored
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
#!/usr/bin/env pwsh
|
||||
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
|
||||
|
||||
$exe=""
|
||||
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
|
||||
# Fix case when both the Windows and Linux builds of Node
|
||||
# are installed in the same directory
|
||||
$exe=".exe"
|
||||
}
|
||||
$ret=0
|
||||
if (Test-Path "$basedir/node$exe") {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "$basedir/node$exe" "$basedir/../uglify-js/bin/uglifyjs" $args
|
||||
} else {
|
||||
& "$basedir/node$exe" "$basedir/../uglify-js/bin/uglifyjs" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
} else {
|
||||
# Support pipeline input
|
||||
if ($MyInvocation.ExpectingInput) {
|
||||
$input | & "node$exe" "$basedir/../uglify-js/bin/uglifyjs" $args
|
||||
} else {
|
||||
& "node$exe" "$basedir/../uglify-js/bin/uglifyjs" $args
|
||||
}
|
||||
$ret=$LASTEXITCODE
|
||||
}
|
||||
exit $ret
|
||||
382
node_modules/.package-lock.json
generated
vendored
Normal file
382
node_modules/.package-lock.json
generated
vendored
Normal file
|
|
@ -0,0 +1,382 @@
|
|||
{
|
||||
"name": "dodgers-stream-theater",
|
||||
"version": "1.4.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"node_modules/ansi-regex": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz",
|
||||
"integrity": "sha512-TIGnTpdo+E3+pCyAluZvtED5p5wCqLdezCyhPZzKPcxvFplEt4i+W7OONCKgeZFT3+y5NZZfOOS/Bdcanm1MYA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/ansi-styles": {
|
||||
"version": "2.2.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz",
|
||||
"integrity": "sha512-kmCevFghRiWM7HB5zTPULl4r9bVFSWjz62MhqizDGUrq2NWuNMQyuv4tHHoKJHs69M/MF64lEcHdYIocrdWQYA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/anymatch": {
|
||||
"version": "3.1.3",
|
||||
"resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz",
|
||||
"integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==",
|
||||
"dev": true,
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"normalize-path": "^3.0.0",
|
||||
"picomatch": "^2.0.4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 8"
|
||||
}
|
||||
},
|
||||
"node_modules/binary-extensions": {
|
||||
"version": "2.3.0",
|
||||
"resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz",
|
||||
"integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/braces": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
|
||||
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"fill-range": "^7.1.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/chalk": {
|
||||
"version": "1.1.3",
|
||||
"resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz",
|
||||
"integrity": "sha512-U3lRVLMSlsCfjqYPbLyVv11M9CPW4I728d6TCKMAOJueEeB9/8o+eSsMnxPJD+Q+K909sdESg7C+tIkoH6on1A==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ansi-styles": "^2.2.1",
|
||||
"escape-string-regexp": "^1.0.2",
|
||||
"has-ansi": "^2.0.0",
|
||||
"strip-ansi": "^3.0.0",
|
||||
"supports-color": "^2.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/chokidar": {
|
||||
"version": "3.6.0",
|
||||
"resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz",
|
||||
"integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"anymatch": "~3.1.2",
|
||||
"braces": "~3.0.2",
|
||||
"glob-parent": "~5.1.2",
|
||||
"is-binary-path": "~2.1.0",
|
||||
"is-glob": "~4.0.1",
|
||||
"normalize-path": "~3.0.0",
|
||||
"readdirp": "~3.6.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 8.10.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://paulmillr.com/funding/"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"fsevents": "~2.3.2"
|
||||
}
|
||||
},
|
||||
"node_modules/clap": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/clap/-/clap-1.2.3.tgz",
|
||||
"integrity": "sha512-4CoL/A3hf90V3VIEjeuhSvlGFEHKzOz+Wfc2IVZc+FaUgU0ZQafJTP49fvnULipOPcAfqhyI2duwQyns6xqjYA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"chalk": "^1.1.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/css-tree": {
|
||||
"version": "1.1.3",
|
||||
"resolved": "https://registry.npmjs.org/css-tree/-/css-tree-1.1.3.tgz",
|
||||
"integrity": "sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"mdn-data": "2.0.14",
|
||||
"source-map": "^0.6.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/csso": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/csso/-/csso-4.2.0.tgz",
|
||||
"integrity": "sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"css-tree": "^1.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/csso-cli": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/csso-cli/-/csso-cli-3.0.0.tgz",
|
||||
"integrity": "sha512-j0ytVZ8qHXcFnR3HOvfH8RWOeHtlqwt97mYe32AnJ2yG6noqV0Jsn8OwOQq8wpdavkHr8JOx6drRtY2HoKLjqw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"chokidar": "^3.0.0",
|
||||
"clap": "^1.0.9",
|
||||
"csso": "^4.0.1",
|
||||
"source-map": "^0.6.1"
|
||||
},
|
||||
"bin": {
|
||||
"csso": "bin/csso"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/escape-string-regexp": {
|
||||
"version": "1.0.5",
|
||||
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
|
||||
"integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/fill-range": {
|
||||
"version": "7.1.1",
|
||||
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
|
||||
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"to-regex-range": "^5.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/fsevents": {
|
||||
"version": "2.3.3",
|
||||
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
|
||||
"integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
|
||||
"dev": true,
|
||||
"hasInstallScript": true,
|
||||
"ideallyInert": true,
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/glob-parent": {
|
||||
"version": "5.1.2",
|
||||
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
|
||||
"integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
|
||||
"dev": true,
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"is-glob": "^4.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/has-ansi": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz",
|
||||
"integrity": "sha512-C8vBJ8DwUCx19vhm7urhTuUsr4/IyP6l4VzNQDv+ryHQObW3TTTp9yB68WpYgRe2bbaGuZ/se74IqFeVnMnLZg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ansi-regex": "^2.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/is-binary-path": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz",
|
||||
"integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"binary-extensions": "^2.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/is-extglob": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
|
||||
"integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/is-glob": {
|
||||
"version": "4.0.3",
|
||||
"resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
|
||||
"integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"is-extglob": "^2.1.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/is-number": {
|
||||
"version": "7.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
|
||||
"integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.12.0"
|
||||
}
|
||||
},
|
||||
"node_modules/mdn-data": {
|
||||
"version": "2.0.14",
|
||||
"resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.14.tgz",
|
||||
"integrity": "sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow==",
|
||||
"dev": true,
|
||||
"license": "CC0-1.0"
|
||||
},
|
||||
"node_modules/normalize-path": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
|
||||
"integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/picomatch": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
|
||||
"integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=8.6"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/jonschlinkert"
|
||||
}
|
||||
},
|
||||
"node_modules/readdirp": {
|
||||
"version": "3.6.0",
|
||||
"resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz",
|
||||
"integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"picomatch": "^2.2.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/source-map": {
|
||||
"version": "0.6.1",
|
||||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
|
||||
"integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
|
||||
"dev": true,
|
||||
"license": "BSD-3-Clause",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/strip-ansi": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz",
|
||||
"integrity": "sha512-VhumSSbBqDTP8p2ZLKj40UjBCV4+v8bUSEpUb4KjRgWk9pbqGF4REFj6KEagidb2f/M6AzC0EmFyDNGaw9OCzg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ansi-regex": "^2.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/supports-color": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz",
|
||||
"integrity": "sha512-KKNVtd6pCYgPIKU4cp2733HWYCpplQhddZLBUryaAHou723x+FRzQ5Df824Fj+IyyuiQTRoub4SnIFfIcrp70g==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/to-regex-range": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
|
||||
"integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"is-number": "^7.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/uglify-js": {
|
||||
"version": "3.19.3",
|
||||
"resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.19.3.tgz",
|
||||
"integrity": "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ==",
|
||||
"dev": true,
|
||||
"license": "BSD-2-Clause",
|
||||
"bin": {
|
||||
"uglifyjs": "bin/uglifyjs"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.8.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
4
node_modules/ansi-regex/index.js
generated
vendored
Normal file
4
node_modules/ansi-regex/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
'use strict';
|
||||
module.exports = function () {
|
||||
return /[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-PRZcf-nqry=><]/g;
|
||||
};
|
||||
21
node_modules/ansi-regex/license
generated
vendored
Normal file
21
node_modules/ansi-regex/license
generated
vendored
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
64
node_modules/ansi-regex/package.json
generated
vendored
Normal file
64
node_modules/ansi-regex/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
{
|
||||
"name": "ansi-regex",
|
||||
"version": "2.1.1",
|
||||
"description": "Regular expression for matching ANSI escape codes",
|
||||
"license": "MIT",
|
||||
"repository": "chalk/ansi-regex",
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "sindresorhus.com"
|
||||
},
|
||||
"maintainers": [
|
||||
"Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)",
|
||||
"Joshua Appelman <jappelman@xebia.com> (jbnicolai.com)",
|
||||
"JD Ballard <i.am.qix@gmail.com> (github.com/qix-)"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "xo && ava --verbose",
|
||||
"view-supported": "node fixtures/view-codes.js"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"keywords": [
|
||||
"ansi",
|
||||
"styles",
|
||||
"color",
|
||||
"colour",
|
||||
"colors",
|
||||
"terminal",
|
||||
"console",
|
||||
"cli",
|
||||
"string",
|
||||
"tty",
|
||||
"escape",
|
||||
"formatting",
|
||||
"rgb",
|
||||
"256",
|
||||
"shell",
|
||||
"xterm",
|
||||
"command-line",
|
||||
"text",
|
||||
"regex",
|
||||
"regexp",
|
||||
"re",
|
||||
"match",
|
||||
"test",
|
||||
"find",
|
||||
"pattern"
|
||||
],
|
||||
"devDependencies": {
|
||||
"ava": "0.17.0",
|
||||
"xo": "0.16.0"
|
||||
},
|
||||
"xo": {
|
||||
"rules": {
|
||||
"guard-for-in": 0,
|
||||
"no-loop-func": 0
|
||||
}
|
||||
}
|
||||
}
|
||||
39
node_modules/ansi-regex/readme.md
generated
vendored
Normal file
39
node_modules/ansi-regex/readme.md
generated
vendored
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
# ansi-regex [](https://travis-ci.org/chalk/ansi-regex)
|
||||
|
||||
> Regular expression for matching [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code)
|
||||
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
$ npm install --save ansi-regex
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const ansiRegex = require('ansi-regex');
|
||||
|
||||
ansiRegex().test('\u001b[4mcake\u001b[0m');
|
||||
//=> true
|
||||
|
||||
ansiRegex().test('cake');
|
||||
//=> false
|
||||
|
||||
'\u001b[4mcake\u001b[0m'.match(ansiRegex());
|
||||
//=> ['\u001b[4m', '\u001b[0m']
|
||||
```
|
||||
|
||||
## FAQ
|
||||
|
||||
### Why do you test for codes not in the ECMA 48 standard?
|
||||
|
||||
Some of the codes we run as a test are codes that we acquired finding various lists of non-standard or manufacturer specific codes. If I recall correctly, we test for both standard and non-standard codes, as most of them follow the same or similar format and can be safely matched in strings without the risk of removing actual string content. There are a few non-standard control codes that do not follow the traditional format (i.e. they end in numbers) thus forcing us to exclude them from the test because we cannot reliably match them.
|
||||
|
||||
On the historical side, those ECMA standards were established in the early 90's whereas the VT100, for example, was designed in the mid/late 70's. At that point in time, control codes were still pretty ungoverned and engineers used them for a multitude of things, namely to activate hardware ports that may have been proprietary. Somewhere else you see a similar 'anarchy' of codes is in the x86 architecture for processors; there are a ton of "interrupts" that can mean different things on certain brands of processors, most of which have been phased out.
|
||||
|
||||
|
||||
## License
|
||||
|
||||
MIT © [Sindre Sorhus](http://sindresorhus.com)
|
||||
65
node_modules/ansi-styles/index.js
generated
vendored
Normal file
65
node_modules/ansi-styles/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
'use strict';
|
||||
|
||||
function assembleStyles () {
|
||||
var styles = {
|
||||
modifiers: {
|
||||
reset: [0, 0],
|
||||
bold: [1, 22], // 21 isn't widely supported and 22 does the same thing
|
||||
dim: [2, 22],
|
||||
italic: [3, 23],
|
||||
underline: [4, 24],
|
||||
inverse: [7, 27],
|
||||
hidden: [8, 28],
|
||||
strikethrough: [9, 29]
|
||||
},
|
||||
colors: {
|
||||
black: [30, 39],
|
||||
red: [31, 39],
|
||||
green: [32, 39],
|
||||
yellow: [33, 39],
|
||||
blue: [34, 39],
|
||||
magenta: [35, 39],
|
||||
cyan: [36, 39],
|
||||
white: [37, 39],
|
||||
gray: [90, 39]
|
||||
},
|
||||
bgColors: {
|
||||
bgBlack: [40, 49],
|
||||
bgRed: [41, 49],
|
||||
bgGreen: [42, 49],
|
||||
bgYellow: [43, 49],
|
||||
bgBlue: [44, 49],
|
||||
bgMagenta: [45, 49],
|
||||
bgCyan: [46, 49],
|
||||
bgWhite: [47, 49]
|
||||
}
|
||||
};
|
||||
|
||||
// fix humans
|
||||
styles.colors.grey = styles.colors.gray;
|
||||
|
||||
Object.keys(styles).forEach(function (groupName) {
|
||||
var group = styles[groupName];
|
||||
|
||||
Object.keys(group).forEach(function (styleName) {
|
||||
var style = group[styleName];
|
||||
|
||||
styles[styleName] = group[styleName] = {
|
||||
open: '\u001b[' + style[0] + 'm',
|
||||
close: '\u001b[' + style[1] + 'm'
|
||||
};
|
||||
});
|
||||
|
||||
Object.defineProperty(styles, groupName, {
|
||||
value: group,
|
||||
enumerable: false
|
||||
});
|
||||
});
|
||||
|
||||
return styles;
|
||||
}
|
||||
|
||||
Object.defineProperty(module, 'exports', {
|
||||
enumerable: true,
|
||||
get: assembleStyles
|
||||
});
|
||||
21
node_modules/ansi-styles/license
generated
vendored
Normal file
21
node_modules/ansi-styles/license
generated
vendored
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
50
node_modules/ansi-styles/package.json
generated
vendored
Normal file
50
node_modules/ansi-styles/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
{
|
||||
"name": "ansi-styles",
|
||||
"version": "2.2.1",
|
||||
"description": "ANSI escape codes for styling strings in the terminal",
|
||||
"license": "MIT",
|
||||
"repository": "chalk/ansi-styles",
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "sindresorhus.com"
|
||||
},
|
||||
"maintainers": [
|
||||
"Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)",
|
||||
"Joshua Appelman <jappelman@xebia.com> (jbnicolai.com)"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "mocha"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"keywords": [
|
||||
"ansi",
|
||||
"styles",
|
||||
"color",
|
||||
"colour",
|
||||
"colors",
|
||||
"terminal",
|
||||
"console",
|
||||
"cli",
|
||||
"string",
|
||||
"tty",
|
||||
"escape",
|
||||
"formatting",
|
||||
"rgb",
|
||||
"256",
|
||||
"shell",
|
||||
"xterm",
|
||||
"log",
|
||||
"logging",
|
||||
"command-line",
|
||||
"text"
|
||||
],
|
||||
"devDependencies": {
|
||||
"mocha": "*"
|
||||
}
|
||||
}
|
||||
86
node_modules/ansi-styles/readme.md
generated
vendored
Normal file
86
node_modules/ansi-styles/readme.md
generated
vendored
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
# ansi-styles [](https://travis-ci.org/chalk/ansi-styles)
|
||||
|
||||
> [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code#Colors_and_Styles) for styling strings in the terminal
|
||||
|
||||
You probably want the higher-level [chalk](https://github.com/chalk/chalk) module for styling your strings.
|
||||
|
||||

|
||||
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
$ npm install --save ansi-styles
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
var ansi = require('ansi-styles');
|
||||
|
||||
console.log(ansi.green.open + 'Hello world!' + ansi.green.close);
|
||||
```
|
||||
|
||||
|
||||
## API
|
||||
|
||||
Each style has an `open` and `close` property.
|
||||
|
||||
|
||||
## Styles
|
||||
|
||||
### Modifiers
|
||||
|
||||
- `reset`
|
||||
- `bold`
|
||||
- `dim`
|
||||
- `italic` *(not widely supported)*
|
||||
- `underline`
|
||||
- `inverse`
|
||||
- `hidden`
|
||||
- `strikethrough` *(not widely supported)*
|
||||
|
||||
### Colors
|
||||
|
||||
- `black`
|
||||
- `red`
|
||||
- `green`
|
||||
- `yellow`
|
||||
- `blue`
|
||||
- `magenta`
|
||||
- `cyan`
|
||||
- `white`
|
||||
- `gray`
|
||||
|
||||
### Background colors
|
||||
|
||||
- `bgBlack`
|
||||
- `bgRed`
|
||||
- `bgGreen`
|
||||
- `bgYellow`
|
||||
- `bgBlue`
|
||||
- `bgMagenta`
|
||||
- `bgCyan`
|
||||
- `bgWhite`
|
||||
|
||||
|
||||
## Advanced usage
|
||||
|
||||
By default you get a map of styles, but the styles are also available as groups. They are non-enumerable so they don't show up unless you access them explicitly. This makes it easier to expose only a subset in a higher-level module.
|
||||
|
||||
- `ansi.modifiers`
|
||||
- `ansi.colors`
|
||||
- `ansi.bgColors`
|
||||
|
||||
|
||||
###### Example
|
||||
|
||||
```js
|
||||
console.log(ansi.colors.green.open);
|
||||
```
|
||||
|
||||
|
||||
## License
|
||||
|
||||
MIT © [Sindre Sorhus](http://sindresorhus.com)
|
||||
15
node_modules/anymatch/LICENSE
generated
vendored
Normal file
15
node_modules/anymatch/LICENSE
generated
vendored
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
The ISC License
|
||||
|
||||
Copyright (c) 2019 Elan Shanker, Paul Miller (https://paulmillr.com)
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
87
node_modules/anymatch/README.md
generated
vendored
Normal file
87
node_modules/anymatch/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,87 @@
|
|||
anymatch [](https://travis-ci.org/micromatch/anymatch) [](https://coveralls.io/r/micromatch/anymatch?branch=master)
|
||||
======
|
||||
Javascript module to match a string against a regular expression, glob, string,
|
||||
or function that takes the string as an argument and returns a truthy or falsy
|
||||
value. The matcher can also be an array of any or all of these. Useful for
|
||||
allowing a very flexible user-defined config to define things like file paths.
|
||||
|
||||
__Note: This module has Bash-parity, please be aware that Windows-style backslashes are not supported as separators. See https://github.com/micromatch/micromatch#backslashes for more information.__
|
||||
|
||||
|
||||
Usage
|
||||
-----
|
||||
```sh
|
||||
npm install anymatch
|
||||
```
|
||||
|
||||
#### anymatch(matchers, testString, [returnIndex], [options])
|
||||
* __matchers__: (_Array|String|RegExp|Function_)
|
||||
String to be directly matched, string with glob patterns, regular expression
|
||||
test, function that takes the testString as an argument and returns a truthy
|
||||
value if it should be matched, or an array of any number and mix of these types.
|
||||
* __testString__: (_String|Array_) The string to test against the matchers. If
|
||||
passed as an array, the first element of the array will be used as the
|
||||
`testString` for non-function matchers, while the entire array will be applied
|
||||
as the arguments for function matchers.
|
||||
* __options__: (_Object_ [optional]_) Any of the [picomatch](https://github.com/micromatch/picomatch#options) options.
|
||||
* __returnIndex__: (_Boolean [optional]_) If true, return the array index of
|
||||
the first matcher that that testString matched, or -1 if no match, instead of a
|
||||
boolean result.
|
||||
|
||||
```js
|
||||
const anymatch = require('anymatch');
|
||||
|
||||
const matchers = [ 'path/to/file.js', 'path/anyjs/**/*.js', /foo.js$/, string => string.includes('bar') && string.length > 10 ] ;
|
||||
|
||||
anymatch(matchers, 'path/to/file.js'); // true
|
||||
anymatch(matchers, 'path/anyjs/baz.js'); // true
|
||||
anymatch(matchers, 'path/to/foo.js'); // true
|
||||
anymatch(matchers, 'path/to/bar.js'); // true
|
||||
anymatch(matchers, 'bar.js'); // false
|
||||
|
||||
// returnIndex = true
|
||||
anymatch(matchers, 'foo.js', {returnIndex: true}); // 2
|
||||
anymatch(matchers, 'path/anyjs/foo.js', {returnIndex: true}); // 1
|
||||
|
||||
// any picomatc
|
||||
|
||||
// using globs to match directories and their children
|
||||
anymatch('node_modules', 'node_modules'); // true
|
||||
anymatch('node_modules', 'node_modules/somelib/index.js'); // false
|
||||
anymatch('node_modules/**', 'node_modules/somelib/index.js'); // true
|
||||
anymatch('node_modules/**', '/absolute/path/to/node_modules/somelib/index.js'); // false
|
||||
anymatch('**/node_modules/**', '/absolute/path/to/node_modules/somelib/index.js'); // true
|
||||
|
||||
const matcher = anymatch(matchers);
|
||||
['foo.js', 'bar.js'].filter(matcher); // [ 'foo.js' ]
|
||||
anymatch master* ❯
|
||||
|
||||
```
|
||||
|
||||
#### anymatch(matchers)
|
||||
You can also pass in only your matcher(s) to get a curried function that has
|
||||
already been bound to the provided matching criteria. This can be used as an
|
||||
`Array#filter` callback.
|
||||
|
||||
```js
|
||||
var matcher = anymatch(matchers);
|
||||
|
||||
matcher('path/to/file.js'); // true
|
||||
matcher('path/anyjs/baz.js', true); // 1
|
||||
|
||||
['foo.js', 'bar.js'].filter(matcher); // ['foo.js']
|
||||
```
|
||||
|
||||
Changelog
|
||||
----------
|
||||
[See release notes page on GitHub](https://github.com/micromatch/anymatch/releases)
|
||||
|
||||
- **v3.0:** Removed `startIndex` and `endIndex` arguments. Node 8.x-only.
|
||||
- **v2.0:** [micromatch](https://github.com/jonschlinkert/micromatch) moves away from minimatch-parity and inline with Bash. This includes handling backslashes differently (see https://github.com/micromatch/micromatch#backslashes for more information).
|
||||
- **v1.2:** anymatch uses [micromatch](https://github.com/jonschlinkert/micromatch)
|
||||
for glob pattern matching. Issues with glob pattern matching should be
|
||||
reported directly to the [micromatch issue tracker](https://github.com/jonschlinkert/micromatch/issues).
|
||||
|
||||
License
|
||||
-------
|
||||
[ISC](https://raw.github.com/micromatch/anymatch/master/LICENSE)
|
||||
20
node_modules/anymatch/index.d.ts
generated
vendored
Normal file
20
node_modules/anymatch/index.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
type AnymatchFn = (testString: string) => boolean;
|
||||
type AnymatchPattern = string|RegExp|AnymatchFn;
|
||||
type AnymatchMatcher = AnymatchPattern|AnymatchPattern[]
|
||||
type AnymatchTester = {
|
||||
(testString: string|any[], returnIndex: true): number;
|
||||
(testString: string|any[]): boolean;
|
||||
}
|
||||
|
||||
type PicomatchOptions = {dot: boolean};
|
||||
|
||||
declare const anymatch: {
|
||||
(matchers: AnymatchMatcher): AnymatchTester;
|
||||
(matchers: AnymatchMatcher, testString: null, returnIndex: true | PicomatchOptions): AnymatchTester;
|
||||
(matchers: AnymatchMatcher, testString: string|any[], returnIndex: true | PicomatchOptions): number;
|
||||
(matchers: AnymatchMatcher, testString: string|any[]): boolean;
|
||||
}
|
||||
|
||||
export {AnymatchMatcher as Matcher}
|
||||
export {AnymatchTester as Tester}
|
||||
export default anymatch
|
||||
104
node_modules/anymatch/index.js
generated
vendored
Normal file
104
node_modules/anymatch/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,104 @@
|
|||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
|
||||
const picomatch = require('picomatch');
|
||||
const normalizePath = require('normalize-path');
|
||||
|
||||
/**
|
||||
* @typedef {(testString: string) => boolean} AnymatchFn
|
||||
* @typedef {string|RegExp|AnymatchFn} AnymatchPattern
|
||||
* @typedef {AnymatchPattern|AnymatchPattern[]} AnymatchMatcher
|
||||
*/
|
||||
const BANG = '!';
|
||||
const DEFAULT_OPTIONS = {returnIndex: false};
|
||||
const arrify = (item) => Array.isArray(item) ? item : [item];
|
||||
|
||||
/**
|
||||
* @param {AnymatchPattern} matcher
|
||||
* @param {object} options
|
||||
* @returns {AnymatchFn}
|
||||
*/
|
||||
const createPattern = (matcher, options) => {
|
||||
if (typeof matcher === 'function') {
|
||||
return matcher;
|
||||
}
|
||||
if (typeof matcher === 'string') {
|
||||
const glob = picomatch(matcher, options);
|
||||
return (string) => matcher === string || glob(string);
|
||||
}
|
||||
if (matcher instanceof RegExp) {
|
||||
return (string) => matcher.test(string);
|
||||
}
|
||||
return (string) => false;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Array<Function>} patterns
|
||||
* @param {Array<Function>} negPatterns
|
||||
* @param {String|Array} args
|
||||
* @param {Boolean} returnIndex
|
||||
* @returns {boolean|number}
|
||||
*/
|
||||
const matchPatterns = (patterns, negPatterns, args, returnIndex) => {
|
||||
const isList = Array.isArray(args);
|
||||
const _path = isList ? args[0] : args;
|
||||
if (!isList && typeof _path !== 'string') {
|
||||
throw new TypeError('anymatch: second argument must be a string: got ' +
|
||||
Object.prototype.toString.call(_path))
|
||||
}
|
||||
const path = normalizePath(_path, false);
|
||||
|
||||
for (let index = 0; index < negPatterns.length; index++) {
|
||||
const nglob = negPatterns[index];
|
||||
if (nglob(path)) {
|
||||
return returnIndex ? -1 : false;
|
||||
}
|
||||
}
|
||||
|
||||
const applied = isList && [path].concat(args.slice(1));
|
||||
for (let index = 0; index < patterns.length; index++) {
|
||||
const pattern = patterns[index];
|
||||
if (isList ? pattern(...applied) : pattern(path)) {
|
||||
return returnIndex ? index : true;
|
||||
}
|
||||
}
|
||||
|
||||
return returnIndex ? -1 : false;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {AnymatchMatcher} matchers
|
||||
* @param {Array|string} testString
|
||||
* @param {object} options
|
||||
* @returns {boolean|number|Function}
|
||||
*/
|
||||
const anymatch = (matchers, testString, options = DEFAULT_OPTIONS) => {
|
||||
if (matchers == null) {
|
||||
throw new TypeError('anymatch: specify first argument');
|
||||
}
|
||||
const opts = typeof options === 'boolean' ? {returnIndex: options} : options;
|
||||
const returnIndex = opts.returnIndex || false;
|
||||
|
||||
// Early cache for matchers.
|
||||
const mtchers = arrify(matchers);
|
||||
const negatedGlobs = mtchers
|
||||
.filter(item => typeof item === 'string' && item.charAt(0) === BANG)
|
||||
.map(item => item.slice(1))
|
||||
.map(item => picomatch(item, opts));
|
||||
const patterns = mtchers
|
||||
.filter(item => typeof item !== 'string' || (typeof item === 'string' && item.charAt(0) !== BANG))
|
||||
.map(matcher => createPattern(matcher, opts));
|
||||
|
||||
if (testString == null) {
|
||||
return (testString, ri = false) => {
|
||||
const returnIndex = typeof ri === 'boolean' ? ri : false;
|
||||
return matchPatterns(patterns, negatedGlobs, testString, returnIndex);
|
||||
}
|
||||
}
|
||||
|
||||
return matchPatterns(patterns, negatedGlobs, testString, returnIndex);
|
||||
};
|
||||
|
||||
anymatch.default = anymatch;
|
||||
module.exports = anymatch;
|
||||
48
node_modules/anymatch/package.json
generated
vendored
Normal file
48
node_modules/anymatch/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
{
|
||||
"name": "anymatch",
|
||||
"version": "3.1.3",
|
||||
"description": "Matches strings against configurable strings, globs, regular expressions, and/or functions",
|
||||
"files": [
|
||||
"index.js",
|
||||
"index.d.ts"
|
||||
],
|
||||
"dependencies": {
|
||||
"normalize-path": "^3.0.0",
|
||||
"picomatch": "^2.0.4"
|
||||
},
|
||||
"author": {
|
||||
"name": "Elan Shanker",
|
||||
"url": "https://github.com/es128"
|
||||
},
|
||||
"license": "ISC",
|
||||
"homepage": "https://github.com/micromatch/anymatch",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/micromatch/anymatch"
|
||||
},
|
||||
"keywords": [
|
||||
"match",
|
||||
"any",
|
||||
"string",
|
||||
"file",
|
||||
"fs",
|
||||
"list",
|
||||
"glob",
|
||||
"regex",
|
||||
"regexp",
|
||||
"regular",
|
||||
"expression",
|
||||
"function"
|
||||
],
|
||||
"scripts": {
|
||||
"test": "nyc mocha",
|
||||
"mocha": "mocha"
|
||||
},
|
||||
"devDependencies": {
|
||||
"mocha": "^6.1.3",
|
||||
"nyc": "^14.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 8"
|
||||
}
|
||||
}
|
||||
263
node_modules/binary-extensions/binary-extensions.json
generated
vendored
Normal file
263
node_modules/binary-extensions/binary-extensions.json
generated
vendored
Normal file
|
|
@ -0,0 +1,263 @@
|
|||
[
|
||||
"3dm",
|
||||
"3ds",
|
||||
"3g2",
|
||||
"3gp",
|
||||
"7z",
|
||||
"a",
|
||||
"aac",
|
||||
"adp",
|
||||
"afdesign",
|
||||
"afphoto",
|
||||
"afpub",
|
||||
"ai",
|
||||
"aif",
|
||||
"aiff",
|
||||
"alz",
|
||||
"ape",
|
||||
"apk",
|
||||
"appimage",
|
||||
"ar",
|
||||
"arj",
|
||||
"asf",
|
||||
"au",
|
||||
"avi",
|
||||
"bak",
|
||||
"baml",
|
||||
"bh",
|
||||
"bin",
|
||||
"bk",
|
||||
"bmp",
|
||||
"btif",
|
||||
"bz2",
|
||||
"bzip2",
|
||||
"cab",
|
||||
"caf",
|
||||
"cgm",
|
||||
"class",
|
||||
"cmx",
|
||||
"cpio",
|
||||
"cr2",
|
||||
"cur",
|
||||
"dat",
|
||||
"dcm",
|
||||
"deb",
|
||||
"dex",
|
||||
"djvu",
|
||||
"dll",
|
||||
"dmg",
|
||||
"dng",
|
||||
"doc",
|
||||
"docm",
|
||||
"docx",
|
||||
"dot",
|
||||
"dotm",
|
||||
"dra",
|
||||
"DS_Store",
|
||||
"dsk",
|
||||
"dts",
|
||||
"dtshd",
|
||||
"dvb",
|
||||
"dwg",
|
||||
"dxf",
|
||||
"ecelp4800",
|
||||
"ecelp7470",
|
||||
"ecelp9600",
|
||||
"egg",
|
||||
"eol",
|
||||
"eot",
|
||||
"epub",
|
||||
"exe",
|
||||
"f4v",
|
||||
"fbs",
|
||||
"fh",
|
||||
"fla",
|
||||
"flac",
|
||||
"flatpak",
|
||||
"fli",
|
||||
"flv",
|
||||
"fpx",
|
||||
"fst",
|
||||
"fvt",
|
||||
"g3",
|
||||
"gh",
|
||||
"gif",
|
||||
"graffle",
|
||||
"gz",
|
||||
"gzip",
|
||||
"h261",
|
||||
"h263",
|
||||
"h264",
|
||||
"icns",
|
||||
"ico",
|
||||
"ief",
|
||||
"img",
|
||||
"ipa",
|
||||
"iso",
|
||||
"jar",
|
||||
"jpeg",
|
||||
"jpg",
|
||||
"jpgv",
|
||||
"jpm",
|
||||
"jxr",
|
||||
"key",
|
||||
"ktx",
|
||||
"lha",
|
||||
"lib",
|
||||
"lvp",
|
||||
"lz",
|
||||
"lzh",
|
||||
"lzma",
|
||||
"lzo",
|
||||
"m3u",
|
||||
"m4a",
|
||||
"m4v",
|
||||
"mar",
|
||||
"mdi",
|
||||
"mht",
|
||||
"mid",
|
||||
"midi",
|
||||
"mj2",
|
||||
"mka",
|
||||
"mkv",
|
||||
"mmr",
|
||||
"mng",
|
||||
"mobi",
|
||||
"mov",
|
||||
"movie",
|
||||
"mp3",
|
||||
"mp4",
|
||||
"mp4a",
|
||||
"mpeg",
|
||||
"mpg",
|
||||
"mpga",
|
||||
"mxu",
|
||||
"nef",
|
||||
"npx",
|
||||
"numbers",
|
||||
"nupkg",
|
||||
"o",
|
||||
"odp",
|
||||
"ods",
|
||||
"odt",
|
||||
"oga",
|
||||
"ogg",
|
||||
"ogv",
|
||||
"otf",
|
||||
"ott",
|
||||
"pages",
|
||||
"pbm",
|
||||
"pcx",
|
||||
"pdb",
|
||||
"pdf",
|
||||
"pea",
|
||||
"pgm",
|
||||
"pic",
|
||||
"png",
|
||||
"pnm",
|
||||
"pot",
|
||||
"potm",
|
||||
"potx",
|
||||
"ppa",
|
||||
"ppam",
|
||||
"ppm",
|
||||
"pps",
|
||||
"ppsm",
|
||||
"ppsx",
|
||||
"ppt",
|
||||
"pptm",
|
||||
"pptx",
|
||||
"psd",
|
||||
"pya",
|
||||
"pyc",
|
||||
"pyo",
|
||||
"pyv",
|
||||
"qt",
|
||||
"rar",
|
||||
"ras",
|
||||
"raw",
|
||||
"resources",
|
||||
"rgb",
|
||||
"rip",
|
||||
"rlc",
|
||||
"rmf",
|
||||
"rmvb",
|
||||
"rpm",
|
||||
"rtf",
|
||||
"rz",
|
||||
"s3m",
|
||||
"s7z",
|
||||
"scpt",
|
||||
"sgi",
|
||||
"shar",
|
||||
"snap",
|
||||
"sil",
|
||||
"sketch",
|
||||
"slk",
|
||||
"smv",
|
||||
"snk",
|
||||
"so",
|
||||
"stl",
|
||||
"suo",
|
||||
"sub",
|
||||
"swf",
|
||||
"tar",
|
||||
"tbz",
|
||||
"tbz2",
|
||||
"tga",
|
||||
"tgz",
|
||||
"thmx",
|
||||
"tif",
|
||||
"tiff",
|
||||
"tlz",
|
||||
"ttc",
|
||||
"ttf",
|
||||
"txz",
|
||||
"udf",
|
||||
"uvh",
|
||||
"uvi",
|
||||
"uvm",
|
||||
"uvp",
|
||||
"uvs",
|
||||
"uvu",
|
||||
"viv",
|
||||
"vob",
|
||||
"war",
|
||||
"wav",
|
||||
"wax",
|
||||
"wbmp",
|
||||
"wdp",
|
||||
"weba",
|
||||
"webm",
|
||||
"webp",
|
||||
"whl",
|
||||
"wim",
|
||||
"wm",
|
||||
"wma",
|
||||
"wmv",
|
||||
"wmx",
|
||||
"woff",
|
||||
"woff2",
|
||||
"wrm",
|
||||
"wvx",
|
||||
"xbm",
|
||||
"xif",
|
||||
"xla",
|
||||
"xlam",
|
||||
"xls",
|
||||
"xlsb",
|
||||
"xlsm",
|
||||
"xlsx",
|
||||
"xlt",
|
||||
"xltm",
|
||||
"xltx",
|
||||
"xm",
|
||||
"xmind",
|
||||
"xpi",
|
||||
"xpm",
|
||||
"xwd",
|
||||
"xz",
|
||||
"z",
|
||||
"zip",
|
||||
"zipx"
|
||||
]
|
||||
3
node_modules/binary-extensions/binary-extensions.json.d.ts
generated
vendored
Normal file
3
node_modules/binary-extensions/binary-extensions.json.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
declare const binaryExtensionsJson: readonly string[];
|
||||
|
||||
export = binaryExtensionsJson;
|
||||
14
node_modules/binary-extensions/index.d.ts
generated
vendored
Normal file
14
node_modules/binary-extensions/index.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
/**
|
||||
List of binary file extensions.
|
||||
|
||||
@example
|
||||
```
|
||||
import binaryExtensions = require('binary-extensions');
|
||||
|
||||
console.log(binaryExtensions);
|
||||
//=> ['3ds', '3g2', …]
|
||||
```
|
||||
*/
|
||||
declare const binaryExtensions: readonly string[];
|
||||
|
||||
export = binaryExtensions;
|
||||
1
node_modules/binary-extensions/index.js
generated
vendored
Normal file
1
node_modules/binary-extensions/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
module.exports = require('./binary-extensions.json');
|
||||
10
node_modules/binary-extensions/license
generated
vendored
Normal file
10
node_modules/binary-extensions/license
generated
vendored
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
|
||||
Copyright (c) Paul Miller (https://paulmillr.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
40
node_modules/binary-extensions/package.json
generated
vendored
Normal file
40
node_modules/binary-extensions/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
{
|
||||
"name": "binary-extensions",
|
||||
"version": "2.3.0",
|
||||
"description": "List of binary file extensions",
|
||||
"license": "MIT",
|
||||
"repository": "sindresorhus/binary-extensions",
|
||||
"funding": "https://github.com/sponsors/sindresorhus",
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "https://sindresorhus.com"
|
||||
},
|
||||
"sideEffects": false,
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "xo && ava && tsd"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"index.d.ts",
|
||||
"binary-extensions.json",
|
||||
"binary-extensions.json.d.ts"
|
||||
],
|
||||
"keywords": [
|
||||
"binary",
|
||||
"extensions",
|
||||
"extension",
|
||||
"file",
|
||||
"json",
|
||||
"list",
|
||||
"array"
|
||||
],
|
||||
"devDependencies": {
|
||||
"ava": "^1.4.1",
|
||||
"tsd": "^0.7.2",
|
||||
"xo": "^0.24.0"
|
||||
}
|
||||
}
|
||||
25
node_modules/binary-extensions/readme.md
generated
vendored
Normal file
25
node_modules/binary-extensions/readme.md
generated
vendored
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
# binary-extensions
|
||||
|
||||
> List of binary file extensions
|
||||
|
||||
The list is just a [JSON file](binary-extensions.json) and can be used anywhere.
|
||||
|
||||
## Install
|
||||
|
||||
```sh
|
||||
npm install binary-extensions
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const binaryExtensions = require('binary-extensions');
|
||||
|
||||
console.log(binaryExtensions);
|
||||
//=> ['3ds', '3g2', …]
|
||||
```
|
||||
|
||||
## Related
|
||||
|
||||
- [is-binary-path](https://github.com/sindresorhus/is-binary-path) - Check if a filepath is a binary file
|
||||
- [text-extensions](https://github.com/sindresorhus/text-extensions) - List of text file extensions
|
||||
21
node_modules/braces/LICENSE
generated
vendored
Normal file
21
node_modules/braces/LICENSE
generated
vendored
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014-present, Jon Schlinkert.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
586
node_modules/braces/README.md
generated
vendored
Normal file
586
node_modules/braces/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,586 @@
|
|||
# braces [](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [](https://www.npmjs.com/package/braces) [](https://npmjs.org/package/braces) [](https://npmjs.org/package/braces) [](https://travis-ci.org/micromatch/braces)
|
||||
|
||||
> Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed.
|
||||
|
||||
Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support.
|
||||
|
||||
## Install
|
||||
|
||||
Install with [npm](https://www.npmjs.com/):
|
||||
|
||||
```sh
|
||||
$ npm install --save braces
|
||||
```
|
||||
|
||||
## v3.0.0 Released!!
|
||||
|
||||
See the [changelog](CHANGELOG.md) for details.
|
||||
|
||||
## Why use braces?
|
||||
|
||||
Brace patterns make globs more powerful by adding the ability to match specific ranges and sequences of characters.
|
||||
|
||||
- **Accurate** - complete support for the [Bash 4.3 Brace Expansion](www.gnu.org/software/bash/) specification (passes all of the Bash braces tests)
|
||||
- **[fast and performant](#benchmarks)** - Starts fast, runs fast and [scales well](#performance) as patterns increase in complexity.
|
||||
- **Organized code base** - The parser and compiler are easy to maintain and update when edge cases crop up.
|
||||
- **Well-tested** - Thousands of test assertions, and passes all of the Bash, minimatch, and [brace-expansion](https://github.com/juliangruber/brace-expansion) unit tests (as of the date this was written).
|
||||
- **Safer** - You shouldn't have to worry about users defining aggressive or malicious brace patterns that can break your application. Braces takes measures to prevent malicious regex that can be used for DDoS attacks (see [catastrophic backtracking](https://www.regular-expressions.info/catastrophic.html)).
|
||||
- [Supports lists](#lists) - (aka "sets") `a/{b,c}/d` => `['a/b/d', 'a/c/d']`
|
||||
- [Supports sequences](#sequences) - (aka "ranges") `{01..03}` => `['01', '02', '03']`
|
||||
- [Supports steps](#steps) - (aka "increments") `{2..10..2}` => `['2', '4', '6', '8', '10']`
|
||||
- [Supports escaping](#escaping) - To prevent evaluation of special characters.
|
||||
|
||||
## Usage
|
||||
|
||||
The main export is a function that takes one or more brace `patterns` and `options`.
|
||||
|
||||
```js
|
||||
const braces = require('braces');
|
||||
// braces(patterns[, options]);
|
||||
|
||||
console.log(braces(['{01..05}', '{a..e}']));
|
||||
//=> ['(0[1-5])', '([a-e])']
|
||||
|
||||
console.log(braces(['{01..05}', '{a..e}'], { expand: true }));
|
||||
//=> ['01', '02', '03', '04', '05', 'a', 'b', 'c', 'd', 'e']
|
||||
```
|
||||
|
||||
### Brace Expansion vs. Compilation
|
||||
|
||||
By default, brace patterns are compiled into strings that are optimized for creating regular expressions and matching.
|
||||
|
||||
**Compiled**
|
||||
|
||||
```js
|
||||
console.log(braces('a/{x,y,z}/b'));
|
||||
//=> ['a/(x|y|z)/b']
|
||||
console.log(braces(['a/{01..20}/b', 'a/{1..5}/b']));
|
||||
//=> [ 'a/(0[1-9]|1[0-9]|20)/b', 'a/([1-5])/b' ]
|
||||
```
|
||||
|
||||
**Expanded**
|
||||
|
||||
Enable brace expansion by setting the `expand` option to true, or by using [braces.expand()](#expand) (returns an array similar to what you'd expect from Bash, or `echo {1..5}`, or [minimatch](https://github.com/isaacs/minimatch)):
|
||||
|
||||
```js
|
||||
console.log(braces('a/{x,y,z}/b', { expand: true }));
|
||||
//=> ['a/x/b', 'a/y/b', 'a/z/b']
|
||||
|
||||
console.log(braces.expand('{01..10}'));
|
||||
//=> ['01','02','03','04','05','06','07','08','09','10']
|
||||
```
|
||||
|
||||
### Lists
|
||||
|
||||
Expand lists (like Bash "sets"):
|
||||
|
||||
```js
|
||||
console.log(braces('a/{foo,bar,baz}/*.js'));
|
||||
//=> ['a/(foo|bar|baz)/*.js']
|
||||
|
||||
console.log(braces.expand('a/{foo,bar,baz}/*.js'));
|
||||
//=> ['a/foo/*.js', 'a/bar/*.js', 'a/baz/*.js']
|
||||
```
|
||||
|
||||
### Sequences
|
||||
|
||||
Expand ranges of characters (like Bash "sequences"):
|
||||
|
||||
```js
|
||||
console.log(braces.expand('{1..3}')); // ['1', '2', '3']
|
||||
console.log(braces.expand('a/{1..3}/b')); // ['a/1/b', 'a/2/b', 'a/3/b']
|
||||
console.log(braces('{a..c}', { expand: true })); // ['a', 'b', 'c']
|
||||
console.log(braces('foo/{a..c}', { expand: true })); // ['foo/a', 'foo/b', 'foo/c']
|
||||
|
||||
// supports zero-padded ranges
|
||||
console.log(braces('a/{01..03}/b')); //=> ['a/(0[1-3])/b']
|
||||
console.log(braces('a/{001..300}/b')); //=> ['a/(0{2}[1-9]|0[1-9][0-9]|[12][0-9]{2}|300)/b']
|
||||
```
|
||||
|
||||
See [fill-range](https://github.com/jonschlinkert/fill-range) for all available range-expansion options.
|
||||
|
||||
### Steppped ranges
|
||||
|
||||
Steps, or increments, may be used with ranges:
|
||||
|
||||
```js
|
||||
console.log(braces.expand('{2..10..2}'));
|
||||
//=> ['2', '4', '6', '8', '10']
|
||||
|
||||
console.log(braces('{2..10..2}'));
|
||||
//=> ['(2|4|6|8|10)']
|
||||
```
|
||||
|
||||
When the [.optimize](#optimize) method is used, or [options.optimize](#optionsoptimize) is set to true, sequences are passed to [to-regex-range](https://github.com/jonschlinkert/to-regex-range) for expansion.
|
||||
|
||||
### Nesting
|
||||
|
||||
Brace patterns may be nested. The results of each expanded string are not sorted, and left to right order is preserved.
|
||||
|
||||
**"Expanded" braces**
|
||||
|
||||
```js
|
||||
console.log(braces.expand('a{b,c,/{x,y}}/e'));
|
||||
//=> ['ab/e', 'ac/e', 'a/x/e', 'a/y/e']
|
||||
|
||||
console.log(braces.expand('a/{x,{1..5},y}/c'));
|
||||
//=> ['a/x/c', 'a/1/c', 'a/2/c', 'a/3/c', 'a/4/c', 'a/5/c', 'a/y/c']
|
||||
```
|
||||
|
||||
**"Optimized" braces**
|
||||
|
||||
```js
|
||||
console.log(braces('a{b,c,/{x,y}}/e'));
|
||||
//=> ['a(b|c|/(x|y))/e']
|
||||
|
||||
console.log(braces('a/{x,{1..5},y}/c'));
|
||||
//=> ['a/(x|([1-5])|y)/c']
|
||||
```
|
||||
|
||||
### Escaping
|
||||
|
||||
**Escaping braces**
|
||||
|
||||
A brace pattern will not be expanded or evaluted if _either the opening or closing brace is escaped_:
|
||||
|
||||
```js
|
||||
console.log(braces.expand('a\\{d,c,b}e'));
|
||||
//=> ['a{d,c,b}e']
|
||||
|
||||
console.log(braces.expand('a{d,c,b\\}e'));
|
||||
//=> ['a{d,c,b}e']
|
||||
```
|
||||
|
||||
**Escaping commas**
|
||||
|
||||
Commas inside braces may also be escaped:
|
||||
|
||||
```js
|
||||
console.log(braces.expand('a{b\\,c}d'));
|
||||
//=> ['a{b,c}d']
|
||||
|
||||
console.log(braces.expand('a{d\\,c,b}e'));
|
||||
//=> ['ad,ce', 'abe']
|
||||
```
|
||||
|
||||
**Single items**
|
||||
|
||||
Following bash conventions, a brace pattern is also not expanded when it contains a single character:
|
||||
|
||||
```js
|
||||
console.log(braces.expand('a{b}c'));
|
||||
//=> ['a{b}c']
|
||||
```
|
||||
|
||||
## Options
|
||||
|
||||
### options.maxLength
|
||||
|
||||
**Type**: `Number`
|
||||
|
||||
**Default**: `10,000`
|
||||
|
||||
**Description**: Limit the length of the input string. Useful when the input string is generated or your application allows users to pass a string, et cetera.
|
||||
|
||||
```js
|
||||
console.log(braces('a/{b,c}/d', { maxLength: 3 })); //=> throws an error
|
||||
```
|
||||
|
||||
### options.expand
|
||||
|
||||
**Type**: `Boolean`
|
||||
|
||||
**Default**: `undefined`
|
||||
|
||||
**Description**: Generate an "expanded" brace pattern (alternatively you can use the `braces.expand()` method, which does the same thing).
|
||||
|
||||
```js
|
||||
console.log(braces('a/{b,c}/d', { expand: true }));
|
||||
//=> [ 'a/b/d', 'a/c/d' ]
|
||||
```
|
||||
|
||||
### options.nodupes
|
||||
|
||||
**Type**: `Boolean`
|
||||
|
||||
**Default**: `undefined`
|
||||
|
||||
**Description**: Remove duplicates from the returned array.
|
||||
|
||||
### options.rangeLimit
|
||||
|
||||
**Type**: `Number`
|
||||
|
||||
**Default**: `1000`
|
||||
|
||||
**Description**: To prevent malicious patterns from being passed by users, an error is thrown when `braces.expand()` is used or `options.expand` is true and the generated range will exceed the `rangeLimit`.
|
||||
|
||||
You can customize `options.rangeLimit` or set it to `Inifinity` to disable this altogether.
|
||||
|
||||
**Examples**
|
||||
|
||||
```js
|
||||
// pattern exceeds the "rangeLimit", so it's optimized automatically
|
||||
console.log(braces.expand('{1..1000}'));
|
||||
//=> ['([1-9]|[1-9][0-9]{1,2}|1000)']
|
||||
|
||||
// pattern does not exceed "rangeLimit", so it's NOT optimized
|
||||
console.log(braces.expand('{1..100}'));
|
||||
//=> ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '30', '31', '32', '33', '34', '35', '36', '37', '38', '39', '40', '41', '42', '43', '44', '45', '46', '47', '48', '49', '50', '51', '52', '53', '54', '55', '56', '57', '58', '59', '60', '61', '62', '63', '64', '65', '66', '67', '68', '69', '70', '71', '72', '73', '74', '75', '76', '77', '78', '79', '80', '81', '82', '83', '84', '85', '86', '87', '88', '89', '90', '91', '92', '93', '94', '95', '96', '97', '98', '99', '100']
|
||||
```
|
||||
|
||||
### options.transform
|
||||
|
||||
**Type**: `Function`
|
||||
|
||||
**Default**: `undefined`
|
||||
|
||||
**Description**: Customize range expansion.
|
||||
|
||||
**Example: Transforming non-numeric values**
|
||||
|
||||
```js
|
||||
const alpha = braces.expand('x/{a..e}/y', {
|
||||
transform(value, index) {
|
||||
// When non-numeric values are passed, "value" is a character code.
|
||||
return 'foo/' + String.fromCharCode(value) + '-' + index;
|
||||
},
|
||||
});
|
||||
console.log(alpha);
|
||||
//=> [ 'x/foo/a-0/y', 'x/foo/b-1/y', 'x/foo/c-2/y', 'x/foo/d-3/y', 'x/foo/e-4/y' ]
|
||||
```
|
||||
|
||||
**Example: Transforming numeric values**
|
||||
|
||||
```js
|
||||
const numeric = braces.expand('{1..5}', {
|
||||
transform(value) {
|
||||
// when numeric values are passed, "value" is a number
|
||||
return 'foo/' + value * 2;
|
||||
},
|
||||
});
|
||||
console.log(numeric);
|
||||
//=> [ 'foo/2', 'foo/4', 'foo/6', 'foo/8', 'foo/10' ]
|
||||
```
|
||||
|
||||
### options.quantifiers
|
||||
|
||||
**Type**: `Boolean`
|
||||
|
||||
**Default**: `undefined`
|
||||
|
||||
**Description**: In regular expressions, quanitifiers can be used to specify how many times a token can be repeated. For example, `a{1,3}` will match the letter `a` one to three times.
|
||||
|
||||
Unfortunately, regex quantifiers happen to share the same syntax as [Bash lists](#lists)
|
||||
|
||||
The `quantifiers` option tells braces to detect when [regex quantifiers](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp#quantifiers) are defined in the given pattern, and not to try to expand them as lists.
|
||||
|
||||
**Examples**
|
||||
|
||||
```js
|
||||
const braces = require('braces');
|
||||
console.log(braces('a/b{1,3}/{x,y,z}'));
|
||||
//=> [ 'a/b(1|3)/(x|y|z)' ]
|
||||
console.log(braces('a/b{1,3}/{x,y,z}', { quantifiers: true }));
|
||||
//=> [ 'a/b{1,3}/(x|y|z)' ]
|
||||
console.log(braces('a/b{1,3}/{x,y,z}', { quantifiers: true, expand: true }));
|
||||
//=> [ 'a/b{1,3}/x', 'a/b{1,3}/y', 'a/b{1,3}/z' ]
|
||||
```
|
||||
|
||||
### options.keepEscaping
|
||||
|
||||
**Type**: `Boolean`
|
||||
|
||||
**Default**: `undefined`
|
||||
|
||||
**Description**: Do not strip backslashes that were used for escaping from the result.
|
||||
|
||||
## What is "brace expansion"?
|
||||
|
||||
Brace expansion is a type of parameter expansion that was made popular by unix shells for generating lists of strings, as well as regex-like matching when used alongside wildcards (globs).
|
||||
|
||||
In addition to "expansion", braces are also used for matching. In other words:
|
||||
|
||||
- [brace expansion](#brace-expansion) is for generating new lists
|
||||
- [brace matching](#brace-matching) is for filtering existing lists
|
||||
|
||||
<details>
|
||||
<summary><strong>More about brace expansion</strong> (click to expand)</summary>
|
||||
|
||||
There are two main types of brace expansion:
|
||||
|
||||
1. **lists**: which are defined using comma-separated values inside curly braces: `{a,b,c}`
|
||||
2. **sequences**: which are defined using a starting value and an ending value, separated by two dots: `a{1..3}b`. Optionally, a third argument may be passed to define a "step" or increment to use: `a{1..100..10}b`. These are also sometimes referred to as "ranges".
|
||||
|
||||
Here are some example brace patterns to illustrate how they work:
|
||||
|
||||
**Sets**
|
||||
|
||||
```
|
||||
{a,b,c} => a b c
|
||||
{a,b,c}{1,2} => a1 a2 b1 b2 c1 c2
|
||||
```
|
||||
|
||||
**Sequences**
|
||||
|
||||
```
|
||||
{1..9} => 1 2 3 4 5 6 7 8 9
|
||||
{4..-4} => 4 3 2 1 0 -1 -2 -3 -4
|
||||
{1..20..3} => 1 4 7 10 13 16 19
|
||||
{a..j} => a b c d e f g h i j
|
||||
{j..a} => j i h g f e d c b a
|
||||
{a..z..3} => a d g j m p s v y
|
||||
```
|
||||
|
||||
**Combination**
|
||||
|
||||
Sets and sequences can be mixed together or used along with any other strings.
|
||||
|
||||
```
|
||||
{a,b,c}{1..3} => a1 a2 a3 b1 b2 b3 c1 c2 c3
|
||||
foo/{a,b,c}/bar => foo/a/bar foo/b/bar foo/c/bar
|
||||
```
|
||||
|
||||
The fact that braces can be "expanded" from relatively simple patterns makes them ideal for quickly generating test fixtures, file paths, and similar use cases.
|
||||
|
||||
## Brace matching
|
||||
|
||||
In addition to _expansion_, brace patterns are also useful for performing regular-expression-like matching.
|
||||
|
||||
For example, the pattern `foo/{1..3}/bar` would match any of following strings:
|
||||
|
||||
```
|
||||
foo/1/bar
|
||||
foo/2/bar
|
||||
foo/3/bar
|
||||
```
|
||||
|
||||
But not:
|
||||
|
||||
```
|
||||
baz/1/qux
|
||||
baz/2/qux
|
||||
baz/3/qux
|
||||
```
|
||||
|
||||
Braces can also be combined with [glob patterns](https://github.com/jonschlinkert/micromatch) to perform more advanced wildcard matching. For example, the pattern `*/{1..3}/*` would match any of following strings:
|
||||
|
||||
```
|
||||
foo/1/bar
|
||||
foo/2/bar
|
||||
foo/3/bar
|
||||
baz/1/qux
|
||||
baz/2/qux
|
||||
baz/3/qux
|
||||
```
|
||||
|
||||
## Brace matching pitfalls
|
||||
|
||||
Although brace patterns offer a user-friendly way of matching ranges or sets of strings, there are also some major disadvantages and potential risks you should be aware of.
|
||||
|
||||
### tldr
|
||||
|
||||
**"brace bombs"**
|
||||
|
||||
- brace expansion can eat up a huge amount of processing resources
|
||||
- as brace patterns increase _linearly in size_, the system resources required to expand the pattern increase exponentially
|
||||
- users can accidentally (or intentially) exhaust your system's resources resulting in the equivalent of a DoS attack (bonus: no programming knowledge is required!)
|
||||
|
||||
For a more detailed explanation with examples, see the [geometric complexity](#geometric-complexity) section.
|
||||
|
||||
### The solution
|
||||
|
||||
Jump to the [performance section](#performance) to see how Braces solves this problem in comparison to other libraries.
|
||||
|
||||
### Geometric complexity
|
||||
|
||||
At minimum, brace patterns with sets limited to two elements have quadradic or `O(n^2)` complexity. But the complexity of the algorithm increases exponentially as the number of sets, _and elements per set_, increases, which is `O(n^c)`.
|
||||
|
||||
For example, the following sets demonstrate quadratic (`O(n^2)`) complexity:
|
||||
|
||||
```
|
||||
{1,2}{3,4} => (2X2) => 13 14 23 24
|
||||
{1,2}{3,4}{5,6} => (2X2X2) => 135 136 145 146 235 236 245 246
|
||||
```
|
||||
|
||||
But add an element to a set, and we get a n-fold Cartesian product with `O(n^c)` complexity:
|
||||
|
||||
```
|
||||
{1,2,3}{4,5,6}{7,8,9} => (3X3X3) => 147 148 149 157 158 159 167 168 169 247 248
|
||||
249 257 258 259 267 268 269 347 348 349 357
|
||||
358 359 367 368 369
|
||||
```
|
||||
|
||||
Now, imagine how this complexity grows given that each element is a n-tuple:
|
||||
|
||||
```
|
||||
{1..100}{1..100} => (100X100) => 10,000 elements (38.4 kB)
|
||||
{1..100}{1..100}{1..100} => (100X100X100) => 1,000,000 elements (5.76 MB)
|
||||
```
|
||||
|
||||
Although these examples are clearly contrived, they demonstrate how brace patterns can quickly grow out of control.
|
||||
|
||||
**More information**
|
||||
|
||||
Interested in learning more about brace expansion?
|
||||
|
||||
- [linuxjournal/bash-brace-expansion](http://www.linuxjournal.com/content/bash-brace-expansion)
|
||||
- [rosettacode/Brace_expansion](https://rosettacode.org/wiki/Brace_expansion)
|
||||
- [cartesian product](https://en.wikipedia.org/wiki/Cartesian_product)
|
||||
|
||||
</details>
|
||||
|
||||
## Performance
|
||||
|
||||
Braces is not only screaming fast, it's also more accurate the other brace expansion libraries.
|
||||
|
||||
### Better algorithms
|
||||
|
||||
Fortunately there is a solution to the ["brace bomb" problem](#brace-matching-pitfalls): _don't expand brace patterns into an array when they're used for matching_.
|
||||
|
||||
Instead, convert the pattern into an optimized regular expression. This is easier said than done, and braces is the only library that does this currently.
|
||||
|
||||
**The proof is in the numbers**
|
||||
|
||||
Minimatch gets exponentially slower as patterns increase in complexity, braces does not. The following results were generated using `braces()` and `minimatch.braceExpand()`, respectively.
|
||||
|
||||
| **Pattern** | **braces** | **[minimatch][]** |
|
||||
| --------------------------- | ------------------- | ---------------------------- |
|
||||
| `{1..9007199254740991}`[^1] | `298 B` (5ms 459μs) | N/A (freezes) |
|
||||
| `{1..1000000000000000}` | `41 B` (1ms 15μs) | N/A (freezes) |
|
||||
| `{1..100000000000000}` | `40 B` (890μs) | N/A (freezes) |
|
||||
| `{1..10000000000000}` | `39 B` (2ms 49μs) | N/A (freezes) |
|
||||
| `{1..1000000000000}` | `38 B` (608μs) | N/A (freezes) |
|
||||
| `{1..100000000000}` | `37 B` (397μs) | N/A (freezes) |
|
||||
| `{1..10000000000}` | `35 B` (983μs) | N/A (freezes) |
|
||||
| `{1..1000000000}` | `34 B` (798μs) | N/A (freezes) |
|
||||
| `{1..100000000}` | `33 B` (733μs) | N/A (freezes) |
|
||||
| `{1..10000000}` | `32 B` (5ms 632μs) | `78.89 MB` (16s 388ms 569μs) |
|
||||
| `{1..1000000}` | `31 B` (1ms 381μs) | `6.89 MB` (1s 496ms 887μs) |
|
||||
| `{1..100000}` | `30 B` (950μs) | `588.89 kB` (146ms 921μs) |
|
||||
| `{1..10000}` | `29 B` (1ms 114μs) | `48.89 kB` (14ms 187μs) |
|
||||
| `{1..1000}` | `28 B` (760μs) | `3.89 kB` (1ms 453μs) |
|
||||
| `{1..100}` | `22 B` (345μs) | `291 B` (196μs) |
|
||||
| `{1..10}` | `10 B` (533μs) | `20 B` (37μs) |
|
||||
| `{1..3}` | `7 B` (190μs) | `5 B` (27μs) |
|
||||
|
||||
### Faster algorithms
|
||||
|
||||
When you need expansion, braces is still much faster.
|
||||
|
||||
_(the following results were generated using `braces.expand()` and `minimatch.braceExpand()`, respectively)_
|
||||
|
||||
| **Pattern** | **braces** | **[minimatch][]** |
|
||||
| --------------- | --------------------------- | ---------------------------- |
|
||||
| `{1..10000000}` | `78.89 MB` (2s 698ms 642μs) | `78.89 MB` (18s 601ms 974μs) |
|
||||
| `{1..1000000}` | `6.89 MB` (458ms 576μs) | `6.89 MB` (1s 491ms 621μs) |
|
||||
| `{1..100000}` | `588.89 kB` (20ms 728μs) | `588.89 kB` (156ms 919μs) |
|
||||
| `{1..10000}` | `48.89 kB` (2ms 202μs) | `48.89 kB` (13ms 641μs) |
|
||||
| `{1..1000}` | `3.89 kB` (1ms 796μs) | `3.89 kB` (1ms 958μs) |
|
||||
| `{1..100}` | `291 B` (424μs) | `291 B` (211μs) |
|
||||
| `{1..10}` | `20 B` (487μs) | `20 B` (72μs) |
|
||||
| `{1..3}` | `5 B` (166μs) | `5 B` (27μs) |
|
||||
|
||||
If you'd like to run these comparisons yourself, see [test/support/generate.js](test/support/generate.js).
|
||||
|
||||
## Benchmarks
|
||||
|
||||
### Running benchmarks
|
||||
|
||||
Install dev dependencies:
|
||||
|
||||
```bash
|
||||
npm i -d && npm benchmark
|
||||
```
|
||||
|
||||
### Latest results
|
||||
|
||||
Braces is more accurate, without sacrificing performance.
|
||||
|
||||
```bash
|
||||
● expand - range (expanded)
|
||||
braces x 53,167 ops/sec ±0.12% (102 runs sampled)
|
||||
minimatch x 11,378 ops/sec ±0.10% (102 runs sampled)
|
||||
● expand - range (optimized for regex)
|
||||
braces x 373,442 ops/sec ±0.04% (100 runs sampled)
|
||||
minimatch x 3,262 ops/sec ±0.18% (100 runs sampled)
|
||||
● expand - nested ranges (expanded)
|
||||
braces x 33,921 ops/sec ±0.09% (99 runs sampled)
|
||||
minimatch x 10,855 ops/sec ±0.28% (100 runs sampled)
|
||||
● expand - nested ranges (optimized for regex)
|
||||
braces x 287,479 ops/sec ±0.52% (98 runs sampled)
|
||||
minimatch x 3,219 ops/sec ±0.28% (101 runs sampled)
|
||||
● expand - set (expanded)
|
||||
braces x 238,243 ops/sec ±0.19% (97 runs sampled)
|
||||
minimatch x 538,268 ops/sec ±0.31% (96 runs sampled)
|
||||
● expand - set (optimized for regex)
|
||||
braces x 321,844 ops/sec ±0.10% (97 runs sampled)
|
||||
minimatch x 140,600 ops/sec ±0.15% (100 runs sampled)
|
||||
● expand - nested sets (expanded)
|
||||
braces x 165,371 ops/sec ±0.42% (96 runs sampled)
|
||||
minimatch x 337,720 ops/sec ±0.28% (100 runs sampled)
|
||||
● expand - nested sets (optimized for regex)
|
||||
braces x 242,948 ops/sec ±0.12% (99 runs sampled)
|
||||
minimatch x 87,403 ops/sec ±0.79% (96 runs sampled)
|
||||
```
|
||||
|
||||
## About
|
||||
|
||||
<details>
|
||||
<summary><strong>Contributing</strong></summary>
|
||||
|
||||
Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new).
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Running Tests</strong></summary>
|
||||
|
||||
Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command:
|
||||
|
||||
```sh
|
||||
$ npm install && npm test
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Building docs</strong></summary>
|
||||
|
||||
_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_
|
||||
|
||||
To generate the readme, run the following command:
|
||||
|
||||
```sh
|
||||
$ npm install -g verbose/verb#dev verb-generate-readme && verb
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### Contributors
|
||||
|
||||
| **Commits** | **Contributor** |
|
||||
| ----------- | ------------------------------------------------------------- |
|
||||
| 197 | [jonschlinkert](https://github.com/jonschlinkert) |
|
||||
| 4 | [doowb](https://github.com/doowb) |
|
||||
| 1 | [es128](https://github.com/es128) |
|
||||
| 1 | [eush77](https://github.com/eush77) |
|
||||
| 1 | [hemanth](https://github.com/hemanth) |
|
||||
| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) |
|
||||
|
||||
### Author
|
||||
|
||||
**Jon Schlinkert**
|
||||
|
||||
- [GitHub Profile](https://github.com/jonschlinkert)
|
||||
- [Twitter Profile](https://twitter.com/jonschlinkert)
|
||||
- [LinkedIn Profile](https://linkedin.com/in/jonschlinkert)
|
||||
|
||||
### License
|
||||
|
||||
Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert).
|
||||
Released under the [MIT License](LICENSE).
|
||||
|
||||
---
|
||||
|
||||
_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 08, 2019._
|
||||
170
node_modules/braces/index.js
generated
vendored
Normal file
170
node_modules/braces/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,170 @@
|
|||
'use strict';
|
||||
|
||||
const stringify = require('./lib/stringify');
|
||||
const compile = require('./lib/compile');
|
||||
const expand = require('./lib/expand');
|
||||
const parse = require('./lib/parse');
|
||||
|
||||
/**
|
||||
* Expand the given pattern or create a regex-compatible string.
|
||||
*
|
||||
* ```js
|
||||
* const braces = require('braces');
|
||||
* console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)']
|
||||
* console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c']
|
||||
* ```
|
||||
* @param {String} `str`
|
||||
* @param {Object} `options`
|
||||
* @return {String}
|
||||
* @api public
|
||||
*/
|
||||
|
||||
const braces = (input, options = {}) => {
|
||||
let output = [];
|
||||
|
||||
if (Array.isArray(input)) {
|
||||
for (const pattern of input) {
|
||||
const result = braces.create(pattern, options);
|
||||
if (Array.isArray(result)) {
|
||||
output.push(...result);
|
||||
} else {
|
||||
output.push(result);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
output = [].concat(braces.create(input, options));
|
||||
}
|
||||
|
||||
if (options && options.expand === true && options.nodupes === true) {
|
||||
output = [...new Set(output)];
|
||||
}
|
||||
return output;
|
||||
};
|
||||
|
||||
/**
|
||||
* Parse the given `str` with the given `options`.
|
||||
*
|
||||
* ```js
|
||||
* // braces.parse(pattern, [, options]);
|
||||
* const ast = braces.parse('a/{b,c}/d');
|
||||
* console.log(ast);
|
||||
* ```
|
||||
* @param {String} pattern Brace pattern to parse
|
||||
* @param {Object} options
|
||||
* @return {Object} Returns an AST
|
||||
* @api public
|
||||
*/
|
||||
|
||||
braces.parse = (input, options = {}) => parse(input, options);
|
||||
|
||||
/**
|
||||
* Creates a braces string from an AST, or an AST node.
|
||||
*
|
||||
* ```js
|
||||
* const braces = require('braces');
|
||||
* let ast = braces.parse('foo/{a,b}/bar');
|
||||
* console.log(stringify(ast.nodes[2])); //=> '{a,b}'
|
||||
* ```
|
||||
* @param {String} `input` Brace pattern or AST.
|
||||
* @param {Object} `options`
|
||||
* @return {Array} Returns an array of expanded values.
|
||||
* @api public
|
||||
*/
|
||||
|
||||
braces.stringify = (input, options = {}) => {
|
||||
if (typeof input === 'string') {
|
||||
return stringify(braces.parse(input, options), options);
|
||||
}
|
||||
return stringify(input, options);
|
||||
};
|
||||
|
||||
/**
|
||||
* Compiles a brace pattern into a regex-compatible, optimized string.
|
||||
* This method is called by the main [braces](#braces) function by default.
|
||||
*
|
||||
* ```js
|
||||
* const braces = require('braces');
|
||||
* console.log(braces.compile('a/{b,c}/d'));
|
||||
* //=> ['a/(b|c)/d']
|
||||
* ```
|
||||
* @param {String} `input` Brace pattern or AST.
|
||||
* @param {Object} `options`
|
||||
* @return {Array} Returns an array of expanded values.
|
||||
* @api public
|
||||
*/
|
||||
|
||||
braces.compile = (input, options = {}) => {
|
||||
if (typeof input === 'string') {
|
||||
input = braces.parse(input, options);
|
||||
}
|
||||
return compile(input, options);
|
||||
};
|
||||
|
||||
/**
|
||||
* Expands a brace pattern into an array. This method is called by the
|
||||
* main [braces](#braces) function when `options.expand` is true. Before
|
||||
* using this method it's recommended that you read the [performance notes](#performance))
|
||||
* and advantages of using [.compile](#compile) instead.
|
||||
*
|
||||
* ```js
|
||||
* const braces = require('braces');
|
||||
* console.log(braces.expand('a/{b,c}/d'));
|
||||
* //=> ['a/b/d', 'a/c/d'];
|
||||
* ```
|
||||
* @param {String} `pattern` Brace pattern
|
||||
* @param {Object} `options`
|
||||
* @return {Array} Returns an array of expanded values.
|
||||
* @api public
|
||||
*/
|
||||
|
||||
braces.expand = (input, options = {}) => {
|
||||
if (typeof input === 'string') {
|
||||
input = braces.parse(input, options);
|
||||
}
|
||||
|
||||
let result = expand(input, options);
|
||||
|
||||
// filter out empty strings if specified
|
||||
if (options.noempty === true) {
|
||||
result = result.filter(Boolean);
|
||||
}
|
||||
|
||||
// filter out duplicates if specified
|
||||
if (options.nodupes === true) {
|
||||
result = [...new Set(result)];
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Processes a brace pattern and returns either an expanded array
|
||||
* (if `options.expand` is true), a highly optimized regex-compatible string.
|
||||
* This method is called by the main [braces](#braces) function.
|
||||
*
|
||||
* ```js
|
||||
* const braces = require('braces');
|
||||
* console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}'))
|
||||
* //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)'
|
||||
* ```
|
||||
* @param {String} `pattern` Brace pattern
|
||||
* @param {Object} `options`
|
||||
* @return {Array} Returns an array of expanded values.
|
||||
* @api public
|
||||
*/
|
||||
|
||||
braces.create = (input, options = {}) => {
|
||||
if (input === '' || input.length < 3) {
|
||||
return [input];
|
||||
}
|
||||
|
||||
return options.expand !== true
|
||||
? braces.compile(input, options)
|
||||
: braces.expand(input, options);
|
||||
};
|
||||
|
||||
/**
|
||||
* Expose "braces"
|
||||
*/
|
||||
|
||||
module.exports = braces;
|
||||
60
node_modules/braces/lib/compile.js
generated
vendored
Normal file
60
node_modules/braces/lib/compile.js
generated
vendored
Normal file
|
|
@ -0,0 +1,60 @@
|
|||
'use strict';
|
||||
|
||||
const fill = require('fill-range');
|
||||
const utils = require('./utils');
|
||||
|
||||
const compile = (ast, options = {}) => {
|
||||
const walk = (node, parent = {}) => {
|
||||
const invalidBlock = utils.isInvalidBrace(parent);
|
||||
const invalidNode = node.invalid === true && options.escapeInvalid === true;
|
||||
const invalid = invalidBlock === true || invalidNode === true;
|
||||
const prefix = options.escapeInvalid === true ? '\\' : '';
|
||||
let output = '';
|
||||
|
||||
if (node.isOpen === true) {
|
||||
return prefix + node.value;
|
||||
}
|
||||
|
||||
if (node.isClose === true) {
|
||||
console.log('node.isClose', prefix, node.value);
|
||||
return prefix + node.value;
|
||||
}
|
||||
|
||||
if (node.type === 'open') {
|
||||
return invalid ? prefix + node.value : '(';
|
||||
}
|
||||
|
||||
if (node.type === 'close') {
|
||||
return invalid ? prefix + node.value : ')';
|
||||
}
|
||||
|
||||
if (node.type === 'comma') {
|
||||
return node.prev.type === 'comma' ? '' : invalid ? node.value : '|';
|
||||
}
|
||||
|
||||
if (node.value) {
|
||||
return node.value;
|
||||
}
|
||||
|
||||
if (node.nodes && node.ranges > 0) {
|
||||
const args = utils.reduce(node.nodes);
|
||||
const range = fill(...args, { ...options, wrap: false, toRegex: true, strictZeros: true });
|
||||
|
||||
if (range.length !== 0) {
|
||||
return args.length > 1 && range.length > 1 ? `(${range})` : range;
|
||||
}
|
||||
}
|
||||
|
||||
if (node.nodes) {
|
||||
for (const child of node.nodes) {
|
||||
output += walk(child, node);
|
||||
}
|
||||
}
|
||||
|
||||
return output;
|
||||
};
|
||||
|
||||
return walk(ast);
|
||||
};
|
||||
|
||||
module.exports = compile;
|
||||
57
node_modules/braces/lib/constants.js
generated
vendored
Normal file
57
node_modules/braces/lib/constants.js
generated
vendored
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
'use strict';
|
||||
|
||||
module.exports = {
|
||||
MAX_LENGTH: 10000,
|
||||
|
||||
// Digits
|
||||
CHAR_0: '0', /* 0 */
|
||||
CHAR_9: '9', /* 9 */
|
||||
|
||||
// Alphabet chars.
|
||||
CHAR_UPPERCASE_A: 'A', /* A */
|
||||
CHAR_LOWERCASE_A: 'a', /* a */
|
||||
CHAR_UPPERCASE_Z: 'Z', /* Z */
|
||||
CHAR_LOWERCASE_Z: 'z', /* z */
|
||||
|
||||
CHAR_LEFT_PARENTHESES: '(', /* ( */
|
||||
CHAR_RIGHT_PARENTHESES: ')', /* ) */
|
||||
|
||||
CHAR_ASTERISK: '*', /* * */
|
||||
|
||||
// Non-alphabetic chars.
|
||||
CHAR_AMPERSAND: '&', /* & */
|
||||
CHAR_AT: '@', /* @ */
|
||||
CHAR_BACKSLASH: '\\', /* \ */
|
||||
CHAR_BACKTICK: '`', /* ` */
|
||||
CHAR_CARRIAGE_RETURN: '\r', /* \r */
|
||||
CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */
|
||||
CHAR_COLON: ':', /* : */
|
||||
CHAR_COMMA: ',', /* , */
|
||||
CHAR_DOLLAR: '$', /* . */
|
||||
CHAR_DOT: '.', /* . */
|
||||
CHAR_DOUBLE_QUOTE: '"', /* " */
|
||||
CHAR_EQUAL: '=', /* = */
|
||||
CHAR_EXCLAMATION_MARK: '!', /* ! */
|
||||
CHAR_FORM_FEED: '\f', /* \f */
|
||||
CHAR_FORWARD_SLASH: '/', /* / */
|
||||
CHAR_HASH: '#', /* # */
|
||||
CHAR_HYPHEN_MINUS: '-', /* - */
|
||||
CHAR_LEFT_ANGLE_BRACKET: '<', /* < */
|
||||
CHAR_LEFT_CURLY_BRACE: '{', /* { */
|
||||
CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */
|
||||
CHAR_LINE_FEED: '\n', /* \n */
|
||||
CHAR_NO_BREAK_SPACE: '\u00A0', /* \u00A0 */
|
||||
CHAR_PERCENT: '%', /* % */
|
||||
CHAR_PLUS: '+', /* + */
|
||||
CHAR_QUESTION_MARK: '?', /* ? */
|
||||
CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */
|
||||
CHAR_RIGHT_CURLY_BRACE: '}', /* } */
|
||||
CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */
|
||||
CHAR_SEMICOLON: ';', /* ; */
|
||||
CHAR_SINGLE_QUOTE: '\'', /* ' */
|
||||
CHAR_SPACE: ' ', /* */
|
||||
CHAR_TAB: '\t', /* \t */
|
||||
CHAR_UNDERSCORE: '_', /* _ */
|
||||
CHAR_VERTICAL_LINE: '|', /* | */
|
||||
CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\uFEFF' /* \uFEFF */
|
||||
};
|
||||
113
node_modules/braces/lib/expand.js
generated
vendored
Normal file
113
node_modules/braces/lib/expand.js
generated
vendored
Normal file
|
|
@ -0,0 +1,113 @@
|
|||
'use strict';
|
||||
|
||||
const fill = require('fill-range');
|
||||
const stringify = require('./stringify');
|
||||
const utils = require('./utils');
|
||||
|
||||
const append = (queue = '', stash = '', enclose = false) => {
|
||||
const result = [];
|
||||
|
||||
queue = [].concat(queue);
|
||||
stash = [].concat(stash);
|
||||
|
||||
if (!stash.length) return queue;
|
||||
if (!queue.length) {
|
||||
return enclose ? utils.flatten(stash).map(ele => `{${ele}}`) : stash;
|
||||
}
|
||||
|
||||
for (const item of queue) {
|
||||
if (Array.isArray(item)) {
|
||||
for (const value of item) {
|
||||
result.push(append(value, stash, enclose));
|
||||
}
|
||||
} else {
|
||||
for (let ele of stash) {
|
||||
if (enclose === true && typeof ele === 'string') ele = `{${ele}}`;
|
||||
result.push(Array.isArray(ele) ? append(item, ele, enclose) : item + ele);
|
||||
}
|
||||
}
|
||||
}
|
||||
return utils.flatten(result);
|
||||
};
|
||||
|
||||
const expand = (ast, options = {}) => {
|
||||
const rangeLimit = options.rangeLimit === undefined ? 1000 : options.rangeLimit;
|
||||
|
||||
const walk = (node, parent = {}) => {
|
||||
node.queue = [];
|
||||
|
||||
let p = parent;
|
||||
let q = parent.queue;
|
||||
|
||||
while (p.type !== 'brace' && p.type !== 'root' && p.parent) {
|
||||
p = p.parent;
|
||||
q = p.queue;
|
||||
}
|
||||
|
||||
if (node.invalid || node.dollar) {
|
||||
q.push(append(q.pop(), stringify(node, options)));
|
||||
return;
|
||||
}
|
||||
|
||||
if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) {
|
||||
q.push(append(q.pop(), ['{}']));
|
||||
return;
|
||||
}
|
||||
|
||||
if (node.nodes && node.ranges > 0) {
|
||||
const args = utils.reduce(node.nodes);
|
||||
|
||||
if (utils.exceedsLimit(...args, options.step, rangeLimit)) {
|
||||
throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.');
|
||||
}
|
||||
|
||||
let range = fill(...args, options);
|
||||
if (range.length === 0) {
|
||||
range = stringify(node, options);
|
||||
}
|
||||
|
||||
q.push(append(q.pop(), range));
|
||||
node.nodes = [];
|
||||
return;
|
||||
}
|
||||
|
||||
const enclose = utils.encloseBrace(node);
|
||||
let queue = node.queue;
|
||||
let block = node;
|
||||
|
||||
while (block.type !== 'brace' && block.type !== 'root' && block.parent) {
|
||||
block = block.parent;
|
||||
queue = block.queue;
|
||||
}
|
||||
|
||||
for (let i = 0; i < node.nodes.length; i++) {
|
||||
const child = node.nodes[i];
|
||||
|
||||
if (child.type === 'comma' && node.type === 'brace') {
|
||||
if (i === 1) queue.push('');
|
||||
queue.push('');
|
||||
continue;
|
||||
}
|
||||
|
||||
if (child.type === 'close') {
|
||||
q.push(append(q.pop(), queue, enclose));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (child.value && child.type !== 'open') {
|
||||
queue.push(append(queue.pop(), child.value));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (child.nodes) {
|
||||
walk(child, node);
|
||||
}
|
||||
}
|
||||
|
||||
return queue;
|
||||
};
|
||||
|
||||
return utils.flatten(walk(ast));
|
||||
};
|
||||
|
||||
module.exports = expand;
|
||||
331
node_modules/braces/lib/parse.js
generated
vendored
Normal file
331
node_modules/braces/lib/parse.js
generated
vendored
Normal file
|
|
@ -0,0 +1,331 @@
|
|||
'use strict';
|
||||
|
||||
const stringify = require('./stringify');
|
||||
|
||||
/**
|
||||
* Constants
|
||||
*/
|
||||
|
||||
const {
|
||||
MAX_LENGTH,
|
||||
CHAR_BACKSLASH, /* \ */
|
||||
CHAR_BACKTICK, /* ` */
|
||||
CHAR_COMMA, /* , */
|
||||
CHAR_DOT, /* . */
|
||||
CHAR_LEFT_PARENTHESES, /* ( */
|
||||
CHAR_RIGHT_PARENTHESES, /* ) */
|
||||
CHAR_LEFT_CURLY_BRACE, /* { */
|
||||
CHAR_RIGHT_CURLY_BRACE, /* } */
|
||||
CHAR_LEFT_SQUARE_BRACKET, /* [ */
|
||||
CHAR_RIGHT_SQUARE_BRACKET, /* ] */
|
||||
CHAR_DOUBLE_QUOTE, /* " */
|
||||
CHAR_SINGLE_QUOTE, /* ' */
|
||||
CHAR_NO_BREAK_SPACE,
|
||||
CHAR_ZERO_WIDTH_NOBREAK_SPACE
|
||||
} = require('./constants');
|
||||
|
||||
/**
|
||||
* parse
|
||||
*/
|
||||
|
||||
const parse = (input, options = {}) => {
|
||||
if (typeof input !== 'string') {
|
||||
throw new TypeError('Expected a string');
|
||||
}
|
||||
|
||||
const opts = options || {};
|
||||
const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;
|
||||
if (input.length > max) {
|
||||
throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`);
|
||||
}
|
||||
|
||||
const ast = { type: 'root', input, nodes: [] };
|
||||
const stack = [ast];
|
||||
let block = ast;
|
||||
let prev = ast;
|
||||
let brackets = 0;
|
||||
const length = input.length;
|
||||
let index = 0;
|
||||
let depth = 0;
|
||||
let value;
|
||||
|
||||
/**
|
||||
* Helpers
|
||||
*/
|
||||
|
||||
const advance = () => input[index++];
|
||||
const push = node => {
|
||||
if (node.type === 'text' && prev.type === 'dot') {
|
||||
prev.type = 'text';
|
||||
}
|
||||
|
||||
if (prev && prev.type === 'text' && node.type === 'text') {
|
||||
prev.value += node.value;
|
||||
return;
|
||||
}
|
||||
|
||||
block.nodes.push(node);
|
||||
node.parent = block;
|
||||
node.prev = prev;
|
||||
prev = node;
|
||||
return node;
|
||||
};
|
||||
|
||||
push({ type: 'bos' });
|
||||
|
||||
while (index < length) {
|
||||
block = stack[stack.length - 1];
|
||||
value = advance();
|
||||
|
||||
/**
|
||||
* Invalid chars
|
||||
*/
|
||||
|
||||
if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) {
|
||||
continue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Escaped chars
|
||||
*/
|
||||
|
||||
if (value === CHAR_BACKSLASH) {
|
||||
push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() });
|
||||
continue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Right square bracket (literal): ']'
|
||||
*/
|
||||
|
||||
if (value === CHAR_RIGHT_SQUARE_BRACKET) {
|
||||
push({ type: 'text', value: '\\' + value });
|
||||
continue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Left square bracket: '['
|
||||
*/
|
||||
|
||||
if (value === CHAR_LEFT_SQUARE_BRACKET) {
|
||||
brackets++;
|
||||
|
||||
let next;
|
||||
|
||||
while (index < length && (next = advance())) {
|
||||
value += next;
|
||||
|
||||
if (next === CHAR_LEFT_SQUARE_BRACKET) {
|
||||
brackets++;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (next === CHAR_BACKSLASH) {
|
||||
value += advance();
|
||||
continue;
|
||||
}
|
||||
|
||||
if (next === CHAR_RIGHT_SQUARE_BRACKET) {
|
||||
brackets--;
|
||||
|
||||
if (brackets === 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
push({ type: 'text', value });
|
||||
continue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parentheses
|
||||
*/
|
||||
|
||||
if (value === CHAR_LEFT_PARENTHESES) {
|
||||
block = push({ type: 'paren', nodes: [] });
|
||||
stack.push(block);
|
||||
push({ type: 'text', value });
|
||||
continue;
|
||||
}
|
||||
|
||||
if (value === CHAR_RIGHT_PARENTHESES) {
|
||||
if (block.type !== 'paren') {
|
||||
push({ type: 'text', value });
|
||||
continue;
|
||||
}
|
||||
block = stack.pop();
|
||||
push({ type: 'text', value });
|
||||
block = stack[stack.length - 1];
|
||||
continue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Quotes: '|"|`
|
||||
*/
|
||||
|
||||
if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) {
|
||||
const open = value;
|
||||
let next;
|
||||
|
||||
if (options.keepQuotes !== true) {
|
||||
value = '';
|
||||
}
|
||||
|
||||
while (index < length && (next = advance())) {
|
||||
if (next === CHAR_BACKSLASH) {
|
||||
value += next + advance();
|
||||
continue;
|
||||
}
|
||||
|
||||
if (next === open) {
|
||||
if (options.keepQuotes === true) value += next;
|
||||
break;
|
||||
}
|
||||
|
||||
value += next;
|
||||
}
|
||||
|
||||
push({ type: 'text', value });
|
||||
continue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Left curly brace: '{'
|
||||
*/
|
||||
|
||||
if (value === CHAR_LEFT_CURLY_BRACE) {
|
||||
depth++;
|
||||
|
||||
const dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true;
|
||||
const brace = {
|
||||
type: 'brace',
|
||||
open: true,
|
||||
close: false,
|
||||
dollar,
|
||||
depth,
|
||||
commas: 0,
|
||||
ranges: 0,
|
||||
nodes: []
|
||||
};
|
||||
|
||||
block = push(brace);
|
||||
stack.push(block);
|
||||
push({ type: 'open', value });
|
||||
continue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Right curly brace: '}'
|
||||
*/
|
||||
|
||||
if (value === CHAR_RIGHT_CURLY_BRACE) {
|
||||
if (block.type !== 'brace') {
|
||||
push({ type: 'text', value });
|
||||
continue;
|
||||
}
|
||||
|
||||
const type = 'close';
|
||||
block = stack.pop();
|
||||
block.close = true;
|
||||
|
||||
push({ type, value });
|
||||
depth--;
|
||||
|
||||
block = stack[stack.length - 1];
|
||||
continue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Comma: ','
|
||||
*/
|
||||
|
||||
if (value === CHAR_COMMA && depth > 0) {
|
||||
if (block.ranges > 0) {
|
||||
block.ranges = 0;
|
||||
const open = block.nodes.shift();
|
||||
block.nodes = [open, { type: 'text', value: stringify(block) }];
|
||||
}
|
||||
|
||||
push({ type: 'comma', value });
|
||||
block.commas++;
|
||||
continue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Dot: '.'
|
||||
*/
|
||||
|
||||
if (value === CHAR_DOT && depth > 0 && block.commas === 0) {
|
||||
const siblings = block.nodes;
|
||||
|
||||
if (depth === 0 || siblings.length === 0) {
|
||||
push({ type: 'text', value });
|
||||
continue;
|
||||
}
|
||||
|
||||
if (prev.type === 'dot') {
|
||||
block.range = [];
|
||||
prev.value += value;
|
||||
prev.type = 'range';
|
||||
|
||||
if (block.nodes.length !== 3 && block.nodes.length !== 5) {
|
||||
block.invalid = true;
|
||||
block.ranges = 0;
|
||||
prev.type = 'text';
|
||||
continue;
|
||||
}
|
||||
|
||||
block.ranges++;
|
||||
block.args = [];
|
||||
continue;
|
||||
}
|
||||
|
||||
if (prev.type === 'range') {
|
||||
siblings.pop();
|
||||
|
||||
const before = siblings[siblings.length - 1];
|
||||
before.value += prev.value + value;
|
||||
prev = before;
|
||||
block.ranges--;
|
||||
continue;
|
||||
}
|
||||
|
||||
push({ type: 'dot', value });
|
||||
continue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Text
|
||||
*/
|
||||
|
||||
push({ type: 'text', value });
|
||||
}
|
||||
|
||||
// Mark imbalanced braces and brackets as invalid
|
||||
do {
|
||||
block = stack.pop();
|
||||
|
||||
if (block.type !== 'root') {
|
||||
block.nodes.forEach(node => {
|
||||
if (!node.nodes) {
|
||||
if (node.type === 'open') node.isOpen = true;
|
||||
if (node.type === 'close') node.isClose = true;
|
||||
if (!node.nodes) node.type = 'text';
|
||||
node.invalid = true;
|
||||
}
|
||||
});
|
||||
|
||||
// get the location of the block on parent.nodes (block's siblings)
|
||||
const parent = stack[stack.length - 1];
|
||||
const index = parent.nodes.indexOf(block);
|
||||
// replace the (invalid) block with it's nodes
|
||||
parent.nodes.splice(index, 1, ...block.nodes);
|
||||
}
|
||||
} while (stack.length > 0);
|
||||
|
||||
push({ type: 'eos' });
|
||||
return ast;
|
||||
};
|
||||
|
||||
module.exports = parse;
|
||||
32
node_modules/braces/lib/stringify.js
generated
vendored
Normal file
32
node_modules/braces/lib/stringify.js
generated
vendored
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
'use strict';
|
||||
|
||||
const utils = require('./utils');
|
||||
|
||||
module.exports = (ast, options = {}) => {
|
||||
const stringify = (node, parent = {}) => {
|
||||
const invalidBlock = options.escapeInvalid && utils.isInvalidBrace(parent);
|
||||
const invalidNode = node.invalid === true && options.escapeInvalid === true;
|
||||
let output = '';
|
||||
|
||||
if (node.value) {
|
||||
if ((invalidBlock || invalidNode) && utils.isOpenOrClose(node)) {
|
||||
return '\\' + node.value;
|
||||
}
|
||||
return node.value;
|
||||
}
|
||||
|
||||
if (node.value) {
|
||||
return node.value;
|
||||
}
|
||||
|
||||
if (node.nodes) {
|
||||
for (const child of node.nodes) {
|
||||
output += stringify(child);
|
||||
}
|
||||
}
|
||||
return output;
|
||||
};
|
||||
|
||||
return stringify(ast);
|
||||
};
|
||||
|
||||
122
node_modules/braces/lib/utils.js
generated
vendored
Normal file
122
node_modules/braces/lib/utils.js
generated
vendored
Normal file
|
|
@ -0,0 +1,122 @@
|
|||
'use strict';
|
||||
|
||||
exports.isInteger = num => {
|
||||
if (typeof num === 'number') {
|
||||
return Number.isInteger(num);
|
||||
}
|
||||
if (typeof num === 'string' && num.trim() !== '') {
|
||||
return Number.isInteger(Number(num));
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
/**
|
||||
* Find a node of the given type
|
||||
*/
|
||||
|
||||
exports.find = (node, type) => node.nodes.find(node => node.type === type);
|
||||
|
||||
/**
|
||||
* Find a node of the given type
|
||||
*/
|
||||
|
||||
exports.exceedsLimit = (min, max, step = 1, limit) => {
|
||||
if (limit === false) return false;
|
||||
if (!exports.isInteger(min) || !exports.isInteger(max)) return false;
|
||||
return ((Number(max) - Number(min)) / Number(step)) >= limit;
|
||||
};
|
||||
|
||||
/**
|
||||
* Escape the given node with '\\' before node.value
|
||||
*/
|
||||
|
||||
exports.escapeNode = (block, n = 0, type) => {
|
||||
const node = block.nodes[n];
|
||||
if (!node) return;
|
||||
|
||||
if ((type && node.type === type) || node.type === 'open' || node.type === 'close') {
|
||||
if (node.escaped !== true) {
|
||||
node.value = '\\' + node.value;
|
||||
node.escaped = true;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns true if the given brace node should be enclosed in literal braces
|
||||
*/
|
||||
|
||||
exports.encloseBrace = node => {
|
||||
if (node.type !== 'brace') return false;
|
||||
if ((node.commas >> 0 + node.ranges >> 0) === 0) {
|
||||
node.invalid = true;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns true if a brace node is invalid.
|
||||
*/
|
||||
|
||||
exports.isInvalidBrace = block => {
|
||||
if (block.type !== 'brace') return false;
|
||||
if (block.invalid === true || block.dollar) return true;
|
||||
if ((block.commas >> 0 + block.ranges >> 0) === 0) {
|
||||
block.invalid = true;
|
||||
return true;
|
||||
}
|
||||
if (block.open !== true || block.close !== true) {
|
||||
block.invalid = true;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns true if a node is an open or close node
|
||||
*/
|
||||
|
||||
exports.isOpenOrClose = node => {
|
||||
if (node.type === 'open' || node.type === 'close') {
|
||||
return true;
|
||||
}
|
||||
return node.open === true || node.close === true;
|
||||
};
|
||||
|
||||
/**
|
||||
* Reduce an array of text nodes.
|
||||
*/
|
||||
|
||||
exports.reduce = nodes => nodes.reduce((acc, node) => {
|
||||
if (node.type === 'text') acc.push(node.value);
|
||||
if (node.type === 'range') node.type = 'text';
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Flatten an array
|
||||
*/
|
||||
|
||||
exports.flatten = (...args) => {
|
||||
const result = [];
|
||||
|
||||
const flat = arr => {
|
||||
for (let i = 0; i < arr.length; i++) {
|
||||
const ele = arr[i];
|
||||
|
||||
if (Array.isArray(ele)) {
|
||||
flat(ele);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (ele !== undefined) {
|
||||
result.push(ele);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
flat(args);
|
||||
return result;
|
||||
};
|
||||
77
node_modules/braces/package.json
generated
vendored
Normal file
77
node_modules/braces/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
{
|
||||
"name": "braces",
|
||||
"description": "Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed.",
|
||||
"version": "3.0.3",
|
||||
"homepage": "https://github.com/micromatch/braces",
|
||||
"author": "Jon Schlinkert (https://github.com/jonschlinkert)",
|
||||
"contributors": [
|
||||
"Brian Woodward (https://twitter.com/doowb)",
|
||||
"Elan Shanker (https://github.com/es128)",
|
||||
"Eugene Sharygin (https://github.com/eush77)",
|
||||
"hemanth.hm (http://h3manth.com)",
|
||||
"Jon Schlinkert (http://twitter.com/jonschlinkert)"
|
||||
],
|
||||
"repository": "micromatch/braces",
|
||||
"bugs": {
|
||||
"url": "https://github.com/micromatch/braces/issues"
|
||||
},
|
||||
"license": "MIT",
|
||||
"files": [
|
||||
"index.js",
|
||||
"lib"
|
||||
],
|
||||
"main": "index.js",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "mocha",
|
||||
"benchmark": "node benchmark"
|
||||
},
|
||||
"dependencies": {
|
||||
"fill-range": "^7.1.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"ansi-colors": "^3.2.4",
|
||||
"bash-path": "^2.0.1",
|
||||
"gulp-format-md": "^2.0.0",
|
||||
"mocha": "^6.1.1"
|
||||
},
|
||||
"keywords": [
|
||||
"alpha",
|
||||
"alphabetical",
|
||||
"bash",
|
||||
"brace",
|
||||
"braces",
|
||||
"expand",
|
||||
"expansion",
|
||||
"filepath",
|
||||
"fill",
|
||||
"fs",
|
||||
"glob",
|
||||
"globbing",
|
||||
"letter",
|
||||
"match",
|
||||
"matches",
|
||||
"matching",
|
||||
"number",
|
||||
"numerical",
|
||||
"path",
|
||||
"range",
|
||||
"ranges",
|
||||
"sh"
|
||||
],
|
||||
"verb": {
|
||||
"toc": false,
|
||||
"layout": "default",
|
||||
"tasks": [
|
||||
"readme"
|
||||
],
|
||||
"lint": {
|
||||
"reflinks": true
|
||||
},
|
||||
"plugins": [
|
||||
"gulp-format-md"
|
||||
]
|
||||
}
|
||||
}
|
||||
116
node_modules/chalk/index.js
generated
vendored
Normal file
116
node_modules/chalk/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,116 @@
|
|||
'use strict';
|
||||
var escapeStringRegexp = require('escape-string-regexp');
|
||||
var ansiStyles = require('ansi-styles');
|
||||
var stripAnsi = require('strip-ansi');
|
||||
var hasAnsi = require('has-ansi');
|
||||
var supportsColor = require('supports-color');
|
||||
var defineProps = Object.defineProperties;
|
||||
var isSimpleWindowsTerm = process.platform === 'win32' && !/^xterm/i.test(process.env.TERM);
|
||||
|
||||
function Chalk(options) {
|
||||
// detect mode if not set manually
|
||||
this.enabled = !options || options.enabled === undefined ? supportsColor : options.enabled;
|
||||
}
|
||||
|
||||
// use bright blue on Windows as the normal blue color is illegible
|
||||
if (isSimpleWindowsTerm) {
|
||||
ansiStyles.blue.open = '\u001b[94m';
|
||||
}
|
||||
|
||||
var styles = (function () {
|
||||
var ret = {};
|
||||
|
||||
Object.keys(ansiStyles).forEach(function (key) {
|
||||
ansiStyles[key].closeRe = new RegExp(escapeStringRegexp(ansiStyles[key].close), 'g');
|
||||
|
||||
ret[key] = {
|
||||
get: function () {
|
||||
return build.call(this, this._styles.concat(key));
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
return ret;
|
||||
})();
|
||||
|
||||
var proto = defineProps(function chalk() {}, styles);
|
||||
|
||||
function build(_styles) {
|
||||
var builder = function () {
|
||||
return applyStyle.apply(builder, arguments);
|
||||
};
|
||||
|
||||
builder._styles = _styles;
|
||||
builder.enabled = this.enabled;
|
||||
// __proto__ is used because we must return a function, but there is
|
||||
// no way to create a function with a different prototype.
|
||||
/* eslint-disable no-proto */
|
||||
builder.__proto__ = proto;
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
function applyStyle() {
|
||||
// support varags, but simply cast to string in case there's only one arg
|
||||
var args = arguments;
|
||||
var argsLen = args.length;
|
||||
var str = argsLen !== 0 && String(arguments[0]);
|
||||
|
||||
if (argsLen > 1) {
|
||||
// don't slice `arguments`, it prevents v8 optimizations
|
||||
for (var a = 1; a < argsLen; a++) {
|
||||
str += ' ' + args[a];
|
||||
}
|
||||
}
|
||||
|
||||
if (!this.enabled || !str) {
|
||||
return str;
|
||||
}
|
||||
|
||||
var nestedStyles = this._styles;
|
||||
var i = nestedStyles.length;
|
||||
|
||||
// Turns out that on Windows dimmed gray text becomes invisible in cmd.exe,
|
||||
// see https://github.com/chalk/chalk/issues/58
|
||||
// If we're on Windows and we're dealing with a gray color, temporarily make 'dim' a noop.
|
||||
var originalDim = ansiStyles.dim.open;
|
||||
if (isSimpleWindowsTerm && (nestedStyles.indexOf('gray') !== -1 || nestedStyles.indexOf('grey') !== -1)) {
|
||||
ansiStyles.dim.open = '';
|
||||
}
|
||||
|
||||
while (i--) {
|
||||
var code = ansiStyles[nestedStyles[i]];
|
||||
|
||||
// Replace any instances already present with a re-opening code
|
||||
// otherwise only the part of the string until said closing code
|
||||
// will be colored, and the rest will simply be 'plain'.
|
||||
str = code.open + str.replace(code.closeRe, code.open) + code.close;
|
||||
}
|
||||
|
||||
// Reset the original 'dim' if we changed it to work around the Windows dimmed gray issue.
|
||||
ansiStyles.dim.open = originalDim;
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
function init() {
|
||||
var ret = {};
|
||||
|
||||
Object.keys(styles).forEach(function (name) {
|
||||
ret[name] = {
|
||||
get: function () {
|
||||
return build.call(this, [name]);
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
defineProps(Chalk.prototype, init());
|
||||
|
||||
module.exports = new Chalk();
|
||||
module.exports.styles = ansiStyles;
|
||||
module.exports.hasColor = hasAnsi;
|
||||
module.exports.stripColor = stripAnsi;
|
||||
module.exports.supportsColor = supportsColor;
|
||||
21
node_modules/chalk/license
generated
vendored
Normal file
21
node_modules/chalk/license
generated
vendored
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
70
node_modules/chalk/package.json
generated
vendored
Normal file
70
node_modules/chalk/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,70 @@
|
|||
{
|
||||
"name": "chalk",
|
||||
"version": "1.1.3",
|
||||
"description": "Terminal string styling done right. Much color.",
|
||||
"license": "MIT",
|
||||
"repository": "chalk/chalk",
|
||||
"maintainers": [
|
||||
"Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)",
|
||||
"Joshua Appelman <jappelman@xebia.com> (jbnicolai.com)",
|
||||
"JD Ballard <i.am.qix@gmail.com> (github.com/qix-)"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "xo && mocha",
|
||||
"bench": "matcha benchmark.js",
|
||||
"coverage": "nyc npm test && nyc report",
|
||||
"coveralls": "nyc npm test && nyc report --reporter=text-lcov | coveralls"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"keywords": [
|
||||
"color",
|
||||
"colour",
|
||||
"colors",
|
||||
"terminal",
|
||||
"console",
|
||||
"cli",
|
||||
"string",
|
||||
"str",
|
||||
"ansi",
|
||||
"style",
|
||||
"styles",
|
||||
"tty",
|
||||
"formatting",
|
||||
"rgb",
|
||||
"256",
|
||||
"shell",
|
||||
"xterm",
|
||||
"log",
|
||||
"logging",
|
||||
"command-line",
|
||||
"text"
|
||||
],
|
||||
"dependencies": {
|
||||
"ansi-styles": "^2.2.1",
|
||||
"escape-string-regexp": "^1.0.2",
|
||||
"has-ansi": "^2.0.0",
|
||||
"strip-ansi": "^3.0.0",
|
||||
"supports-color": "^2.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"coveralls": "^2.11.2",
|
||||
"matcha": "^0.6.0",
|
||||
"mocha": "*",
|
||||
"nyc": "^3.0.0",
|
||||
"require-uncached": "^1.0.2",
|
||||
"resolve-from": "^1.0.0",
|
||||
"semver": "^4.3.3",
|
||||
"xo": "*"
|
||||
},
|
||||
"xo": {
|
||||
"envs": [
|
||||
"node",
|
||||
"mocha"
|
||||
]
|
||||
}
|
||||
}
|
||||
213
node_modules/chalk/readme.md
generated
vendored
Normal file
213
node_modules/chalk/readme.md
generated
vendored
Normal file
|
|
@ -0,0 +1,213 @@
|
|||
<h1 align="center">
|
||||
<br>
|
||||
<br>
|
||||
<img width="360" src="https://cdn.rawgit.com/chalk/chalk/19935d6484811c5e468817f846b7b3d417d7bf4a/logo.svg" alt="chalk">
|
||||
<br>
|
||||
<br>
|
||||
<br>
|
||||
</h1>
|
||||
|
||||
> Terminal string styling done right
|
||||
|
||||
[](https://travis-ci.org/chalk/chalk)
|
||||
[](https://coveralls.io/r/chalk/chalk?branch=master)
|
||||
[](https://www.youtube.com/watch?v=9auOCbH5Ns4)
|
||||
|
||||
|
||||
[colors.js](https://github.com/Marak/colors.js) used to be the most popular string styling module, but it has serious deficiencies like extending `String.prototype` which causes all kinds of [problems](https://github.com/yeoman/yo/issues/68). Although there are other ones, they either do too much or not enough.
|
||||
|
||||
**Chalk is a clean and focused alternative.**
|
||||
|
||||

|
||||
|
||||
|
||||
## Why
|
||||
|
||||
- Highly performant
|
||||
- Doesn't extend `String.prototype`
|
||||
- Expressive API
|
||||
- Ability to nest styles
|
||||
- Clean and focused
|
||||
- Auto-detects color support
|
||||
- Actively maintained
|
||||
- [Used by ~4500 modules](https://www.npmjs.com/browse/depended/chalk) as of July 15, 2015
|
||||
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
$ npm install --save chalk
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
Chalk comes with an easy to use composable API where you just chain and nest the styles you want.
|
||||
|
||||
```js
|
||||
var chalk = require('chalk');
|
||||
|
||||
// style a string
|
||||
chalk.blue('Hello world!');
|
||||
|
||||
// combine styled and normal strings
|
||||
chalk.blue('Hello') + 'World' + chalk.red('!');
|
||||
|
||||
// compose multiple styles using the chainable API
|
||||
chalk.blue.bgRed.bold('Hello world!');
|
||||
|
||||
// pass in multiple arguments
|
||||
chalk.blue('Hello', 'World!', 'Foo', 'bar', 'biz', 'baz');
|
||||
|
||||
// nest styles
|
||||
chalk.red('Hello', chalk.underline.bgBlue('world') + '!');
|
||||
|
||||
// nest styles of the same type even (color, underline, background)
|
||||
chalk.green(
|
||||
'I am a green line ' +
|
||||
chalk.blue.underline.bold('with a blue substring') +
|
||||
' that becomes green again!'
|
||||
);
|
||||
```
|
||||
|
||||
Easily define your own themes.
|
||||
|
||||
```js
|
||||
var chalk = require('chalk');
|
||||
var error = chalk.bold.red;
|
||||
console.log(error('Error!'));
|
||||
```
|
||||
|
||||
Take advantage of console.log [string substitution](http://nodejs.org/docs/latest/api/console.html#console_console_log_data).
|
||||
|
||||
```js
|
||||
var name = 'Sindre';
|
||||
console.log(chalk.green('Hello %s'), name);
|
||||
//=> Hello Sindre
|
||||
```
|
||||
|
||||
|
||||
## API
|
||||
|
||||
### chalk.`<style>[.<style>...](string, [string...])`
|
||||
|
||||
Example: `chalk.red.bold.underline('Hello', 'world');`
|
||||
|
||||
Chain [styles](#styles) and call the last one as a method with a string argument. Order doesn't matter, and later styles take precedent in case of a conflict. This simply means that `Chalk.red.yellow.green` is equivalent to `Chalk.green`.
|
||||
|
||||
Multiple arguments will be separated by space.
|
||||
|
||||
### chalk.enabled
|
||||
|
||||
Color support is automatically detected, but you can override it by setting the `enabled` property. You should however only do this in your own code as it applies globally to all chalk consumers.
|
||||
|
||||
If you need to change this in a reusable module create a new instance:
|
||||
|
||||
```js
|
||||
var ctx = new chalk.constructor({enabled: false});
|
||||
```
|
||||
|
||||
### chalk.supportsColor
|
||||
|
||||
Detect whether the terminal [supports color](https://github.com/chalk/supports-color). Used internally and handled for you, but exposed for convenience.
|
||||
|
||||
Can be overridden by the user with the flags `--color` and `--no-color`. For situations where using `--color` is not possible, add an environment variable `FORCE_COLOR` with any value to force color. Trumps `--no-color`.
|
||||
|
||||
### chalk.styles
|
||||
|
||||
Exposes the styles as [ANSI escape codes](https://github.com/chalk/ansi-styles).
|
||||
|
||||
Generally not useful, but you might need just the `.open` or `.close` escape code if you're mixing externally styled strings with your own.
|
||||
|
||||
```js
|
||||
var chalk = require('chalk');
|
||||
|
||||
console.log(chalk.styles.red);
|
||||
//=> {open: '\u001b[31m', close: '\u001b[39m'}
|
||||
|
||||
console.log(chalk.styles.red.open + 'Hello' + chalk.styles.red.close);
|
||||
```
|
||||
|
||||
### chalk.hasColor(string)
|
||||
|
||||
Check whether a string [has color](https://github.com/chalk/has-ansi).
|
||||
|
||||
### chalk.stripColor(string)
|
||||
|
||||
[Strip color](https://github.com/chalk/strip-ansi) from a string.
|
||||
|
||||
Can be useful in combination with `.supportsColor` to strip color on externally styled text when it's not supported.
|
||||
|
||||
Example:
|
||||
|
||||
```js
|
||||
var chalk = require('chalk');
|
||||
var styledString = getText();
|
||||
|
||||
if (!chalk.supportsColor) {
|
||||
styledString = chalk.stripColor(styledString);
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## Styles
|
||||
|
||||
### Modifiers
|
||||
|
||||
- `reset`
|
||||
- `bold`
|
||||
- `dim`
|
||||
- `italic` *(not widely supported)*
|
||||
- `underline`
|
||||
- `inverse`
|
||||
- `hidden`
|
||||
- `strikethrough` *(not widely supported)*
|
||||
|
||||
### Colors
|
||||
|
||||
- `black`
|
||||
- `red`
|
||||
- `green`
|
||||
- `yellow`
|
||||
- `blue` *(on Windows the bright version is used as normal blue is illegible)*
|
||||
- `magenta`
|
||||
- `cyan`
|
||||
- `white`
|
||||
- `gray`
|
||||
|
||||
### Background colors
|
||||
|
||||
- `bgBlack`
|
||||
- `bgRed`
|
||||
- `bgGreen`
|
||||
- `bgYellow`
|
||||
- `bgBlue`
|
||||
- `bgMagenta`
|
||||
- `bgCyan`
|
||||
- `bgWhite`
|
||||
|
||||
|
||||
## 256-colors
|
||||
|
||||
Chalk does not support anything other than the base eight colors, which guarantees it will work on all terminals and systems. Some terminals, specifically `xterm` compliant ones, will support the full range of 8-bit colors. For this the lower level [ansi-256-colors](https://github.com/jbnicolai/ansi-256-colors) package can be used.
|
||||
|
||||
|
||||
## Windows
|
||||
|
||||
If you're on Windows, do yourself a favor and use [`cmder`](http://bliker.github.io/cmder/) instead of `cmd.exe`.
|
||||
|
||||
|
||||
## Related
|
||||
|
||||
- [chalk-cli](https://github.com/chalk/chalk-cli) - CLI for this module
|
||||
- [ansi-styles](https://github.com/chalk/ansi-styles/) - ANSI escape codes for styling strings in the terminal
|
||||
- [supports-color](https://github.com/chalk/supports-color/) - Detect whether a terminal supports color
|
||||
- [strip-ansi](https://github.com/chalk/strip-ansi) - Strip ANSI escape codes
|
||||
- [has-ansi](https://github.com/chalk/has-ansi) - Check if a string has ANSI escape codes
|
||||
- [ansi-regex](https://github.com/chalk/ansi-regex) - Regular expression for matching ANSI escape codes
|
||||
- [wrap-ansi](https://github.com/chalk/wrap-ansi) - Wordwrap a string with ANSI escape codes
|
||||
|
||||
|
||||
## License
|
||||
|
||||
MIT © [Sindre Sorhus](http://sindresorhus.com)
|
||||
21
node_modules/chokidar/LICENSE
generated
vendored
Normal file
21
node_modules/chokidar/LICENSE
generated
vendored
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2012-2019 Paul Miller (https://paulmillr.com), Elan Shanker
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the “Software”), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
308
node_modules/chokidar/README.md
generated
vendored
Normal file
308
node_modules/chokidar/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,308 @@
|
|||
# Chokidar [](https://github.com/paulmillr/chokidar) [](https://github.com/paulmillr/chokidar)
|
||||
|
||||
> Minimal and efficient cross-platform file watching library
|
||||
|
||||
[](https://www.npmjs.com/package/chokidar)
|
||||
|
||||
## Why?
|
||||
|
||||
Node.js `fs.watch`:
|
||||
|
||||
* Doesn't report filenames on MacOS.
|
||||
* Doesn't report events at all when using editors like Sublime on MacOS.
|
||||
* Often reports events twice.
|
||||
* Emits most changes as `rename`.
|
||||
* Does not provide an easy way to recursively watch file trees.
|
||||
* Does not support recursive watching on Linux.
|
||||
|
||||
Node.js `fs.watchFile`:
|
||||
|
||||
* Almost as bad at event handling.
|
||||
* Also does not provide any recursive watching.
|
||||
* Results in high CPU utilization.
|
||||
|
||||
Chokidar resolves these problems.
|
||||
|
||||
Initially made for **[Brunch](https://brunch.io/)** (an ultra-swift web app build tool), it is now used in
|
||||
[Microsoft's Visual Studio Code](https://github.com/microsoft/vscode),
|
||||
[gulp](https://github.com/gulpjs/gulp/),
|
||||
[karma](https://karma-runner.github.io/),
|
||||
[PM2](https://github.com/Unitech/PM2),
|
||||
[browserify](http://browserify.org/),
|
||||
[webpack](https://webpack.github.io/),
|
||||
[BrowserSync](https://www.browsersync.io/),
|
||||
and [many others](https://www.npmjs.com/browse/depended/chokidar).
|
||||
It has proven itself in production environments.
|
||||
|
||||
Version 3 is out! Check out our blog post about it: [Chokidar 3: How to save 32TB of traffic every week](https://paulmillr.com/posts/chokidar-3-save-32tb-of-traffic/)
|
||||
|
||||
## How?
|
||||
|
||||
Chokidar does still rely on the Node.js core `fs` module, but when using
|
||||
`fs.watch` and `fs.watchFile` for watching, it normalizes the events it
|
||||
receives, often checking for truth by getting file stats and/or dir contents.
|
||||
|
||||
On MacOS, chokidar by default uses a native extension exposing the Darwin
|
||||
`FSEvents` API. This provides very efficient recursive watching compared with
|
||||
implementations like `kqueue` available on most \*nix platforms. Chokidar still
|
||||
does have to do some work to normalize the events received that way as well.
|
||||
|
||||
On most other platforms, the `fs.watch`-based implementation is the default, which
|
||||
avoids polling and keeps CPU usage down. Be advised that chokidar will initiate
|
||||
watchers recursively for everything within scope of the paths that have been
|
||||
specified, so be judicious about not wasting system resources by watching much
|
||||
more than needed.
|
||||
|
||||
## Getting started
|
||||
|
||||
Install with npm:
|
||||
|
||||
```sh
|
||||
npm install chokidar
|
||||
```
|
||||
|
||||
Then `require` and use it in your code:
|
||||
|
||||
```javascript
|
||||
const chokidar = require('chokidar');
|
||||
|
||||
// One-liner for current directory
|
||||
chokidar.watch('.').on('all', (event, path) => {
|
||||
console.log(event, path);
|
||||
});
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
```javascript
|
||||
// Example of a more typical implementation structure
|
||||
|
||||
// Initialize watcher.
|
||||
const watcher = chokidar.watch('file, dir, glob, or array', {
|
||||
ignored: /(^|[\/\\])\../, // ignore dotfiles
|
||||
persistent: true
|
||||
});
|
||||
|
||||
// Something to use when events are received.
|
||||
const log = console.log.bind(console);
|
||||
// Add event listeners.
|
||||
watcher
|
||||
.on('add', path => log(`File ${path} has been added`))
|
||||
.on('change', path => log(`File ${path} has been changed`))
|
||||
.on('unlink', path => log(`File ${path} has been removed`));
|
||||
|
||||
// More possible events.
|
||||
watcher
|
||||
.on('addDir', path => log(`Directory ${path} has been added`))
|
||||
.on('unlinkDir', path => log(`Directory ${path} has been removed`))
|
||||
.on('error', error => log(`Watcher error: ${error}`))
|
||||
.on('ready', () => log('Initial scan complete. Ready for changes'))
|
||||
.on('raw', (event, path, details) => { // internal
|
||||
log('Raw event info:', event, path, details);
|
||||
});
|
||||
|
||||
// 'add', 'addDir' and 'change' events also receive stat() results as second
|
||||
// argument when available: https://nodejs.org/api/fs.html#fs_class_fs_stats
|
||||
watcher.on('change', (path, stats) => {
|
||||
if (stats) console.log(`File ${path} changed size to ${stats.size}`);
|
||||
});
|
||||
|
||||
// Watch new files.
|
||||
watcher.add('new-file');
|
||||
watcher.add(['new-file-2', 'new-file-3', '**/other-file*']);
|
||||
|
||||
// Get list of actual paths being watched on the filesystem
|
||||
var watchedPaths = watcher.getWatched();
|
||||
|
||||
// Un-watch some files.
|
||||
await watcher.unwatch('new-file*');
|
||||
|
||||
// Stop watching.
|
||||
// The method is async!
|
||||
watcher.close().then(() => console.log('closed'));
|
||||
|
||||
// Full list of options. See below for descriptions.
|
||||
// Do not use this example!
|
||||
chokidar.watch('file', {
|
||||
persistent: true,
|
||||
|
||||
ignored: '*.txt',
|
||||
ignoreInitial: false,
|
||||
followSymlinks: true,
|
||||
cwd: '.',
|
||||
disableGlobbing: false,
|
||||
|
||||
usePolling: false,
|
||||
interval: 100,
|
||||
binaryInterval: 300,
|
||||
alwaysStat: false,
|
||||
depth: 99,
|
||||
awaitWriteFinish: {
|
||||
stabilityThreshold: 2000,
|
||||
pollInterval: 100
|
||||
},
|
||||
|
||||
ignorePermissionErrors: false,
|
||||
atomic: true // or a custom 'atomicity delay', in milliseconds (default 100)
|
||||
});
|
||||
|
||||
```
|
||||
|
||||
`chokidar.watch(paths, [options])`
|
||||
|
||||
* `paths` (string or array of strings). Paths to files, dirs to be watched
|
||||
recursively, or glob patterns.
|
||||
- Note: globs must not contain windows separators (`\`),
|
||||
because that's how they work by the standard —
|
||||
you'll need to replace them with forward slashes (`/`).
|
||||
- Note 2: for additional glob documentation, check out low-level
|
||||
library: [picomatch](https://github.com/micromatch/picomatch).
|
||||
* `options` (object) Options object as defined below:
|
||||
|
||||
#### Persistence
|
||||
|
||||
* `persistent` (default: `true`). Indicates whether the process
|
||||
should continue to run as long as files are being watched. If set to
|
||||
`false` when using `fsevents` to watch, no more events will be emitted
|
||||
after `ready`, even if the process continues to run.
|
||||
|
||||
#### Path filtering
|
||||
|
||||
* `ignored` ([anymatch](https://github.com/es128/anymatch)-compatible definition)
|
||||
Defines files/paths to be ignored. The whole relative or absolute path is
|
||||
tested, not just filename. If a function with two arguments is provided, it
|
||||
gets called twice per path - once with a single argument (the path), second
|
||||
time with two arguments (the path and the
|
||||
[`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats)
|
||||
object of that path).
|
||||
* `ignoreInitial` (default: `false`). If set to `false` then `add`/`addDir` events are also emitted for matching paths while
|
||||
instantiating the watching as chokidar discovers these file paths (before the `ready` event).
|
||||
* `followSymlinks` (default: `true`). When `false`, only the
|
||||
symlinks themselves will be watched for changes instead of following
|
||||
the link references and bubbling events through the link's path.
|
||||
* `cwd` (no default). The base directory from which watch `paths` are to be
|
||||
derived. Paths emitted with events will be relative to this.
|
||||
* `disableGlobbing` (default: `false`). If set to `true` then the strings passed to `.watch()` and `.add()` are treated as
|
||||
literal path names, even if they look like globs.
|
||||
|
||||
#### Performance
|
||||
|
||||
* `usePolling` (default: `false`).
|
||||
Whether to use fs.watchFile (backed by polling), or fs.watch. If polling
|
||||
leads to high CPU utilization, consider setting this to `false`. It is
|
||||
typically necessary to **set this to `true` to successfully watch files over
|
||||
a network**, and it may be necessary to successfully watch files in other
|
||||
non-standard situations. Setting to `true` explicitly on MacOS overrides the
|
||||
`useFsEvents` default. You may also set the CHOKIDAR_USEPOLLING env variable
|
||||
to true (1) or false (0) in order to override this option.
|
||||
* _Polling-specific settings_ (effective when `usePolling: true`)
|
||||
* `interval` (default: `100`). Interval of file system polling, in milliseconds. You may also
|
||||
set the CHOKIDAR_INTERVAL env variable to override this option.
|
||||
* `binaryInterval` (default: `300`). Interval of file system
|
||||
polling for binary files.
|
||||
([see list of binary extensions](https://github.com/sindresorhus/binary-extensions/blob/master/binary-extensions.json))
|
||||
* `useFsEvents` (default: `true` on MacOS). Whether to use the
|
||||
`fsevents` watching interface if available. When set to `true` explicitly
|
||||
and `fsevents` is available this supercedes the `usePolling` setting. When
|
||||
set to `false` on MacOS, `usePolling: true` becomes the default.
|
||||
* `alwaysStat` (default: `false`). If relying upon the
|
||||
[`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats)
|
||||
object that may get passed with `add`, `addDir`, and `change` events, set
|
||||
this to `true` to ensure it is provided even in cases where it wasn't
|
||||
already available from the underlying watch events.
|
||||
* `depth` (default: `undefined`). If set, limits how many levels of
|
||||
subdirectories will be traversed.
|
||||
* `awaitWriteFinish` (default: `false`).
|
||||
By default, the `add` event will fire when a file first appears on disk, before
|
||||
the entire file has been written. Furthermore, in some cases some `change`
|
||||
events will be emitted while the file is being written. In some cases,
|
||||
especially when watching for large files there will be a need to wait for the
|
||||
write operation to finish before responding to a file creation or modification.
|
||||
Setting `awaitWriteFinish` to `true` (or a truthy value) will poll file size,
|
||||
holding its `add` and `change` events until the size does not change for a
|
||||
configurable amount of time. The appropriate duration setting is heavily
|
||||
dependent on the OS and hardware. For accurate detection this parameter should
|
||||
be relatively high, making file watching much less responsive.
|
||||
Use with caution.
|
||||
* *`options.awaitWriteFinish` can be set to an object in order to adjust
|
||||
timing params:*
|
||||
* `awaitWriteFinish.stabilityThreshold` (default: 2000). Amount of time in
|
||||
milliseconds for a file size to remain constant before emitting its event.
|
||||
* `awaitWriteFinish.pollInterval` (default: 100). File size polling interval, in milliseconds.
|
||||
|
||||
#### Errors
|
||||
|
||||
* `ignorePermissionErrors` (default: `false`). Indicates whether to watch files
|
||||
that don't have read permissions if possible. If watching fails due to `EPERM`
|
||||
or `EACCES` with this set to `true`, the errors will be suppressed silently.
|
||||
* `atomic` (default: `true` if `useFsEvents` and `usePolling` are `false`).
|
||||
Automatically filters out artifacts that occur when using editors that use
|
||||
"atomic writes" instead of writing directly to the source file. If a file is
|
||||
re-added within 100 ms of being deleted, Chokidar emits a `change` event
|
||||
rather than `unlink` then `add`. If the default of 100 ms does not work well
|
||||
for you, you can override it by setting `atomic` to a custom value, in
|
||||
milliseconds.
|
||||
|
||||
### Methods & Events
|
||||
|
||||
`chokidar.watch()` produces an instance of `FSWatcher`. Methods of `FSWatcher`:
|
||||
|
||||
* `.add(path / paths)`: Add files, directories, or glob patterns for tracking.
|
||||
Takes an array of strings or just one string.
|
||||
* `.on(event, callback)`: Listen for an FS event.
|
||||
Available events: `add`, `addDir`, `change`, `unlink`, `unlinkDir`, `ready`,
|
||||
`raw`, `error`.
|
||||
Additionally `all` is available which gets emitted with the underlying event
|
||||
name and path for every event other than `ready`, `raw`, and `error`. `raw` is internal, use it carefully.
|
||||
* `.unwatch(path / paths)`: Stop watching files, directories, or glob patterns.
|
||||
Takes an array of strings or just one string.
|
||||
* `.close()`: **async** Removes all listeners from watched files. Asynchronous, returns Promise. Use with `await` to ensure bugs don't happen.
|
||||
* `.getWatched()`: Returns an object representing all the paths on the file
|
||||
system being watched by this `FSWatcher` instance. The object's keys are all the
|
||||
directories (using absolute paths unless the `cwd` option was used), and the
|
||||
values are arrays of the names of the items contained in each directory.
|
||||
|
||||
## CLI
|
||||
|
||||
If you need a CLI interface for your file watching, check out
|
||||
[chokidar-cli](https://github.com/open-cli-tools/chokidar-cli), allowing you to
|
||||
execute a command on each change, or get a stdio stream of change events.
|
||||
|
||||
## Install Troubleshooting
|
||||
|
||||
* `npm WARN optional dep failed, continuing fsevents@n.n.n`
|
||||
* This message is normal part of how `npm` handles optional dependencies and is
|
||||
not indicative of a problem. Even if accompanied by other related error messages,
|
||||
Chokidar should function properly.
|
||||
|
||||
* `TypeError: fsevents is not a constructor`
|
||||
* Update chokidar by doing `rm -rf node_modules package-lock.json yarn.lock && npm install`, or update your dependency that uses chokidar.
|
||||
|
||||
* Chokidar is producing `ENOSP` error on Linux, like this:
|
||||
* `bash: cannot set terminal process group (-1): Inappropriate ioctl for device bash: no job control in this shell`
|
||||
`Error: watch /home/ ENOSPC`
|
||||
* This means Chokidar ran out of file handles and you'll need to increase their count by executing the following command in Terminal:
|
||||
`echo fs.inotify.max_user_watches=524288 | sudo tee -a /etc/sysctl.conf && sudo sysctl -p`
|
||||
|
||||
## Changelog
|
||||
|
||||
For more detailed changelog, see [`full_changelog.md`](.github/full_changelog.md).
|
||||
- **v3.5 (Jan 6, 2021):** Support for ARM Macs with Apple Silicon. Fixes for deleted symlinks.
|
||||
- **v3.4 (Apr 26, 2020):** Support for directory-based symlinks. Fixes for macos file replacement.
|
||||
- **v3.3 (Nov 2, 2019):** `FSWatcher#close()` method became async. That fixes IO race conditions related to close method.
|
||||
- **v3.2 (Oct 1, 2019):** Improve Linux RAM usage by 50%. Race condition fixes. Windows glob fixes. Improve stability by using tight range of dependency versions.
|
||||
- **v3.1 (Sep 16, 2019):** dotfiles are no longer filtered out by default. Use `ignored` option if needed. Improve initial Linux scan time by 50%.
|
||||
- **v3 (Apr 30, 2019):** massive CPU & RAM consumption improvements; reduces deps / package size by a factor of 17x and bumps Node.js requirement to v8.16 and higher.
|
||||
- **v2 (Dec 29, 2017):** Globs are now posix-style-only; without windows support. Tons of bugfixes.
|
||||
- **v1 (Apr 7, 2015):** Glob support, symlink support, tons of bugfixes. Node 0.8+ is supported
|
||||
- **v0.1 (Apr 20, 2012):** Initial release, extracted from [Brunch](https://github.com/brunch/brunch/blob/9847a065aea300da99bd0753f90354cde9de1261/src/helpers.coffee#L66)
|
||||
|
||||
## Also
|
||||
|
||||
Why was chokidar named this way? What's the meaning behind it?
|
||||
|
||||
>Chowkidar is a transliteration of a Hindi word meaning 'watchman, gatekeeper', चौकीदार. This ultimately comes from Sanskrit _ चतुष्क_ (crossway, quadrangle, consisting-of-four). This word is also used in other languages like Urdu as (چوکیدار) which is widely used in Pakistan and India.
|
||||
|
||||
## License
|
||||
|
||||
MIT (c) Paul Miller (<https://paulmillr.com>), see [LICENSE](LICENSE) file.
|
||||
973
node_modules/chokidar/index.js
generated
vendored
Normal file
973
node_modules/chokidar/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,973 @@
|
|||
'use strict';
|
||||
|
||||
const { EventEmitter } = require('events');
|
||||
const fs = require('fs');
|
||||
const sysPath = require('path');
|
||||
const { promisify } = require('util');
|
||||
const readdirp = require('readdirp');
|
||||
const anymatch = require('anymatch').default;
|
||||
const globParent = require('glob-parent');
|
||||
const isGlob = require('is-glob');
|
||||
const braces = require('braces');
|
||||
const normalizePath = require('normalize-path');
|
||||
|
||||
const NodeFsHandler = require('./lib/nodefs-handler');
|
||||
const FsEventsHandler = require('./lib/fsevents-handler');
|
||||
const {
|
||||
EV_ALL,
|
||||
EV_READY,
|
||||
EV_ADD,
|
||||
EV_CHANGE,
|
||||
EV_UNLINK,
|
||||
EV_ADD_DIR,
|
||||
EV_UNLINK_DIR,
|
||||
EV_RAW,
|
||||
EV_ERROR,
|
||||
|
||||
STR_CLOSE,
|
||||
STR_END,
|
||||
|
||||
BACK_SLASH_RE,
|
||||
DOUBLE_SLASH_RE,
|
||||
SLASH_OR_BACK_SLASH_RE,
|
||||
DOT_RE,
|
||||
REPLACER_RE,
|
||||
|
||||
SLASH,
|
||||
SLASH_SLASH,
|
||||
BRACE_START,
|
||||
BANG,
|
||||
ONE_DOT,
|
||||
TWO_DOTS,
|
||||
GLOBSTAR,
|
||||
SLASH_GLOBSTAR,
|
||||
ANYMATCH_OPTS,
|
||||
STRING_TYPE,
|
||||
FUNCTION_TYPE,
|
||||
EMPTY_STR,
|
||||
EMPTY_FN,
|
||||
|
||||
isWindows,
|
||||
isMacos,
|
||||
isIBMi
|
||||
} = require('./lib/constants');
|
||||
|
||||
const stat = promisify(fs.stat);
|
||||
const readdir = promisify(fs.readdir);
|
||||
|
||||
/**
|
||||
* @typedef {String} Path
|
||||
* @typedef {'all'|'add'|'addDir'|'change'|'unlink'|'unlinkDir'|'raw'|'error'|'ready'} EventName
|
||||
* @typedef {'readdir'|'watch'|'add'|'remove'|'change'} ThrottleType
|
||||
*/
|
||||
|
||||
/**
|
||||
*
|
||||
* @typedef {Object} WatchHelpers
|
||||
* @property {Boolean} followSymlinks
|
||||
* @property {'stat'|'lstat'} statMethod
|
||||
* @property {Path} path
|
||||
* @property {Path} watchPath
|
||||
* @property {Function} entryPath
|
||||
* @property {Boolean} hasGlob
|
||||
* @property {Object} globFilter
|
||||
* @property {Function} filterPath
|
||||
* @property {Function} filterDir
|
||||
*/
|
||||
|
||||
const arrify = (value = []) => Array.isArray(value) ? value : [value];
|
||||
const flatten = (list, result = []) => {
|
||||
list.forEach(item => {
|
||||
if (Array.isArray(item)) {
|
||||
flatten(item, result);
|
||||
} else {
|
||||
result.push(item);
|
||||
}
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
const unifyPaths = (paths_) => {
|
||||
/**
|
||||
* @type {Array<String>}
|
||||
*/
|
||||
const paths = flatten(arrify(paths_));
|
||||
if (!paths.every(p => typeof p === STRING_TYPE)) {
|
||||
throw new TypeError(`Non-string provided as watch path: ${paths}`);
|
||||
}
|
||||
return paths.map(normalizePathToUnix);
|
||||
};
|
||||
|
||||
// If SLASH_SLASH occurs at the beginning of path, it is not replaced
|
||||
// because "//StoragePC/DrivePool/Movies" is a valid network path
|
||||
const toUnix = (string) => {
|
||||
let str = string.replace(BACK_SLASH_RE, SLASH);
|
||||
let prepend = false;
|
||||
if (str.startsWith(SLASH_SLASH)) {
|
||||
prepend = true;
|
||||
}
|
||||
while (str.match(DOUBLE_SLASH_RE)) {
|
||||
str = str.replace(DOUBLE_SLASH_RE, SLASH);
|
||||
}
|
||||
if (prepend) {
|
||||
str = SLASH + str;
|
||||
}
|
||||
return str;
|
||||
};
|
||||
|
||||
// Our version of upath.normalize
|
||||
// TODO: this is not equal to path-normalize module - investigate why
|
||||
const normalizePathToUnix = (path) => toUnix(sysPath.normalize(toUnix(path)));
|
||||
|
||||
const normalizeIgnored = (cwd = EMPTY_STR) => (path) => {
|
||||
if (typeof path !== STRING_TYPE) return path;
|
||||
return normalizePathToUnix(sysPath.isAbsolute(path) ? path : sysPath.join(cwd, path));
|
||||
};
|
||||
|
||||
const getAbsolutePath = (path, cwd) => {
|
||||
if (sysPath.isAbsolute(path)) {
|
||||
return path;
|
||||
}
|
||||
if (path.startsWith(BANG)) {
|
||||
return BANG + sysPath.join(cwd, path.slice(1));
|
||||
}
|
||||
return sysPath.join(cwd, path);
|
||||
};
|
||||
|
||||
const undef = (opts, key) => opts[key] === undefined;
|
||||
|
||||
/**
|
||||
* Directory entry.
|
||||
* @property {Path} path
|
||||
* @property {Set<Path>} items
|
||||
*/
|
||||
class DirEntry {
|
||||
/**
|
||||
* @param {Path} dir
|
||||
* @param {Function} removeWatcher
|
||||
*/
|
||||
constructor(dir, removeWatcher) {
|
||||
this.path = dir;
|
||||
this._removeWatcher = removeWatcher;
|
||||
/** @type {Set<Path>} */
|
||||
this.items = new Set();
|
||||
}
|
||||
|
||||
add(item) {
|
||||
const {items} = this;
|
||||
if (!items) return;
|
||||
if (item !== ONE_DOT && item !== TWO_DOTS) items.add(item);
|
||||
}
|
||||
|
||||
async remove(item) {
|
||||
const {items} = this;
|
||||
if (!items) return;
|
||||
items.delete(item);
|
||||
if (items.size > 0) return;
|
||||
|
||||
const dir = this.path;
|
||||
try {
|
||||
await readdir(dir);
|
||||
} catch (err) {
|
||||
if (this._removeWatcher) {
|
||||
this._removeWatcher(sysPath.dirname(dir), sysPath.basename(dir));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
has(item) {
|
||||
const {items} = this;
|
||||
if (!items) return;
|
||||
return items.has(item);
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {Array<String>}
|
||||
*/
|
||||
getChildren() {
|
||||
const {items} = this;
|
||||
if (!items) return;
|
||||
return [...items.values()];
|
||||
}
|
||||
|
||||
dispose() {
|
||||
this.items.clear();
|
||||
delete this.path;
|
||||
delete this._removeWatcher;
|
||||
delete this.items;
|
||||
Object.freeze(this);
|
||||
}
|
||||
}
|
||||
|
||||
const STAT_METHOD_F = 'stat';
|
||||
const STAT_METHOD_L = 'lstat';
|
||||
class WatchHelper {
|
||||
constructor(path, watchPath, follow, fsw) {
|
||||
this.fsw = fsw;
|
||||
this.path = path = path.replace(REPLACER_RE, EMPTY_STR);
|
||||
this.watchPath = watchPath;
|
||||
this.fullWatchPath = sysPath.resolve(watchPath);
|
||||
this.hasGlob = watchPath !== path;
|
||||
/** @type {object|boolean} */
|
||||
if (path === EMPTY_STR) this.hasGlob = false;
|
||||
this.globSymlink = this.hasGlob && follow ? undefined : false;
|
||||
this.globFilter = this.hasGlob ? anymatch(path, undefined, ANYMATCH_OPTS) : false;
|
||||
this.dirParts = this.getDirParts(path);
|
||||
this.dirParts.forEach((parts) => {
|
||||
if (parts.length > 1) parts.pop();
|
||||
});
|
||||
this.followSymlinks = follow;
|
||||
this.statMethod = follow ? STAT_METHOD_F : STAT_METHOD_L;
|
||||
}
|
||||
|
||||
checkGlobSymlink(entry) {
|
||||
// only need to resolve once
|
||||
// first entry should always have entry.parentDir === EMPTY_STR
|
||||
if (this.globSymlink === undefined) {
|
||||
this.globSymlink = entry.fullParentDir === this.fullWatchPath ?
|
||||
false : {realPath: entry.fullParentDir, linkPath: this.fullWatchPath};
|
||||
}
|
||||
|
||||
if (this.globSymlink) {
|
||||
return entry.fullPath.replace(this.globSymlink.realPath, this.globSymlink.linkPath);
|
||||
}
|
||||
|
||||
return entry.fullPath;
|
||||
}
|
||||
|
||||
entryPath(entry) {
|
||||
return sysPath.join(this.watchPath,
|
||||
sysPath.relative(this.watchPath, this.checkGlobSymlink(entry))
|
||||
);
|
||||
}
|
||||
|
||||
filterPath(entry) {
|
||||
const {stats} = entry;
|
||||
if (stats && stats.isSymbolicLink()) return this.filterDir(entry);
|
||||
const resolvedPath = this.entryPath(entry);
|
||||
const matchesGlob = this.hasGlob && typeof this.globFilter === FUNCTION_TYPE ?
|
||||
this.globFilter(resolvedPath) : true;
|
||||
return matchesGlob &&
|
||||
this.fsw._isntIgnored(resolvedPath, stats) &&
|
||||
this.fsw._hasReadPermissions(stats);
|
||||
}
|
||||
|
||||
getDirParts(path) {
|
||||
if (!this.hasGlob) return [];
|
||||
const parts = [];
|
||||
const expandedPath = path.includes(BRACE_START) ? braces.expand(path) : [path];
|
||||
expandedPath.forEach((path) => {
|
||||
parts.push(sysPath.relative(this.watchPath, path).split(SLASH_OR_BACK_SLASH_RE));
|
||||
});
|
||||
return parts;
|
||||
}
|
||||
|
||||
filterDir(entry) {
|
||||
if (this.hasGlob) {
|
||||
const entryParts = this.getDirParts(this.checkGlobSymlink(entry));
|
||||
let globstar = false;
|
||||
this.unmatchedGlob = !this.dirParts.some((parts) => {
|
||||
return parts.every((part, i) => {
|
||||
if (part === GLOBSTAR) globstar = true;
|
||||
return globstar || !entryParts[0][i] || anymatch(part, entryParts[0][i], ANYMATCH_OPTS);
|
||||
});
|
||||
});
|
||||
}
|
||||
return !this.unmatchedGlob && this.fsw._isntIgnored(this.entryPath(entry), entry.stats);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Watches files & directories for changes. Emitted events:
|
||||
* `add`, `addDir`, `change`, `unlink`, `unlinkDir`, `all`, `error`
|
||||
*
|
||||
* new FSWatcher()
|
||||
* .add(directories)
|
||||
* .on('add', path => log('File', path, 'was added'))
|
||||
*/
|
||||
class FSWatcher extends EventEmitter {
|
||||
// Not indenting methods for history sake; for now.
|
||||
constructor(_opts) {
|
||||
super();
|
||||
|
||||
const opts = {};
|
||||
if (_opts) Object.assign(opts, _opts); // for frozen objects
|
||||
|
||||
/** @type {Map<String, DirEntry>} */
|
||||
this._watched = new Map();
|
||||
/** @type {Map<String, Array>} */
|
||||
this._closers = new Map();
|
||||
/** @type {Set<String>} */
|
||||
this._ignoredPaths = new Set();
|
||||
|
||||
/** @type {Map<ThrottleType, Map>} */
|
||||
this._throttled = new Map();
|
||||
|
||||
/** @type {Map<Path, String|Boolean>} */
|
||||
this._symlinkPaths = new Map();
|
||||
|
||||
this._streams = new Set();
|
||||
this.closed = false;
|
||||
|
||||
// Set up default options.
|
||||
if (undef(opts, 'persistent')) opts.persistent = true;
|
||||
if (undef(opts, 'ignoreInitial')) opts.ignoreInitial = false;
|
||||
if (undef(opts, 'ignorePermissionErrors')) opts.ignorePermissionErrors = false;
|
||||
if (undef(opts, 'interval')) opts.interval = 100;
|
||||
if (undef(opts, 'binaryInterval')) opts.binaryInterval = 300;
|
||||
if (undef(opts, 'disableGlobbing')) opts.disableGlobbing = false;
|
||||
opts.enableBinaryInterval = opts.binaryInterval !== opts.interval;
|
||||
|
||||
// Enable fsevents on OS X when polling isn't explicitly enabled.
|
||||
if (undef(opts, 'useFsEvents')) opts.useFsEvents = !opts.usePolling;
|
||||
|
||||
// If we can't use fsevents, ensure the options reflect it's disabled.
|
||||
const canUseFsEvents = FsEventsHandler.canUse();
|
||||
if (!canUseFsEvents) opts.useFsEvents = false;
|
||||
|
||||
// Use polling on Mac if not using fsevents.
|
||||
// Other platforms use non-polling fs_watch.
|
||||
if (undef(opts, 'usePolling') && !opts.useFsEvents) {
|
||||
opts.usePolling = isMacos;
|
||||
}
|
||||
|
||||
// Always default to polling on IBM i because fs.watch() is not available on IBM i.
|
||||
if(isIBMi) {
|
||||
opts.usePolling = true;
|
||||
}
|
||||
|
||||
// Global override (useful for end-developers that need to force polling for all
|
||||
// instances of chokidar, regardless of usage/dependency depth)
|
||||
const envPoll = process.env.CHOKIDAR_USEPOLLING;
|
||||
if (envPoll !== undefined) {
|
||||
const envLower = envPoll.toLowerCase();
|
||||
|
||||
if (envLower === 'false' || envLower === '0') {
|
||||
opts.usePolling = false;
|
||||
} else if (envLower === 'true' || envLower === '1') {
|
||||
opts.usePolling = true;
|
||||
} else {
|
||||
opts.usePolling = !!envLower;
|
||||
}
|
||||
}
|
||||
const envInterval = process.env.CHOKIDAR_INTERVAL;
|
||||
if (envInterval) {
|
||||
opts.interval = Number.parseInt(envInterval, 10);
|
||||
}
|
||||
|
||||
// Editor atomic write normalization enabled by default with fs.watch
|
||||
if (undef(opts, 'atomic')) opts.atomic = !opts.usePolling && !opts.useFsEvents;
|
||||
if (opts.atomic) this._pendingUnlinks = new Map();
|
||||
|
||||
if (undef(opts, 'followSymlinks')) opts.followSymlinks = true;
|
||||
|
||||
if (undef(opts, 'awaitWriteFinish')) opts.awaitWriteFinish = false;
|
||||
if (opts.awaitWriteFinish === true) opts.awaitWriteFinish = {};
|
||||
const awf = opts.awaitWriteFinish;
|
||||
if (awf) {
|
||||
if (!awf.stabilityThreshold) awf.stabilityThreshold = 2000;
|
||||
if (!awf.pollInterval) awf.pollInterval = 100;
|
||||
this._pendingWrites = new Map();
|
||||
}
|
||||
if (opts.ignored) opts.ignored = arrify(opts.ignored);
|
||||
|
||||
let readyCalls = 0;
|
||||
this._emitReady = () => {
|
||||
readyCalls++;
|
||||
if (readyCalls >= this._readyCount) {
|
||||
this._emitReady = EMPTY_FN;
|
||||
this._readyEmitted = true;
|
||||
// use process.nextTick to allow time for listener to be bound
|
||||
process.nextTick(() => this.emit(EV_READY));
|
||||
}
|
||||
};
|
||||
this._emitRaw = (...args) => this.emit(EV_RAW, ...args);
|
||||
this._readyEmitted = false;
|
||||
this.options = opts;
|
||||
|
||||
// Initialize with proper watcher.
|
||||
if (opts.useFsEvents) {
|
||||
this._fsEventsHandler = new FsEventsHandler(this);
|
||||
} else {
|
||||
this._nodeFsHandler = new NodeFsHandler(this);
|
||||
}
|
||||
|
||||
// You’re frozen when your heart’s not open.
|
||||
Object.freeze(opts);
|
||||
}
|
||||
|
||||
// Public methods
|
||||
|
||||
/**
|
||||
* Adds paths to be watched on an existing FSWatcher instance
|
||||
* @param {Path|Array<Path>} paths_
|
||||
* @param {String=} _origAdd private; for handling non-existent paths to be watched
|
||||
* @param {Boolean=} _internal private; indicates a non-user add
|
||||
* @returns {FSWatcher} for chaining
|
||||
*/
|
||||
add(paths_, _origAdd, _internal) {
|
||||
const {cwd, disableGlobbing} = this.options;
|
||||
this.closed = false;
|
||||
let paths = unifyPaths(paths_);
|
||||
if (cwd) {
|
||||
paths = paths.map((path) => {
|
||||
const absPath = getAbsolutePath(path, cwd);
|
||||
|
||||
// Check `path` instead of `absPath` because the cwd portion can't be a glob
|
||||
if (disableGlobbing || !isGlob(path)) {
|
||||
return absPath;
|
||||
}
|
||||
return normalizePath(absPath);
|
||||
});
|
||||
}
|
||||
|
||||
// set aside negated glob strings
|
||||
paths = paths.filter((path) => {
|
||||
if (path.startsWith(BANG)) {
|
||||
this._ignoredPaths.add(path.slice(1));
|
||||
return false;
|
||||
}
|
||||
|
||||
// if a path is being added that was previously ignored, stop ignoring it
|
||||
this._ignoredPaths.delete(path);
|
||||
this._ignoredPaths.delete(path + SLASH_GLOBSTAR);
|
||||
|
||||
// reset the cached userIgnored anymatch fn
|
||||
// to make ignoredPaths changes effective
|
||||
this._userIgnored = undefined;
|
||||
|
||||
return true;
|
||||
});
|
||||
|
||||
if (this.options.useFsEvents && this._fsEventsHandler) {
|
||||
if (!this._readyCount) this._readyCount = paths.length;
|
||||
if (this.options.persistent) this._readyCount += paths.length;
|
||||
paths.forEach((path) => this._fsEventsHandler._addToFsEvents(path));
|
||||
} else {
|
||||
if (!this._readyCount) this._readyCount = 0;
|
||||
this._readyCount += paths.length;
|
||||
Promise.all(
|
||||
paths.map(async path => {
|
||||
const res = await this._nodeFsHandler._addToNodeFs(path, !_internal, 0, 0, _origAdd);
|
||||
if (res) this._emitReady();
|
||||
return res;
|
||||
})
|
||||
).then(results => {
|
||||
if (this.closed) return;
|
||||
results.filter(item => item).forEach(item => {
|
||||
this.add(sysPath.dirname(item), sysPath.basename(_origAdd || item));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Close watchers or start ignoring events from specified paths.
|
||||
* @param {Path|Array<Path>} paths_ - string or array of strings, file/directory paths and/or globs
|
||||
* @returns {FSWatcher} for chaining
|
||||
*/
|
||||
unwatch(paths_) {
|
||||
if (this.closed) return this;
|
||||
const paths = unifyPaths(paths_);
|
||||
const {cwd} = this.options;
|
||||
|
||||
paths.forEach((path) => {
|
||||
// convert to absolute path unless relative path already matches
|
||||
if (!sysPath.isAbsolute(path) && !this._closers.has(path)) {
|
||||
if (cwd) path = sysPath.join(cwd, path);
|
||||
path = sysPath.resolve(path);
|
||||
}
|
||||
|
||||
this._closePath(path);
|
||||
|
||||
this._ignoredPaths.add(path);
|
||||
if (this._watched.has(path)) {
|
||||
this._ignoredPaths.add(path + SLASH_GLOBSTAR);
|
||||
}
|
||||
|
||||
// reset the cached userIgnored anymatch fn
|
||||
// to make ignoredPaths changes effective
|
||||
this._userIgnored = undefined;
|
||||
});
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Close watchers and remove all listeners from watched paths.
|
||||
* @returns {Promise<void>}.
|
||||
*/
|
||||
close() {
|
||||
if (this.closed) return this._closePromise;
|
||||
this.closed = true;
|
||||
|
||||
// Memory management.
|
||||
this.removeAllListeners();
|
||||
const closers = [];
|
||||
this._closers.forEach(closerList => closerList.forEach(closer => {
|
||||
const promise = closer();
|
||||
if (promise instanceof Promise) closers.push(promise);
|
||||
}));
|
||||
this._streams.forEach(stream => stream.destroy());
|
||||
this._userIgnored = undefined;
|
||||
this._readyCount = 0;
|
||||
this._readyEmitted = false;
|
||||
this._watched.forEach(dirent => dirent.dispose());
|
||||
['closers', 'watched', 'streams', 'symlinkPaths', 'throttled'].forEach(key => {
|
||||
this[`_${key}`].clear();
|
||||
});
|
||||
|
||||
this._closePromise = closers.length ? Promise.all(closers).then(() => undefined) : Promise.resolve();
|
||||
return this._closePromise;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expose list of watched paths
|
||||
* @returns {Object} for chaining
|
||||
*/
|
||||
getWatched() {
|
||||
const watchList = {};
|
||||
this._watched.forEach((entry, dir) => {
|
||||
const key = this.options.cwd ? sysPath.relative(this.options.cwd, dir) : dir;
|
||||
watchList[key || ONE_DOT] = entry.getChildren().sort();
|
||||
});
|
||||
return watchList;
|
||||
}
|
||||
|
||||
emitWithAll(event, args) {
|
||||
this.emit(...args);
|
||||
if (event !== EV_ERROR) this.emit(EV_ALL, ...args);
|
||||
}
|
||||
|
||||
// Common helpers
|
||||
// --------------
|
||||
|
||||
/**
|
||||
* Normalize and emit events.
|
||||
* Calling _emit DOES NOT MEAN emit() would be called!
|
||||
* @param {EventName} event Type of event
|
||||
* @param {Path} path File or directory path
|
||||
* @param {*=} val1 arguments to be passed with event
|
||||
* @param {*=} val2
|
||||
* @param {*=} val3
|
||||
* @returns the error if defined, otherwise the value of the FSWatcher instance's `closed` flag
|
||||
*/
|
||||
async _emit(event, path, val1, val2, val3) {
|
||||
if (this.closed) return;
|
||||
|
||||
const opts = this.options;
|
||||
if (isWindows) path = sysPath.normalize(path);
|
||||
if (opts.cwd) path = sysPath.relative(opts.cwd, path);
|
||||
/** @type Array<any> */
|
||||
const args = [event, path];
|
||||
if (val3 !== undefined) args.push(val1, val2, val3);
|
||||
else if (val2 !== undefined) args.push(val1, val2);
|
||||
else if (val1 !== undefined) args.push(val1);
|
||||
|
||||
const awf = opts.awaitWriteFinish;
|
||||
let pw;
|
||||
if (awf && (pw = this._pendingWrites.get(path))) {
|
||||
pw.lastChange = new Date();
|
||||
return this;
|
||||
}
|
||||
|
||||
if (opts.atomic) {
|
||||
if (event === EV_UNLINK) {
|
||||
this._pendingUnlinks.set(path, args);
|
||||
setTimeout(() => {
|
||||
this._pendingUnlinks.forEach((entry, path) => {
|
||||
this.emit(...entry);
|
||||
this.emit(EV_ALL, ...entry);
|
||||
this._pendingUnlinks.delete(path);
|
||||
});
|
||||
}, typeof opts.atomic === 'number' ? opts.atomic : 100);
|
||||
return this;
|
||||
}
|
||||
if (event === EV_ADD && this._pendingUnlinks.has(path)) {
|
||||
event = args[0] = EV_CHANGE;
|
||||
this._pendingUnlinks.delete(path);
|
||||
}
|
||||
}
|
||||
|
||||
if (awf && (event === EV_ADD || event === EV_CHANGE) && this._readyEmitted) {
|
||||
const awfEmit = (err, stats) => {
|
||||
if (err) {
|
||||
event = args[0] = EV_ERROR;
|
||||
args[1] = err;
|
||||
this.emitWithAll(event, args);
|
||||
} else if (stats) {
|
||||
// if stats doesn't exist the file must have been deleted
|
||||
if (args.length > 2) {
|
||||
args[2] = stats;
|
||||
} else {
|
||||
args.push(stats);
|
||||
}
|
||||
this.emitWithAll(event, args);
|
||||
}
|
||||
};
|
||||
|
||||
this._awaitWriteFinish(path, awf.stabilityThreshold, event, awfEmit);
|
||||
return this;
|
||||
}
|
||||
|
||||
if (event === EV_CHANGE) {
|
||||
const isThrottled = !this._throttle(EV_CHANGE, path, 50);
|
||||
if (isThrottled) return this;
|
||||
}
|
||||
|
||||
if (opts.alwaysStat && val1 === undefined &&
|
||||
(event === EV_ADD || event === EV_ADD_DIR || event === EV_CHANGE)
|
||||
) {
|
||||
const fullPath = opts.cwd ? sysPath.join(opts.cwd, path) : path;
|
||||
let stats;
|
||||
try {
|
||||
stats = await stat(fullPath);
|
||||
} catch (err) {}
|
||||
// Suppress event when fs_stat fails, to avoid sending undefined 'stat'
|
||||
if (!stats || this.closed) return;
|
||||
args.push(stats);
|
||||
}
|
||||
this.emitWithAll(event, args);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Common handler for errors
|
||||
* @param {Error} error
|
||||
* @returns {Error|Boolean} The error if defined, otherwise the value of the FSWatcher instance's `closed` flag
|
||||
*/
|
||||
_handleError(error) {
|
||||
const code = error && error.code;
|
||||
if (error && code !== 'ENOENT' && code !== 'ENOTDIR' &&
|
||||
(!this.options.ignorePermissionErrors || (code !== 'EPERM' && code !== 'EACCES'))
|
||||
) {
|
||||
this.emit(EV_ERROR, error);
|
||||
}
|
||||
return error || this.closed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper utility for throttling
|
||||
* @param {ThrottleType} actionType type being throttled
|
||||
* @param {Path} path being acted upon
|
||||
* @param {Number} timeout duration of time to suppress duplicate actions
|
||||
* @returns {Object|false} tracking object or false if action should be suppressed
|
||||
*/
|
||||
_throttle(actionType, path, timeout) {
|
||||
if (!this._throttled.has(actionType)) {
|
||||
this._throttled.set(actionType, new Map());
|
||||
}
|
||||
|
||||
/** @type {Map<Path, Object>} */
|
||||
const action = this._throttled.get(actionType);
|
||||
/** @type {Object} */
|
||||
const actionPath = action.get(path);
|
||||
|
||||
if (actionPath) {
|
||||
actionPath.count++;
|
||||
return false;
|
||||
}
|
||||
|
||||
let timeoutObject;
|
||||
const clear = () => {
|
||||
const item = action.get(path);
|
||||
const count = item ? item.count : 0;
|
||||
action.delete(path);
|
||||
clearTimeout(timeoutObject);
|
||||
if (item) clearTimeout(item.timeoutObject);
|
||||
return count;
|
||||
};
|
||||
timeoutObject = setTimeout(clear, timeout);
|
||||
const thr = {timeoutObject, clear, count: 0};
|
||||
action.set(path, thr);
|
||||
return thr;
|
||||
}
|
||||
|
||||
_incrReadyCount() {
|
||||
return this._readyCount++;
|
||||
}
|
||||
|
||||
/**
|
||||
* Awaits write operation to finish.
|
||||
* Polls a newly created file for size variations. When files size does not change for 'threshold' milliseconds calls callback.
|
||||
* @param {Path} path being acted upon
|
||||
* @param {Number} threshold Time in milliseconds a file size must be fixed before acknowledging write OP is finished
|
||||
* @param {EventName} event
|
||||
* @param {Function} awfEmit Callback to be called when ready for event to be emitted.
|
||||
*/
|
||||
_awaitWriteFinish(path, threshold, event, awfEmit) {
|
||||
let timeoutHandler;
|
||||
|
||||
let fullPath = path;
|
||||
if (this.options.cwd && !sysPath.isAbsolute(path)) {
|
||||
fullPath = sysPath.join(this.options.cwd, path);
|
||||
}
|
||||
|
||||
const now = new Date();
|
||||
|
||||
const awaitWriteFinish = (prevStat) => {
|
||||
fs.stat(fullPath, (err, curStat) => {
|
||||
if (err || !this._pendingWrites.has(path)) {
|
||||
if (err && err.code !== 'ENOENT') awfEmit(err);
|
||||
return;
|
||||
}
|
||||
|
||||
const now = Number(new Date());
|
||||
|
||||
if (prevStat && curStat.size !== prevStat.size) {
|
||||
this._pendingWrites.get(path).lastChange = now;
|
||||
}
|
||||
const pw = this._pendingWrites.get(path);
|
||||
const df = now - pw.lastChange;
|
||||
|
||||
if (df >= threshold) {
|
||||
this._pendingWrites.delete(path);
|
||||
awfEmit(undefined, curStat);
|
||||
} else {
|
||||
timeoutHandler = setTimeout(
|
||||
awaitWriteFinish,
|
||||
this.options.awaitWriteFinish.pollInterval,
|
||||
curStat
|
||||
);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
if (!this._pendingWrites.has(path)) {
|
||||
this._pendingWrites.set(path, {
|
||||
lastChange: now,
|
||||
cancelWait: () => {
|
||||
this._pendingWrites.delete(path);
|
||||
clearTimeout(timeoutHandler);
|
||||
return event;
|
||||
}
|
||||
});
|
||||
timeoutHandler = setTimeout(
|
||||
awaitWriteFinish,
|
||||
this.options.awaitWriteFinish.pollInterval
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
_getGlobIgnored() {
|
||||
return [...this._ignoredPaths.values()];
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines whether user has asked to ignore this path.
|
||||
* @param {Path} path filepath or dir
|
||||
* @param {fs.Stats=} stats result of fs.stat
|
||||
* @returns {Boolean}
|
||||
*/
|
||||
_isIgnored(path, stats) {
|
||||
if (this.options.atomic && DOT_RE.test(path)) return true;
|
||||
if (!this._userIgnored) {
|
||||
const {cwd} = this.options;
|
||||
const ign = this.options.ignored;
|
||||
|
||||
const ignored = ign && ign.map(normalizeIgnored(cwd));
|
||||
const paths = arrify(ignored)
|
||||
.filter((path) => typeof path === STRING_TYPE && !isGlob(path))
|
||||
.map((path) => path + SLASH_GLOBSTAR);
|
||||
const list = this._getGlobIgnored().map(normalizeIgnored(cwd)).concat(ignored, paths);
|
||||
this._userIgnored = anymatch(list, undefined, ANYMATCH_OPTS);
|
||||
}
|
||||
|
||||
return this._userIgnored([path, stats]);
|
||||
}
|
||||
|
||||
_isntIgnored(path, stat) {
|
||||
return !this._isIgnored(path, stat);
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides a set of common helpers and properties relating to symlink and glob handling.
|
||||
* @param {Path} path file, directory, or glob pattern being watched
|
||||
* @param {Number=} depth at any depth > 0, this isn't a glob
|
||||
* @returns {WatchHelper} object containing helpers for this path
|
||||
*/
|
||||
_getWatchHelpers(path, depth) {
|
||||
const watchPath = depth || this.options.disableGlobbing || !isGlob(path) ? path : globParent(path);
|
||||
const follow = this.options.followSymlinks;
|
||||
|
||||
return new WatchHelper(path, watchPath, follow, this);
|
||||
}
|
||||
|
||||
// Directory helpers
|
||||
// -----------------
|
||||
|
||||
/**
|
||||
* Provides directory tracking objects
|
||||
* @param {String} directory path of the directory
|
||||
* @returns {DirEntry} the directory's tracking object
|
||||
*/
|
||||
_getWatchedDir(directory) {
|
||||
if (!this._boundRemove) this._boundRemove = this._remove.bind(this);
|
||||
const dir = sysPath.resolve(directory);
|
||||
if (!this._watched.has(dir)) this._watched.set(dir, new DirEntry(dir, this._boundRemove));
|
||||
return this._watched.get(dir);
|
||||
}
|
||||
|
||||
// File helpers
|
||||
// ------------
|
||||
|
||||
/**
|
||||
* Check for read permissions.
|
||||
* Based on this answer on SO: https://stackoverflow.com/a/11781404/1358405
|
||||
* @param {fs.Stats} stats - object, result of fs_stat
|
||||
* @returns {Boolean} indicates whether the file can be read
|
||||
*/
|
||||
_hasReadPermissions(stats) {
|
||||
if (this.options.ignorePermissionErrors) return true;
|
||||
|
||||
// stats.mode may be bigint
|
||||
const md = stats && Number.parseInt(stats.mode, 10);
|
||||
const st = md & 0o777;
|
||||
const it = Number.parseInt(st.toString(8)[0], 10);
|
||||
return Boolean(4 & it);
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles emitting unlink events for
|
||||
* files and directories, and via recursion, for
|
||||
* files and directories within directories that are unlinked
|
||||
* @param {String} directory within which the following item is located
|
||||
* @param {String} item base path of item/directory
|
||||
* @returns {void}
|
||||
*/
|
||||
_remove(directory, item, isDirectory) {
|
||||
// if what is being deleted is a directory, get that directory's paths
|
||||
// for recursive deleting and cleaning of watched object
|
||||
// if it is not a directory, nestedDirectoryChildren will be empty array
|
||||
const path = sysPath.join(directory, item);
|
||||
const fullPath = sysPath.resolve(path);
|
||||
isDirectory = isDirectory != null
|
||||
? isDirectory
|
||||
: this._watched.has(path) || this._watched.has(fullPath);
|
||||
|
||||
// prevent duplicate handling in case of arriving here nearly simultaneously
|
||||
// via multiple paths (such as _handleFile and _handleDir)
|
||||
if (!this._throttle('remove', path, 100)) return;
|
||||
|
||||
// if the only watched file is removed, watch for its return
|
||||
if (!isDirectory && !this.options.useFsEvents && this._watched.size === 1) {
|
||||
this.add(directory, item, true);
|
||||
}
|
||||
|
||||
// This will create a new entry in the watched object in either case
|
||||
// so we got to do the directory check beforehand
|
||||
const wp = this._getWatchedDir(path);
|
||||
const nestedDirectoryChildren = wp.getChildren();
|
||||
|
||||
// Recursively remove children directories / files.
|
||||
nestedDirectoryChildren.forEach(nested => this._remove(path, nested));
|
||||
|
||||
// Check if item was on the watched list and remove it
|
||||
const parent = this._getWatchedDir(directory);
|
||||
const wasTracked = parent.has(item);
|
||||
parent.remove(item);
|
||||
|
||||
// Fixes issue #1042 -> Relative paths were detected and added as symlinks
|
||||
// (https://github.com/paulmillr/chokidar/blob/e1753ddbc9571bdc33b4a4af172d52cb6e611c10/lib/nodefs-handler.js#L612),
|
||||
// but never removed from the map in case the path was deleted.
|
||||
// This leads to an incorrect state if the path was recreated:
|
||||
// https://github.com/paulmillr/chokidar/blob/e1753ddbc9571bdc33b4a4af172d52cb6e611c10/lib/nodefs-handler.js#L553
|
||||
if (this._symlinkPaths.has(fullPath)) {
|
||||
this._symlinkPaths.delete(fullPath);
|
||||
}
|
||||
|
||||
// If we wait for this file to be fully written, cancel the wait.
|
||||
let relPath = path;
|
||||
if (this.options.cwd) relPath = sysPath.relative(this.options.cwd, path);
|
||||
if (this.options.awaitWriteFinish && this._pendingWrites.has(relPath)) {
|
||||
const event = this._pendingWrites.get(relPath).cancelWait();
|
||||
if (event === EV_ADD) return;
|
||||
}
|
||||
|
||||
// The Entry will either be a directory that just got removed
|
||||
// or a bogus entry to a file, in either case we have to remove it
|
||||
this._watched.delete(path);
|
||||
this._watched.delete(fullPath);
|
||||
const eventName = isDirectory ? EV_UNLINK_DIR : EV_UNLINK;
|
||||
if (wasTracked && !this._isIgnored(path)) this._emit(eventName, path);
|
||||
|
||||
// Avoid conflicts if we later create another file with the same name
|
||||
if (!this.options.useFsEvents) {
|
||||
this._closePath(path);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes all watchers for a path
|
||||
* @param {Path} path
|
||||
*/
|
||||
_closePath(path) {
|
||||
this._closeFile(path)
|
||||
const dir = sysPath.dirname(path);
|
||||
this._getWatchedDir(dir).remove(sysPath.basename(path));
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes only file-specific watchers
|
||||
* @param {Path} path
|
||||
*/
|
||||
_closeFile(path) {
|
||||
const closers = this._closers.get(path);
|
||||
if (!closers) return;
|
||||
closers.forEach(closer => closer());
|
||||
this._closers.delete(path);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Path} path
|
||||
* @param {Function} closer
|
||||
*/
|
||||
_addPathCloser(path, closer) {
|
||||
if (!closer) return;
|
||||
let list = this._closers.get(path);
|
||||
if (!list) {
|
||||
list = [];
|
||||
this._closers.set(path, list);
|
||||
}
|
||||
list.push(closer);
|
||||
}
|
||||
|
||||
_readdirp(root, opts) {
|
||||
if (this.closed) return;
|
||||
const options = {type: EV_ALL, alwaysStat: true, lstat: true, ...opts};
|
||||
let stream = readdirp(root, options);
|
||||
this._streams.add(stream);
|
||||
stream.once(STR_CLOSE, () => {
|
||||
stream = undefined;
|
||||
});
|
||||
stream.once(STR_END, () => {
|
||||
if (stream) {
|
||||
this._streams.delete(stream);
|
||||
stream = undefined;
|
||||
}
|
||||
});
|
||||
return stream;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Export FSWatcher class
|
||||
exports.FSWatcher = FSWatcher;
|
||||
|
||||
/**
|
||||
* Instantiates watcher with paths to be tracked.
|
||||
* @param {String|Array<String>} paths file/directory paths and/or globs
|
||||
* @param {Object=} options chokidar opts
|
||||
* @returns an instance of FSWatcher for chaining.
|
||||
*/
|
||||
const watch = (paths, options) => {
|
||||
const watcher = new FSWatcher(options);
|
||||
watcher.add(paths);
|
||||
return watcher;
|
||||
};
|
||||
|
||||
exports.watch = watch;
|
||||
66
node_modules/chokidar/lib/constants.js
generated
vendored
Normal file
66
node_modules/chokidar/lib/constants.js
generated
vendored
Normal file
|
|
@ -0,0 +1,66 @@
|
|||
'use strict';
|
||||
|
||||
const {sep} = require('path');
|
||||
const {platform} = process;
|
||||
const os = require('os');
|
||||
|
||||
exports.EV_ALL = 'all';
|
||||
exports.EV_READY = 'ready';
|
||||
exports.EV_ADD = 'add';
|
||||
exports.EV_CHANGE = 'change';
|
||||
exports.EV_ADD_DIR = 'addDir';
|
||||
exports.EV_UNLINK = 'unlink';
|
||||
exports.EV_UNLINK_DIR = 'unlinkDir';
|
||||
exports.EV_RAW = 'raw';
|
||||
exports.EV_ERROR = 'error';
|
||||
|
||||
exports.STR_DATA = 'data';
|
||||
exports.STR_END = 'end';
|
||||
exports.STR_CLOSE = 'close';
|
||||
|
||||
exports.FSEVENT_CREATED = 'created';
|
||||
exports.FSEVENT_MODIFIED = 'modified';
|
||||
exports.FSEVENT_DELETED = 'deleted';
|
||||
exports.FSEVENT_MOVED = 'moved';
|
||||
exports.FSEVENT_CLONED = 'cloned';
|
||||
exports.FSEVENT_UNKNOWN = 'unknown';
|
||||
exports.FSEVENT_FLAG_MUST_SCAN_SUBDIRS = 1;
|
||||
exports.FSEVENT_TYPE_FILE = 'file';
|
||||
exports.FSEVENT_TYPE_DIRECTORY = 'directory';
|
||||
exports.FSEVENT_TYPE_SYMLINK = 'symlink';
|
||||
|
||||
exports.KEY_LISTENERS = 'listeners';
|
||||
exports.KEY_ERR = 'errHandlers';
|
||||
exports.KEY_RAW = 'rawEmitters';
|
||||
exports.HANDLER_KEYS = [exports.KEY_LISTENERS, exports.KEY_ERR, exports.KEY_RAW];
|
||||
|
||||
exports.DOT_SLASH = `.${sep}`;
|
||||
|
||||
exports.BACK_SLASH_RE = /\\/g;
|
||||
exports.DOUBLE_SLASH_RE = /\/\//;
|
||||
exports.SLASH_OR_BACK_SLASH_RE = /[/\\]/;
|
||||
exports.DOT_RE = /\..*\.(sw[px])$|~$|\.subl.*\.tmp/;
|
||||
exports.REPLACER_RE = /^\.[/\\]/;
|
||||
|
||||
exports.SLASH = '/';
|
||||
exports.SLASH_SLASH = '//';
|
||||
exports.BRACE_START = '{';
|
||||
exports.BANG = '!';
|
||||
exports.ONE_DOT = '.';
|
||||
exports.TWO_DOTS = '..';
|
||||
exports.STAR = '*';
|
||||
exports.GLOBSTAR = '**';
|
||||
exports.ROOT_GLOBSTAR = '/**/*';
|
||||
exports.SLASH_GLOBSTAR = '/**';
|
||||
exports.DIR_SUFFIX = 'Dir';
|
||||
exports.ANYMATCH_OPTS = {dot: true};
|
||||
exports.STRING_TYPE = 'string';
|
||||
exports.FUNCTION_TYPE = 'function';
|
||||
exports.EMPTY_STR = '';
|
||||
exports.EMPTY_FN = () => {};
|
||||
exports.IDENTITY_FN = val => val;
|
||||
|
||||
exports.isWindows = platform === 'win32';
|
||||
exports.isMacos = platform === 'darwin';
|
||||
exports.isLinux = platform === 'linux';
|
||||
exports.isIBMi = os.type() === 'OS400';
|
||||
526
node_modules/chokidar/lib/fsevents-handler.js
generated
vendored
Normal file
526
node_modules/chokidar/lib/fsevents-handler.js
generated
vendored
Normal file
|
|
@ -0,0 +1,526 @@
|
|||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const sysPath = require('path');
|
||||
const { promisify } = require('util');
|
||||
|
||||
let fsevents;
|
||||
try {
|
||||
fsevents = require('fsevents');
|
||||
} catch (error) {
|
||||
if (process.env.CHOKIDAR_PRINT_FSEVENTS_REQUIRE_ERROR) console.error(error);
|
||||
}
|
||||
|
||||
if (fsevents) {
|
||||
// TODO: real check
|
||||
const mtch = process.version.match(/v(\d+)\.(\d+)/);
|
||||
if (mtch && mtch[1] && mtch[2]) {
|
||||
const maj = Number.parseInt(mtch[1], 10);
|
||||
const min = Number.parseInt(mtch[2], 10);
|
||||
if (maj === 8 && min < 16) {
|
||||
fsevents = undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const {
|
||||
EV_ADD,
|
||||
EV_CHANGE,
|
||||
EV_ADD_DIR,
|
||||
EV_UNLINK,
|
||||
EV_ERROR,
|
||||
STR_DATA,
|
||||
STR_END,
|
||||
FSEVENT_CREATED,
|
||||
FSEVENT_MODIFIED,
|
||||
FSEVENT_DELETED,
|
||||
FSEVENT_MOVED,
|
||||
// FSEVENT_CLONED,
|
||||
FSEVENT_UNKNOWN,
|
||||
FSEVENT_FLAG_MUST_SCAN_SUBDIRS,
|
||||
FSEVENT_TYPE_FILE,
|
||||
FSEVENT_TYPE_DIRECTORY,
|
||||
FSEVENT_TYPE_SYMLINK,
|
||||
|
||||
ROOT_GLOBSTAR,
|
||||
DIR_SUFFIX,
|
||||
DOT_SLASH,
|
||||
FUNCTION_TYPE,
|
||||
EMPTY_FN,
|
||||
IDENTITY_FN
|
||||
} = require('./constants');
|
||||
|
||||
const Depth = (value) => isNaN(value) ? {} : {depth: value};
|
||||
|
||||
const stat = promisify(fs.stat);
|
||||
const lstat = promisify(fs.lstat);
|
||||
const realpath = promisify(fs.realpath);
|
||||
|
||||
const statMethods = { stat, lstat };
|
||||
|
||||
/**
|
||||
* @typedef {String} Path
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} FsEventsWatchContainer
|
||||
* @property {Set<Function>} listeners
|
||||
* @property {Function} rawEmitter
|
||||
* @property {{stop: Function}} watcher
|
||||
*/
|
||||
|
||||
// fsevents instance helper functions
|
||||
/**
|
||||
* Object to hold per-process fsevents instances (may be shared across chokidar FSWatcher instances)
|
||||
* @type {Map<Path,FsEventsWatchContainer>}
|
||||
*/
|
||||
const FSEventsWatchers = new Map();
|
||||
|
||||
// Threshold of duplicate path prefixes at which to start
|
||||
// consolidating going forward
|
||||
const consolidateThreshhold = 10;
|
||||
|
||||
const wrongEventFlags = new Set([
|
||||
69888, 70400, 71424, 72704, 73472, 131328, 131840, 262912
|
||||
]);
|
||||
|
||||
/**
|
||||
* Instantiates the fsevents interface
|
||||
* @param {Path} path path to be watched
|
||||
* @param {Function} callback called when fsevents is bound and ready
|
||||
* @returns {{stop: Function}} new fsevents instance
|
||||
*/
|
||||
const createFSEventsInstance = (path, callback) => {
|
||||
const stop = fsevents.watch(path, callback);
|
||||
return {stop};
|
||||
};
|
||||
|
||||
/**
|
||||
* Instantiates the fsevents interface or binds listeners to an existing one covering
|
||||
* the same file tree.
|
||||
* @param {Path} path - to be watched
|
||||
* @param {Path} realPath - real path for symlinks
|
||||
* @param {Function} listener - called when fsevents emits events
|
||||
* @param {Function} rawEmitter - passes data to listeners of the 'raw' event
|
||||
* @returns {Function} closer
|
||||
*/
|
||||
function setFSEventsListener(path, realPath, listener, rawEmitter) {
|
||||
let watchPath = sysPath.extname(realPath) ? sysPath.dirname(realPath) : realPath;
|
||||
|
||||
const parentPath = sysPath.dirname(watchPath);
|
||||
let cont = FSEventsWatchers.get(watchPath);
|
||||
|
||||
// If we've accumulated a substantial number of paths that
|
||||
// could have been consolidated by watching one directory
|
||||
// above the current one, create a watcher on the parent
|
||||
// path instead, so that we do consolidate going forward.
|
||||
if (couldConsolidate(parentPath)) {
|
||||
watchPath = parentPath;
|
||||
}
|
||||
|
||||
const resolvedPath = sysPath.resolve(path);
|
||||
const hasSymlink = resolvedPath !== realPath;
|
||||
|
||||
const filteredListener = (fullPath, flags, info) => {
|
||||
if (hasSymlink) fullPath = fullPath.replace(realPath, resolvedPath);
|
||||
if (
|
||||
fullPath === resolvedPath ||
|
||||
!fullPath.indexOf(resolvedPath + sysPath.sep)
|
||||
) listener(fullPath, flags, info);
|
||||
};
|
||||
|
||||
// check if there is already a watcher on a parent path
|
||||
// modifies `watchPath` to the parent path when it finds a match
|
||||
let watchedParent = false;
|
||||
for (const watchedPath of FSEventsWatchers.keys()) {
|
||||
if (realPath.indexOf(sysPath.resolve(watchedPath) + sysPath.sep) === 0) {
|
||||
watchPath = watchedPath;
|
||||
cont = FSEventsWatchers.get(watchPath);
|
||||
watchedParent = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (cont || watchedParent) {
|
||||
cont.listeners.add(filteredListener);
|
||||
} else {
|
||||
cont = {
|
||||
listeners: new Set([filteredListener]),
|
||||
rawEmitter,
|
||||
watcher: createFSEventsInstance(watchPath, (fullPath, flags) => {
|
||||
if (!cont.listeners.size) return;
|
||||
if (flags & FSEVENT_FLAG_MUST_SCAN_SUBDIRS) return;
|
||||
const info = fsevents.getInfo(fullPath, flags);
|
||||
cont.listeners.forEach(list => {
|
||||
list(fullPath, flags, info);
|
||||
});
|
||||
|
||||
cont.rawEmitter(info.event, fullPath, info);
|
||||
})
|
||||
};
|
||||
FSEventsWatchers.set(watchPath, cont);
|
||||
}
|
||||
|
||||
// removes this instance's listeners and closes the underlying fsevents
|
||||
// instance if there are no more listeners left
|
||||
return () => {
|
||||
const lst = cont.listeners;
|
||||
|
||||
lst.delete(filteredListener);
|
||||
if (!lst.size) {
|
||||
FSEventsWatchers.delete(watchPath);
|
||||
if (cont.watcher) return cont.watcher.stop().then(() => {
|
||||
cont.rawEmitter = cont.watcher = undefined;
|
||||
Object.freeze(cont);
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Decide whether or not we should start a new higher-level
|
||||
// parent watcher
|
||||
const couldConsolidate = (path) => {
|
||||
let count = 0;
|
||||
for (const watchPath of FSEventsWatchers.keys()) {
|
||||
if (watchPath.indexOf(path) === 0) {
|
||||
count++;
|
||||
if (count >= consolidateThreshhold) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
// returns boolean indicating whether fsevents can be used
|
||||
const canUse = () => fsevents && FSEventsWatchers.size < 128;
|
||||
|
||||
// determines subdirectory traversal levels from root to path
|
||||
const calcDepth = (path, root) => {
|
||||
let i = 0;
|
||||
while (!path.indexOf(root) && (path = sysPath.dirname(path)) !== root) i++;
|
||||
return i;
|
||||
};
|
||||
|
||||
// returns boolean indicating whether the fsevents' event info has the same type
|
||||
// as the one returned by fs.stat
|
||||
const sameTypes = (info, stats) => (
|
||||
info.type === FSEVENT_TYPE_DIRECTORY && stats.isDirectory() ||
|
||||
info.type === FSEVENT_TYPE_SYMLINK && stats.isSymbolicLink() ||
|
||||
info.type === FSEVENT_TYPE_FILE && stats.isFile()
|
||||
)
|
||||
|
||||
/**
|
||||
* @mixin
|
||||
*/
|
||||
class FsEventsHandler {
|
||||
|
||||
/**
|
||||
* @param {import('../index').FSWatcher} fsw
|
||||
*/
|
||||
constructor(fsw) {
|
||||
this.fsw = fsw;
|
||||
}
|
||||
checkIgnored(path, stats) {
|
||||
const ipaths = this.fsw._ignoredPaths;
|
||||
if (this.fsw._isIgnored(path, stats)) {
|
||||
ipaths.add(path);
|
||||
if (stats && stats.isDirectory()) {
|
||||
ipaths.add(path + ROOT_GLOBSTAR);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
ipaths.delete(path);
|
||||
ipaths.delete(path + ROOT_GLOBSTAR);
|
||||
}
|
||||
|
||||
addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts) {
|
||||
const event = watchedDir.has(item) ? EV_CHANGE : EV_ADD;
|
||||
this.handleEvent(event, path, fullPath, realPath, parent, watchedDir, item, info, opts);
|
||||
}
|
||||
|
||||
async checkExists(path, fullPath, realPath, parent, watchedDir, item, info, opts) {
|
||||
try {
|
||||
const stats = await stat(path)
|
||||
if (this.fsw.closed) return;
|
||||
if (sameTypes(info, stats)) {
|
||||
this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts);
|
||||
} else {
|
||||
this.handleEvent(EV_UNLINK, path, fullPath, realPath, parent, watchedDir, item, info, opts);
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code === 'EACCES') {
|
||||
this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts);
|
||||
} else {
|
||||
this.handleEvent(EV_UNLINK, path, fullPath, realPath, parent, watchedDir, item, info, opts);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
handleEvent(event, path, fullPath, realPath, parent, watchedDir, item, info, opts) {
|
||||
if (this.fsw.closed || this.checkIgnored(path)) return;
|
||||
|
||||
if (event === EV_UNLINK) {
|
||||
const isDirectory = info.type === FSEVENT_TYPE_DIRECTORY
|
||||
// suppress unlink events on never before seen files
|
||||
if (isDirectory || watchedDir.has(item)) {
|
||||
this.fsw._remove(parent, item, isDirectory);
|
||||
}
|
||||
} else {
|
||||
if (event === EV_ADD) {
|
||||
// track new directories
|
||||
if (info.type === FSEVENT_TYPE_DIRECTORY) this.fsw._getWatchedDir(path);
|
||||
|
||||
if (info.type === FSEVENT_TYPE_SYMLINK && opts.followSymlinks) {
|
||||
// push symlinks back to the top of the stack to get handled
|
||||
const curDepth = opts.depth === undefined ?
|
||||
undefined : calcDepth(fullPath, realPath) + 1;
|
||||
return this._addToFsEvents(path, false, true, curDepth);
|
||||
}
|
||||
|
||||
// track new paths
|
||||
// (other than symlinks being followed, which will be tracked soon)
|
||||
this.fsw._getWatchedDir(parent).add(item);
|
||||
}
|
||||
/**
|
||||
* @type {'add'|'addDir'|'unlink'|'unlinkDir'}
|
||||
*/
|
||||
const eventName = info.type === FSEVENT_TYPE_DIRECTORY ? event + DIR_SUFFIX : event;
|
||||
this.fsw._emit(eventName, path);
|
||||
if (eventName === EV_ADD_DIR) this._addToFsEvents(path, false, true);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle symlinks encountered during directory scan
|
||||
* @param {String} watchPath - file/dir path to be watched with fsevents
|
||||
* @param {String} realPath - real path (in case of symlinks)
|
||||
* @param {Function} transform - path transformer
|
||||
* @param {Function} globFilter - path filter in case a glob pattern was provided
|
||||
* @returns {Function} closer for the watcher instance
|
||||
*/
|
||||
_watchWithFsEvents(watchPath, realPath, transform, globFilter) {
|
||||
if (this.fsw.closed || this.fsw._isIgnored(watchPath)) return;
|
||||
const opts = this.fsw.options;
|
||||
const watchCallback = async (fullPath, flags, info) => {
|
||||
if (this.fsw.closed) return;
|
||||
if (
|
||||
opts.depth !== undefined &&
|
||||
calcDepth(fullPath, realPath) > opts.depth
|
||||
) return;
|
||||
const path = transform(sysPath.join(
|
||||
watchPath, sysPath.relative(watchPath, fullPath)
|
||||
));
|
||||
if (globFilter && !globFilter(path)) return;
|
||||
// ensure directories are tracked
|
||||
const parent = sysPath.dirname(path);
|
||||
const item = sysPath.basename(path);
|
||||
const watchedDir = this.fsw._getWatchedDir(
|
||||
info.type === FSEVENT_TYPE_DIRECTORY ? path : parent
|
||||
);
|
||||
|
||||
// correct for wrong events emitted
|
||||
if (wrongEventFlags.has(flags) || info.event === FSEVENT_UNKNOWN) {
|
||||
if (typeof opts.ignored === FUNCTION_TYPE) {
|
||||
let stats;
|
||||
try {
|
||||
stats = await stat(path);
|
||||
} catch (error) {}
|
||||
if (this.fsw.closed) return;
|
||||
if (this.checkIgnored(path, stats)) return;
|
||||
if (sameTypes(info, stats)) {
|
||||
this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts);
|
||||
} else {
|
||||
this.handleEvent(EV_UNLINK, path, fullPath, realPath, parent, watchedDir, item, info, opts);
|
||||
}
|
||||
} else {
|
||||
this.checkExists(path, fullPath, realPath, parent, watchedDir, item, info, opts);
|
||||
}
|
||||
} else {
|
||||
switch (info.event) {
|
||||
case FSEVENT_CREATED:
|
||||
case FSEVENT_MODIFIED:
|
||||
return this.addOrChange(path, fullPath, realPath, parent, watchedDir, item, info, opts);
|
||||
case FSEVENT_DELETED:
|
||||
case FSEVENT_MOVED:
|
||||
return this.checkExists(path, fullPath, realPath, parent, watchedDir, item, info, opts);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const closer = setFSEventsListener(
|
||||
watchPath,
|
||||
realPath,
|
||||
watchCallback,
|
||||
this.fsw._emitRaw
|
||||
);
|
||||
|
||||
this.fsw._emitReady();
|
||||
return closer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle symlinks encountered during directory scan
|
||||
* @param {String} linkPath path to symlink
|
||||
* @param {String} fullPath absolute path to the symlink
|
||||
* @param {Function} transform pre-existing path transformer
|
||||
* @param {Number} curDepth level of subdirectories traversed to where symlink is
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async _handleFsEventsSymlink(linkPath, fullPath, transform, curDepth) {
|
||||
// don't follow the same symlink more than once
|
||||
if (this.fsw.closed || this.fsw._symlinkPaths.has(fullPath)) return;
|
||||
|
||||
this.fsw._symlinkPaths.set(fullPath, true);
|
||||
this.fsw._incrReadyCount();
|
||||
|
||||
try {
|
||||
const linkTarget = await realpath(linkPath);
|
||||
if (this.fsw.closed) return;
|
||||
if (this.fsw._isIgnored(linkTarget)) {
|
||||
return this.fsw._emitReady();
|
||||
}
|
||||
|
||||
this.fsw._incrReadyCount();
|
||||
|
||||
// add the linkTarget for watching with a wrapper for transform
|
||||
// that causes emitted paths to incorporate the link's path
|
||||
this._addToFsEvents(linkTarget || linkPath, (path) => {
|
||||
let aliasedPath = linkPath;
|
||||
if (linkTarget && linkTarget !== DOT_SLASH) {
|
||||
aliasedPath = path.replace(linkTarget, linkPath);
|
||||
} else if (path !== DOT_SLASH) {
|
||||
aliasedPath = sysPath.join(linkPath, path);
|
||||
}
|
||||
return transform(aliasedPath);
|
||||
}, false, curDepth);
|
||||
} catch(error) {
|
||||
if (this.fsw._handleError(error)) {
|
||||
return this.fsw._emitReady();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Path} newPath
|
||||
* @param {fs.Stats} stats
|
||||
*/
|
||||
emitAdd(newPath, stats, processPath, opts, forceAdd) {
|
||||
const pp = processPath(newPath);
|
||||
const isDir = stats.isDirectory();
|
||||
const dirObj = this.fsw._getWatchedDir(sysPath.dirname(pp));
|
||||
const base = sysPath.basename(pp);
|
||||
|
||||
// ensure empty dirs get tracked
|
||||
if (isDir) this.fsw._getWatchedDir(pp);
|
||||
if (dirObj.has(base)) return;
|
||||
dirObj.add(base);
|
||||
|
||||
if (!opts.ignoreInitial || forceAdd === true) {
|
||||
this.fsw._emit(isDir ? EV_ADD_DIR : EV_ADD, pp, stats);
|
||||
}
|
||||
}
|
||||
|
||||
initWatch(realPath, path, wh, processPath) {
|
||||
if (this.fsw.closed) return;
|
||||
const closer = this._watchWithFsEvents(
|
||||
wh.watchPath,
|
||||
sysPath.resolve(realPath || wh.watchPath),
|
||||
processPath,
|
||||
wh.globFilter
|
||||
);
|
||||
this.fsw._addPathCloser(path, closer);
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle added path with fsevents
|
||||
* @param {String} path file/dir path or glob pattern
|
||||
* @param {Function|Boolean=} transform converts working path to what the user expects
|
||||
* @param {Boolean=} forceAdd ensure add is emitted
|
||||
* @param {Number=} priorDepth Level of subdirectories already traversed.
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async _addToFsEvents(path, transform, forceAdd, priorDepth) {
|
||||
if (this.fsw.closed) {
|
||||
return;
|
||||
}
|
||||
const opts = this.fsw.options;
|
||||
const processPath = typeof transform === FUNCTION_TYPE ? transform : IDENTITY_FN;
|
||||
|
||||
const wh = this.fsw._getWatchHelpers(path);
|
||||
|
||||
// evaluate what is at the path we're being asked to watch
|
||||
try {
|
||||
const stats = await statMethods[wh.statMethod](wh.watchPath);
|
||||
if (this.fsw.closed) return;
|
||||
if (this.fsw._isIgnored(wh.watchPath, stats)) {
|
||||
throw null;
|
||||
}
|
||||
if (stats.isDirectory()) {
|
||||
// emit addDir unless this is a glob parent
|
||||
if (!wh.globFilter) this.emitAdd(processPath(path), stats, processPath, opts, forceAdd);
|
||||
|
||||
// don't recurse further if it would exceed depth setting
|
||||
if (priorDepth && priorDepth > opts.depth) return;
|
||||
|
||||
// scan the contents of the dir
|
||||
this.fsw._readdirp(wh.watchPath, {
|
||||
fileFilter: entry => wh.filterPath(entry),
|
||||
directoryFilter: entry => wh.filterDir(entry),
|
||||
...Depth(opts.depth - (priorDepth || 0))
|
||||
}).on(STR_DATA, (entry) => {
|
||||
// need to check filterPath on dirs b/c filterDir is less restrictive
|
||||
if (this.fsw.closed) {
|
||||
return;
|
||||
}
|
||||
if (entry.stats.isDirectory() && !wh.filterPath(entry)) return;
|
||||
|
||||
const joinedPath = sysPath.join(wh.watchPath, entry.path);
|
||||
const {fullPath} = entry;
|
||||
|
||||
if (wh.followSymlinks && entry.stats.isSymbolicLink()) {
|
||||
// preserve the current depth here since it can't be derived from
|
||||
// real paths past the symlink
|
||||
const curDepth = opts.depth === undefined ?
|
||||
undefined : calcDepth(joinedPath, sysPath.resolve(wh.watchPath)) + 1;
|
||||
|
||||
this._handleFsEventsSymlink(joinedPath, fullPath, processPath, curDepth);
|
||||
} else {
|
||||
this.emitAdd(joinedPath, entry.stats, processPath, opts, forceAdd);
|
||||
}
|
||||
}).on(EV_ERROR, EMPTY_FN).on(STR_END, () => {
|
||||
this.fsw._emitReady();
|
||||
});
|
||||
} else {
|
||||
this.emitAdd(wh.watchPath, stats, processPath, opts, forceAdd);
|
||||
this.fsw._emitReady();
|
||||
}
|
||||
} catch (error) {
|
||||
if (!error || this.fsw._handleError(error)) {
|
||||
// TODO: Strange thing: "should not choke on an ignored watch path" will be failed without 2 ready calls -__-
|
||||
this.fsw._emitReady();
|
||||
this.fsw._emitReady();
|
||||
}
|
||||
}
|
||||
|
||||
if (opts.persistent && forceAdd !== true) {
|
||||
if (typeof transform === FUNCTION_TYPE) {
|
||||
// realpath has already been resolved
|
||||
this.initWatch(undefined, path, wh, processPath);
|
||||
} else {
|
||||
let realPath;
|
||||
try {
|
||||
realPath = await realpath(wh.watchPath);
|
||||
} catch (e) {}
|
||||
this.initWatch(realPath, path, wh, processPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = FsEventsHandler;
|
||||
module.exports.canUse = canUse;
|
||||
654
node_modules/chokidar/lib/nodefs-handler.js
generated
vendored
Normal file
654
node_modules/chokidar/lib/nodefs-handler.js
generated
vendored
Normal file
|
|
@ -0,0 +1,654 @@
|
|||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const sysPath = require('path');
|
||||
const { promisify } = require('util');
|
||||
const isBinaryPath = require('is-binary-path');
|
||||
const {
|
||||
isWindows,
|
||||
isLinux,
|
||||
EMPTY_FN,
|
||||
EMPTY_STR,
|
||||
KEY_LISTENERS,
|
||||
KEY_ERR,
|
||||
KEY_RAW,
|
||||
HANDLER_KEYS,
|
||||
EV_CHANGE,
|
||||
EV_ADD,
|
||||
EV_ADD_DIR,
|
||||
EV_ERROR,
|
||||
STR_DATA,
|
||||
STR_END,
|
||||
BRACE_START,
|
||||
STAR
|
||||
} = require('./constants');
|
||||
|
||||
const THROTTLE_MODE_WATCH = 'watch';
|
||||
|
||||
const open = promisify(fs.open);
|
||||
const stat = promisify(fs.stat);
|
||||
const lstat = promisify(fs.lstat);
|
||||
const close = promisify(fs.close);
|
||||
const fsrealpath = promisify(fs.realpath);
|
||||
|
||||
const statMethods = { lstat, stat };
|
||||
|
||||
// TODO: emit errors properly. Example: EMFILE on Macos.
|
||||
const foreach = (val, fn) => {
|
||||
if (val instanceof Set) {
|
||||
val.forEach(fn);
|
||||
} else {
|
||||
fn(val);
|
||||
}
|
||||
};
|
||||
|
||||
const addAndConvert = (main, prop, item) => {
|
||||
let container = main[prop];
|
||||
if (!(container instanceof Set)) {
|
||||
main[prop] = container = new Set([container]);
|
||||
}
|
||||
container.add(item);
|
||||
};
|
||||
|
||||
const clearItem = cont => key => {
|
||||
const set = cont[key];
|
||||
if (set instanceof Set) {
|
||||
set.clear();
|
||||
} else {
|
||||
delete cont[key];
|
||||
}
|
||||
};
|
||||
|
||||
const delFromSet = (main, prop, item) => {
|
||||
const container = main[prop];
|
||||
if (container instanceof Set) {
|
||||
container.delete(item);
|
||||
} else if (container === item) {
|
||||
delete main[prop];
|
||||
}
|
||||
};
|
||||
|
||||
const isEmptySet = (val) => val instanceof Set ? val.size === 0 : !val;
|
||||
|
||||
/**
|
||||
* @typedef {String} Path
|
||||
*/
|
||||
|
||||
// fs_watch helpers
|
||||
|
||||
// object to hold per-process fs_watch instances
|
||||
// (may be shared across chokidar FSWatcher instances)
|
||||
|
||||
/**
|
||||
* @typedef {Object} FsWatchContainer
|
||||
* @property {Set} listeners
|
||||
* @property {Set} errHandlers
|
||||
* @property {Set} rawEmitters
|
||||
* @property {fs.FSWatcher=} watcher
|
||||
* @property {Boolean=} watcherUnusable
|
||||
*/
|
||||
|
||||
/**
|
||||
* @type {Map<String,FsWatchContainer>}
|
||||
*/
|
||||
const FsWatchInstances = new Map();
|
||||
|
||||
/**
|
||||
* Instantiates the fs_watch interface
|
||||
* @param {String} path to be watched
|
||||
* @param {Object} options to be passed to fs_watch
|
||||
* @param {Function} listener main event handler
|
||||
* @param {Function} errHandler emits info about errors
|
||||
* @param {Function} emitRaw emits raw event data
|
||||
* @returns {fs.FSWatcher} new fsevents instance
|
||||
*/
|
||||
function createFsWatchInstance(path, options, listener, errHandler, emitRaw) {
|
||||
const handleEvent = (rawEvent, evPath) => {
|
||||
listener(path);
|
||||
emitRaw(rawEvent, evPath, {watchedPath: path});
|
||||
|
||||
// emit based on events occurring for files from a directory's watcher in
|
||||
// case the file's watcher misses it (and rely on throttling to de-dupe)
|
||||
if (evPath && path !== evPath) {
|
||||
fsWatchBroadcast(
|
||||
sysPath.resolve(path, evPath), KEY_LISTENERS, sysPath.join(path, evPath)
|
||||
);
|
||||
}
|
||||
};
|
||||
try {
|
||||
return fs.watch(path, options, handleEvent);
|
||||
} catch (error) {
|
||||
errHandler(error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper for passing fs_watch event data to a collection of listeners
|
||||
* @param {Path} fullPath absolute path bound to fs_watch instance
|
||||
* @param {String} type listener type
|
||||
* @param {*=} val1 arguments to be passed to listeners
|
||||
* @param {*=} val2
|
||||
* @param {*=} val3
|
||||
*/
|
||||
const fsWatchBroadcast = (fullPath, type, val1, val2, val3) => {
|
||||
const cont = FsWatchInstances.get(fullPath);
|
||||
if (!cont) return;
|
||||
foreach(cont[type], (listener) => {
|
||||
listener(val1, val2, val3);
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Instantiates the fs_watch interface or binds listeners
|
||||
* to an existing one covering the same file system entry
|
||||
* @param {String} path
|
||||
* @param {String} fullPath absolute path
|
||||
* @param {Object} options to be passed to fs_watch
|
||||
* @param {Object} handlers container for event listener functions
|
||||
*/
|
||||
const setFsWatchListener = (path, fullPath, options, handlers) => {
|
||||
const {listener, errHandler, rawEmitter} = handlers;
|
||||
let cont = FsWatchInstances.get(fullPath);
|
||||
|
||||
/** @type {fs.FSWatcher=} */
|
||||
let watcher;
|
||||
if (!options.persistent) {
|
||||
watcher = createFsWatchInstance(
|
||||
path, options, listener, errHandler, rawEmitter
|
||||
);
|
||||
return watcher.close.bind(watcher);
|
||||
}
|
||||
if (cont) {
|
||||
addAndConvert(cont, KEY_LISTENERS, listener);
|
||||
addAndConvert(cont, KEY_ERR, errHandler);
|
||||
addAndConvert(cont, KEY_RAW, rawEmitter);
|
||||
} else {
|
||||
watcher = createFsWatchInstance(
|
||||
path,
|
||||
options,
|
||||
fsWatchBroadcast.bind(null, fullPath, KEY_LISTENERS),
|
||||
errHandler, // no need to use broadcast here
|
||||
fsWatchBroadcast.bind(null, fullPath, KEY_RAW)
|
||||
);
|
||||
if (!watcher) return;
|
||||
watcher.on(EV_ERROR, async (error) => {
|
||||
const broadcastErr = fsWatchBroadcast.bind(null, fullPath, KEY_ERR);
|
||||
cont.watcherUnusable = true; // documented since Node 10.4.1
|
||||
// Workaround for https://github.com/joyent/node/issues/4337
|
||||
if (isWindows && error.code === 'EPERM') {
|
||||
try {
|
||||
const fd = await open(path, 'r');
|
||||
await close(fd);
|
||||
broadcastErr(error);
|
||||
} catch (err) {}
|
||||
} else {
|
||||
broadcastErr(error);
|
||||
}
|
||||
});
|
||||
cont = {
|
||||
listeners: listener,
|
||||
errHandlers: errHandler,
|
||||
rawEmitters: rawEmitter,
|
||||
watcher
|
||||
};
|
||||
FsWatchInstances.set(fullPath, cont);
|
||||
}
|
||||
// const index = cont.listeners.indexOf(listener);
|
||||
|
||||
// removes this instance's listeners and closes the underlying fs_watch
|
||||
// instance if there are no more listeners left
|
||||
return () => {
|
||||
delFromSet(cont, KEY_LISTENERS, listener);
|
||||
delFromSet(cont, KEY_ERR, errHandler);
|
||||
delFromSet(cont, KEY_RAW, rawEmitter);
|
||||
if (isEmptySet(cont.listeners)) {
|
||||
// Check to protect against issue gh-730.
|
||||
// if (cont.watcherUnusable) {
|
||||
cont.watcher.close();
|
||||
// }
|
||||
FsWatchInstances.delete(fullPath);
|
||||
HANDLER_KEYS.forEach(clearItem(cont));
|
||||
cont.watcher = undefined;
|
||||
Object.freeze(cont);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
// fs_watchFile helpers
|
||||
|
||||
// object to hold per-process fs_watchFile instances
|
||||
// (may be shared across chokidar FSWatcher instances)
|
||||
const FsWatchFileInstances = new Map();
|
||||
|
||||
/**
|
||||
* Instantiates the fs_watchFile interface or binds listeners
|
||||
* to an existing one covering the same file system entry
|
||||
* @param {String} path to be watched
|
||||
* @param {String} fullPath absolute path
|
||||
* @param {Object} options options to be passed to fs_watchFile
|
||||
* @param {Object} handlers container for event listener functions
|
||||
* @returns {Function} closer
|
||||
*/
|
||||
const setFsWatchFileListener = (path, fullPath, options, handlers) => {
|
||||
const {listener, rawEmitter} = handlers;
|
||||
let cont = FsWatchFileInstances.get(fullPath);
|
||||
|
||||
/* eslint-disable no-unused-vars, prefer-destructuring */
|
||||
let listeners = new Set();
|
||||
let rawEmitters = new Set();
|
||||
|
||||
const copts = cont && cont.options;
|
||||
if (copts && (copts.persistent < options.persistent || copts.interval > options.interval)) {
|
||||
// "Upgrade" the watcher to persistence or a quicker interval.
|
||||
// This creates some unlikely edge case issues if the user mixes
|
||||
// settings in a very weird way, but solving for those cases
|
||||
// doesn't seem worthwhile for the added complexity.
|
||||
listeners = cont.listeners;
|
||||
rawEmitters = cont.rawEmitters;
|
||||
fs.unwatchFile(fullPath);
|
||||
cont = undefined;
|
||||
}
|
||||
|
||||
/* eslint-enable no-unused-vars, prefer-destructuring */
|
||||
|
||||
if (cont) {
|
||||
addAndConvert(cont, KEY_LISTENERS, listener);
|
||||
addAndConvert(cont, KEY_RAW, rawEmitter);
|
||||
} else {
|
||||
// TODO
|
||||
// listeners.add(listener);
|
||||
// rawEmitters.add(rawEmitter);
|
||||
cont = {
|
||||
listeners: listener,
|
||||
rawEmitters: rawEmitter,
|
||||
options,
|
||||
watcher: fs.watchFile(fullPath, options, (curr, prev) => {
|
||||
foreach(cont.rawEmitters, (rawEmitter) => {
|
||||
rawEmitter(EV_CHANGE, fullPath, {curr, prev});
|
||||
});
|
||||
const currmtime = curr.mtimeMs;
|
||||
if (curr.size !== prev.size || currmtime > prev.mtimeMs || currmtime === 0) {
|
||||
foreach(cont.listeners, (listener) => listener(path, curr));
|
||||
}
|
||||
})
|
||||
};
|
||||
FsWatchFileInstances.set(fullPath, cont);
|
||||
}
|
||||
// const index = cont.listeners.indexOf(listener);
|
||||
|
||||
// Removes this instance's listeners and closes the underlying fs_watchFile
|
||||
// instance if there are no more listeners left.
|
||||
return () => {
|
||||
delFromSet(cont, KEY_LISTENERS, listener);
|
||||
delFromSet(cont, KEY_RAW, rawEmitter);
|
||||
if (isEmptySet(cont.listeners)) {
|
||||
FsWatchFileInstances.delete(fullPath);
|
||||
fs.unwatchFile(fullPath);
|
||||
cont.options = cont.watcher = undefined;
|
||||
Object.freeze(cont);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* @mixin
|
||||
*/
|
||||
class NodeFsHandler {
|
||||
|
||||
/**
|
||||
* @param {import("../index").FSWatcher} fsW
|
||||
*/
|
||||
constructor(fsW) {
|
||||
this.fsw = fsW;
|
||||
this._boundHandleError = (error) => fsW._handleError(error);
|
||||
}
|
||||
|
||||
/**
|
||||
* Watch file for changes with fs_watchFile or fs_watch.
|
||||
* @param {String} path to file or dir
|
||||
* @param {Function} listener on fs change
|
||||
* @returns {Function} closer for the watcher instance
|
||||
*/
|
||||
_watchWithNodeFs(path, listener) {
|
||||
const opts = this.fsw.options;
|
||||
const directory = sysPath.dirname(path);
|
||||
const basename = sysPath.basename(path);
|
||||
const parent = this.fsw._getWatchedDir(directory);
|
||||
parent.add(basename);
|
||||
const absolutePath = sysPath.resolve(path);
|
||||
const options = {persistent: opts.persistent};
|
||||
if (!listener) listener = EMPTY_FN;
|
||||
|
||||
let closer;
|
||||
if (opts.usePolling) {
|
||||
options.interval = opts.enableBinaryInterval && isBinaryPath(basename) ?
|
||||
opts.binaryInterval : opts.interval;
|
||||
closer = setFsWatchFileListener(path, absolutePath, options, {
|
||||
listener,
|
||||
rawEmitter: this.fsw._emitRaw
|
||||
});
|
||||
} else {
|
||||
closer = setFsWatchListener(path, absolutePath, options, {
|
||||
listener,
|
||||
errHandler: this._boundHandleError,
|
||||
rawEmitter: this.fsw._emitRaw
|
||||
});
|
||||
}
|
||||
return closer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Watch a file and emit add event if warranted.
|
||||
* @param {Path} file Path
|
||||
* @param {fs.Stats} stats result of fs_stat
|
||||
* @param {Boolean} initialAdd was the file added at watch instantiation?
|
||||
* @returns {Function} closer for the watcher instance
|
||||
*/
|
||||
_handleFile(file, stats, initialAdd) {
|
||||
if (this.fsw.closed) {
|
||||
return;
|
||||
}
|
||||
const dirname = sysPath.dirname(file);
|
||||
const basename = sysPath.basename(file);
|
||||
const parent = this.fsw._getWatchedDir(dirname);
|
||||
// stats is always present
|
||||
let prevStats = stats;
|
||||
|
||||
// if the file is already being watched, do nothing
|
||||
if (parent.has(basename)) return;
|
||||
|
||||
const listener = async (path, newStats) => {
|
||||
if (!this.fsw._throttle(THROTTLE_MODE_WATCH, file, 5)) return;
|
||||
if (!newStats || newStats.mtimeMs === 0) {
|
||||
try {
|
||||
const newStats = await stat(file);
|
||||
if (this.fsw.closed) return;
|
||||
// Check that change event was not fired because of changed only accessTime.
|
||||
const at = newStats.atimeMs;
|
||||
const mt = newStats.mtimeMs;
|
||||
if (!at || at <= mt || mt !== prevStats.mtimeMs) {
|
||||
this.fsw._emit(EV_CHANGE, file, newStats);
|
||||
}
|
||||
if (isLinux && prevStats.ino !== newStats.ino) {
|
||||
this.fsw._closeFile(path)
|
||||
prevStats = newStats;
|
||||
this.fsw._addPathCloser(path, this._watchWithNodeFs(file, listener));
|
||||
} else {
|
||||
prevStats = newStats;
|
||||
}
|
||||
} catch (error) {
|
||||
// Fix issues where mtime is null but file is still present
|
||||
this.fsw._remove(dirname, basename);
|
||||
}
|
||||
// add is about to be emitted if file not already tracked in parent
|
||||
} else if (parent.has(basename)) {
|
||||
// Check that change event was not fired because of changed only accessTime.
|
||||
const at = newStats.atimeMs;
|
||||
const mt = newStats.mtimeMs;
|
||||
if (!at || at <= mt || mt !== prevStats.mtimeMs) {
|
||||
this.fsw._emit(EV_CHANGE, file, newStats);
|
||||
}
|
||||
prevStats = newStats;
|
||||
}
|
||||
}
|
||||
// kick off the watcher
|
||||
const closer = this._watchWithNodeFs(file, listener);
|
||||
|
||||
// emit an add event if we're supposed to
|
||||
if (!(initialAdd && this.fsw.options.ignoreInitial) && this.fsw._isntIgnored(file)) {
|
||||
if (!this.fsw._throttle(EV_ADD, file, 0)) return;
|
||||
this.fsw._emit(EV_ADD, file, stats);
|
||||
}
|
||||
|
||||
return closer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle symlinks encountered while reading a dir.
|
||||
* @param {Object} entry returned by readdirp
|
||||
* @param {String} directory path of dir being read
|
||||
* @param {String} path of this item
|
||||
* @param {String} item basename of this item
|
||||
* @returns {Promise<Boolean>} true if no more processing is needed for this entry.
|
||||
*/
|
||||
async _handleSymlink(entry, directory, path, item) {
|
||||
if (this.fsw.closed) {
|
||||
return;
|
||||
}
|
||||
const full = entry.fullPath;
|
||||
const dir = this.fsw._getWatchedDir(directory);
|
||||
|
||||
if (!this.fsw.options.followSymlinks) {
|
||||
// watch symlink directly (don't follow) and detect changes
|
||||
this.fsw._incrReadyCount();
|
||||
|
||||
let linkPath;
|
||||
try {
|
||||
linkPath = await fsrealpath(path);
|
||||
} catch (e) {
|
||||
this.fsw._emitReady();
|
||||
return true;
|
||||
}
|
||||
|
||||
if (this.fsw.closed) return;
|
||||
if (dir.has(item)) {
|
||||
if (this.fsw._symlinkPaths.get(full) !== linkPath) {
|
||||
this.fsw._symlinkPaths.set(full, linkPath);
|
||||
this.fsw._emit(EV_CHANGE, path, entry.stats);
|
||||
}
|
||||
} else {
|
||||
dir.add(item);
|
||||
this.fsw._symlinkPaths.set(full, linkPath);
|
||||
this.fsw._emit(EV_ADD, path, entry.stats);
|
||||
}
|
||||
this.fsw._emitReady();
|
||||
return true;
|
||||
}
|
||||
|
||||
// don't follow the same symlink more than once
|
||||
if (this.fsw._symlinkPaths.has(full)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
this.fsw._symlinkPaths.set(full, true);
|
||||
}
|
||||
|
||||
_handleRead(directory, initialAdd, wh, target, dir, depth, throttler) {
|
||||
// Normalize the directory name on Windows
|
||||
directory = sysPath.join(directory, EMPTY_STR);
|
||||
|
||||
if (!wh.hasGlob) {
|
||||
throttler = this.fsw._throttle('readdir', directory, 1000);
|
||||
if (!throttler) return;
|
||||
}
|
||||
|
||||
const previous = this.fsw._getWatchedDir(wh.path);
|
||||
const current = new Set();
|
||||
|
||||
let stream = this.fsw._readdirp(directory, {
|
||||
fileFilter: entry => wh.filterPath(entry),
|
||||
directoryFilter: entry => wh.filterDir(entry),
|
||||
depth: 0
|
||||
}).on(STR_DATA, async (entry) => {
|
||||
if (this.fsw.closed) {
|
||||
stream = undefined;
|
||||
return;
|
||||
}
|
||||
const item = entry.path;
|
||||
let path = sysPath.join(directory, item);
|
||||
current.add(item);
|
||||
|
||||
if (entry.stats.isSymbolicLink() && await this._handleSymlink(entry, directory, path, item)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.fsw.closed) {
|
||||
stream = undefined;
|
||||
return;
|
||||
}
|
||||
// Files that present in current directory snapshot
|
||||
// but absent in previous are added to watch list and
|
||||
// emit `add` event.
|
||||
if (item === target || !target && !previous.has(item)) {
|
||||
this.fsw._incrReadyCount();
|
||||
|
||||
// ensure relativeness of path is preserved in case of watcher reuse
|
||||
path = sysPath.join(dir, sysPath.relative(dir, path));
|
||||
|
||||
this._addToNodeFs(path, initialAdd, wh, depth + 1);
|
||||
}
|
||||
}).on(EV_ERROR, this._boundHandleError);
|
||||
|
||||
return new Promise(resolve =>
|
||||
stream.once(STR_END, () => {
|
||||
if (this.fsw.closed) {
|
||||
stream = undefined;
|
||||
return;
|
||||
}
|
||||
const wasThrottled = throttler ? throttler.clear() : false;
|
||||
|
||||
resolve();
|
||||
|
||||
// Files that absent in current directory snapshot
|
||||
// but present in previous emit `remove` event
|
||||
// and are removed from @watched[directory].
|
||||
previous.getChildren().filter((item) => {
|
||||
return item !== directory &&
|
||||
!current.has(item) &&
|
||||
// in case of intersecting globs;
|
||||
// a path may have been filtered out of this readdir, but
|
||||
// shouldn't be removed because it matches a different glob
|
||||
(!wh.hasGlob || wh.filterPath({
|
||||
fullPath: sysPath.resolve(directory, item)
|
||||
}));
|
||||
}).forEach((item) => {
|
||||
this.fsw._remove(directory, item);
|
||||
});
|
||||
|
||||
stream = undefined;
|
||||
|
||||
// one more time for any missed in case changes came in extremely quickly
|
||||
if (wasThrottled) this._handleRead(directory, false, wh, target, dir, depth, throttler);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read directory to add / remove files from `@watched` list and re-read it on change.
|
||||
* @param {String} dir fs path
|
||||
* @param {fs.Stats} stats
|
||||
* @param {Boolean} initialAdd
|
||||
* @param {Number} depth relative to user-supplied path
|
||||
* @param {String} target child path targeted for watch
|
||||
* @param {Object} wh Common watch helpers for this path
|
||||
* @param {String} realpath
|
||||
* @returns {Promise<Function>} closer for the watcher instance.
|
||||
*/
|
||||
async _handleDir(dir, stats, initialAdd, depth, target, wh, realpath) {
|
||||
const parentDir = this.fsw._getWatchedDir(sysPath.dirname(dir));
|
||||
const tracked = parentDir.has(sysPath.basename(dir));
|
||||
if (!(initialAdd && this.fsw.options.ignoreInitial) && !target && !tracked) {
|
||||
if (!wh.hasGlob || wh.globFilter(dir)) this.fsw._emit(EV_ADD_DIR, dir, stats);
|
||||
}
|
||||
|
||||
// ensure dir is tracked (harmless if redundant)
|
||||
parentDir.add(sysPath.basename(dir));
|
||||
this.fsw._getWatchedDir(dir);
|
||||
let throttler;
|
||||
let closer;
|
||||
|
||||
const oDepth = this.fsw.options.depth;
|
||||
if ((oDepth == null || depth <= oDepth) && !this.fsw._symlinkPaths.has(realpath)) {
|
||||
if (!target) {
|
||||
await this._handleRead(dir, initialAdd, wh, target, dir, depth, throttler);
|
||||
if (this.fsw.closed) return;
|
||||
}
|
||||
|
||||
closer = this._watchWithNodeFs(dir, (dirPath, stats) => {
|
||||
// if current directory is removed, do nothing
|
||||
if (stats && stats.mtimeMs === 0) return;
|
||||
|
||||
this._handleRead(dirPath, false, wh, target, dir, depth, throttler);
|
||||
});
|
||||
}
|
||||
return closer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle added file, directory, or glob pattern.
|
||||
* Delegates call to _handleFile / _handleDir after checks.
|
||||
* @param {String} path to file or ir
|
||||
* @param {Boolean} initialAdd was the file added at watch instantiation?
|
||||
* @param {Object} priorWh depth relative to user-supplied path
|
||||
* @param {Number} depth Child path actually targeted for watch
|
||||
* @param {String=} target Child path actually targeted for watch
|
||||
* @returns {Promise}
|
||||
*/
|
||||
async _addToNodeFs(path, initialAdd, priorWh, depth, target) {
|
||||
const ready = this.fsw._emitReady;
|
||||
if (this.fsw._isIgnored(path) || this.fsw.closed) {
|
||||
ready();
|
||||
return false;
|
||||
}
|
||||
|
||||
const wh = this.fsw._getWatchHelpers(path, depth);
|
||||
if (!wh.hasGlob && priorWh) {
|
||||
wh.hasGlob = priorWh.hasGlob;
|
||||
wh.globFilter = priorWh.globFilter;
|
||||
wh.filterPath = entry => priorWh.filterPath(entry);
|
||||
wh.filterDir = entry => priorWh.filterDir(entry);
|
||||
}
|
||||
|
||||
// evaluate what is at the path we're being asked to watch
|
||||
try {
|
||||
const stats = await statMethods[wh.statMethod](wh.watchPath);
|
||||
if (this.fsw.closed) return;
|
||||
if (this.fsw._isIgnored(wh.watchPath, stats)) {
|
||||
ready();
|
||||
return false;
|
||||
}
|
||||
|
||||
const follow = this.fsw.options.followSymlinks && !path.includes(STAR) && !path.includes(BRACE_START);
|
||||
let closer;
|
||||
if (stats.isDirectory()) {
|
||||
const absPath = sysPath.resolve(path);
|
||||
const targetPath = follow ? await fsrealpath(path) : path;
|
||||
if (this.fsw.closed) return;
|
||||
closer = await this._handleDir(wh.watchPath, stats, initialAdd, depth, target, wh, targetPath);
|
||||
if (this.fsw.closed) return;
|
||||
// preserve this symlink's target path
|
||||
if (absPath !== targetPath && targetPath !== undefined) {
|
||||
this.fsw._symlinkPaths.set(absPath, targetPath);
|
||||
}
|
||||
} else if (stats.isSymbolicLink()) {
|
||||
const targetPath = follow ? await fsrealpath(path) : path;
|
||||
if (this.fsw.closed) return;
|
||||
const parent = sysPath.dirname(wh.watchPath);
|
||||
this.fsw._getWatchedDir(parent).add(wh.watchPath);
|
||||
this.fsw._emit(EV_ADD, wh.watchPath, stats);
|
||||
closer = await this._handleDir(parent, stats, initialAdd, depth, path, wh, targetPath);
|
||||
if (this.fsw.closed) return;
|
||||
|
||||
// preserve this symlink's target path
|
||||
if (targetPath !== undefined) {
|
||||
this.fsw._symlinkPaths.set(sysPath.resolve(path), targetPath);
|
||||
}
|
||||
} else {
|
||||
closer = this._handleFile(wh.watchPath, stats, initialAdd);
|
||||
}
|
||||
ready();
|
||||
|
||||
this.fsw._addPathCloser(path, closer);
|
||||
return false;
|
||||
|
||||
} catch (error) {
|
||||
if (this.fsw._handleError(error)) {
|
||||
ready();
|
||||
return path;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = NodeFsHandler;
|
||||
70
node_modules/chokidar/package.json
generated
vendored
Normal file
70
node_modules/chokidar/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,70 @@
|
|||
{
|
||||
"name": "chokidar",
|
||||
"description": "Minimal and efficient cross-platform file watching library",
|
||||
"version": "3.6.0",
|
||||
"homepage": "https://github.com/paulmillr/chokidar",
|
||||
"author": "Paul Miller (https://paulmillr.com)",
|
||||
"contributors": [
|
||||
"Paul Miller (https://paulmillr.com)",
|
||||
"Elan Shanker"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 8.10.0"
|
||||
},
|
||||
"main": "index.js",
|
||||
"types": "./types/index.d.ts",
|
||||
"dependencies": {
|
||||
"anymatch": "~3.1.2",
|
||||
"braces": "~3.0.2",
|
||||
"glob-parent": "~5.1.2",
|
||||
"is-binary-path": "~2.1.0",
|
||||
"is-glob": "~4.0.1",
|
||||
"normalize-path": "~3.0.0",
|
||||
"readdirp": "~3.6.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"fsevents": "~2.3.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^14",
|
||||
"chai": "^4.3",
|
||||
"dtslint": "^3.3.0",
|
||||
"eslint": "^7.0.0",
|
||||
"mocha": "^7.0.0",
|
||||
"rimraf": "^3.0.0",
|
||||
"sinon": "^9.0.1",
|
||||
"sinon-chai": "^3.3.0",
|
||||
"typescript": "^4.4.3",
|
||||
"upath": "^1.2.0"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"lib/*.js",
|
||||
"types/index.d.ts"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/paulmillr/chokidar.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/paulmillr/chokidar/issues"
|
||||
},
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"dtslint": "dtslint types",
|
||||
"lint": "eslint --report-unused-disable-directives --ignore-path .gitignore .",
|
||||
"build": "npm ls",
|
||||
"mocha": "mocha --exit --timeout 90000",
|
||||
"test": "npm run lint && npm run mocha"
|
||||
},
|
||||
"keywords": [
|
||||
"fs",
|
||||
"watch",
|
||||
"watchFile",
|
||||
"watcher",
|
||||
"watching",
|
||||
"file",
|
||||
"fsevents"
|
||||
],
|
||||
"funding": "https://paulmillr.com/funding/"
|
||||
}
|
||||
192
node_modules/chokidar/types/index.d.ts
generated
vendored
Normal file
192
node_modules/chokidar/types/index.d.ts
generated
vendored
Normal file
|
|
@ -0,0 +1,192 @@
|
|||
// TypeScript Version: 3.0
|
||||
|
||||
/// <reference types="node" />
|
||||
|
||||
import * as fs from "fs";
|
||||
import { EventEmitter } from "events";
|
||||
import { Matcher } from 'anymatch';
|
||||
|
||||
export class FSWatcher extends EventEmitter implements fs.FSWatcher {
|
||||
options: WatchOptions;
|
||||
|
||||
/**
|
||||
* Constructs a new FSWatcher instance with optional WatchOptions parameter.
|
||||
*/
|
||||
constructor(options?: WatchOptions);
|
||||
|
||||
/**
|
||||
* Add files, directories, or glob patterns for tracking. Takes an array of strings or just one
|
||||
* string.
|
||||
*/
|
||||
add(paths: string | ReadonlyArray<string>): this;
|
||||
|
||||
/**
|
||||
* Stop watching files, directories, or glob patterns. Takes an array of strings or just one
|
||||
* string.
|
||||
*/
|
||||
unwatch(paths: string | ReadonlyArray<string>): this;
|
||||
|
||||
/**
|
||||
* Returns an object representing all the paths on the file system being watched by this
|
||||
* `FSWatcher` instance. The object's keys are all the directories (using absolute paths unless
|
||||
* the `cwd` option was used), and the values are arrays of the names of the items contained in
|
||||
* each directory.
|
||||
*/
|
||||
getWatched(): {
|
||||
[directory: string]: string[];
|
||||
};
|
||||
|
||||
/**
|
||||
* Removes all listeners from watched files.
|
||||
*/
|
||||
close(): Promise<void>;
|
||||
|
||||
on(event: 'add'|'addDir'|'change', listener: (path: string, stats?: fs.Stats) => void): this;
|
||||
|
||||
on(event: 'all', listener: (eventName: 'add'|'addDir'|'change'|'unlink'|'unlinkDir', path: string, stats?: fs.Stats) => void): this;
|
||||
|
||||
/**
|
||||
* Error occurred
|
||||
*/
|
||||
on(event: 'error', listener: (error: Error) => void): this;
|
||||
|
||||
/**
|
||||
* Exposes the native Node `fs.FSWatcher events`
|
||||
*/
|
||||
on(event: 'raw', listener: (eventName: string, path: string, details: any) => void): this;
|
||||
|
||||
/**
|
||||
* Fires when the initial scan is complete
|
||||
*/
|
||||
on(event: 'ready', listener: () => void): this;
|
||||
|
||||
on(event: 'unlink'|'unlinkDir', listener: (path: string) => void): this;
|
||||
|
||||
on(event: string, listener: (...args: any[]) => void): this;
|
||||
|
||||
ref(): this;
|
||||
|
||||
unref(): this;
|
||||
}
|
||||
|
||||
export interface WatchOptions {
|
||||
/**
|
||||
* Indicates whether the process should continue to run as long as files are being watched. If
|
||||
* set to `false` when using `fsevents` to watch, no more events will be emitted after `ready`,
|
||||
* even if the process continues to run.
|
||||
*/
|
||||
persistent?: boolean;
|
||||
|
||||
/**
|
||||
* ([anymatch](https://github.com/micromatch/anymatch)-compatible definition) Defines files/paths to
|
||||
* be ignored. The whole relative or absolute path is tested, not just filename. If a function
|
||||
* with two arguments is provided, it gets called twice per path - once with a single argument
|
||||
* (the path), second time with two arguments (the path and the
|
||||
* [`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats) object of that path).
|
||||
*/
|
||||
ignored?: Matcher;
|
||||
|
||||
/**
|
||||
* If set to `false` then `add`/`addDir` events are also emitted for matching paths while
|
||||
* instantiating the watching as chokidar discovers these file paths (before the `ready` event).
|
||||
*/
|
||||
ignoreInitial?: boolean;
|
||||
|
||||
/**
|
||||
* When `false`, only the symlinks themselves will be watched for changes instead of following
|
||||
* the link references and bubbling events through the link's path.
|
||||
*/
|
||||
followSymlinks?: boolean;
|
||||
|
||||
/**
|
||||
* The base directory from which watch `paths` are to be derived. Paths emitted with events will
|
||||
* be relative to this.
|
||||
*/
|
||||
cwd?: string;
|
||||
|
||||
/**
|
||||
* If set to true then the strings passed to .watch() and .add() are treated as literal path
|
||||
* names, even if they look like globs. Default: false.
|
||||
*/
|
||||
disableGlobbing?: boolean;
|
||||
|
||||
/**
|
||||
* Whether to use fs.watchFile (backed by polling), or fs.watch. If polling leads to high CPU
|
||||
* utilization, consider setting this to `false`. It is typically necessary to **set this to
|
||||
* `true` to successfully watch files over a network**, and it may be necessary to successfully
|
||||
* watch files in other non-standard situations. Setting to `true` explicitly on OS X overrides
|
||||
* the `useFsEvents` default.
|
||||
*/
|
||||
usePolling?: boolean;
|
||||
|
||||
/**
|
||||
* Whether to use the `fsevents` watching interface if available. When set to `true` explicitly
|
||||
* and `fsevents` is available this supercedes the `usePolling` setting. When set to `false` on
|
||||
* OS X, `usePolling: true` becomes the default.
|
||||
*/
|
||||
useFsEvents?: boolean;
|
||||
|
||||
/**
|
||||
* If relying upon the [`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats) object that
|
||||
* may get passed with `add`, `addDir`, and `change` events, set this to `true` to ensure it is
|
||||
* provided even in cases where it wasn't already available from the underlying watch events.
|
||||
*/
|
||||
alwaysStat?: boolean;
|
||||
|
||||
/**
|
||||
* If set, limits how many levels of subdirectories will be traversed.
|
||||
*/
|
||||
depth?: number;
|
||||
|
||||
/**
|
||||
* Interval of file system polling.
|
||||
*/
|
||||
interval?: number;
|
||||
|
||||
/**
|
||||
* Interval of file system polling for binary files. ([see list of binary extensions](https://gi
|
||||
* thub.com/sindresorhus/binary-extensions/blob/master/binary-extensions.json))
|
||||
*/
|
||||
binaryInterval?: number;
|
||||
|
||||
/**
|
||||
* Indicates whether to watch files that don't have read permissions if possible. If watching
|
||||
* fails due to `EPERM` or `EACCES` with this set to `true`, the errors will be suppressed
|
||||
* silently.
|
||||
*/
|
||||
ignorePermissionErrors?: boolean;
|
||||
|
||||
/**
|
||||
* `true` if `useFsEvents` and `usePolling` are `false`). Automatically filters out artifacts
|
||||
* that occur when using editors that use "atomic writes" instead of writing directly to the
|
||||
* source file. If a file is re-added within 100 ms of being deleted, Chokidar emits a `change`
|
||||
* event rather than `unlink` then `add`. If the default of 100 ms does not work well for you,
|
||||
* you can override it by setting `atomic` to a custom value, in milliseconds.
|
||||
*/
|
||||
atomic?: boolean | number;
|
||||
|
||||
/**
|
||||
* can be set to an object in order to adjust timing params:
|
||||
*/
|
||||
awaitWriteFinish?: AwaitWriteFinishOptions | boolean;
|
||||
}
|
||||
|
||||
export interface AwaitWriteFinishOptions {
|
||||
/**
|
||||
* Amount of time in milliseconds for a file size to remain constant before emitting its event.
|
||||
*/
|
||||
stabilityThreshold?: number;
|
||||
|
||||
/**
|
||||
* File size polling interval.
|
||||
*/
|
||||
pollInterval?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* produces an instance of `FSWatcher`.
|
||||
*/
|
||||
export function watch(
|
||||
paths: string | ReadonlyArray<string>,
|
||||
options?: WatchOptions
|
||||
): FSWatcher;
|
||||
38
node_modules/clap/HISTORY.md
generated
vendored
Normal file
38
node_modules/clap/HISTORY.md
generated
vendored
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
## 1.2.3 (September 20, 2017)
|
||||
|
||||
- Rolled back passing params to `args()` back as array
|
||||
|
||||
## 1.2.2 (September 18, 2017)
|
||||
|
||||
- Fixed context passed to `Command#args()`, now it's a command as expected (#10)
|
||||
- Fixed consuming of literal arguments that wrongly concating with other arguments (i.e. anything going after `--` concats with arguments before `--`)
|
||||
|
||||
## 1.2.1 (September 18, 2017)
|
||||
|
||||
- Fixed multi value option processing (@tyanas & @smelukov, #9)
|
||||
|
||||
## 1.2.0 (June 13, 2017)
|
||||
|
||||
- Improved multi value option processing (@smelukov, #7)
|
||||
|
||||
## 1.1.3 (March 16, 2017)
|
||||
|
||||
- Fixed `Command#normalize()` issue when set a value for option with argument and no default value
|
||||
|
||||
## 1.1.2 (December 3, 2016)
|
||||
|
||||
- Fix exception on `Command#normalize()`
|
||||
|
||||
## 1.1.1 (May 10, 2016)
|
||||
|
||||
- Fix `chalk` version
|
||||
|
||||
## 1.1.0 (March 19, 2016)
|
||||
|
||||
- `Command#extend()` accepts parameters for passed function now
|
||||
- Implement `Command#end()` method to return to parent command definition
|
||||
- Fix suggestion bugs and add tests
|
||||
|
||||
## 1.0.0 (Oct 12, 2014)
|
||||
|
||||
- Initial release
|
||||
19
node_modules/clap/LICENSE
generated
vendored
Normal file
19
node_modules/clap/LICENSE
generated
vendored
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
Copyright (C) 2014-2016 by Roman Dvornov <rdvornov@gmail.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
11
node_modules/clap/README.md
generated
vendored
Normal file
11
node_modules/clap/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
[](https://www.npmjs.com/package/clap)
|
||||
[](https://david-dm.org/lahmatiy/clap)
|
||||
[](https://travis-ci.org/lahmatiy/clap)
|
||||
|
||||
# Clap.js
|
||||
|
||||
Argument parser for command-line interfaces. It primary target to large tool sets that provides a lot of subcommands. Support for argument coercion and completion makes task run much easer, even if you doesn't use CLI.
|
||||
|
||||
Inspired by TJ Holowaychuk [Commander](https://github.com/visionmedia/commander.js).
|
||||
|
||||
[TODO: Complete readme]
|
||||
950
node_modules/clap/index.js
generated
vendored
Normal file
950
node_modules/clap/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,950 @@
|
|||
var MAX_LINE_WIDTH = process.stdout.columns || 200;
|
||||
var MIN_OFFSET = 25;
|
||||
|
||||
var errorHandler;
|
||||
var commandsPath;
|
||||
|
||||
var reAstral = /[\uD800-\uDBFF][\uDC00-\uDFFF]/g;
|
||||
var ansiRegex = /\x1B\[([0-9]{1,3}(;[0-9]{1,3})*)?[m|K]/g;
|
||||
var hasOwnProperty = Object.prototype.hasOwnProperty;
|
||||
|
||||
function stringLength(str){
|
||||
return str
|
||||
.replace(ansiRegex, '')
|
||||
.replace(reAstral, ' ')
|
||||
.length;
|
||||
}
|
||||
|
||||
function camelize(name){
|
||||
return name.replace(/-(.)/g, function(m, ch){
|
||||
return ch.toUpperCase();
|
||||
});
|
||||
}
|
||||
|
||||
function assign(dest, source){
|
||||
for (var key in source)
|
||||
if (hasOwnProperty.call(source, key))
|
||||
dest[key] = source[key];
|
||||
|
||||
return dest;
|
||||
}
|
||||
|
||||
function returnFirstArg(value){
|
||||
return value;
|
||||
}
|
||||
|
||||
function pad(width, str){
|
||||
return str + Array(Math.max(0, width - stringLength(str)) + 1).join(' ');
|
||||
}
|
||||
|
||||
function noop(){
|
||||
// nothing todo
|
||||
}
|
||||
|
||||
function parseParams(str){
|
||||
// params [..<required>] [..[optional]]
|
||||
// <foo> - require
|
||||
// [foo] - optional
|
||||
var tmp;
|
||||
var left = str.trim();
|
||||
var result = {
|
||||
minArgsCount: 0,
|
||||
maxArgsCount: 0,
|
||||
args: []
|
||||
};
|
||||
|
||||
do {
|
||||
tmp = left;
|
||||
left = left.replace(/^<([a-zA-Z][a-zA-Z0-9\-\_]*)>\s*/, function(m, name){
|
||||
result.args.push(new Argument(name, true));
|
||||
result.minArgsCount++;
|
||||
result.maxArgsCount++;
|
||||
|
||||
return '';
|
||||
});
|
||||
}
|
||||
while (tmp != left);
|
||||
|
||||
do {
|
||||
tmp = left;
|
||||
left = left.replace(/^\[([a-zA-Z][a-zA-Z0-9\-\_]*)\]\s*/, function(m, name){
|
||||
result.args.push(new Argument(name, false));
|
||||
result.maxArgsCount++;
|
||||
|
||||
return '';
|
||||
});
|
||||
}
|
||||
while (tmp != left);
|
||||
|
||||
if (left)
|
||||
throw new SyntaxError('Bad parameter description: ' + str);
|
||||
|
||||
return result.args.length ? result : false;
|
||||
}
|
||||
|
||||
/**
|
||||
* @class
|
||||
*/
|
||||
|
||||
var SyntaxError = function(message){
|
||||
this.message = message;
|
||||
};
|
||||
SyntaxError.prototype = Object.create(Error.prototype);
|
||||
SyntaxError.prototype.name = 'SyntaxError';
|
||||
SyntaxError.prototype.clap = true;
|
||||
|
||||
/**
|
||||
* @class
|
||||
*/
|
||||
var Argument = function(name, required){
|
||||
this.name = name;
|
||||
this.required = required;
|
||||
};
|
||||
Argument.prototype = {
|
||||
required: false,
|
||||
name: '',
|
||||
normalize: returnFirstArg,
|
||||
suggest: function(){
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @class
|
||||
* @param {string} usage
|
||||
* @param {string} description
|
||||
*/
|
||||
var Option = function(usage, description){
|
||||
var self = this;
|
||||
var params;
|
||||
var left = usage.trim()
|
||||
// short usage
|
||||
// -x
|
||||
.replace(/^-([a-zA-Z])(?:\s*,\s*|\s+)/, function(m, name){
|
||||
self.short = name;
|
||||
|
||||
return '';
|
||||
})
|
||||
// long usage
|
||||
// --flag
|
||||
// --no-flag - invert value if flag is boolean
|
||||
.replace(/^--([a-zA-Z][a-zA-Z0-9\-\_]+)\s*/, function(m, name){
|
||||
self.long = name;
|
||||
self.name = name.replace(/(^|-)no-/, '$1');
|
||||
self.defValue = self.name != self.long;
|
||||
|
||||
return '';
|
||||
});
|
||||
|
||||
if (!this.long)
|
||||
throw new SyntaxError('Usage has no long name: ' + usage);
|
||||
|
||||
try {
|
||||
params = parseParams(left);
|
||||
} catch(e) {
|
||||
throw new SyntaxError('Bad paramenter description in usage for option: ' + usage, e);
|
||||
}
|
||||
|
||||
if (params)
|
||||
{
|
||||
left = '';
|
||||
this.name = this.long;
|
||||
this.defValue = undefined;
|
||||
|
||||
assign(this, params);
|
||||
}
|
||||
|
||||
if (left)
|
||||
throw new SyntaxError('Bad usage description for option: ' + usage);
|
||||
|
||||
if (!this.name)
|
||||
this.name = this.long;
|
||||
|
||||
this.description = description || '';
|
||||
this.usage = usage.trim();
|
||||
this.camelName = camelize(this.name);
|
||||
};
|
||||
|
||||
Option.prototype = {
|
||||
name: '',
|
||||
description: '',
|
||||
short: '',
|
||||
long: '',
|
||||
|
||||
beforeInit: false,
|
||||
required: false,
|
||||
minArgsCount: 0,
|
||||
maxArgsCount: 0,
|
||||
args: null,
|
||||
|
||||
defValue: undefined,
|
||||
normalize: returnFirstArg
|
||||
};
|
||||
|
||||
|
||||
//
|
||||
// Command
|
||||
//
|
||||
|
||||
function createOption(usage, description, opt_1, opt_2){
|
||||
var option = new Option(usage, description);
|
||||
|
||||
// if (option.bool && arguments.length > 2)
|
||||
// throw new SyntaxError('bool flags can\'t has default value or validator');
|
||||
|
||||
if (arguments.length == 3)
|
||||
{
|
||||
if (opt_1 && opt_1.constructor === Object)
|
||||
{
|
||||
for (var key in opt_1)
|
||||
if (key == 'normalize' ||
|
||||
key == 'defValue' ||
|
||||
key == 'beforeInit')
|
||||
option[key] = opt_1[key];
|
||||
|
||||
// old name for `beforeInit` setting is `hot`
|
||||
if (opt_1.hot)
|
||||
option.beforeInit = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (typeof opt_1 == 'function')
|
||||
option.normalize = opt_1;
|
||||
else
|
||||
option.defValue = opt_1;
|
||||
}
|
||||
}
|
||||
|
||||
if (arguments.length == 4)
|
||||
{
|
||||
if (typeof opt_1 == 'function')
|
||||
option.normalize = opt_1;
|
||||
|
||||
option.defValue = opt_2;
|
||||
}
|
||||
|
||||
return option;
|
||||
}
|
||||
|
||||
function addOptionToCommand(command, option){
|
||||
var commandOption;
|
||||
|
||||
// short
|
||||
if (option.short)
|
||||
{
|
||||
commandOption = command.short[option.short];
|
||||
|
||||
if (commandOption)
|
||||
throw new SyntaxError('Short option name -' + option.short + ' already in use by ' + commandOption.usage + ' ' + commandOption.description);
|
||||
|
||||
command.short[option.short] = option;
|
||||
}
|
||||
|
||||
// long
|
||||
commandOption = command.long[option.long];
|
||||
|
||||
if (commandOption)
|
||||
throw new SyntaxError('Long option --' + option.long + ' already in use by ' + commandOption.usage + ' ' + commandOption.description);
|
||||
|
||||
command.long[option.long] = option;
|
||||
|
||||
// camel
|
||||
commandOption = command.options[option.camelName];
|
||||
|
||||
if (commandOption)
|
||||
throw new SyntaxError('Name option ' + option.camelName + ' already in use by ' + commandOption.usage + ' ' + commandOption.description);
|
||||
|
||||
command.options[option.camelName] = option;
|
||||
|
||||
// set default value
|
||||
if (typeof option.defValue != 'undefined')
|
||||
command.setOption(option.camelName, option.defValue, true);
|
||||
|
||||
// add to suggestions
|
||||
command.suggestions.push('--' + option.long);
|
||||
|
||||
return option;
|
||||
}
|
||||
|
||||
function findVariants(obj, entry){
|
||||
return obj.suggestions.filter(function(item){
|
||||
return item.substr(0, entry.length) == entry;
|
||||
});
|
||||
}
|
||||
|
||||
function processArgs(command, args, suggest){
|
||||
function processOption(option, command){
|
||||
var params = [];
|
||||
|
||||
if (option.maxArgsCount)
|
||||
{
|
||||
for (var j = 0; j < option.maxArgsCount; j++)
|
||||
{
|
||||
var suggestPoint = suggest && i + 1 + j >= args.length - 1;
|
||||
var nextToken = args[i + 1];
|
||||
|
||||
// TODO: suggestions for options
|
||||
if (suggestPoint)
|
||||
{
|
||||
// search for suggest
|
||||
noSuggestions = true;
|
||||
i = args.length;
|
||||
return;
|
||||
}
|
||||
|
||||
if (!nextToken || nextToken[0] == '-')
|
||||
break;
|
||||
|
||||
params.push(args[++i]);
|
||||
}
|
||||
|
||||
if (params.length < option.minArgsCount)
|
||||
throw new SyntaxError('Option ' + token + ' should be used with at least ' + option.minArgsCount + ' argument(s)\nUsage: ' + option.usage);
|
||||
|
||||
if (option.maxArgsCount == 1)
|
||||
params = params[0];
|
||||
}
|
||||
else
|
||||
{
|
||||
params = !option.defValue;
|
||||
}
|
||||
|
||||
//command.values[option.camelName] = newValue;
|
||||
resultToken.options.push({
|
||||
option: option,
|
||||
value: params
|
||||
});
|
||||
}
|
||||
|
||||
var resultToken = {
|
||||
command: command,
|
||||
args: [],
|
||||
literalArgs: [],
|
||||
options: []
|
||||
};
|
||||
var result = [resultToken];
|
||||
|
||||
var suggestStartsWith = '';
|
||||
var noSuggestions = false;
|
||||
var collectArgs = false;
|
||||
var commandArgs = [];
|
||||
var noOptionsYet = true;
|
||||
var option;
|
||||
|
||||
commandsPath = [command.name];
|
||||
|
||||
for (var i = 0; i < args.length; i++)
|
||||
{
|
||||
var suggestPoint = suggest && i == args.length - 1;
|
||||
var token = args[i];
|
||||
|
||||
if (collectArgs)
|
||||
{
|
||||
commandArgs.push(token);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (suggestPoint && (token == '--' || token == '-' || token[0] != '-'))
|
||||
{
|
||||
suggestStartsWith = token;
|
||||
break; // returns long option & command list outside the loop
|
||||
}
|
||||
|
||||
if (token == '--')
|
||||
{
|
||||
resultToken.args = commandArgs;
|
||||
commandArgs = [];
|
||||
noOptionsYet = false;
|
||||
collectArgs = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (token[0] == '-')
|
||||
{
|
||||
noOptionsYet = false;
|
||||
|
||||
if (commandArgs.length)
|
||||
{
|
||||
//command.args_.apply(command, commandArgs);
|
||||
resultToken.args = commandArgs;
|
||||
commandArgs = [];
|
||||
}
|
||||
|
||||
if (token[1] == '-')
|
||||
{
|
||||
// long option
|
||||
option = command.long[token.substr(2)];
|
||||
|
||||
if (!option)
|
||||
{
|
||||
// option doesn't exist
|
||||
if (suggestPoint)
|
||||
return findVariants(command, token);
|
||||
else
|
||||
throw new SyntaxError('Unknown option: ' + token);
|
||||
}
|
||||
|
||||
// process option
|
||||
processOption(option, command);
|
||||
}
|
||||
else
|
||||
{
|
||||
// short flags sequence
|
||||
if (!/^-[a-zA-Z]+$/.test(token))
|
||||
throw new SyntaxError('Wrong short option sequence: ' + token);
|
||||
|
||||
if (token.length == 2)
|
||||
{
|
||||
option = command.short[token[1]];
|
||||
|
||||
if (!option)
|
||||
throw new SyntaxError('Unknown short option name: -' + token[1]);
|
||||
|
||||
// single option
|
||||
processOption(option, command);
|
||||
}
|
||||
else
|
||||
{
|
||||
// short options sequence
|
||||
for (var j = 1; j < token.length; j++)
|
||||
{
|
||||
option = command.short[token[j]];
|
||||
|
||||
if (!option)
|
||||
throw new SyntaxError('Unknown short option name: -' + token[j]);
|
||||
|
||||
if (option.maxArgsCount)
|
||||
throw new SyntaxError('Non-boolean option -' + token[j] + ' can\'t be used in short option sequence: ' + token);
|
||||
|
||||
processOption(option, command);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (command.commands[token] && (!command.params || commandArgs.length >= command.params.minArgsCount))
|
||||
{
|
||||
if (noOptionsYet)
|
||||
{
|
||||
resultToken.args = commandArgs;
|
||||
commandArgs = [];
|
||||
}
|
||||
|
||||
if (command.params && resultToken.args.length < command.params.minArgsCount)
|
||||
throw new SyntaxError('Missed required argument(s) for command `' + command.name + '`');
|
||||
|
||||
// switch control to another command
|
||||
command = command.commands[token];
|
||||
noOptionsYet = true;
|
||||
|
||||
commandsPath.push(command.name);
|
||||
|
||||
resultToken = {
|
||||
command: command,
|
||||
args: [],
|
||||
literalArgs: [],
|
||||
options: []
|
||||
};
|
||||
result.push(resultToken);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (noOptionsYet && command.params && commandArgs.length < command.params.maxArgsCount)
|
||||
{
|
||||
commandArgs.push(token);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (suggestPoint)
|
||||
return findVariants(command, token);
|
||||
else
|
||||
throw new SyntaxError('Unknown command: ' + token);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (suggest)
|
||||
{
|
||||
if (collectArgs || noSuggestions)
|
||||
return [];
|
||||
|
||||
return findVariants(command, suggestStartsWith);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (!noOptionsYet)
|
||||
resultToken.literalArgs = commandArgs;
|
||||
else
|
||||
resultToken.args = commandArgs;
|
||||
|
||||
if (command.params && resultToken.args.length < command.params.minArgsCount)
|
||||
throw new SyntaxError('Missed required argument(s) for command `' + command.name + '`');
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function setFunctionFactory(name){
|
||||
return function(fn){
|
||||
var property = name + '_';
|
||||
|
||||
if (this[property] !== noop)
|
||||
throw new SyntaxError('Method `' + name + '` could be invoked only once');
|
||||
|
||||
if (typeof fn != 'function')
|
||||
throw new SyntaxError('Value for `' + name + '` method should be a function');
|
||||
|
||||
this[property] = fn;
|
||||
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @class
|
||||
*/
|
||||
var Command = function(name, params){
|
||||
this.name = name;
|
||||
this.params = false;
|
||||
|
||||
try {
|
||||
if (params)
|
||||
this.params = parseParams(params);
|
||||
} catch(e) {
|
||||
throw new SyntaxError('Bad paramenter description in command definition: ' + this.name + ' ' + params);
|
||||
}
|
||||
|
||||
this.commands = {};
|
||||
|
||||
this.options = {};
|
||||
this.short = {};
|
||||
this.long = {};
|
||||
this.values = {};
|
||||
this.defaults_ = {};
|
||||
|
||||
this.suggestions = [];
|
||||
|
||||
this.option('-h, --help', 'Output usage information', function(){
|
||||
this.showHelp();
|
||||
process.exit(0);
|
||||
}, undefined);
|
||||
};
|
||||
|
||||
Command.prototype = {
|
||||
params: null,
|
||||
commands: null,
|
||||
options: null,
|
||||
short: null,
|
||||
long: null,
|
||||
values: null,
|
||||
defaults_: null,
|
||||
suggestions: null,
|
||||
|
||||
description_: '',
|
||||
version_: '',
|
||||
initContext_: noop,
|
||||
init_: noop,
|
||||
delegate_: noop,
|
||||
action_: noop,
|
||||
args_: noop,
|
||||
end_: null,
|
||||
|
||||
option: function(usage, description, opt_1, opt_2){
|
||||
addOptionToCommand(this, createOption.apply(null, arguments));
|
||||
|
||||
return this;
|
||||
},
|
||||
shortcut: function(usage, description, fn, opt_1, opt_2){
|
||||
if (typeof fn != 'function')
|
||||
throw new SyntaxError('fn should be a function');
|
||||
|
||||
var command = this;
|
||||
var option = addOptionToCommand(this, createOption(usage, description, opt_1, opt_2));
|
||||
var normalize = option.normalize;
|
||||
|
||||
option.normalize = function(value){
|
||||
var values;
|
||||
|
||||
value = normalize.call(command, value);
|
||||
values = fn(value);
|
||||
|
||||
for (var name in values)
|
||||
if (hasOwnProperty.call(values, name))
|
||||
if (hasOwnProperty.call(command.options, name))
|
||||
command.setOption(name, values[name]);
|
||||
else
|
||||
command.values[name] = values[name];
|
||||
|
||||
command.values[option.name] = value;
|
||||
|
||||
return value;
|
||||
};
|
||||
|
||||
return this;
|
||||
},
|
||||
hasOption: function(name){
|
||||
return hasOwnProperty.call(this.options, name);
|
||||
},
|
||||
hasOptions: function(){
|
||||
return Object.keys(this.options).length > 0;
|
||||
},
|
||||
setOption: function(name, value, isDefault){
|
||||
if (!this.hasOption(name))
|
||||
throw new SyntaxError('Option `' + name + '` is not defined');
|
||||
|
||||
var option = this.options[name];
|
||||
var oldValue = this.values[name];
|
||||
var newValue = option.normalize.call(this, value, oldValue);
|
||||
|
||||
this.values[name] = option.maxArgsCount ? newValue : value;
|
||||
|
||||
if (isDefault && !hasOwnProperty.call(this.defaults_, name))
|
||||
this.defaults_[name] = this.values[name];
|
||||
},
|
||||
setOptions: function(values){
|
||||
for (var name in values)
|
||||
if (hasOwnProperty.call(values, name) && this.hasOption(name))
|
||||
this.setOption(name, values[name]);
|
||||
},
|
||||
reset: function(){
|
||||
this.values = {};
|
||||
|
||||
assign(this.values, this.defaults_);
|
||||
},
|
||||
|
||||
command: function(nameOrCommand, params){
|
||||
var name;
|
||||
var command;
|
||||
|
||||
if (nameOrCommand instanceof Command)
|
||||
{
|
||||
command = nameOrCommand;
|
||||
name = command.name;
|
||||
}
|
||||
else
|
||||
{
|
||||
name = nameOrCommand;
|
||||
|
||||
if (!/^[a-zA-Z][a-zA-Z0-9\-\_]*$/.test(name))
|
||||
throw new SyntaxError('Wrong command name: ' + name);
|
||||
}
|
||||
|
||||
// search for existing one
|
||||
var subcommand = this.commands[name];
|
||||
|
||||
if (!subcommand)
|
||||
{
|
||||
// create new one if not exists
|
||||
subcommand = command || new Command(name, params);
|
||||
subcommand.end_ = this;
|
||||
this.commands[name] = subcommand;
|
||||
this.suggestions.push(name);
|
||||
}
|
||||
|
||||
return subcommand;
|
||||
},
|
||||
end: function() {
|
||||
return this.end_;
|
||||
},
|
||||
hasCommands: function(){
|
||||
return Object.keys(this.commands).length > 0;
|
||||
},
|
||||
|
||||
version: function(version, usage, description){
|
||||
if (this.version_)
|
||||
throw new SyntaxError('Version for command could be set only once');
|
||||
|
||||
this.version_ = version;
|
||||
this.option(
|
||||
usage || '-v, --version',
|
||||
description || 'Output version',
|
||||
function(){
|
||||
console.log(this.version_);
|
||||
process.exit(0);
|
||||
},
|
||||
undefined
|
||||
);
|
||||
|
||||
return this;
|
||||
},
|
||||
description: function(description){
|
||||
if (this.description_)
|
||||
throw new SyntaxError('Description for command could be set only once');
|
||||
|
||||
this.description_ = description;
|
||||
|
||||
return this;
|
||||
},
|
||||
|
||||
init: setFunctionFactory('init'),
|
||||
initContext: setFunctionFactory('initContext'),
|
||||
args: setFunctionFactory('args'),
|
||||
delegate: setFunctionFactory('delegate'),
|
||||
action: setFunctionFactory('action'),
|
||||
|
||||
extend: function(fn){
|
||||
fn.apply(null, [this].concat(Array.prototype.slice.call(arguments, 1)));
|
||||
return this;
|
||||
},
|
||||
|
||||
parse: function(args, suggest){
|
||||
if (!args)
|
||||
args = process.argv.slice(2);
|
||||
|
||||
if (!errorHandler)
|
||||
return processArgs(this, args, suggest);
|
||||
else
|
||||
try {
|
||||
return processArgs(this, args, suggest);
|
||||
} catch(e) {
|
||||
errorHandler(e.message || e);
|
||||
}
|
||||
},
|
||||
run: function(args, context){
|
||||
var commands = this.parse(args);
|
||||
|
||||
if (!commands)
|
||||
return;
|
||||
|
||||
var prevCommand;
|
||||
var context = assign({}, context || this.initContext_());
|
||||
for (var i = 0; i < commands.length; i++)
|
||||
{
|
||||
var item = commands[i];
|
||||
var command = item.command;
|
||||
|
||||
// reset command values
|
||||
command.reset();
|
||||
command.context = context;
|
||||
command.root = this;
|
||||
|
||||
if (prevCommand)
|
||||
prevCommand.delegate_(command);
|
||||
|
||||
// apply beforeInit options
|
||||
item.options.forEach(function(entry){
|
||||
if (entry.option.beforeInit)
|
||||
command.setOption(entry.option.camelName, entry.value);
|
||||
});
|
||||
|
||||
command.init_(item.args.slice()); // use slice to avoid args mutation in handler
|
||||
|
||||
if (item.args.length)
|
||||
command.args_(item.args.slice()); // use slice to avoid args mutation in handler
|
||||
|
||||
// apply regular options
|
||||
item.options.forEach(function(entry){
|
||||
if (!entry.option.beforeInit)
|
||||
command.setOption(entry.option.camelName, entry.value);
|
||||
});
|
||||
|
||||
prevCommand = command;
|
||||
}
|
||||
|
||||
// return last command action result
|
||||
if (command)
|
||||
return command.action_(item.args, item.literalArgs);
|
||||
},
|
||||
|
||||
normalize: function(values){
|
||||
var result = {};
|
||||
|
||||
if (!values)
|
||||
values = {};
|
||||
|
||||
for (var name in this.values)
|
||||
if (hasOwnProperty.call(this.values, name))
|
||||
result[name] = hasOwnProperty.call(values, name) && hasOwnProperty.call(this.options, name)
|
||||
? this.options[name].normalize.call(this, values[name])
|
||||
: this.values[name];
|
||||
|
||||
for (var name in values)
|
||||
if (hasOwnProperty.call(values, name) && !hasOwnProperty.call(result, name))
|
||||
result[name] = values[name];
|
||||
|
||||
return result;
|
||||
},
|
||||
|
||||
showHelp: function(){
|
||||
console.log(showCommandHelp(this));
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
//
|
||||
// help
|
||||
//
|
||||
|
||||
/**
|
||||
* Return program help documentation.
|
||||
*
|
||||
* @return {String}
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function showCommandHelp(command){
|
||||
function breakByLines(str, offset){
|
||||
var words = str.split(' ');
|
||||
var maxWidth = MAX_LINE_WIDTH - offset || 0;
|
||||
var lines = [];
|
||||
var line = '';
|
||||
|
||||
while (words.length)
|
||||
{
|
||||
var word = words.shift();
|
||||
if (!line || (line.length + word.length + 1) < maxWidth)
|
||||
{
|
||||
line += (line ? ' ' : '') + word;
|
||||
}
|
||||
else
|
||||
{
|
||||
lines.push(line);
|
||||
words.unshift(word);
|
||||
line = '';
|
||||
}
|
||||
}
|
||||
|
||||
lines.push(line);
|
||||
|
||||
return lines.map(function(line, idx){
|
||||
return (idx && offset ? pad(offset, '') : '') + line;
|
||||
}).join('\n');
|
||||
}
|
||||
|
||||
function args(command){
|
||||
return command.params.args.map(function(arg){
|
||||
return arg.required
|
||||
? '<' + arg.name + '>'
|
||||
: '[' + arg.name + ']';
|
||||
}).join(' ');
|
||||
}
|
||||
|
||||
function commandsHelp(){
|
||||
if (!command.hasCommands())
|
||||
return '';
|
||||
|
||||
var maxNameLength = MIN_OFFSET - 2;
|
||||
var lines = Object.keys(command.commands).sort().map(function(name){
|
||||
var subcommand = command.commands[name];
|
||||
|
||||
var line = {
|
||||
name: chalk.green(name) + chalk.gray(
|
||||
(subcommand.params ? ' ' + args(subcommand) : '')
|
||||
// (subcommand.hasOptions() ? ' [options]' : '')
|
||||
),
|
||||
description: subcommand.description_ || ''
|
||||
};
|
||||
|
||||
maxNameLength = Math.max(maxNameLength, stringLength(line.name));
|
||||
|
||||
return line;
|
||||
});
|
||||
|
||||
return [
|
||||
'',
|
||||
'Commands:',
|
||||
'',
|
||||
lines.map(function(line){
|
||||
return ' ' + pad(maxNameLength, line.name) + ' ' + breakByLines(line.description, maxNameLength + 4);
|
||||
}).join('\n'),
|
||||
''
|
||||
].join('\n');
|
||||
}
|
||||
|
||||
function optionsHelp(){
|
||||
if (!command.hasOptions())
|
||||
return '';
|
||||
|
||||
var hasShortOptions = Object.keys(command.short).length > 0;
|
||||
var maxNameLength = MIN_OFFSET - 2;
|
||||
var lines = Object.keys(command.long).sort().map(function(name){
|
||||
var option = command.long[name];
|
||||
var line = {
|
||||
name: option.usage
|
||||
.replace(/^(?:-., |)/, function(m){
|
||||
return m || (hasShortOptions ? ' ' : '');
|
||||
})
|
||||
.replace(/(^|\s)(-[^\s,]+)/ig, function(m, p, flag){
|
||||
return p + chalk.yellow(flag);
|
||||
}),
|
||||
description: option.description
|
||||
};
|
||||
|
||||
maxNameLength = Math.max(maxNameLength, stringLength(line.name));
|
||||
|
||||
return line;
|
||||
});
|
||||
|
||||
// Prepend the help information
|
||||
return [
|
||||
'',
|
||||
'Options:',
|
||||
'',
|
||||
lines.map(function(line){
|
||||
return ' ' + pad(maxNameLength, line.name) + ' ' + breakByLines(line.description, maxNameLength + 4);
|
||||
}).join('\n'),
|
||||
''
|
||||
].join('\n');
|
||||
}
|
||||
|
||||
var output = [];
|
||||
var chalk = require('chalk');
|
||||
|
||||
chalk.enabled = module.exports.color && process.stdout.isTTY;
|
||||
|
||||
if (command.description_)
|
||||
output.push(command.description_ + '\n');
|
||||
|
||||
output.push(
|
||||
'Usage:\n\n ' +
|
||||
chalk.cyan(commandsPath ? commandsPath.join(' ') : command.name) +
|
||||
(command.params ? ' ' + chalk.magenta(args(command)) : '') +
|
||||
(command.hasOptions() ? ' [' + chalk.yellow('options') + ']' : '') +
|
||||
(command.hasCommands() ? ' [' + chalk.green('command') + ']' : ''),
|
||||
commandsHelp() +
|
||||
optionsHelp()
|
||||
);
|
||||
|
||||
return output.join('\n');
|
||||
};
|
||||
|
||||
|
||||
//
|
||||
// export
|
||||
//
|
||||
|
||||
module.exports = {
|
||||
color: true,
|
||||
|
||||
Error: SyntaxError,
|
||||
Argument: Argument,
|
||||
Command: Command,
|
||||
Option: Option,
|
||||
|
||||
error: function(fn){
|
||||
if (errorHandler)
|
||||
throw new SyntaxError('Error handler should be set only once');
|
||||
|
||||
if (typeof fn != 'function')
|
||||
throw new SyntaxError('Error handler should be a function');
|
||||
|
||||
errorHandler = fn;
|
||||
|
||||
return this;
|
||||
},
|
||||
|
||||
create: function(name, params){
|
||||
return new Command(name || require('path').basename(process.argv[1]) || 'cli', params);
|
||||
},
|
||||
|
||||
confirm: function(message, fn){
|
||||
process.stdout.write(message);
|
||||
process.stdin.setEncoding('utf8');
|
||||
process.stdin.once('data', function(val){
|
||||
process.stdin.pause();
|
||||
fn(/^y|yes|ok|true$/i.test(val.trim()));
|
||||
});
|
||||
process.stdin.resume();
|
||||
}
|
||||
};
|
||||
36
node_modules/clap/package.json
generated
vendored
Normal file
36
node_modules/clap/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
{
|
||||
"name": "clap",
|
||||
"title": "Command line argument parser",
|
||||
"description": "Command line argument parser",
|
||||
"author": "Roman Dvornov <rdvornov@gmail.com>",
|
||||
"license": "MIT",
|
||||
"version": "1.2.3",
|
||||
"keywords": [
|
||||
"cli",
|
||||
"command",
|
||||
"option",
|
||||
"argument",
|
||||
"completion"
|
||||
],
|
||||
"homepage": "https://github.com/lahmatiy/clap",
|
||||
"repository": "lahmatiy/clap",
|
||||
"main": "index.js",
|
||||
"files": [
|
||||
"index.js",
|
||||
"HISTORY.md",
|
||||
"LICENSE",
|
||||
"README.md"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"chalk": "^1.1.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"mocha": "^2.4.5"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "mocha test -R spec"
|
||||
}
|
||||
}
|
||||
590
node_modules/css-tree/CHANGELOG.md
generated
vendored
Normal file
590
node_modules/css-tree/CHANGELOG.md
generated
vendored
Normal file
|
|
@ -0,0 +1,590 @@
|
|||
## 1.1.3 (March 31, 2021)
|
||||
|
||||
- Fixed matching on CSS wide keywords for at-rule's prelude and descriptors
|
||||
- Added `fit-content` to `width` property patch as browsers are supported it as a keyword (nonstandard), but spec defines it as a function
|
||||
- Fixed parsing a value contains parentheses or brackets and `parseValue` option is set to `false`, in that case `!important` was included into a value but must not (#155)
|
||||
|
||||
## 1.1.2 (November 26, 2020)
|
||||
|
||||
- Rolled back to use spread syntax in object literals since it not supported by nodejs < 8.3 (#145)
|
||||
|
||||
## 1.1.1 (November 18, 2020)
|
||||
|
||||
- Fixed edge cases in mismatch location computation for `SyntaxMatchError`
|
||||
|
||||
## 1.1.0 (November 17, 2020)
|
||||
|
||||
- Bumped `mdn-data` to 2.0.14
|
||||
- Extended `fork()` method to allow append syntax instead of overriding for `types`, `properties` and `atrules`, e.g. `csstree.fork({ types: { color: '| foo | bar' } })`
|
||||
- Extended lexer API for validation
|
||||
- Added `Lexer#checkAtruleName(atruleName)`, `Lexer#checkAtrulePrelude(atruleName, prelude)`, `Lexer#checkAtruleDescriptorName(atruleName, descriptorName)` and `Lexer#checkPropertyName(propertyName)`
|
||||
- Added `Lexer#getAtrule(atruleName, fallbackBasename)` method
|
||||
- Extended `Lexer#getAtrulePrelude()` and `Lexer#getProperty()` methods to take `fallbackBasename` parameter
|
||||
- Improved `SyntaxMatchError` location details
|
||||
- Changed error messages
|
||||
|
||||
## 1.0.1 (November 11, 2020)
|
||||
|
||||
- Fixed edge cases for parsing of custom property value with a single whitespace when `parseCustomProperty:true`
|
||||
|
||||
## 1.0.0 (October 27, 2020)
|
||||
|
||||
- Added `onComment` option to parser config
|
||||
- Added support for `break` and `skip` values in `walk()` to control traversal
|
||||
- Added `List#reduce()` and `List#reduceRight()` methods
|
||||
- Bumped `mdn-data` to 2.0.12
|
||||
- Exposed version of the lib (i.e. `import { version } from 'css-tree'`)
|
||||
- Fixed `Lexer#dump()` to dump atrules syntaxes as well
|
||||
- Fixed matching comma separated `<urange>` list (#135)
|
||||
- Renamed `HexColor` node type into `Hash`
|
||||
- Removed `element()` specific parsing rules
|
||||
- Removed `dist/default-syntax.json` from package
|
||||
|
||||
## 1.0.0-alpha.39 (December 5, 2019)
|
||||
|
||||
- Fixed walker with `visit: "Declaration"` to iterate `DeclarationList` (#114)
|
||||
|
||||
## 1.0.0-alpha.38 (November 25, 2019)
|
||||
|
||||
- Bumped `mdn-data` to `2.0.6`
|
||||
- Added initial implmentation for at-rule matching via `Lexer#matchAtrulePrelude()` and `Lexer#matchAtruleDescriptor()` methods
|
||||
- Added `-moz-control-character-visibility`, `-ms-grid-columns`, `-ms-grid-rows` and `-ms-hyphenate-limit-last` properties to patch (#111)
|
||||
- Added `flow`, `flow-root` and `table-caption` values to patched `display` (#112)
|
||||
|
||||
## 1.0.0-alpha.37 (October 22, 2019)
|
||||
|
||||
- Bumped `source-map` version to `^0.6.1` to fix source map generation inconsistency across node.js versions due to mappings sorting bug and v8 moving to [a stable Array#sort](https://v8.dev/blog/array-sort) ([fix commit](https://github.com/mozilla/source-map/commit/f35a2e4212dd025cb5e1fc219e7ac8a4b96c2cc9) in `source-map`)
|
||||
|
||||
## 1.0.0-alpha.36 (October 13, 2019)
|
||||
|
||||
- Dropped support for Node < 8
|
||||
- Updated dev deps (fixed `npm audit` issues)
|
||||
- Reworked build pipeline
|
||||
- Package provides `dist/csstree.js` and `dist/csstree.min.js` now (instead of single `dist/csstree.js` that was a min version)
|
||||
- Bundle size (min version) reduced from 191Kb to 158Kb due to some optimisations
|
||||
- Definition syntax
|
||||
- Renamed `grammar` into `definitionSyntax` (named per spec)
|
||||
- Added `compact` option to `generate()` method to avoid formatting (spaces) when possible
|
||||
- Lexer
|
||||
- Changed `dump()` method to produce syntaxes in compact form by default
|
||||
|
||||
## 1.0.0-alpha.35 (October 7, 2019)
|
||||
|
||||
- Walker
|
||||
- Changed implementation to avoid runtime compilation due to CSP issues (see #91, #109)
|
||||
- Added `find()`, `findLast()` and `findAll()` methods (e.g. `csstree.find(ast, node => node.type === 'ClassSelector')`)
|
||||
|
||||
## 1.0.0-alpha.34 (July 27, 2019)
|
||||
|
||||
- Tokenizer
|
||||
- Added `isBOM()` function
|
||||
- Added `charCodeCategory()` function
|
||||
- Removed `firstCharOffset()` function (use `isBOM()` instead)
|
||||
- Removed `CHARCODE` dictionary
|
||||
- Removed `INPUT_STREAM_CODE*` dictionaries
|
||||
- Lexer
|
||||
- Allowed comments in matching value (just ignore them like whitespaces)
|
||||
- Increased iteration count in value matching from 10k up to 15k
|
||||
- Fixed missed `debugger` (#104)
|
||||
|
||||
## 1.0.0-alpha.33 (July 11, 2019)
|
||||
|
||||
- Lexer
|
||||
- Fixed low priority productions matching by changing an approach for robust one (#103)
|
||||
|
||||
## 1.0.0-alpha.32 (July 11, 2019)
|
||||
|
||||
- Lexer
|
||||
- Fixed low priority productions matching in long `||-` and `&&-` groups (#103)
|
||||
|
||||
## 1.0.0-alpha.31 (July 11, 2019)
|
||||
|
||||
- Bumped `mdn/data` to `2.0.4` (#99)
|
||||
- Lexer
|
||||
- Added [bracketed range notation](https://drafts.csswg.org/css-values-4/#numeric-ranges) support and related refactoring
|
||||
- Removed `<number-zero-one>`, `<number-one-or-greater>` and `<positive-integer>` from generic types. In fact, types moved to patch, because those types can be expressed in a regular grammar due to bracketed range notation implemented
|
||||
- Added support for multiple token string matching
|
||||
- Improved `<custom-ident>` production matching to claim the keyword only if no other unfulfilled production can claim it (#101)
|
||||
- Improved `<length>` production matching to claim "unitless zero" only if no other unfulfilled production can claim it
|
||||
- Changed lexer's constructor to prevent generic types override when used
|
||||
- Fixed large `||`- and `&&`-group matching, matching continues from the beginning on term match (#85)
|
||||
- Fixed checking that value has `var()` occurrences when value is a string (such values can't be matched on syntax currently and fail with specific error that can be used for ignorance in validation tools)
|
||||
- Fixed `<declaration-value>` and `<any-value>` matching when a value contains a function, parentheses or braces
|
||||
|
||||
## 1.0.0-alpha.30 (July 3, 2019)
|
||||
|
||||
- Bumped `mdn/data` to `~2.0.3`
|
||||
- Removed type removals from `mdn/data` due to lack of some generic types and specific lexer restictions (since lexer was reworked, see below)
|
||||
- Reduced and updated patches
|
||||
- Tokenizer
|
||||
- Reworked tokenizer itself to compliment [CSS Syntax Module Level 3](https://drafts.csswg.org/css-syntax/#tokenization)
|
||||
- `Tokenizer` class splitted into several abstractions:
|
||||
- Added `TokenStream` class
|
||||
- Added `OffsetToLocation` class
|
||||
- Added `tokenize()` function that creates `TokenStream` instance for given string or updates a `TokenStream` instance passed as second parameter
|
||||
- Removed `Tokenizer` class
|
||||
- Removed `Raw` token type
|
||||
- Renamed `Identifier` token type to `Ident`
|
||||
- Added token types: `Hash`, `BadString`, `BadUrl`, `Delim`, `Percentage`, `Dimension`, `Colon`, `Semicolon`, `Comma`, `LeftSquareBracket`, `RightSquareBracket`, `LeftParenthesis`, `RightParenthesis`, `LeftCurlyBracket`, `RightCurlyBracket`
|
||||
- Replaced `Punctuator` with `Delim` token type, that excludes specific characters with its own token type like `Colon`, `Semicolon` etc
|
||||
- Removed `findCommentEnd`, `findStringEnd`, `findDecimalNumberEnd`, `findNumberEnd`, `findEscapeEnd`, `findIdentifierEnd` and `findUrlRawEnd` helper function
|
||||
- Removed `SYMBOL_TYPE`, `PUNCTUATION` and `STOP_URL_RAW` dictionaries
|
||||
- Added `isDigit`, `isHexDigit`, `isUppercaseLetter`, `isLowercaseLetter`, `isLetter`, `isNonAscii`, `isNameStart`, `isName`, `isNonPrintable`, `isNewline`, `isWhiteSpace`, `isValidEscape`, `isIdentifierStart`, `isNumberStart`, `consumeEscaped`, `consumeName`, `consumeNumber` and `consumeBadUrlRemnants` helper functions
|
||||
- Parser
|
||||
- Changed parsing algorithms to work with new token type set
|
||||
- Changed `HexColor` consumption in way to relax checking a value, i.e. now `value` is a sequence of one or more name chars
|
||||
- Added `&` as a property hack
|
||||
- Relaxed `var()` parsing to only check that a first arguments is an identifier (not a custom property name as before)
|
||||
- Lexer
|
||||
- Reworked syntax matching to relay on token set only (having AST is optional now)
|
||||
- Extended `Lexer#match()`, `Lexer#matchType()` and `Lexer#matchProperty()` methods to take a string as value, beside AST as a value
|
||||
- Extended `Lexer#match()` method to take a string as a syntax, beside of syntax descriptor
|
||||
- Reworked generic types:
|
||||
- Removed `<attr()>`, `<url>` (moved to patch) and `<progid>` types
|
||||
- Added types:
|
||||
- Related to token types: `<ident-token>`, `<function-token>`, `<at-keyword-token>`, `<hash-token>`, `<string-token>`, `<bad-string-token>`, `<url-token>`, `<bad-url-token>`, `<delim-token>`, `<number-token>`, `<percentage-token>`, `<dimension-token>`, `<whitespace-token>`, `<CDO-token>`, `<CDC-token>`, `<colon-token>`, `<semicolon-token>`, `<comma-token>`, `<[-token>`, `<]-token>`, `<(-token>`, `<)-token>`, `<{-token>` and `<}-token>`
|
||||
- Complex types: `<an-plus-b>`, `<urange>`, `<custom-property-name>`, `<declaration-value>`, `<any-value>` and `<zero>`
|
||||
- Renamed `<unicode-range>` to `<urange>` as per spec
|
||||
- Renamed `<expression>` (IE legacy extension) to `<-ms-legacy-expression>` and may to be removed in next releases
|
||||
|
||||
## 1.0.0-alpha.29 (May 30, 2018)
|
||||
|
||||
- Lexer
|
||||
- Syntax matching was completely reworked. Now it's token-based and uses state machine. Public API has not changed. However, some internal data structures have changed. Most significal change in syntax match result tree structure, it's became token-based instead of node-based.
|
||||
- Grammar
|
||||
- Changed grammar tree format:
|
||||
- Added `Token` node type to represent a single code point (`<delim-token>`)
|
||||
- Added `Multiplier` that wraps a single node (`term` property)
|
||||
- Added `AtKeyword` to represent `<at-keyword-token>`
|
||||
- Removed `Slash` and `Percent` node types, they are replaced for a node with `Token` type
|
||||
- Changed `Function` to represent `<function-token>` with no children
|
||||
- Removed `multiplier` property from `Group`
|
||||
- Changed `generate()` method:
|
||||
- Method takes an `options` as second argument now (`generate(node, forceBraces, decorator)` -> `generate(node, options)`). Two options are supported: `forceBraces` and `decorator`
|
||||
- When a second parameter is a function it treats as `decorate` option value, i.e. `generate(node, fn)` -> `generate(node, { decorate: fn })`
|
||||
- Decorate function invokes with additional parameter – a reference to a node
|
||||
- Tokenizer
|
||||
- Renamed `Atrule` const to `AtKeyword`
|
||||
|
||||
## 1.0.0-alpha.28 (February 19, 2018)
|
||||
|
||||
- Renamed `lexer.grammar.translate()` method into `generate()`
|
||||
- Fixed `<'-webkit-font-smoothing'>` and `<'-moz-osx-font-smoothing'>` syntaxes (#75)
|
||||
- Added vendor keywords for `<'overflow'>` property syntax (#76)
|
||||
- Pinned `mdn-data` to `~1.1.0` and fixed issues with some updated property syntaxes
|
||||
|
||||
## 1.0.0-alpha.27 (January 14, 2018)
|
||||
|
||||
- Generator
|
||||
- Changed node's `generate()` methods invocation, methods now take a node as a single argument and context (i.e. `this`) that have methods: `chunk()`, `node()` and `children()`
|
||||
- Renamed `translate()` to `generate()` and changed to take `options` argument
|
||||
- Removed `translateMarkup(ast, enter, leave)` method, use `generate(ast, { decorator: (handlers) => { ... }})` instead
|
||||
- Removed `translateWithSourceMap(ast)`, use `generate(ast, { sourceMap: true })` instead
|
||||
- Changed to support for children as an array
|
||||
- Walker
|
||||
- Changed `walk()` to take an `options` argument instead of handler, with `enter`, `leave`, `visit` and `reverse` options (`walk(ast, fn)` is still works and equivalent to `walk(ast, { enter: fn })`)
|
||||
- Removed `walkUp(ast, fn)`, use `walk(ast, { leave: fn })`
|
||||
- Removed `walkRules(ast, fn)`, use `walk(ast, { visit: 'Rule', enter: fn })` instead
|
||||
- Removed `walkRulesRight(ast, fn)`, use `walk(ast, { visit: 'Rule', reverse: true, enter: fn })` instead
|
||||
- Removed `walkDeclarations(ast, fn)`, use `walk(ast, { visit: 'Declaration', enter: fn })` instead
|
||||
- Changed to support for children as array in most cases (`reverse: true` will fail on arrays since they have no `forEachRight()` method)
|
||||
- Misc
|
||||
- List
|
||||
- Added `List#forEach()` method
|
||||
- Added `List#forEachRight()` method
|
||||
- Added `List#filter()` method
|
||||
- Changed `List#map()` method to return a `List` instance instead of `Array`
|
||||
- Added `List#push()` method, similar to `List#appendData()` but returns nothing
|
||||
- Added `List#pop()` method
|
||||
- Added `List#unshift()` method, similar to `List#prependData()` but returns nothing
|
||||
- Added `List#shift()` method
|
||||
- Added `List#prependList()` method
|
||||
- Changed `List#insert()`, `List#insertData()`, `List#appendList()` and `List#insertList()` methods to return a list that performed an operation
|
||||
- Changed `keyword()` method
|
||||
- Changed `name` field to include a vendor prefix
|
||||
- Added `basename` field to contain a name without a vendor prefix
|
||||
- Added `custom` field that contain a `true` when keyword is a custom property reference
|
||||
- Changed `property()` method
|
||||
- Changed `name` field to include a vendor prefix
|
||||
- Added `basename` field to contain a name without any prefixes, i.e. a hack and a vendor prefix
|
||||
- Added `vendorPrefix()` method
|
||||
- Added `isCustomProperty()` method
|
||||
|
||||
## 1.0.0-alpha.26 (November 9, 2017)
|
||||
|
||||
- Tokenizer
|
||||
- Added `Tokenizer#isBalanceEdge()` method
|
||||
- Removed `Tokenizer.endsWith()` method
|
||||
- Parser
|
||||
- Made the parser tolerant to errors by default
|
||||
- Removed `tolerant` parser option (no parsing modes anymore)
|
||||
- Removed `property` parser option (a value parsing does not depend on property name anymore)
|
||||
- Canceled error for a handing semicolon in a block
|
||||
- Canceled error for unclosed `Brackets`, `Function` and `Parentheses` when EOF is reached
|
||||
- Fixed error when prelude ends with a comment for at-rules with custom prelude consumer
|
||||
- Relaxed at-rule parsing:
|
||||
- Canceled error when EOF is reached after a prelude
|
||||
- Canceled error for an at-rule with custom block consumer when at-rule has no block (just don't apply consumer in that case)
|
||||
- Canceled error on at-rule parsing when it occurs outside prelude or block (at-rule is converting to `Raw` node)
|
||||
- Allowed for any at-rule to have a prelude and a block, even if it's invalid per at-rule syntax (the responsibility for this check is moved to lexer, since it's possible to construct a AST with such errors)
|
||||
- Made a declaration value a safe parsing point (i.e. error on value parsing lead to a value is turning into `Raw` node, not a declaration as before)
|
||||
- Excluded surrounding white spaces and comments from a `Raw` node that represents a declaration value
|
||||
- Changed `Value` parse handler to return a node only with type `Value` (previously it returned a `Raw` node in some cases)
|
||||
- Fixed issue with `onParseError()` is not invoked for errors occured on selector or declaration value parsing in some cases
|
||||
- Changed using of `onParseError()` to stop parsing if handler throws an exception
|
||||
- Lexer
|
||||
- Changed `grammar.walk()` to invoke passed handler on entering to node rather than on leaving the node
|
||||
- Improved `grammar.walk()` to take a walk handler pair as an object, i.e. `walk(node, { enter: fn, leave: fn })`
|
||||
- Changed `Lexer#match*()` methods to take a node of any type, but with a `children` field
|
||||
- Added `Lexer#match(syntax, node)` method
|
||||
- Fixed `Lexer#matchType()` method to stop return a positive result for the CSS wide keywords
|
||||
|
||||
## 1.0.0-alpha25 (October 9, 2017)
|
||||
|
||||
- Parser
|
||||
- Added fallback node as argument to `onParseError()` handler
|
||||
- Fixed raw consuming in tolerant mode when selector is invalid (greedy consuming and redundant warnings)
|
||||
- Fixed exception in tolerant mode caused by unknown at-rule with unclosed block
|
||||
- Changed handling of semicolons:
|
||||
- Hanging semicolon inside declaration blocks raise an error or turns into a `Raw` node in tolerant mode instead of being ignored
|
||||
- Semicolon outside of declaration blocks opens a `Rule` node as part of selector instead of being ignored
|
||||
- Aligned `parseAtrulePrelude` behaviour to `parseRulePrelude`
|
||||
- Removed `Raw` node wraping into `AtrulePrelude` when `parseAtrulePrelude` is disabled
|
||||
- Removed error emitting when at-rule has a custom prelude customer but no prelude is found (it should be validated by a lexer later)
|
||||
- Generator
|
||||
- Fixed performance issue with `translateWithSourceMap()`, flattening the string (because of mixing building string and indexing into it) turned it into a quadratic algorithm (approximate numbers can be found in [the quiz created by this case](https://gist.github.com/lahmatiy/ea25d0e623d88ca9848384b5707d52d9))
|
||||
- Added support for a single solidus hack for `property()`
|
||||
- Minor fixes for custom errors
|
||||
|
||||
## 1.0.0-alpha24 (September 14, 2017)
|
||||
|
||||
- Improved CSSTree to be stable for standart build-in objects extension (#58)
|
||||
- Parser
|
||||
- Renamed rule's `selector` to `prelude`. The reasons: [spec names this part so](https://www.w3.org/TR/css-syntax-3/#qualified-rule), and this branch can contain not only a selector (`SelectorList`) but also a raw payload (`Raw`). What's changed:
|
||||
- Renamed `Rule.selector` to `Rule.prelude`
|
||||
- Renamed `parseSelector` parser option to `parseRulePrelude`
|
||||
- Removed option for selector parse in `SelectorList`
|
||||
- Lexer
|
||||
- Fixed undefined positions in a error when match a syntax to empty or white space only value
|
||||
- Improved `Lexer#checkStructure()`
|
||||
- Return a warning as an object with node reference and message
|
||||
- No exception on unknown node type, return a warning instead
|
||||
|
||||
## 1.0.0-alpha23 (September 10, 2017)
|
||||
|
||||
- Fixed `Tokenizer#getRawLength()`'s false positive balance match to the end of input in some cases (#56)
|
||||
- Rename walker's entry point methods to be the same as CSSTree exposed methods (i.e. `walk()`, `walkUp()` etc)
|
||||
- Rename at-rule's `expression` to `prelude` (since [spec names it so](https://www.w3.org/TR/css-syntax-3/#at-rule))
|
||||
- `AtruleExpression` node type → `AtrulePrelude`
|
||||
- `Atrule.expression` field → `Atrule.prelude`
|
||||
- `parseAtruleExpression` parser's option → `parseAtrulePrelude`
|
||||
- `atruleExpression` parse context → `atrulePrelude`
|
||||
- `atruleExpression` walk context reference → `atrulePrelude`
|
||||
|
||||
## 1.0.0-alpha22 (September 8, 2017)
|
||||
|
||||
- Parser
|
||||
- Fixed exception on parsing of unclosed `{}-block` in tolerant mode
|
||||
- Added tolerant mode support for `DeclarationList`
|
||||
- Added standalone entry point, i.e. default parser can be used via `require('css-tree/lib/parser')` (#47)
|
||||
- Generator
|
||||
- Changed generator to produce `+n` when `AnPlusB.a` is `+1` to be "round-trip" with parser
|
||||
- Added standalone entry point, i.e. default generators can be used via `require('css-tree/lib/generator')`
|
||||
- Walker
|
||||
- Added standalone entry point, i.e. default walkers can be used via `require('css-tree/lib/walker')` (#47)
|
||||
- Lexer
|
||||
- Added `default` keyword to the list of invalid values for `<custom-ident>` (since it reversed per [spec](https://www.w3.org/TR/css-values/#custom-idents))
|
||||
- Convertors (`toPlainObject()` and `fromPlainObject()`) moved to `lib/convertor` (entry point is `require('css-tree/lib/convertor')`)
|
||||
|
||||
## 1.0.0-alpha21 (September 5, 2017)
|
||||
|
||||
- Tokenizer
|
||||
- Added `Raw` token type
|
||||
- Improved tokenization of `url()` with raw as url to be more spec complient
|
||||
- Added `Tokenizer#balance` array computation on token layout
|
||||
- Added `Tokenizer#getRawLength()` to compute a raw length with respect of block balance
|
||||
- Added `Tokenizer#getTokenStart(offset)` method to get token start offset by token index
|
||||
- Added `idx` and `balance` fields to each token of `Tokenizer#dump()` method result
|
||||
- Parser
|
||||
- Added `onParseError` option
|
||||
- Reworked node parsers that consume a `Raw` node to use a new approach. Since now a `Raw` node builds in `parser#Raw()` function only
|
||||
- Changed semantic of `parser#Raw()`, it takes 5 parameters now (it might to be changed in future)
|
||||
- Changed `parser#tolerantParse()` to pass a start token index to fallback function instead of source offset
|
||||
- Fixed `AtruleExpression` consuming in tolerant mode
|
||||
- Atrule handler to convert an empty `AtruleExpression` node into `null`
|
||||
- Changed `AtruleExpression` handler to always return a node (before it could return a `null` in some cases)
|
||||
- Lexer
|
||||
- Fixed comma match node for `#` multiplier
|
||||
- Added reference name to `SyntaxReferenceError`
|
||||
- Additional fixes on custom errors
|
||||
- Reduced possible corruption of base config by `syntax.fork()`
|
||||
|
||||
## 1.0.0-alpha20 (August 28, 2017)
|
||||
|
||||
- Tokenizer
|
||||
- Added `Atrule` token type (`<at-rule-token>` per spec)
|
||||
- Added `Function` token type (`<function-token>` per spec)
|
||||
- Added `Url` token type
|
||||
- Replaced `Tokenizer#getTypes()` method with `Tokenizer#dump()` to get all tokens as an array
|
||||
- Renamed `Tokenizer.TYPE.Whitespace` to `Tokenizer.TYPE.WhiteSpace`
|
||||
- Renamed `Tokenizer.findWhitespaceEnd()` to `Tokenizer.findWhiteSpaceEnd()`
|
||||
- Parser
|
||||
- Added initial implementation of tollerant mode (turn on by passing `tolerant: true` option). In this mode parse errors are never occour and any invalid part of CSS turns into a `Raw` node. Current safe points: `Atrule`, `AtruleExpression`, `Rule`, `Selector` and `Declaration`. Feature is experimental and further improvements are planned.
|
||||
- Changed `Atrule.expression` to contain a `AtruleExpression` node or `null` only (other node types is wrapping into a `AtruleExpression` node)
|
||||
- Renamed `AttributeSelector.operator` to `AttributeSelector.matcher`
|
||||
- Generator
|
||||
- `translate()` method is now can take a function as second argument, that recieves every generated chunk. When no function is passed, default handler is used, it concats all the chunks and method returns a string.
|
||||
- Lexer
|
||||
- Used [mdn/data](https://github.com/mdn/data) package as source of lexer's grammar instead of local dictionaries
|
||||
- Added `x` unit to `<resolution>` generic type
|
||||
- Improved match tree:
|
||||
- Omited Group (sequences) match nodes
|
||||
- Omited empty match nodes (for terms with `zero or more` multipliers)
|
||||
- Added `ASTNode` node type to contain a reference to AST node
|
||||
- Fixed node duplication (uncompleted match were added to tree)
|
||||
- Added AST node reference in match nodes
|
||||
- Added comma match node by `#` multiplier
|
||||
- Grammar
|
||||
- Changed `translate()` function to get a handler as third argument (optional). That handler recieves result of node traslation and can be used for decoration purposes. See [example](https://github.com/csstree/docs/blob/04c65af44477b5ea05feb373482898122b2a4528/docs/syntax.html#L619-L627)
|
||||
- Added `SyntaxParseError` to grammar export
|
||||
- Reworked group and multipliers representation in syntax tree:
|
||||
- Replaced `Sequence` for `Group` node type (`Sequence` node type removed)
|
||||
- Added `explicit` boolean property for `Group`
|
||||
- Only groups can have a multiplier now (other node types is wrapping into a single term implicit group when multiplier is applied)
|
||||
- Renamed `nonEmpty` Group's property to `disallowEmpty`
|
||||
- Added optimisation for syntax tree by dropping redundant root `Group` when it contains a single `Group` term (return this `Group` as a result)
|
||||
- Changed lexer's match functionality
|
||||
- Changed `Lexer#matchProperty()` and `Lexer#matchType()` to return an object instead of match tree. A match tree stores in `matched` field when AST is matched to grammar successfully, otherwise an error in `error` field. The result object also has some methods to test AST node against a match tree: `getTrace()`, `isType()`, `isProperty()` and `isKeyword()`
|
||||
- Added `Lexer#matchDeclaration()` method
|
||||
- Removed `Lexer#lastMatchError` (error stores in match result object in `error` field)
|
||||
- Added initial implementation of search for AST segments (new lexer methods: `Lexer#findValueSegments()`, `Lexer#findDeclarationValueSegments()` and `Lexer#findAllSegments`)
|
||||
- Implemented `SyntaxReferenceError` for unknown property and type references
|
||||
- Renamed field in resulting object of `property()` function: `variable` → `custom`
|
||||
- Fixed issue with readonly properties (e.g. `line` and `column`) of `Error` and exception on attempt to write in iOS Safari
|
||||
|
||||
## 1.0.0-alpha19 (April 24, 2017)
|
||||
|
||||
- Extended `List` class with new methods:
|
||||
- `List#prepend(item)`
|
||||
- `List#prependData(data)`
|
||||
- `List#insertData(data)`
|
||||
- `List#insertList(list)`
|
||||
- `List#replace(item, itemOrList)`
|
||||
|
||||
## 1.0.0-alpha18 (April 3, 2017)
|
||||
|
||||
- Added `atrule` walk context (#39)
|
||||
- Changed a result of generate method for `AnPlusB`, `AttributeSelector`, `Function`, `MediaFeature` and `Ratio` ([1e95877](https://github.com/csstree/csstree/commit/1e9587710efa8e9338bcf0bc794b4b45f286231d))
|
||||
- Fixed typo in `List` exception messages (@strarsis, #42)
|
||||
- Improved tokenizer to convert an input to a string
|
||||
|
||||
## 1.0.0-alpha17 (March 13, 2017)
|
||||
|
||||
- Implemented new concept of `syntax`
|
||||
- Changed main `exports` to expose a default syntax
|
||||
- Defined initial [CSS syntax](lib/syntax/default.js)
|
||||
- Implemented `createSyntax()` method to create a new syntax from scratch
|
||||
- Implemented `fork()` method to create a new syntax based on given via extension
|
||||
- Parser
|
||||
- Implemented `mediaQueryList` and `mediaQuery` parsing contexts
|
||||
- Implemented `CDO` and `CDC` node types
|
||||
- Implemented additional declaration property prefix hacks (`#` and `+`)
|
||||
- Added support for UTF-16LE BOM
|
||||
- Added support for `@font-face` at-rule
|
||||
- Added `chroma()` to legacy IE filter functions
|
||||
- Improved `HexColor` to consume hex only
|
||||
- Improved support for `\0` and `\9` hacks (#2)
|
||||
- Relaxed number check for `Ratio` terms
|
||||
- Allowed fractal values as a `Ratio` term
|
||||
- Disallowed zero number as a `Ratio` term
|
||||
- Changed important clause parsing
|
||||
- Allowed any identifier for important (to support hacks like `!ie`)
|
||||
- Store `true` for `important` field in case identifier equals to `important` and string otherwise
|
||||
- Fixed parse error formatted message rendering to take into account tabs
|
||||
- Removed exposing of `Parser` class
|
||||
- Removed `readSelectorSequence()`, `readSequenceFallback()` and `readSelectorSequenceFallback` methods
|
||||
- Used single universal sequence consumer for `AtruleExpression`, `Selector` and `Value`
|
||||
- Generator
|
||||
- Reworked generator to use auto-generated functions based on syntax definition (additional work to be done in next releases)
|
||||
- Implemented `translateMarkup(ast, before, after)` method for complex cases
|
||||
- Reworked `translateWithSourceMap` to be more flexible (based on `translateMarkup`, additional work to be done in next releases)
|
||||
- Walker
|
||||
- Reworked walker to use auto-generated function based on syntax definition (additional work to be done in next releases)
|
||||
- Lexer
|
||||
- Prepared for better extensibility (additional work to be done in next releases)
|
||||
- Implemented `checkStructure(ast)` method to check AST structure based on syntax definition
|
||||
- Update syntax dictionaries to latest `mdn/data`
|
||||
- Add missing `<'offset-position'>` syntax
|
||||
- Extended `<position>` property with `-webkit-sticky` (@sergejmueller, #37)
|
||||
- Improved mismatch error position
|
||||
- Implemented script (`gen:syntax`) to generate AST format reference page (`docs/ast.md`) using syntax definition
|
||||
|
||||
## 1.0.0-alpha16 (February 12, 2017)
|
||||
|
||||
- Exposed `Parser` class
|
||||
- Added `startOffset` option to `Tokenizer` (constructor and `setSource()` method)
|
||||
- Added fallback functions for default (`readSequenceFallback`) and selector (`readSelectorSequenceFallback`) sequence readers
|
||||
- Fixed edge cases for `AnPlusB`
|
||||
- Fixed wrong whitespace ignoring in `Selector` consumer
|
||||
|
||||
## 1.0.0-alpha15 (February 8, 2017)
|
||||
|
||||
- Fixed broken `atruleExpression` context
|
||||
- Fixed vendor prefix detection in `keyword()` and `property()`
|
||||
- Fixed `property()` to not lowercase custom property names
|
||||
- Added `variable` boolean flag in `property()` result
|
||||
- Renamed `scanner` into `tokenizer`
|
||||
- Ranamed `syntax` into `lexer`
|
||||
- Moved `docs/*.html` files to [csstree/docs](https://github.com/csstree/docs) repo
|
||||
- Added `element()` function for `Value` context (`-moz-element()` supported as well)
|
||||
- Merged `Universal` node type into `Type`
|
||||
- Renamed node types:
|
||||
- `Id` -> `IdSelector`
|
||||
- `Class` -> `ClassSelector`
|
||||
- `Type` -> `TypeSelector`
|
||||
- `Attribute` -> `AttributeSelector`
|
||||
- `PseudoClass` -> `PseudoClassSelector`
|
||||
- `PseudoElement` -> `PseudoElementSelector`
|
||||
- `Hash` -> `HexColor`
|
||||
- `Space` -> `WhiteSpace`
|
||||
- `An+B` -> `AnPlusB`
|
||||
- Removed `Progid` node type
|
||||
- Relaxed `MediaQuery` consumer to not validate syntax on parse and to include whitespaces in children sequence as is
|
||||
- Added `WhiteSpace.value` property to store whitespace sequence
|
||||
- Implemented parser options to specify what should be parsed in details (when option is `false` some part of CSS represents as balanced `Raw`):
|
||||
- `parseAtruleExpression` – to parse at-rule expressions (`true` by default)
|
||||
- `parseSelector` – to parse rule's selector (`true` by default)
|
||||
- `parseValue` - to parse declaration's value (`true` by default)
|
||||
- `parseCustomProperty` – to parse value and fallback of custom property (`false` by default)
|
||||
- Changed tokenization to stick leading hyphen minus to identifier token
|
||||
- Changed selector parsing:
|
||||
- Don't convert spaces into descendant combinator
|
||||
- Don't validate selector structure on parsing (selectors may be checked by lexer later)
|
||||
- Initial refactoring of [docs](https://github.com/csstree/csstree/blob/master/docs)
|
||||
- Various improvements and fixes
|
||||
|
||||
## 1.0.0-alpha14 (February 3, 2017)
|
||||
|
||||
- Implemented `DeclarationList`, `MediaQueryList`, `MediaQuery`, `MediaFeature` and `Ratio` node types
|
||||
- Implemented `declarationList` context (useful to parse HTML `style` attribute content)
|
||||
- Implemented custom consumers for `@import`, `@media`, `@page` and `@supports` at-rules
|
||||
- Implemented `atrule` option for `parse()` config, is used for `atruleExpession` context to specify custom consumer for at-rule if any
|
||||
- Added `Scanner#skipWS()`, `Scanner#eatNonWS()`, `Scanner#consume()` and `Scanner#consumeNonWS()` helper methods
|
||||
- Added custom consumers for known functional-pseudos, consume unknown functional-pseudo content as balanced `Raw`
|
||||
- Allowed any `PseudoElement` to be a functional-pseudo (#33)
|
||||
- Improved walker implementations to reduce GC thrashing by reusing cursors
|
||||
- Changed `Atrule.block` to contain a `Block` node type only if any
|
||||
- Changed `Block.loc` positions to include curly brackets
|
||||
- Changed `Atrule.expression` to store a `null` if no expression
|
||||
- Changed parser to use `StyleSheet` node type only for top level node (when context is `stylesheet`, that's by default)
|
||||
- Changed `Parentheses`, `Brackets` and `Function` consumers to use passed sequence reader instead of its own
|
||||
- Changed `Value` and `AtruleExpression` consumers to use common sequence reader (that reader was used by `Value` consumer before)
|
||||
- Changed default sequence reader to exclude storage of spaces around `Comma`
|
||||
- Changed processing of custom properties:
|
||||
- Consume declaration value as balanced `Raw`
|
||||
- Consume `var()` fallback value as balanced `Raw`
|
||||
- Validate first argument of `var()` starts with double dash
|
||||
- Custom property's value and fallback includes spaces around
|
||||
- Fixed `Nth` to have a `loc` property
|
||||
- Fixed `SelectorList.loc` and `Selector.loc` positions to exclude spaces
|
||||
- Fixed issue Browserify build fail with `default-syntax.json` is not found error (#32, @philschatz)
|
||||
- Disallowed `Type` selector starting with dash (parser throws an error in this case now)
|
||||
- Disallowed empty selectors for `Rule` (not sure if it's correct but looks reasonable)
|
||||
- Removed `>>` combinator support until any browser support (no signals about that yet)
|
||||
- Removed `PseudoElement.legacy` property
|
||||
- Removed special case for `:before`, `:after`, `:first-letter` and `:first-line` to represent them as `PseudoElement`, now those pseudos are represented as `PseudoClass` nodes
|
||||
- Removed deprecated `Syntax#match()` method
|
||||
- Parser was splitted into modules and related changes, one step closer to an extensible parser
|
||||
- Various fixes and improvements, all changes have negligible impact on performance
|
||||
|
||||
## 1.0.0-alpha13 (January 19, 2017)
|
||||
|
||||
- Changed location storing in `SyntaxMatchError`
|
||||
- Changed property to store mismatch offset to `mismatchOffset`
|
||||
- Changed `offset` property to store bad node offset in source CSS if any
|
||||
- Added `loc` property that stores bad node `loc` if any
|
||||
|
||||
## 1.0.0-alpha12 (January 19, 2017)
|
||||
|
||||
- Fixed `Syntax#matchProperty()` method to always return a positive result for custom properties since syntax is never defined for them (#31)
|
||||
- Implemented `fromPlainObject()` and `toPlainObject()` to convert plain object to AST or AST to plain object (currently converts `List` <-> `Array`)
|
||||
|
||||
## 1.0.0-alpha11 (January 18, 2017)
|
||||
|
||||
- Added support for `:matches(<selector-list>)` (#28)
|
||||
- Added support for `:has(<relative-selector-list>)`
|
||||
- Added support for `::slotted(<compound-selector>)`
|
||||
- Implemented `Brackets` node type
|
||||
- Implemented basic support for at-rule inside rule block (#24)
|
||||
- Renamed `Selector` node type to `SelectorList`
|
||||
- Renamed `SimpleSelector` node type to `Selector`
|
||||
- Renamed `UnicodeRange.name` property to `UnicodeRange.value`
|
||||
- Replaced `Negation` node type for regular `PseudoClass`
|
||||
- Unified name of node property to store nested nodes, it always `children` now:
|
||||
- `StyleSheet.rules` -> `StyleSheet.children`
|
||||
- `SelectorList.selectors` -> `SelectorList.children`
|
||||
- `Block.declarations` -> `Block.children`
|
||||
- `*.sequence` -> `*.children`
|
||||
- Fixed edge cases in parsing `Hex` and `UnicodeRange` when number not an integer
|
||||
- Changed `nth-` pseudos parsing
|
||||
- Implemented `An+B` node type to represent expressions like `2n + 1` or `-3n`
|
||||
- Fixed edge cases when `a` or `b` is not an integer
|
||||
- Changed `odd` and `even` keywords processing, keywords are storing as `Identifier` node type now
|
||||
- Changed `Nth` node type format to store a `nth`-query and an optional `selector`
|
||||
- Implemented `of` clause for `nth-` pseudos (a.e. `:nth-child(2n + 1 of li, img)`)
|
||||
- Limited `Nth` parsing rules to `:nth-child()`, `:nth-last-child()`, `:nth-of-type()` and `:nth-last-of-type()` pseudos
|
||||
- Changed the way to store locations
|
||||
- Renamed `info` node property to `loc`
|
||||
- Changed format of `loc` to store `start` and `end` positions
|
||||
|
||||
## 1.0.0-alpha10 (January 11, 2017)
|
||||
|
||||
- Reworked `Scanner` to be a single point to its functionality
|
||||
- Exposed `Scanner` class to be useful for external projects
|
||||
- Changed `walk()` function behaviour to traverse AST nodes in natural order
|
||||
- Implemented `walkUp()` function to traverse AST nodes from deepest to parent (behaves as `walk()` before)
|
||||
|
||||
## 1.0.0-alpha9 (December 21, 2016)
|
||||
|
||||
- Fixed `<angle>` generic according to specs that allow a `<number>` equals to zero to be used as valid value (#30)
|
||||
|
||||
## 1.0.0-alpha8 (November 11, 2016)
|
||||
|
||||
- Fixed `Scanner#skip()` issue method when cursor is moving to the end of source
|
||||
- Simplified `Progid` node
|
||||
- Changed behaviour for bad selector processing, now parsing fails instead of selector ignoring
|
||||
- Fixed `<id-selector>` generic syntax
|
||||
- Added `q` unit for `<length>` generic syntax
|
||||
- Refactored syntax parser (performance)
|
||||
- Reduced startup time by implementing lazy syntax parsing (default syntax doesn't parse on module load)
|
||||
- Updated syntax dictionaries and used [`mdn/data`](https://github.com/mdn/data) instead of `Template:CSSData`
|
||||
- Renamed `syntax.stringify()` method to `syntax.translate()`
|
||||
- Simplified generic syntax functions, those functions receive a single AST node for checking and should return `true` or `false`
|
||||
- Added exception for values that contains `var()`, those values are always valid for now
|
||||
- Added more tests and increase code coverage to `98.5%`
|
||||
|
||||
## 1.0.0-alpha7 (October 7, 2016)
|
||||
|
||||
- Added support for explicit descendant combinator (`>>`)
|
||||
- Implemented `Type` and `Universal` type nodes
|
||||
- Improved `Number` parsing by including sign and exponent (#26)
|
||||
- Parse `before`, `after`, `first-letter` and `first-line` pseudos with single colon as `PseudoElement`
|
||||
- Changed `FunctionalPseudo` node type to `PseudoClass`
|
||||
- Fixed attribute selector name parsing (namespace edge cases)
|
||||
- Fixed location calculation for specified offset when `eof` is reached
|
||||
- Added more non-standard colors (#25)
|
||||
- Removed obsolete `Syntax#getAll()` method
|
||||
- Fixed various edge cases, code clean up and performance improvements
|
||||
|
||||
## 1.0.0-alpha6 (September 23, 2016)
|
||||
|
||||
- More accurate positions for syntax mismatch errors
|
||||
- Added [`apple`](https://webkit.org/blog/3709/using-the-system-font-in-web-content/) specific font keywords (#20)
|
||||
- Changed `Property` node stucture from object to string
|
||||
- Renamed `Ruleset` node type to `Rule`
|
||||
- Removed `Argument` node type
|
||||
- Fixed `Dimension` and `Percentage` position computation
|
||||
- Fixed bad selector parsing (temporary solution)
|
||||
- Fixed location computation for CSS with very long lines that may lead to really long parsing with `positions:true` (even freeze)
|
||||
- Fixed `line` and `column` computation for `SyntaxMatch` error
|
||||
- Improved performance of parsing and translation. Now CSSTree is under 10ms in [PostCSS benchmark](https://github.com/postcss/benchmark).
|
||||
19
node_modules/css-tree/LICENSE
generated
vendored
Normal file
19
node_modules/css-tree/LICENSE
generated
vendored
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
Copyright (C) 2016-2019 by Roman Dvornov
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
130
node_modules/css-tree/README.md
generated
vendored
Normal file
130
node_modules/css-tree/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,130 @@
|
|||
<img align="right" width="111" height="111"
|
||||
alt="CSSTree logo"
|
||||
src="https://cloud.githubusercontent.com/assets/270491/19243723/6f9136c6-8f21-11e6-82ac-eeeee4c6c452.png"/>
|
||||
|
||||
# CSSTree
|
||||
|
||||
[](https://www.npmjs.com/package/css-tree)
|
||||
[](https://travis-ci.org/csstree/csstree)
|
||||
[](https://coveralls.io/github/csstree/csstree?branch=master)
|
||||
[](https://www.npmjs.com/package/css-tree)
|
||||
[](https://twitter.com/csstree)
|
||||
|
||||
CSSTree is a tool set for CSS: [fast](https://github.com/postcss/benchmark) detailed parser (CSS → AST), walker (AST traversal), generator (AST → CSS) and lexer (validation and matching) based on specs and browser implementations. The main goal is to be efficient and W3C specs compliant, with focus on CSS analyzing and source-to-source transforming tasks.
|
||||
|
||||
> NOTE: The library isn't in final shape and needs further improvements (e.g. AST format and API are subjects to change in next major versions). However it's stable enough and used by projects like [CSSO](https://github.com/css/csso) (CSS minifier) and [SVGO](https://github.com/svg/svgo) (SVG optimizer) in production.
|
||||
|
||||
## Features
|
||||
|
||||
- **Detailed parsing with an adjustable level of detail**
|
||||
|
||||
By default CSSTree parses CSS as detailed as possible, i.e. each single logical part is representing with its own AST node (see [AST format](docs/ast.md) for all possible node types). The parsing detail level can be changed through [parser options](docs/parsing.md#parsesource-options), for example, you can disable parsing of selectors or declaration values for component parts.
|
||||
|
||||
- **Tolerant to errors by design**
|
||||
|
||||
Parser behaves as [spec says](https://www.w3.org/TR/css-syntax-3/#error-handling): "When errors occur in CSS, the parser attempts to recover gracefully, throwing away only the minimum amount of content before returning to parsing as normal". The only thing the parser departs from the specification is that it doesn't throw away bad content, but wraps it in a special node type (`Raw`) that allows processing it later.
|
||||
|
||||
- **Fast and efficient**
|
||||
|
||||
CSSTree is created with focus on performance and effective memory consumption. Therefore it's [one of the fastest CSS parsers](https://github.com/postcss/benchmark) at the moment.
|
||||
|
||||
- **Syntax validation**
|
||||
|
||||
The build-in lexer can test CSS against syntaxes defined by W3C. CSSTree uses [mdn/data](https://github.com/mdn/data/) as a basis for lexer's dictionaries and extends it with vendor specific and legacy syntaxes. Lexer can only check the declaration values currently, but this feature will be extended to other parts of the CSS in the future.
|
||||
|
||||
## Documentation
|
||||
|
||||
- [AST format](docs/ast.md)
|
||||
- [Parsing CSS → AST](docs/parsing.md)
|
||||
- [parse(source[, options])](docs/parsing.md#parsesource-options)
|
||||
- [Serialization AST → CSS](docs/generate.md)
|
||||
- [generate(ast[, options])](docs/generate.md#generateast-options)
|
||||
- [AST traversal](docs/traversal.md)
|
||||
- [walk(ast, options)](docs/traversal.md#walkast-options)
|
||||
- [find(ast, fn)](docs/traversal.md#findast-fn)
|
||||
- [findLast(ast, fn)](docs/traversal.md#findlastast-fn)
|
||||
- [findAll(ast, fn)](docs/traversal.md#findallast-fn)
|
||||
- [Utils for AST](docs/utils.md)
|
||||
- [property(name)](docs/utils.md#propertyname)
|
||||
- [keyword(name)](docs/utils.md#keywordname)
|
||||
- [clone(ast)](docs/utils.md#cloneast)
|
||||
- [fromPlainObject(object)](docs/utils.md#fromplainobjectobject)
|
||||
- [toPlainObject(ast)](docs/utils.md#toplainobjectast)
|
||||
- [Value Definition Syntax](docs/definition-syntax.md)
|
||||
- [parse(source)](docs/definition-syntax.md#parsesource)
|
||||
- [walk(node, options, context)](docs/definition-syntax.md#walknode-options-context)
|
||||
- [generate(node, options)](docs/definition-syntax.md#generatenode-options)
|
||||
- [AST format](docs/definition-syntax.md#ast-format)
|
||||
|
||||
## Tools
|
||||
|
||||
* [AST Explorer](https://astexplorer.net/#/gist/244e2fb4da940df52bf0f4b94277db44/e79aff44611020b22cfd9708f3a99ce09b7d67a8) – explore CSSTree AST format with zero setup
|
||||
* [CSS syntax reference](https://csstree.github.io/docs/syntax.html)
|
||||
* [CSS syntax validator](https://csstree.github.io/docs/validator.html)
|
||||
|
||||
## Related projects
|
||||
|
||||
* [csstree-validator](https://github.com/csstree/validator) – NPM package to validate CSS
|
||||
* [stylelint-csstree-validator](https://github.com/csstree/stylelint-validator) – plugin for stylelint to validate CSS
|
||||
* [Grunt plugin](https://github.com/sergejmueller/grunt-csstree-validator)
|
||||
* [Gulp plugin](https://github.com/csstree/gulp-csstree)
|
||||
* [Sublime plugin](https://github.com/csstree/SublimeLinter-contrib-csstree)
|
||||
* [VS Code plugin](https://github.com/csstree/vscode-plugin)
|
||||
* [Atom plugin](https://github.com/csstree/atom-plugin)
|
||||
|
||||
## Usage
|
||||
|
||||
Install with npm:
|
||||
|
||||
```
|
||||
> npm install css-tree
|
||||
```
|
||||
|
||||
Basic usage:
|
||||
|
||||
```js
|
||||
var csstree = require('css-tree');
|
||||
|
||||
// parse CSS to AST
|
||||
var ast = csstree.parse('.example { world: "!" }');
|
||||
|
||||
// traverse AST and modify it
|
||||
csstree.walk(ast, function(node) {
|
||||
if (node.type === 'ClassSelector' && node.name === 'example') {
|
||||
node.name = 'hello';
|
||||
}
|
||||
});
|
||||
|
||||
// generate CSS from AST
|
||||
console.log(csstree.generate(ast));
|
||||
// .hello{world:"!"}
|
||||
```
|
||||
|
||||
Syntax matching:
|
||||
|
||||
```js
|
||||
// parse CSS to AST as a declaration value
|
||||
var ast = csstree.parse('red 1px solid', { context: 'value' });
|
||||
|
||||
// match to syntax of `border` property
|
||||
var matchResult = csstree.lexer.matchProperty('border', ast);
|
||||
|
||||
// check first value node is a <color>
|
||||
console.log(matchResult.isType(ast.children.first(), 'color'));
|
||||
// true
|
||||
|
||||
// get a type list matched to a node
|
||||
console.log(matchResult.getTrace(ast.children.first()));
|
||||
// [ { type: 'Property', name: 'border' },
|
||||
// { type: 'Type', name: 'color' },
|
||||
// { type: 'Type', name: 'named-color' },
|
||||
// { type: 'Keyword', name: 'red' } ]
|
||||
```
|
||||
|
||||
## Top level API
|
||||
|
||||

|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
103
node_modules/css-tree/data/index.js
generated
vendored
Normal file
103
node_modules/css-tree/data/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,103 @@
|
|||
const mdnAtrules = require('mdn-data/css/at-rules.json');
|
||||
const mdnProperties = require('mdn-data/css/properties.json');
|
||||
const mdnSyntaxes = require('mdn-data/css/syntaxes.json');
|
||||
const patch = require('./patch.json');
|
||||
const extendSyntax = /^\s*\|\s*/;
|
||||
|
||||
function preprocessAtrules(dict) {
|
||||
const result = Object.create(null);
|
||||
|
||||
for (const atruleName in dict) {
|
||||
const atrule = dict[atruleName];
|
||||
let descriptors = null;
|
||||
|
||||
if (atrule.descriptors) {
|
||||
descriptors = Object.create(null);
|
||||
|
||||
for (const descriptor in atrule.descriptors) {
|
||||
descriptors[descriptor] = atrule.descriptors[descriptor].syntax;
|
||||
}
|
||||
}
|
||||
|
||||
result[atruleName.substr(1)] = {
|
||||
prelude: atrule.syntax.trim().match(/^@\S+\s+([^;\{]*)/)[1].trim() || null,
|
||||
descriptors
|
||||
};
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function patchDictionary(dict, patchDict) {
|
||||
const result = {};
|
||||
|
||||
// copy all syntaxes for an original dict
|
||||
for (const key in dict) {
|
||||
result[key] = dict[key].syntax || dict[key];
|
||||
}
|
||||
|
||||
// apply a patch
|
||||
for (const key in patchDict) {
|
||||
if (key in dict) {
|
||||
if (patchDict[key].syntax) {
|
||||
result[key] = extendSyntax.test(patchDict[key].syntax)
|
||||
? result[key] + ' ' + patchDict[key].syntax.trim()
|
||||
: patchDict[key].syntax;
|
||||
} else {
|
||||
delete result[key];
|
||||
}
|
||||
} else {
|
||||
if (patchDict[key].syntax) {
|
||||
result[key] = patchDict[key].syntax.replace(extendSyntax, '');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function unpackSyntaxes(dict) {
|
||||
const result = {};
|
||||
|
||||
for (const key in dict) {
|
||||
result[key] = dict[key].syntax;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function patchAtrules(dict, patchDict) {
|
||||
const result = {};
|
||||
|
||||
// copy all syntaxes for an original dict
|
||||
for (const key in dict) {
|
||||
const patchDescriptors = (patchDict[key] && patchDict[key].descriptors) || null;
|
||||
|
||||
result[key] = {
|
||||
prelude: key in patchDict && 'prelude' in patchDict[key]
|
||||
? patchDict[key].prelude
|
||||
: dict[key].prelude || null,
|
||||
descriptors: dict[key].descriptors
|
||||
? patchDictionary(dict[key].descriptors, patchDescriptors || {})
|
||||
: patchDescriptors && unpackSyntaxes(patchDescriptors)
|
||||
};
|
||||
}
|
||||
|
||||
// apply a patch
|
||||
for (const key in patchDict) {
|
||||
if (!hasOwnProperty.call(dict, key)) {
|
||||
result[key] = {
|
||||
prelude: patchDict[key].prelude || null,
|
||||
descriptors: patchDict[key].descriptors && unpackSyntaxes(patchDict[key].descriptors)
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
types: patchDictionary(mdnSyntaxes, patch.syntaxes),
|
||||
atrules: patchAtrules(preprocessAtrules(mdnAtrules), patch.atrules),
|
||||
properties: patchDictionary(mdnProperties, patch.properties)
|
||||
};
|
||||
721
node_modules/css-tree/data/patch.json
generated
vendored
Normal file
721
node_modules/css-tree/data/patch.json
generated
vendored
Normal file
|
|
@ -0,0 +1,721 @@
|
|||
{
|
||||
"atrules": {
|
||||
"charset": {
|
||||
"prelude": "<string>"
|
||||
},
|
||||
"font-face": {
|
||||
"descriptors": {
|
||||
"unicode-range": {
|
||||
"comment": "replaces <unicode-range>, an old production name",
|
||||
"syntax": "<urange>#"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"-moz-background-clip": {
|
||||
"comment": "deprecated syntax in old Firefox, https://developer.mozilla.org/en/docs/Web/CSS/background-clip",
|
||||
"syntax": "padding | border"
|
||||
},
|
||||
"-moz-border-radius-bottomleft": {
|
||||
"comment": "https://developer.mozilla.org/en-US/docs/Web/CSS/border-bottom-left-radius",
|
||||
"syntax": "<'border-bottom-left-radius'>"
|
||||
},
|
||||
"-moz-border-radius-bottomright": {
|
||||
"comment": "https://developer.mozilla.org/en-US/docs/Web/CSS/border-bottom-right-radius",
|
||||
"syntax": "<'border-bottom-right-radius'>"
|
||||
},
|
||||
"-moz-border-radius-topleft": {
|
||||
"comment": "https://developer.mozilla.org/en-US/docs/Web/CSS/border-top-left-radius",
|
||||
"syntax": "<'border-top-left-radius'>"
|
||||
},
|
||||
"-moz-border-radius-topright": {
|
||||
"comment": "https://developer.mozilla.org/en-US/docs/Web/CSS/border-bottom-right-radius",
|
||||
"syntax": "<'border-bottom-right-radius'>"
|
||||
},
|
||||
"-moz-control-character-visibility": {
|
||||
"comment": "firefox specific keywords, https://bugzilla.mozilla.org/show_bug.cgi?id=947588",
|
||||
"syntax": "visible | hidden"
|
||||
},
|
||||
"-moz-osx-font-smoothing": {
|
||||
"comment": "misssed old syntax https://developer.mozilla.org/en-US/docs/Web/CSS/font-smooth",
|
||||
"syntax": "auto | grayscale"
|
||||
},
|
||||
"-moz-user-select": {
|
||||
"comment": "https://developer.mozilla.org/en-US/docs/Web/CSS/user-select",
|
||||
"syntax": "none | text | all | -moz-none"
|
||||
},
|
||||
"-ms-flex-align": {
|
||||
"comment": "misssed old syntax implemented in IE, https://www.w3.org/TR/2012/WD-css3-flexbox-20120322/#flex-align",
|
||||
"syntax": "start | end | center | baseline | stretch"
|
||||
},
|
||||
"-ms-flex-item-align": {
|
||||
"comment": "misssed old syntax implemented in IE, https://www.w3.org/TR/2012/WD-css3-flexbox-20120322/#flex-align",
|
||||
"syntax": "auto | start | end | center | baseline | stretch"
|
||||
},
|
||||
"-ms-flex-line-pack": {
|
||||
"comment": "misssed old syntax implemented in IE, https://www.w3.org/TR/2012/WD-css3-flexbox-20120322/#flex-line-pack",
|
||||
"syntax": "start | end | center | justify | distribute | stretch"
|
||||
},
|
||||
"-ms-flex-negative": {
|
||||
"comment": "misssed old syntax implemented in IE; TODO: find references for comfirmation",
|
||||
"syntax": "<'flex-shrink'>"
|
||||
},
|
||||
"-ms-flex-pack": {
|
||||
"comment": "misssed old syntax implemented in IE, https://www.w3.org/TR/2012/WD-css3-flexbox-20120322/#flex-pack",
|
||||
"syntax": "start | end | center | justify | distribute"
|
||||
},
|
||||
"-ms-flex-order": {
|
||||
"comment": "misssed old syntax implemented in IE; https://msdn.microsoft.com/en-us/library/jj127303(v=vs.85).aspx",
|
||||
"syntax": "<integer>"
|
||||
},
|
||||
"-ms-flex-positive": {
|
||||
"comment": "misssed old syntax implemented in IE; TODO: find references for comfirmation",
|
||||
"syntax": "<'flex-grow'>"
|
||||
},
|
||||
"-ms-flex-preferred-size": {
|
||||
"comment": "misssed old syntax implemented in IE; TODO: find references for comfirmation",
|
||||
"syntax": "<'flex-basis'>"
|
||||
},
|
||||
"-ms-interpolation-mode": {
|
||||
"comment": "https://msdn.microsoft.com/en-us/library/ff521095(v=vs.85).aspx",
|
||||
"syntax": "nearest-neighbor | bicubic"
|
||||
},
|
||||
"-ms-grid-column-align": {
|
||||
"comment": "add this property first since it uses as fallback for flexbox, https://msdn.microsoft.com/en-us/library/windows/apps/hh466338.aspx",
|
||||
"syntax": "start | end | center | stretch"
|
||||
},
|
||||
"-ms-grid-row-align": {
|
||||
"comment": "add this property first since it uses as fallback for flexbox, https://msdn.microsoft.com/en-us/library/windows/apps/hh466348.aspx",
|
||||
"syntax": "start | end | center | stretch"
|
||||
},
|
||||
"-ms-hyphenate-limit-last": {
|
||||
"comment": "misssed old syntax implemented in IE; https://www.w3.org/TR/css-text-4/#hyphenate-line-limits",
|
||||
"syntax": "none | always | column | page | spread"
|
||||
},
|
||||
"-webkit-appearance": {
|
||||
"comment": "webkit specific keywords",
|
||||
"references": [
|
||||
"http://css-infos.net/property/-webkit-appearance"
|
||||
],
|
||||
"syntax": "none | button | button-bevel | caps-lock-indicator | caret | checkbox | default-button | inner-spin-button | listbox | listitem | media-controls-background | media-controls-fullscreen-background | media-current-time-display | media-enter-fullscreen-button | media-exit-fullscreen-button | media-fullscreen-button | media-mute-button | media-overlay-play-button | media-play-button | media-seek-back-button | media-seek-forward-button | media-slider | media-sliderthumb | media-time-remaining-display | media-toggle-closed-captions-button | media-volume-slider | media-volume-slider-container | media-volume-sliderthumb | menulist | menulist-button | menulist-text | menulist-textfield | meter | progress-bar | progress-bar-value | push-button | radio | scrollbarbutton-down | scrollbarbutton-left | scrollbarbutton-right | scrollbarbutton-up | scrollbargripper-horizontal | scrollbargripper-vertical | scrollbarthumb-horizontal | scrollbarthumb-vertical | scrollbartrack-horizontal | scrollbartrack-vertical | searchfield | searchfield-cancel-button | searchfield-decoration | searchfield-results-button | searchfield-results-decoration | slider-horizontal | slider-vertical | sliderthumb-horizontal | sliderthumb-vertical | square-button | textarea | textfield | -apple-pay-button"
|
||||
},
|
||||
"-webkit-background-clip": {
|
||||
"comment": "https://developer.mozilla.org/en/docs/Web/CSS/background-clip",
|
||||
"syntax": "[ <box> | border | padding | content | text ]#"
|
||||
},
|
||||
"-webkit-column-break-after": {
|
||||
"comment": "added, http://help.dottoro.com/lcrthhhv.php",
|
||||
"syntax": "always | auto | avoid"
|
||||
},
|
||||
"-webkit-column-break-before": {
|
||||
"comment": "added, http://help.dottoro.com/lcxquvkf.php",
|
||||
"syntax": "always | auto | avoid"
|
||||
},
|
||||
"-webkit-column-break-inside": {
|
||||
"comment": "added, http://help.dottoro.com/lclhnthl.php",
|
||||
"syntax": "always | auto | avoid"
|
||||
},
|
||||
"-webkit-font-smoothing": {
|
||||
"comment": "https://developer.mozilla.org/en-US/docs/Web/CSS/font-smooth",
|
||||
"syntax": "auto | none | antialiased | subpixel-antialiased"
|
||||
},
|
||||
"-webkit-mask-box-image": {
|
||||
"comment": "missed; https://developer.mozilla.org/en-US/docs/Web/CSS/-webkit-mask-box-image",
|
||||
"syntax": "[ <url> | <gradient> | none ] [ <length-percentage>{4} <-webkit-mask-box-repeat>{2} ]?"
|
||||
},
|
||||
"-webkit-print-color-adjust": {
|
||||
"comment": "missed",
|
||||
"references": [
|
||||
"https://developer.mozilla.org/en/docs/Web/CSS/-webkit-print-color-adjust"
|
||||
],
|
||||
"syntax": "economy | exact"
|
||||
},
|
||||
"-webkit-text-security": {
|
||||
"comment": "missed; http://help.dottoro.com/lcbkewgt.php",
|
||||
"syntax": "none | circle | disc | square"
|
||||
},
|
||||
"-webkit-user-drag": {
|
||||
"comment": "missed; http://help.dottoro.com/lcbixvwm.php",
|
||||
"syntax": "none | element | auto"
|
||||
},
|
||||
"-webkit-user-select": {
|
||||
"comment": "auto is supported by old webkit, https://developer.mozilla.org/en-US/docs/Web/CSS/user-select",
|
||||
"syntax": "auto | none | text | all"
|
||||
},
|
||||
"alignment-baseline": {
|
||||
"comment": "added SVG property",
|
||||
"references": [
|
||||
"https://www.w3.org/TR/SVG/text.html#AlignmentBaselineProperty"
|
||||
],
|
||||
"syntax": "auto | baseline | before-edge | text-before-edge | middle | central | after-edge | text-after-edge | ideographic | alphabetic | hanging | mathematical"
|
||||
},
|
||||
"baseline-shift": {
|
||||
"comment": "added SVG property",
|
||||
"references": [
|
||||
"https://www.w3.org/TR/SVG/text.html#BaselineShiftProperty"
|
||||
],
|
||||
"syntax": "baseline | sub | super | <svg-length>"
|
||||
},
|
||||
"behavior": {
|
||||
"comment": "added old IE property https://msdn.microsoft.com/en-us/library/ms530723(v=vs.85).aspx",
|
||||
"syntax": "<url>+"
|
||||
},
|
||||
"clip-rule": {
|
||||
"comment": "added SVG property",
|
||||
"references": [
|
||||
"https://www.w3.org/TR/SVG/masking.html#ClipRuleProperty"
|
||||
],
|
||||
"syntax": "nonzero | evenodd"
|
||||
},
|
||||
"cue": {
|
||||
"comment": "https://www.w3.org/TR/css3-speech/#property-index",
|
||||
"syntax": "<'cue-before'> <'cue-after'>?"
|
||||
},
|
||||
"cue-after": {
|
||||
"comment": "https://www.w3.org/TR/css3-speech/#property-index",
|
||||
"syntax": "<url> <decibel>? | none"
|
||||
},
|
||||
"cue-before": {
|
||||
"comment": "https://www.w3.org/TR/css3-speech/#property-index",
|
||||
"syntax": "<url> <decibel>? | none"
|
||||
},
|
||||
"cursor": {
|
||||
"comment": "added legacy keywords: hand, -webkit-grab. -webkit-grabbing, -webkit-zoom-in, -webkit-zoom-out, -moz-grab, -moz-grabbing, -moz-zoom-in, -moz-zoom-out",
|
||||
"references": [
|
||||
"https://www.sitepoint.com/css3-cursor-styles/"
|
||||
],
|
||||
"syntax": "[ [ <url> [ <x> <y> ]? , ]* [ auto | default | none | context-menu | help | pointer | progress | wait | cell | crosshair | text | vertical-text | alias | copy | move | no-drop | not-allowed | e-resize | n-resize | ne-resize | nw-resize | s-resize | se-resize | sw-resize | w-resize | ew-resize | ns-resize | nesw-resize | nwse-resize | col-resize | row-resize | all-scroll | zoom-in | zoom-out | grab | grabbing | hand | -webkit-grab | -webkit-grabbing | -webkit-zoom-in | -webkit-zoom-out | -moz-grab | -moz-grabbing | -moz-zoom-in | -moz-zoom-out ] ]"
|
||||
},
|
||||
"display": {
|
||||
"comment": "extended with -ms-flexbox",
|
||||
"syntax": "| <-non-standard-display>"
|
||||
},
|
||||
"position": {
|
||||
"comment": "extended with -webkit-sticky",
|
||||
"syntax": "| -webkit-sticky"
|
||||
},
|
||||
"dominant-baseline": {
|
||||
"comment": "added SVG property",
|
||||
"references": [
|
||||
"https://www.w3.org/TR/SVG/text.html#DominantBaselineProperty"
|
||||
],
|
||||
"syntax": "auto | use-script | no-change | reset-size | ideographic | alphabetic | hanging | mathematical | central | middle | text-after-edge | text-before-edge"
|
||||
},
|
||||
"image-rendering": {
|
||||
"comment": "extended with <-non-standard-image-rendering>, added SVG keywords optimizeSpeed and optimizeQuality",
|
||||
"references": [
|
||||
"https://developer.mozilla.org/en/docs/Web/CSS/image-rendering",
|
||||
"https://www.w3.org/TR/SVG/painting.html#ImageRenderingProperty"
|
||||
],
|
||||
"syntax": "| optimizeSpeed | optimizeQuality | <-non-standard-image-rendering>"
|
||||
},
|
||||
"fill": {
|
||||
"comment": "added SVG property",
|
||||
"references": [
|
||||
"https://www.w3.org/TR/SVG/painting.html#FillProperty"
|
||||
],
|
||||
"syntax": "<paint>"
|
||||
},
|
||||
"fill-opacity": {
|
||||
"comment": "added SVG property",
|
||||
"references": [
|
||||
"https://www.w3.org/TR/SVG/painting.html#FillProperty"
|
||||
],
|
||||
"syntax": "<number-zero-one>"
|
||||
},
|
||||
"fill-rule": {
|
||||
"comment": "added SVG property",
|
||||
"references": [
|
||||
"https://www.w3.org/TR/SVG/painting.html#FillProperty"
|
||||
],
|
||||
"syntax": "nonzero | evenodd"
|
||||
},
|
||||
"filter": {
|
||||
"comment": "extend with IE legacy syntaxes",
|
||||
"syntax": "| <-ms-filter-function-list>"
|
||||
},
|
||||
"glyph-orientation-horizontal": {
|
||||
"comment": "added SVG property",
|
||||
"references": [
|
||||
"https://www.w3.org/TR/SVG/text.html#GlyphOrientationHorizontalProperty"
|
||||
],
|
||||
"syntax": "<angle>"
|
||||
},
|
||||
"glyph-orientation-vertical": {
|
||||
"comment": "added SVG property",
|
||||
"references": [
|
||||
"https://www.w3.org/TR/SVG/text.html#GlyphOrientationVerticalProperty"
|
||||
],
|
||||
"syntax": "<angle>"
|
||||
},
|
||||
"kerning": {
|
||||
"comment": "added SVG property",
|
||||
"references": [
|
||||
"https://www.w3.org/TR/SVG/text.html#KerningProperty"
|
||||
],
|
||||
"syntax": "auto | <svg-length>"
|
||||
},
|
||||
"letter-spacing": {
|
||||
"comment": "fix syntax <length> -> <length-percentage>",
|
||||
"references": [
|
||||
"https://developer.mozilla.org/en-US/docs/Web/SVG/Attribute/letter-spacing"
|
||||
],
|
||||
"syntax": "normal | <length-percentage>"
|
||||
},
|
||||
"marker": {
|
||||
"comment": "added SVG property",
|
||||
"references": [
|
||||
"https://www.w3.org/TR/SVG/painting.html#MarkerProperties"
|
||||
],
|
||||
"syntax": "none | <url>"
|
||||
},
|
||||
"marker-end": {
|
||||
"comment": "added SVG property",
|
||||
"references": [
|
||||
"https://www.w3.org/TR/SVG/painting.html#MarkerProperties"
|
||||
],
|
||||
"syntax": "none | <url>"
|
||||
},
|
||||
"marker-mid": {
|
||||
"comment": "added SVG property",
|
||||
"references": [
|
||||
"https://www.w3.org/TR/SVG/painting.html#MarkerProperties"
|
||||
],
|
||||
"syntax": "none | <url>"
|
||||
},
|
||||
"marker-start": {
|
||||
"comment": "added SVG property",
|
||||
"references": [
|
||||
"https://www.w3.org/TR/SVG/painting.html#MarkerProperties"
|
||||
],
|
||||
"syntax": "none | <url>"
|
||||
},
|
||||
"max-width": {
|
||||
"comment": "fix auto -> none (https://github.com/mdn/data/pull/431); extend by non-standard width keywords https://developer.mozilla.org/en-US/docs/Web/CSS/max-width",
|
||||
"syntax": "none | <length-percentage> | min-content | max-content | fit-content(<length-percentage>) | <-non-standard-width>"
|
||||
},
|
||||
"width": {
|
||||
"comment": "per spec fit-content should be a function, however browsers are supporting it as a keyword (https://github.com/csstree/stylelint-validator/issues/29)",
|
||||
"syntax": "| fit-content | -moz-fit-content | -webkit-fit-content"
|
||||
},
|
||||
"min-width": {
|
||||
"comment": "extend by non-standard width keywords https://developer.mozilla.org/en-US/docs/Web/CSS/width",
|
||||
"syntax": "auto | <length-percentage> | min-content | max-content | fit-content(<length-percentage>) | <-non-standard-width>"
|
||||
},
|
||||
"overflow": {
|
||||
"comment": "extend by vendor keywords https://developer.mozilla.org/en-US/docs/Web/CSS/overflow",
|
||||
"syntax": "| <-non-standard-overflow>"
|
||||
},
|
||||
"pause": {
|
||||
"comment": "https://www.w3.org/TR/css3-speech/#property-index",
|
||||
"syntax": "<'pause-before'> <'pause-after'>?"
|
||||
},
|
||||
"pause-after": {
|
||||
"comment": "https://www.w3.org/TR/css3-speech/#property-index",
|
||||
"syntax": "<time> | none | x-weak | weak | medium | strong | x-strong"
|
||||
},
|
||||
"pause-before": {
|
||||
"comment": "https://www.w3.org/TR/css3-speech/#property-index",
|
||||
"syntax": "<time> | none | x-weak | weak | medium | strong | x-strong"
|
||||
},
|
||||
"rest": {
|
||||
"comment": "https://www.w3.org/TR/css3-speech/#property-index",
|
||||
"syntax": "<'rest-before'> <'rest-after'>?"
|
||||
},
|
||||
"rest-after": {
|
||||
"comment": "https://www.w3.org/TR/css3-speech/#property-index",
|
||||
"syntax": "<time> | none | x-weak | weak | medium | strong | x-strong"
|
||||
},
|
||||
"rest-before": {
|
||||
"comment": "https://www.w3.org/TR/css3-speech/#property-index",
|
||||
"syntax": "<time> | none | x-weak | weak | medium | strong | x-strong"
|
||||
},
|
||||
"shape-rendering": {
|
||||
"comment": "added SVG property",
|
||||
"references": [
|
||||
"https://www.w3.org/TR/SVG/painting.html#ShapeRenderingPropert"
|
||||
],
|
||||
"syntax": "auto | optimizeSpeed | crispEdges | geometricPrecision"
|
||||
},
|
||||
"src": {
|
||||
"comment": "added @font-face's src property https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/src",
|
||||
"syntax": "[ <url> [ format( <string># ) ]? | local( <family-name> ) ]#"
|
||||
},
|
||||
"speak": {
|
||||
"comment": "https://www.w3.org/TR/css3-speech/#property-index",
|
||||
"syntax": "auto | none | normal"
|
||||
},
|
||||
"speak-as": {
|
||||
"comment": "https://www.w3.org/TR/css3-speech/#property-index",
|
||||
"syntax": "normal | spell-out || digits || [ literal-punctuation | no-punctuation ]"
|
||||
},
|
||||
"stroke": {
|
||||
"comment": "added SVG property",
|
||||
"references": [
|
||||
"https://www.w3.org/TR/SVG/painting.html#StrokeProperties"
|
||||
],
|
||||
"syntax": "<paint>"
|
||||
},
|
||||
"stroke-dasharray": {
|
||||
"comment": "added SVG property; a list of comma and/or white space separated <length>s and <percentage>s",
|
||||
"references": [
|
||||
"https://www.w3.org/TR/SVG/painting.html#StrokeProperties"
|
||||
],
|
||||
"syntax": "none | [ <svg-length>+ ]#"
|
||||
},
|
||||
"stroke-dashoffset": {
|
||||
"comment": "added SVG property",
|
||||
"references": [
|
||||
"https://www.w3.org/TR/SVG/painting.html#StrokeProperties"
|
||||
],
|
||||
"syntax": "<svg-length>"
|
||||
},
|
||||
"stroke-linecap": {
|
||||
"comment": "added SVG property",
|
||||
"references": [
|
||||
"https://www.w3.org/TR/SVG/painting.html#StrokeProperties"
|
||||
],
|
||||
"syntax": "butt | round | square"
|
||||
},
|
||||
"stroke-linejoin": {
|
||||
"comment": "added SVG property",
|
||||
"references": [
|
||||
"https://www.w3.org/TR/SVG/painting.html#StrokeProperties"
|
||||
],
|
||||
"syntax": "miter | round | bevel"
|
||||
},
|
||||
"stroke-miterlimit": {
|
||||
"comment": "added SVG property (<miterlimit> = <number-one-or-greater>) ",
|
||||
"references": [
|
||||
"https://www.w3.org/TR/SVG/painting.html#StrokeProperties"
|
||||
],
|
||||
"syntax": "<number-one-or-greater>"
|
||||
},
|
||||
"stroke-opacity": {
|
||||
"comment": "added SVG property",
|
||||
"references": [
|
||||
"https://www.w3.org/TR/SVG/painting.html#StrokeProperties"
|
||||
],
|
||||
"syntax": "<number-zero-one>"
|
||||
},
|
||||
"stroke-width": {
|
||||
"comment": "added SVG property",
|
||||
"references": [
|
||||
"https://www.w3.org/TR/SVG/painting.html#StrokeProperties"
|
||||
],
|
||||
"syntax": "<svg-length>"
|
||||
},
|
||||
"text-anchor": {
|
||||
"comment": "added SVG property",
|
||||
"references": [
|
||||
"https://www.w3.org/TR/SVG/text.html#TextAlignmentProperties"
|
||||
],
|
||||
"syntax": "start | middle | end"
|
||||
},
|
||||
"unicode-bidi": {
|
||||
"comment": "added prefixed keywords https://developer.mozilla.org/en-US/docs/Web/CSS/unicode-bidi",
|
||||
"syntax": "| -moz-isolate | -moz-isolate-override | -moz-plaintext | -webkit-isolate | -webkit-isolate-override | -webkit-plaintext"
|
||||
},
|
||||
"unicode-range": {
|
||||
"comment": "added missed property https://developer.mozilla.org/en-US/docs/Web/CSS/%40font-face/unicode-range",
|
||||
"syntax": "<urange>#"
|
||||
},
|
||||
"voice-balance": {
|
||||
"comment": "https://www.w3.org/TR/css3-speech/#property-index",
|
||||
"syntax": "<number> | left | center | right | leftwards | rightwards"
|
||||
},
|
||||
"voice-duration": {
|
||||
"comment": "https://www.w3.org/TR/css3-speech/#property-index",
|
||||
"syntax": "auto | <time>"
|
||||
},
|
||||
"voice-family": {
|
||||
"comment": "<name> -> <family-name>, https://www.w3.org/TR/css3-speech/#property-index",
|
||||
"syntax": "[ [ <family-name> | <generic-voice> ] , ]* [ <family-name> | <generic-voice> ] | preserve"
|
||||
},
|
||||
"voice-pitch": {
|
||||
"comment": "https://www.w3.org/TR/css3-speech/#property-index",
|
||||
"syntax": "<frequency> && absolute | [ [ x-low | low | medium | high | x-high ] || [ <frequency> | <semitones> | <percentage> ] ]"
|
||||
},
|
||||
"voice-range": {
|
||||
"comment": "https://www.w3.org/TR/css3-speech/#property-index",
|
||||
"syntax": "<frequency> && absolute | [ [ x-low | low | medium | high | x-high ] || [ <frequency> | <semitones> | <percentage> ] ]"
|
||||
},
|
||||
"voice-rate": {
|
||||
"comment": "https://www.w3.org/TR/css3-speech/#property-index",
|
||||
"syntax": "[ normal | x-slow | slow | medium | fast | x-fast ] || <percentage>"
|
||||
},
|
||||
"voice-stress": {
|
||||
"comment": "https://www.w3.org/TR/css3-speech/#property-index",
|
||||
"syntax": "normal | strong | moderate | none | reduced"
|
||||
},
|
||||
"voice-volume": {
|
||||
"comment": "https://www.w3.org/TR/css3-speech/#property-index",
|
||||
"syntax": "silent | [ [ x-soft | soft | medium | loud | x-loud ] || <decibel> ]"
|
||||
},
|
||||
"writing-mode": {
|
||||
"comment": "extend with SVG keywords",
|
||||
"syntax": "| <svg-writing-mode>"
|
||||
}
|
||||
},
|
||||
"syntaxes": {
|
||||
"-legacy-gradient": {
|
||||
"comment": "added collection of legacy gradient syntaxes",
|
||||
"syntax": "<-webkit-gradient()> | <-legacy-linear-gradient> | <-legacy-repeating-linear-gradient> | <-legacy-radial-gradient> | <-legacy-repeating-radial-gradient>"
|
||||
},
|
||||
"-legacy-linear-gradient": {
|
||||
"comment": "like standard syntax but w/o `to` keyword https://developer.mozilla.org/en-US/docs/Web/CSS/linear-gradient",
|
||||
"syntax": "-moz-linear-gradient( <-legacy-linear-gradient-arguments> ) | -webkit-linear-gradient( <-legacy-linear-gradient-arguments> ) | -o-linear-gradient( <-legacy-linear-gradient-arguments> )"
|
||||
},
|
||||
"-legacy-repeating-linear-gradient": {
|
||||
"comment": "like standard syntax but w/o `to` keyword https://developer.mozilla.org/en-US/docs/Web/CSS/linear-gradient",
|
||||
"syntax": "-moz-repeating-linear-gradient( <-legacy-linear-gradient-arguments> ) | -webkit-repeating-linear-gradient( <-legacy-linear-gradient-arguments> ) | -o-repeating-linear-gradient( <-legacy-linear-gradient-arguments> )"
|
||||
},
|
||||
"-legacy-linear-gradient-arguments": {
|
||||
"comment": "like standard syntax but w/o `to` keyword https://developer.mozilla.org/en-US/docs/Web/CSS/linear-gradient",
|
||||
"syntax": "[ <angle> | <side-or-corner> ]? , <color-stop-list>"
|
||||
},
|
||||
"-legacy-radial-gradient": {
|
||||
"comment": "deprecated syntax that implemented by some browsers https://www.w3.org/TR/2011/WD-css3-images-20110908/#radial-gradients",
|
||||
"syntax": "-moz-radial-gradient( <-legacy-radial-gradient-arguments> ) | -webkit-radial-gradient( <-legacy-radial-gradient-arguments> ) | -o-radial-gradient( <-legacy-radial-gradient-arguments> )"
|
||||
},
|
||||
"-legacy-repeating-radial-gradient": {
|
||||
"comment": "deprecated syntax that implemented by some browsers https://www.w3.org/TR/2011/WD-css3-images-20110908/#radial-gradients",
|
||||
"syntax": "-moz-repeating-radial-gradient( <-legacy-radial-gradient-arguments> ) | -webkit-repeating-radial-gradient( <-legacy-radial-gradient-arguments> ) | -o-repeating-radial-gradient( <-legacy-radial-gradient-arguments> )"
|
||||
},
|
||||
"-legacy-radial-gradient-arguments": {
|
||||
"comment": "deprecated syntax that implemented by some browsers https://www.w3.org/TR/2011/WD-css3-images-20110908/#radial-gradients",
|
||||
"syntax": "[ <position> , ]? [ [ [ <-legacy-radial-gradient-shape> || <-legacy-radial-gradient-size> ] | [ <length> | <percentage> ]{2} ] , ]? <color-stop-list>"
|
||||
},
|
||||
"-legacy-radial-gradient-size": {
|
||||
"comment": "before a standard it contains 2 extra keywords (`contain` and `cover`) https://www.w3.org/TR/2011/WD-css3-images-20110908/#ltsize",
|
||||
"syntax": "closest-side | closest-corner | farthest-side | farthest-corner | contain | cover"
|
||||
},
|
||||
"-legacy-radial-gradient-shape": {
|
||||
"comment": "define to double sure it doesn't extends in future https://www.w3.org/TR/2011/WD-css3-images-20110908/#ltshape",
|
||||
"syntax": "circle | ellipse"
|
||||
},
|
||||
"-non-standard-font": {
|
||||
"comment": "non standard fonts",
|
||||
"references": [
|
||||
"https://webkit.org/blog/3709/using-the-system-font-in-web-content/"
|
||||
],
|
||||
"syntax": "-apple-system-body | -apple-system-headline | -apple-system-subheadline | -apple-system-caption1 | -apple-system-caption2 | -apple-system-footnote | -apple-system-short-body | -apple-system-short-headline | -apple-system-short-subheadline | -apple-system-short-caption1 | -apple-system-short-footnote | -apple-system-tall-body"
|
||||
},
|
||||
"-non-standard-color": {
|
||||
"comment": "non standard colors",
|
||||
"references": [
|
||||
"http://cssdot.ru/%D0%A1%D0%BF%D1%80%D0%B0%D0%B2%D0%BE%D1%87%D0%BD%D0%B8%D0%BA_CSS/color-i305.html",
|
||||
"https://developer.mozilla.org/en-US/docs/Web/CSS/color_value#Mozilla_Color_Preference_Extensions"
|
||||
],
|
||||
"syntax": "-moz-ButtonDefault | -moz-ButtonHoverFace | -moz-ButtonHoverText | -moz-CellHighlight | -moz-CellHighlightText | -moz-Combobox | -moz-ComboboxText | -moz-Dialog | -moz-DialogText | -moz-dragtargetzone | -moz-EvenTreeRow | -moz-Field | -moz-FieldText | -moz-html-CellHighlight | -moz-html-CellHighlightText | -moz-mac-accentdarkestshadow | -moz-mac-accentdarkshadow | -moz-mac-accentface | -moz-mac-accentlightesthighlight | -moz-mac-accentlightshadow | -moz-mac-accentregularhighlight | -moz-mac-accentregularshadow | -moz-mac-chrome-active | -moz-mac-chrome-inactive | -moz-mac-focusring | -moz-mac-menuselect | -moz-mac-menushadow | -moz-mac-menutextselect | -moz-MenuHover | -moz-MenuHoverText | -moz-MenuBarText | -moz-MenuBarHoverText | -moz-nativehyperlinktext | -moz-OddTreeRow | -moz-win-communicationstext | -moz-win-mediatext | -moz-activehyperlinktext | -moz-default-background-color | -moz-default-color | -moz-hyperlinktext | -moz-visitedhyperlinktext | -webkit-activelink | -webkit-focus-ring-color | -webkit-link | -webkit-text"
|
||||
},
|
||||
"-non-standard-image-rendering": {
|
||||
"comment": "non-standard keywords http://phrogz.net/tmp/canvas_image_zoom.html",
|
||||
"syntax": "optimize-contrast | -moz-crisp-edges | -o-crisp-edges | -webkit-optimize-contrast"
|
||||
},
|
||||
"-non-standard-overflow": {
|
||||
"comment": "non-standard keywords https://developer.mozilla.org/en-US/docs/Web/CSS/overflow",
|
||||
"syntax": "-moz-scrollbars-none | -moz-scrollbars-horizontal | -moz-scrollbars-vertical | -moz-hidden-unscrollable"
|
||||
},
|
||||
"-non-standard-width": {
|
||||
"comment": "non-standard keywords https://developer.mozilla.org/en-US/docs/Web/CSS/width",
|
||||
"syntax": "fill-available | min-intrinsic | intrinsic | -moz-available | -moz-fit-content | -moz-min-content | -moz-max-content | -webkit-min-content | -webkit-max-content"
|
||||
},
|
||||
"-webkit-gradient()": {
|
||||
"comment": "first Apple proposal gradient syntax https://webkit.org/blog/175/introducing-css-gradients/ - TODO: simplify when after match algorithm improvement ( [, point, radius | , point] -> [, radius]? , point )",
|
||||
"syntax": "-webkit-gradient( <-webkit-gradient-type>, <-webkit-gradient-point> [, <-webkit-gradient-point> | , <-webkit-gradient-radius>, <-webkit-gradient-point> ] [, <-webkit-gradient-radius>]? [, <-webkit-gradient-color-stop>]* )"
|
||||
},
|
||||
"-webkit-gradient-color-stop": {
|
||||
"comment": "first Apple proposal gradient syntax https://webkit.org/blog/175/introducing-css-gradients/",
|
||||
"syntax": "from( <color> ) | color-stop( [ <number-zero-one> | <percentage> ] , <color> ) | to( <color> )"
|
||||
},
|
||||
"-webkit-gradient-point": {
|
||||
"comment": "first Apple proposal gradient syntax https://webkit.org/blog/175/introducing-css-gradients/",
|
||||
"syntax": "[ left | center | right | <length-percentage> ] [ top | center | bottom | <length-percentage> ]"
|
||||
},
|
||||
"-webkit-gradient-radius": {
|
||||
"comment": "first Apple proposal gradient syntax https://webkit.org/blog/175/introducing-css-gradients/",
|
||||
"syntax": "<length> | <percentage>"
|
||||
},
|
||||
"-webkit-gradient-type": {
|
||||
"comment": "first Apple proposal gradient syntax https://webkit.org/blog/175/introducing-css-gradients/",
|
||||
"syntax": "linear | radial"
|
||||
},
|
||||
"-webkit-mask-box-repeat": {
|
||||
"comment": "missed; https://developer.mozilla.org/en-US/docs/Web/CSS/-webkit-mask-box-image",
|
||||
"syntax": "repeat | stretch | round"
|
||||
},
|
||||
"-webkit-mask-clip-style": {
|
||||
"comment": "missed; there is no enough information about `-webkit-mask-clip` property, but looks like all those keywords are working",
|
||||
"syntax": "border | border-box | padding | padding-box | content | content-box | text"
|
||||
},
|
||||
"-ms-filter-function-list": {
|
||||
"comment": "https://developer.mozilla.org/en-US/docs/Web/CSS/-ms-filter",
|
||||
"syntax": "<-ms-filter-function>+"
|
||||
},
|
||||
"-ms-filter-function": {
|
||||
"comment": "https://developer.mozilla.org/en-US/docs/Web/CSS/-ms-filter",
|
||||
"syntax": "<-ms-filter-function-progid> | <-ms-filter-function-legacy>"
|
||||
},
|
||||
"-ms-filter-function-progid": {
|
||||
"comment": "https://developer.mozilla.org/en-US/docs/Web/CSS/-ms-filter",
|
||||
"syntax": "'progid:' [ <ident-token> '.' ]* [ <ident-token> | <function-token> <any-value>? ) ]"
|
||||
},
|
||||
"-ms-filter-function-legacy": {
|
||||
"comment": "https://developer.mozilla.org/en-US/docs/Web/CSS/-ms-filter",
|
||||
"syntax": "<ident-token> | <function-token> <any-value>? )"
|
||||
},
|
||||
"-ms-filter": {
|
||||
"syntax": "<string>"
|
||||
},
|
||||
"age": {
|
||||
"comment": "https://www.w3.org/TR/css3-speech/#voice-family",
|
||||
"syntax": "child | young | old"
|
||||
},
|
||||
"attr-name": {
|
||||
"syntax": "<wq-name>"
|
||||
},
|
||||
"attr-fallback": {
|
||||
"syntax": "<any-value>"
|
||||
},
|
||||
"border-radius": {
|
||||
"comment": "missed, https://drafts.csswg.org/css-backgrounds-3/#the-border-radius",
|
||||
"syntax": "<length-percentage>{1,2}"
|
||||
},
|
||||
"bottom": {
|
||||
"comment": "missed; not sure we should add it, but no others except `shape` is using it so it's ok for now; https://drafts.fxtf.org/css-masking-1/#funcdef-clip-rect",
|
||||
"syntax": "<length> | auto"
|
||||
},
|
||||
"content-list": {
|
||||
"comment": "missed -> https://drafts.csswg.org/css-content/#typedef-content-list (document-url, <target> and leader() is omitted util stabilization)",
|
||||
"syntax": "[ <string> | contents | <image> | <quote> | <target> | <leader()> | <attr()> | counter( <ident>, <'list-style-type'>? ) ]+"
|
||||
},
|
||||
"element()": {
|
||||
"comment": "https://drafts.csswg.org/css-gcpm/#element-syntax & https://drafts.csswg.org/css-images-4/#element-notation",
|
||||
"syntax": "element( <custom-ident> , [ first | start | last | first-except ]? ) | element( <id-selector> )"
|
||||
},
|
||||
"generic-voice": {
|
||||
"comment": "https://www.w3.org/TR/css3-speech/#voice-family",
|
||||
"syntax": "[ <age>? <gender> <integer>? ]"
|
||||
},
|
||||
"gender": {
|
||||
"comment": "https://www.w3.org/TR/css3-speech/#voice-family",
|
||||
"syntax": "male | female | neutral"
|
||||
},
|
||||
"generic-family": {
|
||||
"comment": "added -apple-system",
|
||||
"references": [
|
||||
"https://webkit.org/blog/3709/using-the-system-font-in-web-content/"
|
||||
],
|
||||
"syntax": "| -apple-system"
|
||||
},
|
||||
"gradient": {
|
||||
"comment": "added legacy syntaxes support",
|
||||
"syntax": "| <-legacy-gradient>"
|
||||
},
|
||||
"left": {
|
||||
"comment": "missed; not sure we should add it, but no others except `shape` is using it so it's ok for now; https://drafts.fxtf.org/css-masking-1/#funcdef-clip-rect",
|
||||
"syntax": "<length> | auto"
|
||||
},
|
||||
"mask-image": {
|
||||
"comment": "missed; https://drafts.fxtf.org/css-masking-1/#the-mask-image",
|
||||
"syntax": "<mask-reference>#"
|
||||
},
|
||||
"name-repeat": {
|
||||
"comment": "missed, and looks like obsolete, keep it as is since other property syntaxes should be changed too; https://www.w3.org/TR/2015/WD-css-grid-1-20150917/#typedef-name-repeat",
|
||||
"syntax": "repeat( [ <positive-integer> | auto-fill ], <line-names>+)"
|
||||
},
|
||||
"named-color": {
|
||||
"comment": "added non standard color names",
|
||||
"syntax": "| <-non-standard-color>"
|
||||
},
|
||||
"paint": {
|
||||
"comment": "used by SVG https://www.w3.org/TR/SVG/painting.html#SpecifyingPaint",
|
||||
"syntax": "none | <color> | <url> [ none | <color> ]? | context-fill | context-stroke"
|
||||
},
|
||||
"page-size": {
|
||||
"comment": "https://www.w3.org/TR/css-page-3/#typedef-page-size-page-size",
|
||||
"syntax": "A5 | A4 | A3 | B5 | B4 | JIS-B5 | JIS-B4 | letter | legal | ledger"
|
||||
},
|
||||
"ratio": {
|
||||
"comment": "missed, https://drafts.csswg.org/mediaqueries-4/#typedef-ratio",
|
||||
"syntax": "<integer> / <integer>"
|
||||
},
|
||||
"right": {
|
||||
"comment": "missed; not sure we should add it, but no others except `shape` is using it so it's ok for now; https://drafts.fxtf.org/css-masking-1/#funcdef-clip-rect",
|
||||
"syntax": "<length> | auto"
|
||||
},
|
||||
"shape": {
|
||||
"comment": "missed spaces in function body and add backwards compatible syntax",
|
||||
"syntax": "rect( <top>, <right>, <bottom>, <left> ) | rect( <top> <right> <bottom> <left> )"
|
||||
},
|
||||
"svg-length": {
|
||||
"comment": "All coordinates and lengths in SVG can be specified with or without a unit identifier",
|
||||
"references": [
|
||||
"https://www.w3.org/TR/SVG11/coords.html#Units"
|
||||
],
|
||||
"syntax": "<percentage> | <length> | <number>"
|
||||
},
|
||||
"svg-writing-mode": {
|
||||
"comment": "SVG specific keywords (deprecated for CSS)",
|
||||
"references": [
|
||||
"https://developer.mozilla.org/en/docs/Web/CSS/writing-mode",
|
||||
"https://www.w3.org/TR/SVG/text.html#WritingModeProperty"
|
||||
],
|
||||
"syntax": "lr-tb | rl-tb | tb-rl | lr | rl | tb"
|
||||
},
|
||||
"top": {
|
||||
"comment": "missed; not sure we should add it, but no others except `shape` is using it so it's ok for now; https://drafts.fxtf.org/css-masking-1/#funcdef-clip-rect",
|
||||
"syntax": "<length> | auto"
|
||||
},
|
||||
"track-group": {
|
||||
"comment": "used by old grid-columns and grid-rows syntax v0",
|
||||
"syntax": "'(' [ <string>* <track-minmax> <string>* ]+ ')' [ '[' <positive-integer> ']' ]? | <track-minmax>"
|
||||
},
|
||||
"track-list-v0": {
|
||||
"comment": "used by old grid-columns and grid-rows syntax v0",
|
||||
"syntax": "[ <string>* <track-group> <string>* ]+ | none"
|
||||
},
|
||||
"track-minmax": {
|
||||
"comment": "used by old grid-columns and grid-rows syntax v0",
|
||||
"syntax": "minmax( <track-breadth> , <track-breadth> ) | auto | <track-breadth> | fit-content"
|
||||
},
|
||||
"x": {
|
||||
"comment": "missed; not sure we should add it, but no others except `cursor` is using it so it's ok for now; https://drafts.csswg.org/css-ui-3/#cursor",
|
||||
"syntax": "<number>"
|
||||
},
|
||||
"y": {
|
||||
"comment": "missed; not sure we should add it, but no others except `cursor` is using so it's ok for now; https://drafts.csswg.org/css-ui-3/#cursor",
|
||||
"syntax": "<number>"
|
||||
},
|
||||
"declaration": {
|
||||
"comment": "missed, restored by https://drafts.csswg.org/css-syntax",
|
||||
"syntax": "<ident-token> : <declaration-value>? [ '!' important ]?"
|
||||
},
|
||||
"declaration-list": {
|
||||
"comment": "missed, restored by https://drafts.csswg.org/css-syntax",
|
||||
"syntax": "[ <declaration>? ';' ]* <declaration>?"
|
||||
},
|
||||
"url": {
|
||||
"comment": "https://drafts.csswg.org/css-values-4/#urls",
|
||||
"syntax": "url( <string> <url-modifier>* ) | <url-token>"
|
||||
},
|
||||
"url-modifier": {
|
||||
"comment": "https://drafts.csswg.org/css-values-4/#typedef-url-modifier",
|
||||
"syntax": "<ident> | <function-token> <any-value> )"
|
||||
},
|
||||
"number-zero-one": {
|
||||
"syntax": "<number [0,1]>"
|
||||
},
|
||||
"number-one-or-greater": {
|
||||
"syntax": "<number [1,∞]>"
|
||||
},
|
||||
"positive-integer": {
|
||||
"syntax": "<integer [0,∞]>"
|
||||
},
|
||||
"-non-standard-display": {
|
||||
"syntax": "-ms-inline-flexbox | -ms-grid | -ms-inline-grid | -webkit-flex | -webkit-inline-flex | -webkit-box | -webkit-inline-box | -moz-inline-stack | -moz-box | -moz-inline-box"
|
||||
}
|
||||
}
|
||||
}
|
||||
12606
node_modules/css-tree/dist/csstree.js
generated
vendored
Normal file
12606
node_modules/css-tree/dist/csstree.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
1
node_modules/css-tree/dist/csstree.min.js
generated
vendored
Normal file
1
node_modules/css-tree/dist/csstree.min.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
576
node_modules/css-tree/lib/common/List.js
generated
vendored
Normal file
576
node_modules/css-tree/lib/common/List.js
generated
vendored
Normal file
|
|
@ -0,0 +1,576 @@
|
|||
//
|
||||
// list
|
||||
// ┌──────┐
|
||||
// ┌──────────────┼─head │
|
||||
// │ │ tail─┼──────────────┐
|
||||
// │ └──────┘ │
|
||||
// ▼ ▼
|
||||
// item item item item
|
||||
// ┌──────┐ ┌──────┐ ┌──────┐ ┌──────┐
|
||||
// null ◀──┼─prev │◀───┼─prev │◀───┼─prev │◀───┼─prev │
|
||||
// │ next─┼───▶│ next─┼───▶│ next─┼───▶│ next─┼──▶ null
|
||||
// ├──────┤ ├──────┤ ├──────┤ ├──────┤
|
||||
// │ data │ │ data │ │ data │ │ data │
|
||||
// └──────┘ └──────┘ └──────┘ └──────┘
|
||||
//
|
||||
|
||||
function createItem(data) {
|
||||
return {
|
||||
prev: null,
|
||||
next: null,
|
||||
data: data
|
||||
};
|
||||
}
|
||||
|
||||
function allocateCursor(node, prev, next) {
|
||||
var cursor;
|
||||
|
||||
if (cursors !== null) {
|
||||
cursor = cursors;
|
||||
cursors = cursors.cursor;
|
||||
cursor.prev = prev;
|
||||
cursor.next = next;
|
||||
cursor.cursor = node.cursor;
|
||||
} else {
|
||||
cursor = {
|
||||
prev: prev,
|
||||
next: next,
|
||||
cursor: node.cursor
|
||||
};
|
||||
}
|
||||
|
||||
node.cursor = cursor;
|
||||
|
||||
return cursor;
|
||||
}
|
||||
|
||||
function releaseCursor(node) {
|
||||
var cursor = node.cursor;
|
||||
|
||||
node.cursor = cursor.cursor;
|
||||
cursor.prev = null;
|
||||
cursor.next = null;
|
||||
cursor.cursor = cursors;
|
||||
cursors = cursor;
|
||||
}
|
||||
|
||||
var cursors = null;
|
||||
var List = function() {
|
||||
this.cursor = null;
|
||||
this.head = null;
|
||||
this.tail = null;
|
||||
};
|
||||
|
||||
List.createItem = createItem;
|
||||
List.prototype.createItem = createItem;
|
||||
|
||||
List.prototype.updateCursors = function(prevOld, prevNew, nextOld, nextNew) {
|
||||
var cursor = this.cursor;
|
||||
|
||||
while (cursor !== null) {
|
||||
if (cursor.prev === prevOld) {
|
||||
cursor.prev = prevNew;
|
||||
}
|
||||
|
||||
if (cursor.next === nextOld) {
|
||||
cursor.next = nextNew;
|
||||
}
|
||||
|
||||
cursor = cursor.cursor;
|
||||
}
|
||||
};
|
||||
|
||||
List.prototype.getSize = function() {
|
||||
var size = 0;
|
||||
var cursor = this.head;
|
||||
|
||||
while (cursor) {
|
||||
size++;
|
||||
cursor = cursor.next;
|
||||
}
|
||||
|
||||
return size;
|
||||
};
|
||||
|
||||
List.prototype.fromArray = function(array) {
|
||||
var cursor = null;
|
||||
|
||||
this.head = null;
|
||||
|
||||
for (var i = 0; i < array.length; i++) {
|
||||
var item = createItem(array[i]);
|
||||
|
||||
if (cursor !== null) {
|
||||
cursor.next = item;
|
||||
} else {
|
||||
this.head = item;
|
||||
}
|
||||
|
||||
item.prev = cursor;
|
||||
cursor = item;
|
||||
}
|
||||
|
||||
this.tail = cursor;
|
||||
|
||||
return this;
|
||||
};
|
||||
|
||||
List.prototype.toArray = function() {
|
||||
var cursor = this.head;
|
||||
var result = [];
|
||||
|
||||
while (cursor) {
|
||||
result.push(cursor.data);
|
||||
cursor = cursor.next;
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
List.prototype.toJSON = List.prototype.toArray;
|
||||
|
||||
List.prototype.isEmpty = function() {
|
||||
return this.head === null;
|
||||
};
|
||||
|
||||
List.prototype.first = function() {
|
||||
return this.head && this.head.data;
|
||||
};
|
||||
|
||||
List.prototype.last = function() {
|
||||
return this.tail && this.tail.data;
|
||||
};
|
||||
|
||||
List.prototype.each = function(fn, context) {
|
||||
var item;
|
||||
|
||||
if (context === undefined) {
|
||||
context = this;
|
||||
}
|
||||
|
||||
// push cursor
|
||||
var cursor = allocateCursor(this, null, this.head);
|
||||
|
||||
while (cursor.next !== null) {
|
||||
item = cursor.next;
|
||||
cursor.next = item.next;
|
||||
|
||||
fn.call(context, item.data, item, this);
|
||||
}
|
||||
|
||||
// pop cursor
|
||||
releaseCursor(this);
|
||||
};
|
||||
|
||||
List.prototype.forEach = List.prototype.each;
|
||||
|
||||
List.prototype.eachRight = function(fn, context) {
|
||||
var item;
|
||||
|
||||
if (context === undefined) {
|
||||
context = this;
|
||||
}
|
||||
|
||||
// push cursor
|
||||
var cursor = allocateCursor(this, this.tail, null);
|
||||
|
||||
while (cursor.prev !== null) {
|
||||
item = cursor.prev;
|
||||
cursor.prev = item.prev;
|
||||
|
||||
fn.call(context, item.data, item, this);
|
||||
}
|
||||
|
||||
// pop cursor
|
||||
releaseCursor(this);
|
||||
};
|
||||
|
||||
List.prototype.forEachRight = List.prototype.eachRight;
|
||||
|
||||
List.prototype.reduce = function(fn, initialValue, context) {
|
||||
var item;
|
||||
|
||||
if (context === undefined) {
|
||||
context = this;
|
||||
}
|
||||
|
||||
// push cursor
|
||||
var cursor = allocateCursor(this, null, this.head);
|
||||
var acc = initialValue;
|
||||
|
||||
while (cursor.next !== null) {
|
||||
item = cursor.next;
|
||||
cursor.next = item.next;
|
||||
|
||||
acc = fn.call(context, acc, item.data, item, this);
|
||||
}
|
||||
|
||||
// pop cursor
|
||||
releaseCursor(this);
|
||||
|
||||
return acc;
|
||||
};
|
||||
|
||||
List.prototype.reduceRight = function(fn, initialValue, context) {
|
||||
var item;
|
||||
|
||||
if (context === undefined) {
|
||||
context = this;
|
||||
}
|
||||
|
||||
// push cursor
|
||||
var cursor = allocateCursor(this, this.tail, null);
|
||||
var acc = initialValue;
|
||||
|
||||
while (cursor.prev !== null) {
|
||||
item = cursor.prev;
|
||||
cursor.prev = item.prev;
|
||||
|
||||
acc = fn.call(context, acc, item.data, item, this);
|
||||
}
|
||||
|
||||
// pop cursor
|
||||
releaseCursor(this);
|
||||
|
||||
return acc;
|
||||
};
|
||||
|
||||
List.prototype.nextUntil = function(start, fn, context) {
|
||||
if (start === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
var item;
|
||||
|
||||
if (context === undefined) {
|
||||
context = this;
|
||||
}
|
||||
|
||||
// push cursor
|
||||
var cursor = allocateCursor(this, null, start);
|
||||
|
||||
while (cursor.next !== null) {
|
||||
item = cursor.next;
|
||||
cursor.next = item.next;
|
||||
|
||||
if (fn.call(context, item.data, item, this)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// pop cursor
|
||||
releaseCursor(this);
|
||||
};
|
||||
|
||||
List.prototype.prevUntil = function(start, fn, context) {
|
||||
if (start === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
var item;
|
||||
|
||||
if (context === undefined) {
|
||||
context = this;
|
||||
}
|
||||
|
||||
// push cursor
|
||||
var cursor = allocateCursor(this, start, null);
|
||||
|
||||
while (cursor.prev !== null) {
|
||||
item = cursor.prev;
|
||||
cursor.prev = item.prev;
|
||||
|
||||
if (fn.call(context, item.data, item, this)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// pop cursor
|
||||
releaseCursor(this);
|
||||
};
|
||||
|
||||
List.prototype.some = function(fn, context) {
|
||||
var cursor = this.head;
|
||||
|
||||
if (context === undefined) {
|
||||
context = this;
|
||||
}
|
||||
|
||||
while (cursor !== null) {
|
||||
if (fn.call(context, cursor.data, cursor, this)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
cursor = cursor.next;
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
List.prototype.map = function(fn, context) {
|
||||
var result = new List();
|
||||
var cursor = this.head;
|
||||
|
||||
if (context === undefined) {
|
||||
context = this;
|
||||
}
|
||||
|
||||
while (cursor !== null) {
|
||||
result.appendData(fn.call(context, cursor.data, cursor, this));
|
||||
cursor = cursor.next;
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
List.prototype.filter = function(fn, context) {
|
||||
var result = new List();
|
||||
var cursor = this.head;
|
||||
|
||||
if (context === undefined) {
|
||||
context = this;
|
||||
}
|
||||
|
||||
while (cursor !== null) {
|
||||
if (fn.call(context, cursor.data, cursor, this)) {
|
||||
result.appendData(cursor.data);
|
||||
}
|
||||
cursor = cursor.next;
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
List.prototype.clear = function() {
|
||||
this.head = null;
|
||||
this.tail = null;
|
||||
};
|
||||
|
||||
List.prototype.copy = function() {
|
||||
var result = new List();
|
||||
var cursor = this.head;
|
||||
|
||||
while (cursor !== null) {
|
||||
result.insert(createItem(cursor.data));
|
||||
cursor = cursor.next;
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
List.prototype.prepend = function(item) {
|
||||
// head
|
||||
// ^
|
||||
// item
|
||||
this.updateCursors(null, item, this.head, item);
|
||||
|
||||
// insert to the beginning of the list
|
||||
if (this.head !== null) {
|
||||
// new item <- first item
|
||||
this.head.prev = item;
|
||||
|
||||
// new item -> first item
|
||||
item.next = this.head;
|
||||
} else {
|
||||
// if list has no head, then it also has no tail
|
||||
// in this case tail points to the new item
|
||||
this.tail = item;
|
||||
}
|
||||
|
||||
// head always points to new item
|
||||
this.head = item;
|
||||
|
||||
return this;
|
||||
};
|
||||
|
||||
List.prototype.prependData = function(data) {
|
||||
return this.prepend(createItem(data));
|
||||
};
|
||||
|
||||
List.prototype.append = function(item) {
|
||||
return this.insert(item);
|
||||
};
|
||||
|
||||
List.prototype.appendData = function(data) {
|
||||
return this.insert(createItem(data));
|
||||
};
|
||||
|
||||
List.prototype.insert = function(item, before) {
|
||||
if (before !== undefined && before !== null) {
|
||||
// prev before
|
||||
// ^
|
||||
// item
|
||||
this.updateCursors(before.prev, item, before, item);
|
||||
|
||||
if (before.prev === null) {
|
||||
// insert to the beginning of list
|
||||
if (this.head !== before) {
|
||||
throw new Error('before doesn\'t belong to list');
|
||||
}
|
||||
|
||||
// since head points to before therefore list doesn't empty
|
||||
// no need to check tail
|
||||
this.head = item;
|
||||
before.prev = item;
|
||||
item.next = before;
|
||||
|
||||
this.updateCursors(null, item);
|
||||
} else {
|
||||
|
||||
// insert between two items
|
||||
before.prev.next = item;
|
||||
item.prev = before.prev;
|
||||
|
||||
before.prev = item;
|
||||
item.next = before;
|
||||
}
|
||||
} else {
|
||||
// tail
|
||||
// ^
|
||||
// item
|
||||
this.updateCursors(this.tail, item, null, item);
|
||||
|
||||
// insert to the ending of the list
|
||||
if (this.tail !== null) {
|
||||
// last item -> new item
|
||||
this.tail.next = item;
|
||||
|
||||
// last item <- new item
|
||||
item.prev = this.tail;
|
||||
} else {
|
||||
// if list has no tail, then it also has no head
|
||||
// in this case head points to new item
|
||||
this.head = item;
|
||||
}
|
||||
|
||||
// tail always points to new item
|
||||
this.tail = item;
|
||||
}
|
||||
|
||||
return this;
|
||||
};
|
||||
|
||||
List.prototype.insertData = function(data, before) {
|
||||
return this.insert(createItem(data), before);
|
||||
};
|
||||
|
||||
List.prototype.remove = function(item) {
|
||||
// item
|
||||
// ^
|
||||
// prev next
|
||||
this.updateCursors(item, item.prev, item, item.next);
|
||||
|
||||
if (item.prev !== null) {
|
||||
item.prev.next = item.next;
|
||||
} else {
|
||||
if (this.head !== item) {
|
||||
throw new Error('item doesn\'t belong to list');
|
||||
}
|
||||
|
||||
this.head = item.next;
|
||||
}
|
||||
|
||||
if (item.next !== null) {
|
||||
item.next.prev = item.prev;
|
||||
} else {
|
||||
if (this.tail !== item) {
|
||||
throw new Error('item doesn\'t belong to list');
|
||||
}
|
||||
|
||||
this.tail = item.prev;
|
||||
}
|
||||
|
||||
item.prev = null;
|
||||
item.next = null;
|
||||
|
||||
return item;
|
||||
};
|
||||
|
||||
List.prototype.push = function(data) {
|
||||
this.insert(createItem(data));
|
||||
};
|
||||
|
||||
List.prototype.pop = function() {
|
||||
if (this.tail !== null) {
|
||||
return this.remove(this.tail);
|
||||
}
|
||||
};
|
||||
|
||||
List.prototype.unshift = function(data) {
|
||||
this.prepend(createItem(data));
|
||||
};
|
||||
|
||||
List.prototype.shift = function() {
|
||||
if (this.head !== null) {
|
||||
return this.remove(this.head);
|
||||
}
|
||||
};
|
||||
|
||||
List.prototype.prependList = function(list) {
|
||||
return this.insertList(list, this.head);
|
||||
};
|
||||
|
||||
List.prototype.appendList = function(list) {
|
||||
return this.insertList(list);
|
||||
};
|
||||
|
||||
List.prototype.insertList = function(list, before) {
|
||||
// ignore empty lists
|
||||
if (list.head === null) {
|
||||
return this;
|
||||
}
|
||||
|
||||
if (before !== undefined && before !== null) {
|
||||
this.updateCursors(before.prev, list.tail, before, list.head);
|
||||
|
||||
// insert in the middle of dist list
|
||||
if (before.prev !== null) {
|
||||
// before.prev <-> list.head
|
||||
before.prev.next = list.head;
|
||||
list.head.prev = before.prev;
|
||||
} else {
|
||||
this.head = list.head;
|
||||
}
|
||||
|
||||
before.prev = list.tail;
|
||||
list.tail.next = before;
|
||||
} else {
|
||||
this.updateCursors(this.tail, list.tail, null, list.head);
|
||||
|
||||
// insert to end of the list
|
||||
if (this.tail !== null) {
|
||||
// if destination list has a tail, then it also has a head,
|
||||
// but head doesn't change
|
||||
|
||||
// dest tail -> source head
|
||||
this.tail.next = list.head;
|
||||
|
||||
// dest tail <- source head
|
||||
list.head.prev = this.tail;
|
||||
} else {
|
||||
// if list has no a tail, then it also has no a head
|
||||
// in this case points head to new item
|
||||
this.head = list.head;
|
||||
}
|
||||
|
||||
// tail always start point to new item
|
||||
this.tail = list.tail;
|
||||
}
|
||||
|
||||
list.head = null;
|
||||
list.tail = null;
|
||||
|
||||
return this;
|
||||
};
|
||||
|
||||
List.prototype.replace = function(oldItem, newItemOrList) {
|
||||
if ('head' in newItemOrList) {
|
||||
this.insertList(newItemOrList, oldItem);
|
||||
} else {
|
||||
this.insert(newItemOrList, oldItem);
|
||||
}
|
||||
|
||||
this.remove(oldItem);
|
||||
};
|
||||
|
||||
module.exports = List;
|
||||
91
node_modules/css-tree/lib/common/OffsetToLocation.js
generated
vendored
Normal file
91
node_modules/css-tree/lib/common/OffsetToLocation.js
generated
vendored
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
var adoptBuffer = require('./adopt-buffer');
|
||||
var isBOM = require('../tokenizer').isBOM;
|
||||
|
||||
var N = 10;
|
||||
var F = 12;
|
||||
var R = 13;
|
||||
|
||||
function computeLinesAndColumns(host, source) {
|
||||
var sourceLength = source.length;
|
||||
var lines = adoptBuffer(host.lines, sourceLength); // +1
|
||||
var line = host.startLine;
|
||||
var columns = adoptBuffer(host.columns, sourceLength);
|
||||
var column = host.startColumn;
|
||||
var startOffset = source.length > 0 ? isBOM(source.charCodeAt(0)) : 0;
|
||||
|
||||
for (var i = startOffset; i < sourceLength; i++) { // -1
|
||||
var code = source.charCodeAt(i);
|
||||
|
||||
lines[i] = line;
|
||||
columns[i] = column++;
|
||||
|
||||
if (code === N || code === R || code === F) {
|
||||
if (code === R && i + 1 < sourceLength && source.charCodeAt(i + 1) === N) {
|
||||
i++;
|
||||
lines[i] = line;
|
||||
columns[i] = column;
|
||||
}
|
||||
|
||||
line++;
|
||||
column = 1;
|
||||
}
|
||||
}
|
||||
|
||||
lines[i] = line;
|
||||
columns[i] = column;
|
||||
|
||||
host.lines = lines;
|
||||
host.columns = columns;
|
||||
}
|
||||
|
||||
var OffsetToLocation = function() {
|
||||
this.lines = null;
|
||||
this.columns = null;
|
||||
this.linesAndColumnsComputed = false;
|
||||
};
|
||||
|
||||
OffsetToLocation.prototype = {
|
||||
setSource: function(source, startOffset, startLine, startColumn) {
|
||||
this.source = source;
|
||||
this.startOffset = typeof startOffset === 'undefined' ? 0 : startOffset;
|
||||
this.startLine = typeof startLine === 'undefined' ? 1 : startLine;
|
||||
this.startColumn = typeof startColumn === 'undefined' ? 1 : startColumn;
|
||||
this.linesAndColumnsComputed = false;
|
||||
},
|
||||
|
||||
ensureLinesAndColumnsComputed: function() {
|
||||
if (!this.linesAndColumnsComputed) {
|
||||
computeLinesAndColumns(this, this.source);
|
||||
this.linesAndColumnsComputed = true;
|
||||
}
|
||||
},
|
||||
getLocation: function(offset, filename) {
|
||||
this.ensureLinesAndColumnsComputed();
|
||||
|
||||
return {
|
||||
source: filename,
|
||||
offset: this.startOffset + offset,
|
||||
line: this.lines[offset],
|
||||
column: this.columns[offset]
|
||||
};
|
||||
},
|
||||
getLocationRange: function(start, end, filename) {
|
||||
this.ensureLinesAndColumnsComputed();
|
||||
|
||||
return {
|
||||
source: filename,
|
||||
start: {
|
||||
offset: this.startOffset + start,
|
||||
line: this.lines[start],
|
||||
column: this.columns[start]
|
||||
},
|
||||
end: {
|
||||
offset: this.startOffset + end,
|
||||
line: this.lines[end],
|
||||
column: this.columns[end]
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = OffsetToLocation;
|
||||
82
node_modules/css-tree/lib/common/SyntaxError.js
generated
vendored
Normal file
82
node_modules/css-tree/lib/common/SyntaxError.js
generated
vendored
Normal file
|
|
@ -0,0 +1,82 @@
|
|||
var createCustomError = require('../utils/createCustomError');
|
||||
var MAX_LINE_LENGTH = 100;
|
||||
var OFFSET_CORRECTION = 60;
|
||||
var TAB_REPLACEMENT = ' ';
|
||||
|
||||
function sourceFragment(error, extraLines) {
|
||||
function processLines(start, end) {
|
||||
return lines.slice(start, end).map(function(line, idx) {
|
||||
var num = String(start + idx + 1);
|
||||
|
||||
while (num.length < maxNumLength) {
|
||||
num = ' ' + num;
|
||||
}
|
||||
|
||||
return num + ' |' + line;
|
||||
}).join('\n');
|
||||
}
|
||||
|
||||
var lines = error.source.split(/\r\n?|\n|\f/);
|
||||
var line = error.line;
|
||||
var column = error.column;
|
||||
var startLine = Math.max(1, line - extraLines) - 1;
|
||||
var endLine = Math.min(line + extraLines, lines.length + 1);
|
||||
var maxNumLength = Math.max(4, String(endLine).length) + 1;
|
||||
var cutLeft = 0;
|
||||
|
||||
// column correction according to replaced tab before column
|
||||
column += (TAB_REPLACEMENT.length - 1) * (lines[line - 1].substr(0, column - 1).match(/\t/g) || []).length;
|
||||
|
||||
if (column > MAX_LINE_LENGTH) {
|
||||
cutLeft = column - OFFSET_CORRECTION + 3;
|
||||
column = OFFSET_CORRECTION - 2;
|
||||
}
|
||||
|
||||
for (var i = startLine; i <= endLine; i++) {
|
||||
if (i >= 0 && i < lines.length) {
|
||||
lines[i] = lines[i].replace(/\t/g, TAB_REPLACEMENT);
|
||||
lines[i] =
|
||||
(cutLeft > 0 && lines[i].length > cutLeft ? '\u2026' : '') +
|
||||
lines[i].substr(cutLeft, MAX_LINE_LENGTH - 2) +
|
||||
(lines[i].length > cutLeft + MAX_LINE_LENGTH - 1 ? '\u2026' : '');
|
||||
}
|
||||
}
|
||||
|
||||
return [
|
||||
processLines(startLine, line),
|
||||
new Array(column + maxNumLength + 2).join('-') + '^',
|
||||
processLines(line, endLine)
|
||||
].filter(Boolean).join('\n');
|
||||
}
|
||||
|
||||
var SyntaxError = function(message, source, offset, line, column) {
|
||||
var error = createCustomError('SyntaxError', message);
|
||||
|
||||
error.source = source;
|
||||
error.offset = offset;
|
||||
error.line = line;
|
||||
error.column = column;
|
||||
|
||||
error.sourceFragment = function(extraLines) {
|
||||
return sourceFragment(error, isNaN(extraLines) ? 0 : extraLines);
|
||||
};
|
||||
Object.defineProperty(error, 'formattedMessage', {
|
||||
get: function() {
|
||||
return (
|
||||
'Parse error: ' + error.message + '\n' +
|
||||
sourceFragment(error, 2)
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
// for backward capability
|
||||
error.parseError = {
|
||||
offset: offset,
|
||||
line: line,
|
||||
column: column
|
||||
};
|
||||
|
||||
return error;
|
||||
};
|
||||
|
||||
module.exports = SyntaxError;
|
||||
219
node_modules/css-tree/lib/common/TokenStream.js
generated
vendored
Normal file
219
node_modules/css-tree/lib/common/TokenStream.js
generated
vendored
Normal file
|
|
@ -0,0 +1,219 @@
|
|||
var constants = require('../tokenizer/const');
|
||||
var TYPE = constants.TYPE;
|
||||
var NAME = constants.NAME;
|
||||
|
||||
var utils = require('../tokenizer/utils');
|
||||
var cmpStr = utils.cmpStr;
|
||||
|
||||
var EOF = TYPE.EOF;
|
||||
var WHITESPACE = TYPE.WhiteSpace;
|
||||
var COMMENT = TYPE.Comment;
|
||||
|
||||
var OFFSET_MASK = 0x00FFFFFF;
|
||||
var TYPE_SHIFT = 24;
|
||||
|
||||
var TokenStream = function() {
|
||||
this.offsetAndType = null;
|
||||
this.balance = null;
|
||||
|
||||
this.reset();
|
||||
};
|
||||
|
||||
TokenStream.prototype = {
|
||||
reset: function() {
|
||||
this.eof = false;
|
||||
this.tokenIndex = -1;
|
||||
this.tokenType = 0;
|
||||
this.tokenStart = this.firstCharOffset;
|
||||
this.tokenEnd = this.firstCharOffset;
|
||||
},
|
||||
|
||||
lookupType: function(offset) {
|
||||
offset += this.tokenIndex;
|
||||
|
||||
if (offset < this.tokenCount) {
|
||||
return this.offsetAndType[offset] >> TYPE_SHIFT;
|
||||
}
|
||||
|
||||
return EOF;
|
||||
},
|
||||
lookupOffset: function(offset) {
|
||||
offset += this.tokenIndex;
|
||||
|
||||
if (offset < this.tokenCount) {
|
||||
return this.offsetAndType[offset - 1] & OFFSET_MASK;
|
||||
}
|
||||
|
||||
return this.source.length;
|
||||
},
|
||||
lookupValue: function(offset, referenceStr) {
|
||||
offset += this.tokenIndex;
|
||||
|
||||
if (offset < this.tokenCount) {
|
||||
return cmpStr(
|
||||
this.source,
|
||||
this.offsetAndType[offset - 1] & OFFSET_MASK,
|
||||
this.offsetAndType[offset] & OFFSET_MASK,
|
||||
referenceStr
|
||||
);
|
||||
}
|
||||
|
||||
return false;
|
||||
},
|
||||
getTokenStart: function(tokenIndex) {
|
||||
if (tokenIndex === this.tokenIndex) {
|
||||
return this.tokenStart;
|
||||
}
|
||||
|
||||
if (tokenIndex > 0) {
|
||||
return tokenIndex < this.tokenCount
|
||||
? this.offsetAndType[tokenIndex - 1] & OFFSET_MASK
|
||||
: this.offsetAndType[this.tokenCount] & OFFSET_MASK;
|
||||
}
|
||||
|
||||
return this.firstCharOffset;
|
||||
},
|
||||
|
||||
// TODO: -> skipUntilBalanced
|
||||
getRawLength: function(startToken, mode) {
|
||||
var cursor = startToken;
|
||||
var balanceEnd;
|
||||
var offset = this.offsetAndType[Math.max(cursor - 1, 0)] & OFFSET_MASK;
|
||||
var type;
|
||||
|
||||
loop:
|
||||
for (; cursor < this.tokenCount; cursor++) {
|
||||
balanceEnd = this.balance[cursor];
|
||||
|
||||
// stop scanning on balance edge that points to offset before start token
|
||||
if (balanceEnd < startToken) {
|
||||
break loop;
|
||||
}
|
||||
|
||||
type = this.offsetAndType[cursor] >> TYPE_SHIFT;
|
||||
|
||||
// check token is stop type
|
||||
switch (mode(type, this.source, offset)) {
|
||||
case 1:
|
||||
break loop;
|
||||
|
||||
case 2:
|
||||
cursor++;
|
||||
break loop;
|
||||
|
||||
default:
|
||||
// fast forward to the end of balanced block
|
||||
if (this.balance[balanceEnd] === cursor) {
|
||||
cursor = balanceEnd;
|
||||
}
|
||||
|
||||
offset = this.offsetAndType[cursor] & OFFSET_MASK;
|
||||
}
|
||||
}
|
||||
|
||||
return cursor - this.tokenIndex;
|
||||
},
|
||||
isBalanceEdge: function(pos) {
|
||||
return this.balance[this.tokenIndex] < pos;
|
||||
},
|
||||
isDelim: function(code, offset) {
|
||||
if (offset) {
|
||||
return (
|
||||
this.lookupType(offset) === TYPE.Delim &&
|
||||
this.source.charCodeAt(this.lookupOffset(offset)) === code
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
this.tokenType === TYPE.Delim &&
|
||||
this.source.charCodeAt(this.tokenStart) === code
|
||||
);
|
||||
},
|
||||
|
||||
getTokenValue: function() {
|
||||
return this.source.substring(this.tokenStart, this.tokenEnd);
|
||||
},
|
||||
getTokenLength: function() {
|
||||
return this.tokenEnd - this.tokenStart;
|
||||
},
|
||||
substrToCursor: function(start) {
|
||||
return this.source.substring(start, this.tokenStart);
|
||||
},
|
||||
|
||||
skipWS: function() {
|
||||
for (var i = this.tokenIndex, skipTokenCount = 0; i < this.tokenCount; i++, skipTokenCount++) {
|
||||
if ((this.offsetAndType[i] >> TYPE_SHIFT) !== WHITESPACE) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (skipTokenCount > 0) {
|
||||
this.skip(skipTokenCount);
|
||||
}
|
||||
},
|
||||
skipSC: function() {
|
||||
while (this.tokenType === WHITESPACE || this.tokenType === COMMENT) {
|
||||
this.next();
|
||||
}
|
||||
},
|
||||
skip: function(tokenCount) {
|
||||
var next = this.tokenIndex + tokenCount;
|
||||
|
||||
if (next < this.tokenCount) {
|
||||
this.tokenIndex = next;
|
||||
this.tokenStart = this.offsetAndType[next - 1] & OFFSET_MASK;
|
||||
next = this.offsetAndType[next];
|
||||
this.tokenType = next >> TYPE_SHIFT;
|
||||
this.tokenEnd = next & OFFSET_MASK;
|
||||
} else {
|
||||
this.tokenIndex = this.tokenCount;
|
||||
this.next();
|
||||
}
|
||||
},
|
||||
next: function() {
|
||||
var next = this.tokenIndex + 1;
|
||||
|
||||
if (next < this.tokenCount) {
|
||||
this.tokenIndex = next;
|
||||
this.tokenStart = this.tokenEnd;
|
||||
next = this.offsetAndType[next];
|
||||
this.tokenType = next >> TYPE_SHIFT;
|
||||
this.tokenEnd = next & OFFSET_MASK;
|
||||
} else {
|
||||
this.tokenIndex = this.tokenCount;
|
||||
this.eof = true;
|
||||
this.tokenType = EOF;
|
||||
this.tokenStart = this.tokenEnd = this.source.length;
|
||||
}
|
||||
},
|
||||
|
||||
forEachToken(fn) {
|
||||
for (var i = 0, offset = this.firstCharOffset; i < this.tokenCount; i++) {
|
||||
var start = offset;
|
||||
var item = this.offsetAndType[i];
|
||||
var end = item & OFFSET_MASK;
|
||||
var type = item >> TYPE_SHIFT;
|
||||
|
||||
offset = end;
|
||||
|
||||
fn(type, start, end, i);
|
||||
}
|
||||
},
|
||||
|
||||
dump() {
|
||||
var tokens = new Array(this.tokenCount);
|
||||
|
||||
this.forEachToken((type, start, end, index) => {
|
||||
tokens[index] = {
|
||||
idx: index,
|
||||
type: NAME[type],
|
||||
chunk: this.source.substring(start, end),
|
||||
balance: this.balance[index]
|
||||
};
|
||||
});
|
||||
|
||||
return tokens;
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = TokenStream;
|
||||
10
node_modules/css-tree/lib/common/adopt-buffer.js
generated
vendored
Normal file
10
node_modules/css-tree/lib/common/adopt-buffer.js
generated
vendored
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
var MIN_SIZE = 16 * 1024;
|
||||
var SafeUint32Array = typeof Uint32Array !== 'undefined' ? Uint32Array : Array; // fallback on Array when TypedArray is not supported
|
||||
|
||||
module.exports = function adoptBuffer(buffer, size) {
|
||||
if (buffer === null || buffer.length < size) {
|
||||
return new SafeUint32Array(Math.max(size + 1024, MIN_SIZE));
|
||||
}
|
||||
|
||||
return buffer;
|
||||
};
|
||||
28
node_modules/css-tree/lib/convertor/create.js
generated
vendored
Normal file
28
node_modules/css-tree/lib/convertor/create.js
generated
vendored
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
var List = require('../common/List');
|
||||
|
||||
module.exports = function createConvertors(walk) {
|
||||
return {
|
||||
fromPlainObject: function(ast) {
|
||||
walk(ast, {
|
||||
enter: function(node) {
|
||||
if (node.children && node.children instanceof List === false) {
|
||||
node.children = new List().fromArray(node.children);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return ast;
|
||||
},
|
||||
toPlainObject: function(ast) {
|
||||
walk(ast, {
|
||||
leave: function(node) {
|
||||
if (node.children && node.children instanceof List) {
|
||||
node.children = node.children.toArray();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return ast;
|
||||
}
|
||||
};
|
||||
};
|
||||
3
node_modules/css-tree/lib/convertor/index.js
generated
vendored
Normal file
3
node_modules/css-tree/lib/convertor/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
var createConvertor = require('./create');
|
||||
|
||||
module.exports = createConvertor(require('../walker'));
|
||||
14
node_modules/css-tree/lib/definition-syntax/SyntaxError.js
generated
vendored
Normal file
14
node_modules/css-tree/lib/definition-syntax/SyntaxError.js
generated
vendored
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
var createCustomError = require('../utils/createCustomError');
|
||||
|
||||
module.exports = function SyntaxError(message, input, offset) {
|
||||
var error = createCustomError('SyntaxError', message);
|
||||
|
||||
error.input = input;
|
||||
error.offset = offset;
|
||||
error.rawMessage = message;
|
||||
error.message = error.rawMessage + '\n' +
|
||||
' ' + error.input + '\n' +
|
||||
'--' + new Array((error.offset || error.input.length) + 1).join('-') + '^';
|
||||
|
||||
return error;
|
||||
};
|
||||
129
node_modules/css-tree/lib/definition-syntax/generate.js
generated
vendored
Normal file
129
node_modules/css-tree/lib/definition-syntax/generate.js
generated
vendored
Normal file
|
|
@ -0,0 +1,129 @@
|
|||
function noop(value) {
|
||||
return value;
|
||||
}
|
||||
|
||||
function generateMultiplier(multiplier) {
|
||||
if (multiplier.min === 0 && multiplier.max === 0) {
|
||||
return '*';
|
||||
}
|
||||
|
||||
if (multiplier.min === 0 && multiplier.max === 1) {
|
||||
return '?';
|
||||
}
|
||||
|
||||
if (multiplier.min === 1 && multiplier.max === 0) {
|
||||
return multiplier.comma ? '#' : '+';
|
||||
}
|
||||
|
||||
if (multiplier.min === 1 && multiplier.max === 1) {
|
||||
return '';
|
||||
}
|
||||
|
||||
return (
|
||||
(multiplier.comma ? '#' : '') +
|
||||
(multiplier.min === multiplier.max
|
||||
? '{' + multiplier.min + '}'
|
||||
: '{' + multiplier.min + ',' + (multiplier.max !== 0 ? multiplier.max : '') + '}'
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
function generateTypeOpts(node) {
|
||||
switch (node.type) {
|
||||
case 'Range':
|
||||
return (
|
||||
' [' +
|
||||
(node.min === null ? '-∞' : node.min) +
|
||||
',' +
|
||||
(node.max === null ? '∞' : node.max) +
|
||||
']'
|
||||
);
|
||||
|
||||
default:
|
||||
throw new Error('Unknown node type `' + node.type + '`');
|
||||
}
|
||||
}
|
||||
|
||||
function generateSequence(node, decorate, forceBraces, compact) {
|
||||
var combinator = node.combinator === ' ' || compact ? node.combinator : ' ' + node.combinator + ' ';
|
||||
var result = node.terms.map(function(term) {
|
||||
return generate(term, decorate, forceBraces, compact);
|
||||
}).join(combinator);
|
||||
|
||||
if (node.explicit || forceBraces) {
|
||||
result = (compact || result[0] === ',' ? '[' : '[ ') + result + (compact ? ']' : ' ]');
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function generate(node, decorate, forceBraces, compact) {
|
||||
var result;
|
||||
|
||||
switch (node.type) {
|
||||
case 'Group':
|
||||
result =
|
||||
generateSequence(node, decorate, forceBraces, compact) +
|
||||
(node.disallowEmpty ? '!' : '');
|
||||
break;
|
||||
|
||||
case 'Multiplier':
|
||||
// return since node is a composition
|
||||
return (
|
||||
generate(node.term, decorate, forceBraces, compact) +
|
||||
decorate(generateMultiplier(node), node)
|
||||
);
|
||||
|
||||
case 'Type':
|
||||
result = '<' + node.name + (node.opts ? decorate(generateTypeOpts(node.opts), node.opts) : '') + '>';
|
||||
break;
|
||||
|
||||
case 'Property':
|
||||
result = '<\'' + node.name + '\'>';
|
||||
break;
|
||||
|
||||
case 'Keyword':
|
||||
result = node.name;
|
||||
break;
|
||||
|
||||
case 'AtKeyword':
|
||||
result = '@' + node.name;
|
||||
break;
|
||||
|
||||
case 'Function':
|
||||
result = node.name + '(';
|
||||
break;
|
||||
|
||||
case 'String':
|
||||
case 'Token':
|
||||
result = node.value;
|
||||
break;
|
||||
|
||||
case 'Comma':
|
||||
result = ',';
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new Error('Unknown node type `' + node.type + '`');
|
||||
}
|
||||
|
||||
return decorate(result, node);
|
||||
}
|
||||
|
||||
module.exports = function(node, options) {
|
||||
var decorate = noop;
|
||||
var forceBraces = false;
|
||||
var compact = false;
|
||||
|
||||
if (typeof options === 'function') {
|
||||
decorate = options;
|
||||
} else if (options) {
|
||||
forceBraces = Boolean(options.forceBraces);
|
||||
compact = Boolean(options.compact);
|
||||
if (typeof options.decorate === 'function') {
|
||||
decorate = options.decorate;
|
||||
}
|
||||
}
|
||||
|
||||
return generate(node, decorate, forceBraces, compact);
|
||||
};
|
||||
6
node_modules/css-tree/lib/definition-syntax/index.js
generated
vendored
Normal file
6
node_modules/css-tree/lib/definition-syntax/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
module.exports = {
|
||||
SyntaxError: require('./SyntaxError'),
|
||||
parse: require('./parse'),
|
||||
generate: require('./generate'),
|
||||
walk: require('./walk')
|
||||
};
|
||||
568
node_modules/css-tree/lib/definition-syntax/parse.js
generated
vendored
Normal file
568
node_modules/css-tree/lib/definition-syntax/parse.js
generated
vendored
Normal file
|
|
@ -0,0 +1,568 @@
|
|||
var Tokenizer = require('./tokenizer');
|
||||
var TAB = 9;
|
||||
var N = 10;
|
||||
var F = 12;
|
||||
var R = 13;
|
||||
var SPACE = 32;
|
||||
var EXCLAMATIONMARK = 33; // !
|
||||
var NUMBERSIGN = 35; // #
|
||||
var AMPERSAND = 38; // &
|
||||
var APOSTROPHE = 39; // '
|
||||
var LEFTPARENTHESIS = 40; // (
|
||||
var RIGHTPARENTHESIS = 41; // )
|
||||
var ASTERISK = 42; // *
|
||||
var PLUSSIGN = 43; // +
|
||||
var COMMA = 44; // ,
|
||||
var HYPERMINUS = 45; // -
|
||||
var LESSTHANSIGN = 60; // <
|
||||
var GREATERTHANSIGN = 62; // >
|
||||
var QUESTIONMARK = 63; // ?
|
||||
var COMMERCIALAT = 64; // @
|
||||
var LEFTSQUAREBRACKET = 91; // [
|
||||
var RIGHTSQUAREBRACKET = 93; // ]
|
||||
var LEFTCURLYBRACKET = 123; // {
|
||||
var VERTICALLINE = 124; // |
|
||||
var RIGHTCURLYBRACKET = 125; // }
|
||||
var INFINITY = 8734; // ∞
|
||||
var NAME_CHAR = createCharMap(function(ch) {
|
||||
return /[a-zA-Z0-9\-]/.test(ch);
|
||||
});
|
||||
var COMBINATOR_PRECEDENCE = {
|
||||
' ': 1,
|
||||
'&&': 2,
|
||||
'||': 3,
|
||||
'|': 4
|
||||
};
|
||||
|
||||
function createCharMap(fn) {
|
||||
var array = typeof Uint32Array === 'function' ? new Uint32Array(128) : new Array(128);
|
||||
for (var i = 0; i < 128; i++) {
|
||||
array[i] = fn(String.fromCharCode(i)) ? 1 : 0;
|
||||
}
|
||||
return array;
|
||||
}
|
||||
|
||||
function scanSpaces(tokenizer) {
|
||||
return tokenizer.substringToPos(
|
||||
tokenizer.findWsEnd(tokenizer.pos)
|
||||
);
|
||||
}
|
||||
|
||||
function scanWord(tokenizer) {
|
||||
var end = tokenizer.pos;
|
||||
|
||||
for (; end < tokenizer.str.length; end++) {
|
||||
var code = tokenizer.str.charCodeAt(end);
|
||||
if (code >= 128 || NAME_CHAR[code] === 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (tokenizer.pos === end) {
|
||||
tokenizer.error('Expect a keyword');
|
||||
}
|
||||
|
||||
return tokenizer.substringToPos(end);
|
||||
}
|
||||
|
||||
function scanNumber(tokenizer) {
|
||||
var end = tokenizer.pos;
|
||||
|
||||
for (; end < tokenizer.str.length; end++) {
|
||||
var code = tokenizer.str.charCodeAt(end);
|
||||
if (code < 48 || code > 57) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (tokenizer.pos === end) {
|
||||
tokenizer.error('Expect a number');
|
||||
}
|
||||
|
||||
return tokenizer.substringToPos(end);
|
||||
}
|
||||
|
||||
function scanString(tokenizer) {
|
||||
var end = tokenizer.str.indexOf('\'', tokenizer.pos + 1);
|
||||
|
||||
if (end === -1) {
|
||||
tokenizer.pos = tokenizer.str.length;
|
||||
tokenizer.error('Expect an apostrophe');
|
||||
}
|
||||
|
||||
return tokenizer.substringToPos(end + 1);
|
||||
}
|
||||
|
||||
function readMultiplierRange(tokenizer) {
|
||||
var min = null;
|
||||
var max = null;
|
||||
|
||||
tokenizer.eat(LEFTCURLYBRACKET);
|
||||
|
||||
min = scanNumber(tokenizer);
|
||||
|
||||
if (tokenizer.charCode() === COMMA) {
|
||||
tokenizer.pos++;
|
||||
if (tokenizer.charCode() !== RIGHTCURLYBRACKET) {
|
||||
max = scanNumber(tokenizer);
|
||||
}
|
||||
} else {
|
||||
max = min;
|
||||
}
|
||||
|
||||
tokenizer.eat(RIGHTCURLYBRACKET);
|
||||
|
||||
return {
|
||||
min: Number(min),
|
||||
max: max ? Number(max) : 0
|
||||
};
|
||||
}
|
||||
|
||||
function readMultiplier(tokenizer) {
|
||||
var range = null;
|
||||
var comma = false;
|
||||
|
||||
switch (tokenizer.charCode()) {
|
||||
case ASTERISK:
|
||||
tokenizer.pos++;
|
||||
|
||||
range = {
|
||||
min: 0,
|
||||
max: 0
|
||||
};
|
||||
|
||||
break;
|
||||
|
||||
case PLUSSIGN:
|
||||
tokenizer.pos++;
|
||||
|
||||
range = {
|
||||
min: 1,
|
||||
max: 0
|
||||
};
|
||||
|
||||
break;
|
||||
|
||||
case QUESTIONMARK:
|
||||
tokenizer.pos++;
|
||||
|
||||
range = {
|
||||
min: 0,
|
||||
max: 1
|
||||
};
|
||||
|
||||
break;
|
||||
|
||||
case NUMBERSIGN:
|
||||
tokenizer.pos++;
|
||||
|
||||
comma = true;
|
||||
|
||||
if (tokenizer.charCode() === LEFTCURLYBRACKET) {
|
||||
range = readMultiplierRange(tokenizer);
|
||||
} else {
|
||||
range = {
|
||||
min: 1,
|
||||
max: 0
|
||||
};
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case LEFTCURLYBRACKET:
|
||||
range = readMultiplierRange(tokenizer);
|
||||
break;
|
||||
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Multiplier',
|
||||
comma: comma,
|
||||
min: range.min,
|
||||
max: range.max,
|
||||
term: null
|
||||
};
|
||||
}
|
||||
|
||||
function maybeMultiplied(tokenizer, node) {
|
||||
var multiplier = readMultiplier(tokenizer);
|
||||
|
||||
if (multiplier !== null) {
|
||||
multiplier.term = node;
|
||||
return multiplier;
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
|
||||
function maybeToken(tokenizer) {
|
||||
var ch = tokenizer.peek();
|
||||
|
||||
if (ch === '') {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Token',
|
||||
value: ch
|
||||
};
|
||||
}
|
||||
|
||||
function readProperty(tokenizer) {
|
||||
var name;
|
||||
|
||||
tokenizer.eat(LESSTHANSIGN);
|
||||
tokenizer.eat(APOSTROPHE);
|
||||
|
||||
name = scanWord(tokenizer);
|
||||
|
||||
tokenizer.eat(APOSTROPHE);
|
||||
tokenizer.eat(GREATERTHANSIGN);
|
||||
|
||||
return maybeMultiplied(tokenizer, {
|
||||
type: 'Property',
|
||||
name: name
|
||||
});
|
||||
}
|
||||
|
||||
// https://drafts.csswg.org/css-values-3/#numeric-ranges
|
||||
// 4.1. Range Restrictions and Range Definition Notation
|
||||
//
|
||||
// Range restrictions can be annotated in the numeric type notation using CSS bracketed
|
||||
// range notation—[min,max]—within the angle brackets, after the identifying keyword,
|
||||
// indicating a closed range between (and including) min and max.
|
||||
// For example, <integer [0, 10]> indicates an integer between 0 and 10, inclusive.
|
||||
function readTypeRange(tokenizer) {
|
||||
// use null for Infinity to make AST format JSON serializable/deserializable
|
||||
var min = null; // -Infinity
|
||||
var max = null; // Infinity
|
||||
var sign = 1;
|
||||
|
||||
tokenizer.eat(LEFTSQUAREBRACKET);
|
||||
|
||||
if (tokenizer.charCode() === HYPERMINUS) {
|
||||
tokenizer.peek();
|
||||
sign = -1;
|
||||
}
|
||||
|
||||
if (sign == -1 && tokenizer.charCode() === INFINITY) {
|
||||
tokenizer.peek();
|
||||
} else {
|
||||
min = sign * Number(scanNumber(tokenizer));
|
||||
}
|
||||
|
||||
scanSpaces(tokenizer);
|
||||
tokenizer.eat(COMMA);
|
||||
scanSpaces(tokenizer);
|
||||
|
||||
if (tokenizer.charCode() === INFINITY) {
|
||||
tokenizer.peek();
|
||||
} else {
|
||||
sign = 1;
|
||||
|
||||
if (tokenizer.charCode() === HYPERMINUS) {
|
||||
tokenizer.peek();
|
||||
sign = -1;
|
||||
}
|
||||
|
||||
max = sign * Number(scanNumber(tokenizer));
|
||||
}
|
||||
|
||||
tokenizer.eat(RIGHTSQUAREBRACKET);
|
||||
|
||||
// If no range is indicated, either by using the bracketed range notation
|
||||
// or in the property description, then [−∞,∞] is assumed.
|
||||
if (min === null && max === null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Range',
|
||||
min: min,
|
||||
max: max
|
||||
};
|
||||
}
|
||||
|
||||
function readType(tokenizer) {
|
||||
var name;
|
||||
var opts = null;
|
||||
|
||||
tokenizer.eat(LESSTHANSIGN);
|
||||
name = scanWord(tokenizer);
|
||||
|
||||
if (tokenizer.charCode() === LEFTPARENTHESIS &&
|
||||
tokenizer.nextCharCode() === RIGHTPARENTHESIS) {
|
||||
tokenizer.pos += 2;
|
||||
name += '()';
|
||||
}
|
||||
|
||||
if (tokenizer.charCodeAt(tokenizer.findWsEnd(tokenizer.pos)) === LEFTSQUAREBRACKET) {
|
||||
scanSpaces(tokenizer);
|
||||
opts = readTypeRange(tokenizer);
|
||||
}
|
||||
|
||||
tokenizer.eat(GREATERTHANSIGN);
|
||||
|
||||
return maybeMultiplied(tokenizer, {
|
||||
type: 'Type',
|
||||
name: name,
|
||||
opts: opts
|
||||
});
|
||||
}
|
||||
|
||||
function readKeywordOrFunction(tokenizer) {
|
||||
var name;
|
||||
|
||||
name = scanWord(tokenizer);
|
||||
|
||||
if (tokenizer.charCode() === LEFTPARENTHESIS) {
|
||||
tokenizer.pos++;
|
||||
|
||||
return {
|
||||
type: 'Function',
|
||||
name: name
|
||||
};
|
||||
}
|
||||
|
||||
return maybeMultiplied(tokenizer, {
|
||||
type: 'Keyword',
|
||||
name: name
|
||||
});
|
||||
}
|
||||
|
||||
function regroupTerms(terms, combinators) {
|
||||
function createGroup(terms, combinator) {
|
||||
return {
|
||||
type: 'Group',
|
||||
terms: terms,
|
||||
combinator: combinator,
|
||||
disallowEmpty: false,
|
||||
explicit: false
|
||||
};
|
||||
}
|
||||
|
||||
combinators = Object.keys(combinators).sort(function(a, b) {
|
||||
return COMBINATOR_PRECEDENCE[a] - COMBINATOR_PRECEDENCE[b];
|
||||
});
|
||||
|
||||
while (combinators.length > 0) {
|
||||
var combinator = combinators.shift();
|
||||
for (var i = 0, subgroupStart = 0; i < terms.length; i++) {
|
||||
var term = terms[i];
|
||||
if (term.type === 'Combinator') {
|
||||
if (term.value === combinator) {
|
||||
if (subgroupStart === -1) {
|
||||
subgroupStart = i - 1;
|
||||
}
|
||||
terms.splice(i, 1);
|
||||
i--;
|
||||
} else {
|
||||
if (subgroupStart !== -1 && i - subgroupStart > 1) {
|
||||
terms.splice(
|
||||
subgroupStart,
|
||||
i - subgroupStart,
|
||||
createGroup(terms.slice(subgroupStart, i), combinator)
|
||||
);
|
||||
i = subgroupStart + 1;
|
||||
}
|
||||
subgroupStart = -1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (subgroupStart !== -1 && combinators.length) {
|
||||
terms.splice(
|
||||
subgroupStart,
|
||||
i - subgroupStart,
|
||||
createGroup(terms.slice(subgroupStart, i), combinator)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return combinator;
|
||||
}
|
||||
|
||||
function readImplicitGroup(tokenizer) {
|
||||
var terms = [];
|
||||
var combinators = {};
|
||||
var token;
|
||||
var prevToken = null;
|
||||
var prevTokenPos = tokenizer.pos;
|
||||
|
||||
while (token = peek(tokenizer)) {
|
||||
if (token.type !== 'Spaces') {
|
||||
if (token.type === 'Combinator') {
|
||||
// check for combinator in group beginning and double combinator sequence
|
||||
if (prevToken === null || prevToken.type === 'Combinator') {
|
||||
tokenizer.pos = prevTokenPos;
|
||||
tokenizer.error('Unexpected combinator');
|
||||
}
|
||||
|
||||
combinators[token.value] = true;
|
||||
} else if (prevToken !== null && prevToken.type !== 'Combinator') {
|
||||
combinators[' '] = true; // a b
|
||||
terms.push({
|
||||
type: 'Combinator',
|
||||
value: ' '
|
||||
});
|
||||
}
|
||||
|
||||
terms.push(token);
|
||||
prevToken = token;
|
||||
prevTokenPos = tokenizer.pos;
|
||||
}
|
||||
}
|
||||
|
||||
// check for combinator in group ending
|
||||
if (prevToken !== null && prevToken.type === 'Combinator') {
|
||||
tokenizer.pos -= prevTokenPos;
|
||||
tokenizer.error('Unexpected combinator');
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Group',
|
||||
terms: terms,
|
||||
combinator: regroupTerms(terms, combinators) || ' ',
|
||||
disallowEmpty: false,
|
||||
explicit: false
|
||||
};
|
||||
}
|
||||
|
||||
function readGroup(tokenizer) {
|
||||
var result;
|
||||
|
||||
tokenizer.eat(LEFTSQUAREBRACKET);
|
||||
result = readImplicitGroup(tokenizer);
|
||||
tokenizer.eat(RIGHTSQUAREBRACKET);
|
||||
|
||||
result.explicit = true;
|
||||
|
||||
if (tokenizer.charCode() === EXCLAMATIONMARK) {
|
||||
tokenizer.pos++;
|
||||
result.disallowEmpty = true;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function peek(tokenizer) {
|
||||
var code = tokenizer.charCode();
|
||||
|
||||
if (code < 128 && NAME_CHAR[code] === 1) {
|
||||
return readKeywordOrFunction(tokenizer);
|
||||
}
|
||||
|
||||
switch (code) {
|
||||
case RIGHTSQUAREBRACKET:
|
||||
// don't eat, stop scan a group
|
||||
break;
|
||||
|
||||
case LEFTSQUAREBRACKET:
|
||||
return maybeMultiplied(tokenizer, readGroup(tokenizer));
|
||||
|
||||
case LESSTHANSIGN:
|
||||
return tokenizer.nextCharCode() === APOSTROPHE
|
||||
? readProperty(tokenizer)
|
||||
: readType(tokenizer);
|
||||
|
||||
case VERTICALLINE:
|
||||
return {
|
||||
type: 'Combinator',
|
||||
value: tokenizer.substringToPos(
|
||||
tokenizer.nextCharCode() === VERTICALLINE
|
||||
? tokenizer.pos + 2
|
||||
: tokenizer.pos + 1
|
||||
)
|
||||
};
|
||||
|
||||
case AMPERSAND:
|
||||
tokenizer.pos++;
|
||||
tokenizer.eat(AMPERSAND);
|
||||
|
||||
return {
|
||||
type: 'Combinator',
|
||||
value: '&&'
|
||||
};
|
||||
|
||||
case COMMA:
|
||||
tokenizer.pos++;
|
||||
return {
|
||||
type: 'Comma'
|
||||
};
|
||||
|
||||
case APOSTROPHE:
|
||||
return maybeMultiplied(tokenizer, {
|
||||
type: 'String',
|
||||
value: scanString(tokenizer)
|
||||
});
|
||||
|
||||
case SPACE:
|
||||
case TAB:
|
||||
case N:
|
||||
case R:
|
||||
case F:
|
||||
return {
|
||||
type: 'Spaces',
|
||||
value: scanSpaces(tokenizer)
|
||||
};
|
||||
|
||||
case COMMERCIALAT:
|
||||
code = tokenizer.nextCharCode();
|
||||
|
||||
if (code < 128 && NAME_CHAR[code] === 1) {
|
||||
tokenizer.pos++;
|
||||
return {
|
||||
type: 'AtKeyword',
|
||||
name: scanWord(tokenizer)
|
||||
};
|
||||
}
|
||||
|
||||
return maybeToken(tokenizer);
|
||||
|
||||
case ASTERISK:
|
||||
case PLUSSIGN:
|
||||
case QUESTIONMARK:
|
||||
case NUMBERSIGN:
|
||||
case EXCLAMATIONMARK:
|
||||
// prohibited tokens (used as a multiplier start)
|
||||
break;
|
||||
|
||||
case LEFTCURLYBRACKET:
|
||||
// LEFTCURLYBRACKET is allowed since mdn/data uses it w/o quoting
|
||||
// check next char isn't a number, because it's likely a disjoined multiplier
|
||||
code = tokenizer.nextCharCode();
|
||||
|
||||
if (code < 48 || code > 57) {
|
||||
return maybeToken(tokenizer);
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
default:
|
||||
return maybeToken(tokenizer);
|
||||
}
|
||||
}
|
||||
|
||||
function parse(source) {
|
||||
var tokenizer = new Tokenizer(source);
|
||||
var result = readImplicitGroup(tokenizer);
|
||||
|
||||
if (tokenizer.pos !== source.length) {
|
||||
tokenizer.error('Unexpected input');
|
||||
}
|
||||
|
||||
// reduce redundant groups with single group term
|
||||
if (result.terms.length === 1 && result.terms[0].type === 'Group') {
|
||||
result = result.terms[0];
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// warm up parse to elimitate code branches that never execute
|
||||
// fix soft deoptimizations (insufficient type feedback)
|
||||
parse('[a&&<b>#|<\'c\'>*||e() f{2} /,(% g#{1,2} h{2,})]!');
|
||||
|
||||
module.exports = parse;
|
||||
55
node_modules/css-tree/lib/definition-syntax/tokenizer.js
generated
vendored
Normal file
55
node_modules/css-tree/lib/definition-syntax/tokenizer.js
generated
vendored
Normal file
|
|
@ -0,0 +1,55 @@
|
|||
var SyntaxError = require('./SyntaxError');
|
||||
|
||||
var TAB = 9;
|
||||
var N = 10;
|
||||
var F = 12;
|
||||
var R = 13;
|
||||
var SPACE = 32;
|
||||
|
||||
var Tokenizer = function(str) {
|
||||
this.str = str;
|
||||
this.pos = 0;
|
||||
};
|
||||
|
||||
Tokenizer.prototype = {
|
||||
charCodeAt: function(pos) {
|
||||
return pos < this.str.length ? this.str.charCodeAt(pos) : 0;
|
||||
},
|
||||
charCode: function() {
|
||||
return this.charCodeAt(this.pos);
|
||||
},
|
||||
nextCharCode: function() {
|
||||
return this.charCodeAt(this.pos + 1);
|
||||
},
|
||||
nextNonWsCode: function(pos) {
|
||||
return this.charCodeAt(this.findWsEnd(pos));
|
||||
},
|
||||
findWsEnd: function(pos) {
|
||||
for (; pos < this.str.length; pos++) {
|
||||
var code = this.str.charCodeAt(pos);
|
||||
if (code !== R && code !== N && code !== F && code !== SPACE && code !== TAB) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return pos;
|
||||
},
|
||||
substringToPos: function(end) {
|
||||
return this.str.substring(this.pos, this.pos = end);
|
||||
},
|
||||
eat: function(code) {
|
||||
if (this.charCode() !== code) {
|
||||
this.error('Expect `' + String.fromCharCode(code) + '`');
|
||||
}
|
||||
|
||||
this.pos++;
|
||||
},
|
||||
peek: function() {
|
||||
return this.pos < this.str.length ? this.str.charAt(this.pos++) : '';
|
||||
},
|
||||
error: function(message) {
|
||||
throw new SyntaxError(message, this.str, this.pos);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = Tokenizer;
|
||||
52
node_modules/css-tree/lib/definition-syntax/walk.js
generated
vendored
Normal file
52
node_modules/css-tree/lib/definition-syntax/walk.js
generated
vendored
Normal file
|
|
@ -0,0 +1,52 @@
|
|||
var noop = function() {};
|
||||
|
||||
function ensureFunction(value) {
|
||||
return typeof value === 'function' ? value : noop;
|
||||
}
|
||||
|
||||
module.exports = function(node, options, context) {
|
||||
function walk(node) {
|
||||
enter.call(context, node);
|
||||
|
||||
switch (node.type) {
|
||||
case 'Group':
|
||||
node.terms.forEach(walk);
|
||||
break;
|
||||
|
||||
case 'Multiplier':
|
||||
walk(node.term);
|
||||
break;
|
||||
|
||||
case 'Type':
|
||||
case 'Property':
|
||||
case 'Keyword':
|
||||
case 'AtKeyword':
|
||||
case 'Function':
|
||||
case 'String':
|
||||
case 'Token':
|
||||
case 'Comma':
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new Error('Unknown type: ' + node.type);
|
||||
}
|
||||
|
||||
leave.call(context, node);
|
||||
}
|
||||
|
||||
var enter = noop;
|
||||
var leave = noop;
|
||||
|
||||
if (typeof options === 'function') {
|
||||
enter = options;
|
||||
} else if (options) {
|
||||
enter = ensureFunction(options.enter);
|
||||
leave = ensureFunction(options.leave);
|
||||
}
|
||||
|
||||
if (enter === noop && leave === noop) {
|
||||
throw new Error('Neither `enter` nor `leave` walker handler is set or both aren\'t a function');
|
||||
}
|
||||
|
||||
walk(node, context);
|
||||
};
|
||||
66
node_modules/css-tree/lib/generator/create.js
generated
vendored
Normal file
66
node_modules/css-tree/lib/generator/create.js
generated
vendored
Normal file
|
|
@ -0,0 +1,66 @@
|
|||
var sourceMap = require('./sourceMap');
|
||||
var hasOwnProperty = Object.prototype.hasOwnProperty;
|
||||
|
||||
function processChildren(node, delimeter) {
|
||||
var list = node.children;
|
||||
var prev = null;
|
||||
|
||||
if (typeof delimeter !== 'function') {
|
||||
list.forEach(this.node, this);
|
||||
} else {
|
||||
list.forEach(function(node) {
|
||||
if (prev !== null) {
|
||||
delimeter.call(this, prev);
|
||||
}
|
||||
|
||||
this.node(node);
|
||||
prev = node;
|
||||
}, this);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function createGenerator(config) {
|
||||
function processNode(node) {
|
||||
if (hasOwnProperty.call(types, node.type)) {
|
||||
types[node.type].call(this, node);
|
||||
} else {
|
||||
throw new Error('Unknown node type: ' + node.type);
|
||||
}
|
||||
}
|
||||
|
||||
var types = {};
|
||||
|
||||
if (config.node) {
|
||||
for (var name in config.node) {
|
||||
types[name] = config.node[name].generate;
|
||||
}
|
||||
}
|
||||
|
||||
return function(node, options) {
|
||||
var buffer = '';
|
||||
var handlers = {
|
||||
children: processChildren,
|
||||
node: processNode,
|
||||
chunk: function(chunk) {
|
||||
buffer += chunk;
|
||||
},
|
||||
result: function() {
|
||||
return buffer;
|
||||
}
|
||||
};
|
||||
|
||||
if (options) {
|
||||
if (typeof options.decorator === 'function') {
|
||||
handlers = options.decorator(handlers);
|
||||
}
|
||||
|
||||
if (options.sourceMap) {
|
||||
handlers = sourceMap(handlers);
|
||||
}
|
||||
}
|
||||
|
||||
handlers.node(node);
|
||||
|
||||
return handlers.result();
|
||||
};
|
||||
};
|
||||
4
node_modules/css-tree/lib/generator/index.js
generated
vendored
Normal file
4
node_modules/css-tree/lib/generator/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
var createGenerator = require('./create');
|
||||
var config = require('../syntax/config/parser');
|
||||
|
||||
module.exports = createGenerator(config);
|
||||
95
node_modules/css-tree/lib/generator/sourceMap.js
generated
vendored
Normal file
95
node_modules/css-tree/lib/generator/sourceMap.js
generated
vendored
Normal file
|
|
@ -0,0 +1,95 @@
|
|||
var SourceMapGenerator = require('source-map/lib/source-map-generator').SourceMapGenerator;
|
||||
var trackNodes = {
|
||||
Atrule: true,
|
||||
Selector: true,
|
||||
Declaration: true
|
||||
};
|
||||
|
||||
module.exports = function generateSourceMap(handlers) {
|
||||
var map = new SourceMapGenerator();
|
||||
var line = 1;
|
||||
var column = 0;
|
||||
var generated = {
|
||||
line: 1,
|
||||
column: 0
|
||||
};
|
||||
var original = {
|
||||
line: 0, // should be zero to add first mapping
|
||||
column: 0
|
||||
};
|
||||
var sourceMappingActive = false;
|
||||
var activatedGenerated = {
|
||||
line: 1,
|
||||
column: 0
|
||||
};
|
||||
var activatedMapping = {
|
||||
generated: activatedGenerated
|
||||
};
|
||||
|
||||
var handlersNode = handlers.node;
|
||||
handlers.node = function(node) {
|
||||
if (node.loc && node.loc.start && trackNodes.hasOwnProperty(node.type)) {
|
||||
var nodeLine = node.loc.start.line;
|
||||
var nodeColumn = node.loc.start.column - 1;
|
||||
|
||||
if (original.line !== nodeLine ||
|
||||
original.column !== nodeColumn) {
|
||||
original.line = nodeLine;
|
||||
original.column = nodeColumn;
|
||||
|
||||
generated.line = line;
|
||||
generated.column = column;
|
||||
|
||||
if (sourceMappingActive) {
|
||||
sourceMappingActive = false;
|
||||
if (generated.line !== activatedGenerated.line ||
|
||||
generated.column !== activatedGenerated.column) {
|
||||
map.addMapping(activatedMapping);
|
||||
}
|
||||
}
|
||||
|
||||
sourceMappingActive = true;
|
||||
map.addMapping({
|
||||
source: node.loc.source,
|
||||
original: original,
|
||||
generated: generated
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
handlersNode.call(this, node);
|
||||
|
||||
if (sourceMappingActive && trackNodes.hasOwnProperty(node.type)) {
|
||||
activatedGenerated.line = line;
|
||||
activatedGenerated.column = column;
|
||||
}
|
||||
};
|
||||
|
||||
var handlersChunk = handlers.chunk;
|
||||
handlers.chunk = function(chunk) {
|
||||
for (var i = 0; i < chunk.length; i++) {
|
||||
if (chunk.charCodeAt(i) === 10) { // \n
|
||||
line++;
|
||||
column = 0;
|
||||
} else {
|
||||
column++;
|
||||
}
|
||||
}
|
||||
|
||||
handlersChunk(chunk);
|
||||
};
|
||||
|
||||
var handlersResult = handlers.result;
|
||||
handlers.result = function() {
|
||||
if (sourceMappingActive) {
|
||||
map.addMapping(activatedMapping);
|
||||
}
|
||||
|
||||
return {
|
||||
css: handlersResult(),
|
||||
map: map
|
||||
};
|
||||
};
|
||||
|
||||
return handlers;
|
||||
};
|
||||
1
node_modules/css-tree/lib/index.js
generated
vendored
Normal file
1
node_modules/css-tree/lib/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
module.exports = require('./syntax');
|
||||
466
node_modules/css-tree/lib/lexer/Lexer.js
generated
vendored
Normal file
466
node_modules/css-tree/lib/lexer/Lexer.js
generated
vendored
Normal file
|
|
@ -0,0 +1,466 @@
|
|||
var SyntaxReferenceError = require('./error').SyntaxReferenceError;
|
||||
var SyntaxMatchError = require('./error').SyntaxMatchError;
|
||||
var names = require('../utils/names');
|
||||
var generic = require('./generic');
|
||||
var parse = require('../definition-syntax/parse');
|
||||
var generate = require('../definition-syntax/generate');
|
||||
var walk = require('../definition-syntax/walk');
|
||||
var prepareTokens = require('./prepare-tokens');
|
||||
var buildMatchGraph = require('./match-graph').buildMatchGraph;
|
||||
var matchAsTree = require('./match').matchAsTree;
|
||||
var trace = require('./trace');
|
||||
var search = require('./search');
|
||||
var getStructureFromConfig = require('./structure').getStructureFromConfig;
|
||||
var cssWideKeywords = buildMatchGraph('inherit | initial | unset');
|
||||
var cssWideKeywordsWithExpression = buildMatchGraph('inherit | initial | unset | <-ms-legacy-expression>');
|
||||
|
||||
function dumpMapSyntax(map, compact, syntaxAsAst) {
|
||||
var result = {};
|
||||
|
||||
for (var name in map) {
|
||||
if (map[name].syntax) {
|
||||
result[name] = syntaxAsAst
|
||||
? map[name].syntax
|
||||
: generate(map[name].syntax, { compact: compact });
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function dumpAtruleMapSyntax(map, compact, syntaxAsAst) {
|
||||
const result = {};
|
||||
|
||||
for (const [name, atrule] of Object.entries(map)) {
|
||||
result[name] = {
|
||||
prelude: atrule.prelude && (
|
||||
syntaxAsAst
|
||||
? atrule.prelude.syntax
|
||||
: generate(atrule.prelude.syntax, { compact })
|
||||
),
|
||||
descriptors: atrule.descriptors && dumpMapSyntax(atrule.descriptors, compact, syntaxAsAst)
|
||||
};
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function valueHasVar(tokens) {
|
||||
for (var i = 0; i < tokens.length; i++) {
|
||||
if (tokens[i].value.toLowerCase() === 'var(') {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function buildMatchResult(match, error, iterations) {
|
||||
return {
|
||||
matched: match,
|
||||
iterations: iterations,
|
||||
error: error,
|
||||
getTrace: trace.getTrace,
|
||||
isType: trace.isType,
|
||||
isProperty: trace.isProperty,
|
||||
isKeyword: trace.isKeyword
|
||||
};
|
||||
}
|
||||
|
||||
function matchSyntax(lexer, syntax, value, useCommon) {
|
||||
var tokens = prepareTokens(value, lexer.syntax);
|
||||
var result;
|
||||
|
||||
if (valueHasVar(tokens)) {
|
||||
return buildMatchResult(null, new Error('Matching for a tree with var() is not supported'));
|
||||
}
|
||||
|
||||
if (useCommon) {
|
||||
result = matchAsTree(tokens, lexer.valueCommonSyntax, lexer);
|
||||
}
|
||||
|
||||
if (!useCommon || !result.match) {
|
||||
result = matchAsTree(tokens, syntax.match, lexer);
|
||||
if (!result.match) {
|
||||
return buildMatchResult(
|
||||
null,
|
||||
new SyntaxMatchError(result.reason, syntax.syntax, value, result),
|
||||
result.iterations
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return buildMatchResult(result.match, null, result.iterations);
|
||||
}
|
||||
|
||||
var Lexer = function(config, syntax, structure) {
|
||||
this.valueCommonSyntax = cssWideKeywords;
|
||||
this.syntax = syntax;
|
||||
this.generic = false;
|
||||
this.atrules = {};
|
||||
this.properties = {};
|
||||
this.types = {};
|
||||
this.structure = structure || getStructureFromConfig(config);
|
||||
|
||||
if (config) {
|
||||
if (config.types) {
|
||||
for (var name in config.types) {
|
||||
this.addType_(name, config.types[name]);
|
||||
}
|
||||
}
|
||||
|
||||
if (config.generic) {
|
||||
this.generic = true;
|
||||
for (var name in generic) {
|
||||
this.addType_(name, generic[name]);
|
||||
}
|
||||
}
|
||||
|
||||
if (config.atrules) {
|
||||
for (var name in config.atrules) {
|
||||
this.addAtrule_(name, config.atrules[name]);
|
||||
}
|
||||
}
|
||||
|
||||
if (config.properties) {
|
||||
for (var name in config.properties) {
|
||||
this.addProperty_(name, config.properties[name]);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Lexer.prototype = {
|
||||
structure: {},
|
||||
checkStructure: function(ast) {
|
||||
function collectWarning(node, message) {
|
||||
warns.push({
|
||||
node: node,
|
||||
message: message
|
||||
});
|
||||
}
|
||||
|
||||
var structure = this.structure;
|
||||
var warns = [];
|
||||
|
||||
this.syntax.walk(ast, function(node) {
|
||||
if (structure.hasOwnProperty(node.type)) {
|
||||
structure[node.type].check(node, collectWarning);
|
||||
} else {
|
||||
collectWarning(node, 'Unknown node type `' + node.type + '`');
|
||||
}
|
||||
});
|
||||
|
||||
return warns.length ? warns : false;
|
||||
},
|
||||
|
||||
createDescriptor: function(syntax, type, name, parent = null) {
|
||||
var ref = {
|
||||
type: type,
|
||||
name: name
|
||||
};
|
||||
var descriptor = {
|
||||
type: type,
|
||||
name: name,
|
||||
parent: parent,
|
||||
syntax: null,
|
||||
match: null
|
||||
};
|
||||
|
||||
if (typeof syntax === 'function') {
|
||||
descriptor.match = buildMatchGraph(syntax, ref);
|
||||
} else {
|
||||
if (typeof syntax === 'string') {
|
||||
// lazy parsing on first access
|
||||
Object.defineProperty(descriptor, 'syntax', {
|
||||
get: function() {
|
||||
Object.defineProperty(descriptor, 'syntax', {
|
||||
value: parse(syntax)
|
||||
});
|
||||
|
||||
return descriptor.syntax;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
descriptor.syntax = syntax;
|
||||
}
|
||||
|
||||
// lazy graph build on first access
|
||||
Object.defineProperty(descriptor, 'match', {
|
||||
get: function() {
|
||||
Object.defineProperty(descriptor, 'match', {
|
||||
value: buildMatchGraph(descriptor.syntax, ref)
|
||||
});
|
||||
|
||||
return descriptor.match;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return descriptor;
|
||||
},
|
||||
addAtrule_: function(name, syntax) {
|
||||
if (!syntax) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.atrules[name] = {
|
||||
type: 'Atrule',
|
||||
name: name,
|
||||
prelude: syntax.prelude ? this.createDescriptor(syntax.prelude, 'AtrulePrelude', name) : null,
|
||||
descriptors: syntax.descriptors
|
||||
? Object.keys(syntax.descriptors).reduce((res, descName) => {
|
||||
res[descName] = this.createDescriptor(syntax.descriptors[descName], 'AtruleDescriptor', descName, name);
|
||||
return res;
|
||||
}, {})
|
||||
: null
|
||||
};
|
||||
},
|
||||
addProperty_: function(name, syntax) {
|
||||
if (!syntax) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.properties[name] = this.createDescriptor(syntax, 'Property', name);
|
||||
},
|
||||
addType_: function(name, syntax) {
|
||||
if (!syntax) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.types[name] = this.createDescriptor(syntax, 'Type', name);
|
||||
|
||||
if (syntax === generic['-ms-legacy-expression']) {
|
||||
this.valueCommonSyntax = cssWideKeywordsWithExpression;
|
||||
}
|
||||
},
|
||||
|
||||
checkAtruleName: function(atruleName) {
|
||||
if (!this.getAtrule(atruleName)) {
|
||||
return new SyntaxReferenceError('Unknown at-rule', '@' + atruleName);
|
||||
}
|
||||
},
|
||||
checkAtrulePrelude: function(atruleName, prelude) {
|
||||
let error = this.checkAtruleName(atruleName);
|
||||
|
||||
if (error) {
|
||||
return error;
|
||||
}
|
||||
|
||||
var atrule = this.getAtrule(atruleName);
|
||||
|
||||
if (!atrule.prelude && prelude) {
|
||||
return new SyntaxError('At-rule `@' + atruleName + '` should not contain a prelude');
|
||||
}
|
||||
|
||||
if (atrule.prelude && !prelude) {
|
||||
return new SyntaxError('At-rule `@' + atruleName + '` should contain a prelude');
|
||||
}
|
||||
},
|
||||
checkAtruleDescriptorName: function(atruleName, descriptorName) {
|
||||
let error = this.checkAtruleName(atruleName);
|
||||
|
||||
if (error) {
|
||||
return error;
|
||||
}
|
||||
|
||||
var atrule = this.getAtrule(atruleName);
|
||||
var descriptor = names.keyword(descriptorName);
|
||||
|
||||
if (!atrule.descriptors) {
|
||||
return new SyntaxError('At-rule `@' + atruleName + '` has no known descriptors');
|
||||
}
|
||||
|
||||
if (!atrule.descriptors[descriptor.name] &&
|
||||
!atrule.descriptors[descriptor.basename]) {
|
||||
return new SyntaxReferenceError('Unknown at-rule descriptor', descriptorName);
|
||||
}
|
||||
},
|
||||
checkPropertyName: function(propertyName) {
|
||||
var property = names.property(propertyName);
|
||||
|
||||
// don't match syntax for a custom property
|
||||
if (property.custom) {
|
||||
return new Error('Lexer matching doesn\'t applicable for custom properties');
|
||||
}
|
||||
|
||||
if (!this.getProperty(propertyName)) {
|
||||
return new SyntaxReferenceError('Unknown property', propertyName);
|
||||
}
|
||||
},
|
||||
|
||||
matchAtrulePrelude: function(atruleName, prelude) {
|
||||
var error = this.checkAtrulePrelude(atruleName, prelude);
|
||||
|
||||
if (error) {
|
||||
return buildMatchResult(null, error);
|
||||
}
|
||||
|
||||
if (!prelude) {
|
||||
return buildMatchResult(null, null);
|
||||
}
|
||||
|
||||
return matchSyntax(this, this.getAtrule(atruleName).prelude, prelude, false);
|
||||
},
|
||||
matchAtruleDescriptor: function(atruleName, descriptorName, value) {
|
||||
var error = this.checkAtruleDescriptorName(atruleName, descriptorName);
|
||||
|
||||
if (error) {
|
||||
return buildMatchResult(null, error);
|
||||
}
|
||||
|
||||
var atrule = this.getAtrule(atruleName);
|
||||
var descriptor = names.keyword(descriptorName);
|
||||
|
||||
return matchSyntax(this, atrule.descriptors[descriptor.name] || atrule.descriptors[descriptor.basename], value, false);
|
||||
},
|
||||
matchDeclaration: function(node) {
|
||||
if (node.type !== 'Declaration') {
|
||||
return buildMatchResult(null, new Error('Not a Declaration node'));
|
||||
}
|
||||
|
||||
return this.matchProperty(node.property, node.value);
|
||||
},
|
||||
matchProperty: function(propertyName, value) {
|
||||
var error = this.checkPropertyName(propertyName);
|
||||
|
||||
if (error) {
|
||||
return buildMatchResult(null, error);
|
||||
}
|
||||
|
||||
return matchSyntax(this, this.getProperty(propertyName), value, true);
|
||||
},
|
||||
matchType: function(typeName, value) {
|
||||
var typeSyntax = this.getType(typeName);
|
||||
|
||||
if (!typeSyntax) {
|
||||
return buildMatchResult(null, new SyntaxReferenceError('Unknown type', typeName));
|
||||
}
|
||||
|
||||
return matchSyntax(this, typeSyntax, value, false);
|
||||
},
|
||||
match: function(syntax, value) {
|
||||
if (typeof syntax !== 'string' && (!syntax || !syntax.type)) {
|
||||
return buildMatchResult(null, new SyntaxReferenceError('Bad syntax'));
|
||||
}
|
||||
|
||||
if (typeof syntax === 'string' || !syntax.match) {
|
||||
syntax = this.createDescriptor(syntax, 'Type', 'anonymous');
|
||||
}
|
||||
|
||||
return matchSyntax(this, syntax, value, false);
|
||||
},
|
||||
|
||||
findValueFragments: function(propertyName, value, type, name) {
|
||||
return search.matchFragments(this, value, this.matchProperty(propertyName, value), type, name);
|
||||
},
|
||||
findDeclarationValueFragments: function(declaration, type, name) {
|
||||
return search.matchFragments(this, declaration.value, this.matchDeclaration(declaration), type, name);
|
||||
},
|
||||
findAllFragments: function(ast, type, name) {
|
||||
var result = [];
|
||||
|
||||
this.syntax.walk(ast, {
|
||||
visit: 'Declaration',
|
||||
enter: function(declaration) {
|
||||
result.push.apply(result, this.findDeclarationValueFragments(declaration, type, name));
|
||||
}.bind(this)
|
||||
});
|
||||
|
||||
return result;
|
||||
},
|
||||
|
||||
getAtrule: function(atruleName, fallbackBasename = true) {
|
||||
var atrule = names.keyword(atruleName);
|
||||
var atruleEntry = atrule.vendor && fallbackBasename
|
||||
? this.atrules[atrule.name] || this.atrules[atrule.basename]
|
||||
: this.atrules[atrule.name];
|
||||
|
||||
return atruleEntry || null;
|
||||
},
|
||||
getAtrulePrelude: function(atruleName, fallbackBasename = true) {
|
||||
const atrule = this.getAtrule(atruleName, fallbackBasename);
|
||||
|
||||
return atrule && atrule.prelude || null;
|
||||
},
|
||||
getAtruleDescriptor: function(atruleName, name) {
|
||||
return this.atrules.hasOwnProperty(atruleName) && this.atrules.declarators
|
||||
? this.atrules[atruleName].declarators[name] || null
|
||||
: null;
|
||||
},
|
||||
getProperty: function(propertyName, fallbackBasename = true) {
|
||||
var property = names.property(propertyName);
|
||||
var propertyEntry = property.vendor && fallbackBasename
|
||||
? this.properties[property.name] || this.properties[property.basename]
|
||||
: this.properties[property.name];
|
||||
|
||||
return propertyEntry || null;
|
||||
},
|
||||
getType: function(name) {
|
||||
return this.types.hasOwnProperty(name) ? this.types[name] : null;
|
||||
},
|
||||
|
||||
validate: function() {
|
||||
function validate(syntax, name, broken, descriptor) {
|
||||
if (broken.hasOwnProperty(name)) {
|
||||
return broken[name];
|
||||
}
|
||||
|
||||
broken[name] = false;
|
||||
if (descriptor.syntax !== null) {
|
||||
walk(descriptor.syntax, function(node) {
|
||||
if (node.type !== 'Type' && node.type !== 'Property') {
|
||||
return;
|
||||
}
|
||||
|
||||
var map = node.type === 'Type' ? syntax.types : syntax.properties;
|
||||
var brokenMap = node.type === 'Type' ? brokenTypes : brokenProperties;
|
||||
|
||||
if (!map.hasOwnProperty(node.name) || validate(syntax, node.name, brokenMap, map[node.name])) {
|
||||
broken[name] = true;
|
||||
}
|
||||
}, this);
|
||||
}
|
||||
}
|
||||
|
||||
var brokenTypes = {};
|
||||
var brokenProperties = {};
|
||||
|
||||
for (var key in this.types) {
|
||||
validate(this, key, brokenTypes, this.types[key]);
|
||||
}
|
||||
|
||||
for (var key in this.properties) {
|
||||
validate(this, key, brokenProperties, this.properties[key]);
|
||||
}
|
||||
|
||||
brokenTypes = Object.keys(brokenTypes).filter(function(name) {
|
||||
return brokenTypes[name];
|
||||
});
|
||||
brokenProperties = Object.keys(brokenProperties).filter(function(name) {
|
||||
return brokenProperties[name];
|
||||
});
|
||||
|
||||
if (brokenTypes.length || brokenProperties.length) {
|
||||
return {
|
||||
types: brokenTypes,
|
||||
properties: brokenProperties
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
dump: function(syntaxAsAst, pretty) {
|
||||
return {
|
||||
generic: this.generic,
|
||||
types: dumpMapSyntax(this.types, !pretty, syntaxAsAst),
|
||||
properties: dumpMapSyntax(this.properties, !pretty, syntaxAsAst),
|
||||
atrules: dumpAtruleMapSyntax(this.atrules, !pretty, syntaxAsAst)
|
||||
};
|
||||
},
|
||||
toString: function() {
|
||||
return JSON.stringify(this.dump());
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = Lexer;
|
||||
127
node_modules/css-tree/lib/lexer/error.js
generated
vendored
Normal file
127
node_modules/css-tree/lib/lexer/error.js
generated
vendored
Normal file
|
|
@ -0,0 +1,127 @@
|
|||
const createCustomError = require('../utils/createCustomError');
|
||||
const generate = require('../definition-syntax/generate');
|
||||
const defaultLoc = { offset: 0, line: 1, column: 1 };
|
||||
|
||||
function locateMismatch(matchResult, node) {
|
||||
const tokens = matchResult.tokens;
|
||||
const longestMatch = matchResult.longestMatch;
|
||||
const mismatchNode = longestMatch < tokens.length ? tokens[longestMatch].node || null : null;
|
||||
const badNode = mismatchNode !== node ? mismatchNode : null;
|
||||
let mismatchOffset = 0;
|
||||
let mismatchLength = 0;
|
||||
let entries = 0;
|
||||
let css = '';
|
||||
let start;
|
||||
let end;
|
||||
|
||||
for (let i = 0; i < tokens.length; i++) {
|
||||
const token = tokens[i].value;
|
||||
|
||||
if (i === longestMatch) {
|
||||
mismatchLength = token.length;
|
||||
mismatchOffset = css.length;
|
||||
}
|
||||
|
||||
if (badNode !== null && tokens[i].node === badNode) {
|
||||
if (i <= longestMatch) {
|
||||
entries++;
|
||||
} else {
|
||||
entries = 0;
|
||||
}
|
||||
}
|
||||
|
||||
css += token;
|
||||
}
|
||||
|
||||
if (longestMatch === tokens.length || entries > 1) { // last
|
||||
start = fromLoc(badNode || node, 'end') || buildLoc(defaultLoc, css);
|
||||
end = buildLoc(start);
|
||||
} else {
|
||||
start = fromLoc(badNode, 'start') ||
|
||||
buildLoc(fromLoc(node, 'start') || defaultLoc, css.slice(0, mismatchOffset));
|
||||
end = fromLoc(badNode, 'end') ||
|
||||
buildLoc(start, css.substr(mismatchOffset, mismatchLength));
|
||||
}
|
||||
|
||||
return {
|
||||
css,
|
||||
mismatchOffset,
|
||||
mismatchLength,
|
||||
start,
|
||||
end
|
||||
};
|
||||
}
|
||||
|
||||
function fromLoc(node, point) {
|
||||
const value = node && node.loc && node.loc[point];
|
||||
|
||||
if (value) {
|
||||
return 'line' in value ? buildLoc(value) : value;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function buildLoc({ offset, line, column }, extra) {
|
||||
const loc = {
|
||||
offset,
|
||||
line,
|
||||
column
|
||||
};
|
||||
|
||||
if (extra) {
|
||||
const lines = extra.split(/\n|\r\n?|\f/);
|
||||
|
||||
loc.offset += extra.length;
|
||||
loc.line += lines.length - 1;
|
||||
loc.column = lines.length === 1 ? loc.column + extra.length : lines.pop().length + 1;
|
||||
}
|
||||
|
||||
return loc;
|
||||
}
|
||||
|
||||
const SyntaxReferenceError = function(type, referenceName) {
|
||||
const error = createCustomError(
|
||||
'SyntaxReferenceError',
|
||||
type + (referenceName ? ' `' + referenceName + '`' : '')
|
||||
);
|
||||
|
||||
error.reference = referenceName;
|
||||
|
||||
return error;
|
||||
};
|
||||
|
||||
const SyntaxMatchError = function(message, syntax, node, matchResult) {
|
||||
const error = createCustomError('SyntaxMatchError', message);
|
||||
const {
|
||||
css,
|
||||
mismatchOffset,
|
||||
mismatchLength,
|
||||
start,
|
||||
end
|
||||
} = locateMismatch(matchResult, node);
|
||||
|
||||
error.rawMessage = message;
|
||||
error.syntax = syntax ? generate(syntax) : '<generic>';
|
||||
error.css = css;
|
||||
error.mismatchOffset = mismatchOffset;
|
||||
error.mismatchLength = mismatchLength;
|
||||
error.message = message + '\n' +
|
||||
' syntax: ' + error.syntax + '\n' +
|
||||
' value: ' + (css || '<empty string>') + '\n' +
|
||||
' --------' + new Array(error.mismatchOffset + 1).join('-') + '^';
|
||||
|
||||
Object.assign(error, start);
|
||||
error.loc = {
|
||||
source: (node && node.loc && node.loc.source) || '<unknown>',
|
||||
start,
|
||||
end
|
||||
};
|
||||
|
||||
return error;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
SyntaxReferenceError,
|
||||
SyntaxMatchError
|
||||
};
|
||||
236
node_modules/css-tree/lib/lexer/generic-an-plus-b.js
generated
vendored
Normal file
236
node_modules/css-tree/lib/lexer/generic-an-plus-b.js
generated
vendored
Normal file
|
|
@ -0,0 +1,236 @@
|
|||
var isDigit = require('../tokenizer').isDigit;
|
||||
var cmpChar = require('../tokenizer').cmpChar;
|
||||
var TYPE = require('../tokenizer').TYPE;
|
||||
|
||||
var DELIM = TYPE.Delim;
|
||||
var WHITESPACE = TYPE.WhiteSpace;
|
||||
var COMMENT = TYPE.Comment;
|
||||
var IDENT = TYPE.Ident;
|
||||
var NUMBER = TYPE.Number;
|
||||
var DIMENSION = TYPE.Dimension;
|
||||
var PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
|
||||
var HYPHENMINUS = 0x002D; // U+002D HYPHEN-MINUS (-)
|
||||
var N = 0x006E; // U+006E LATIN SMALL LETTER N (n)
|
||||
var DISALLOW_SIGN = true;
|
||||
var ALLOW_SIGN = false;
|
||||
|
||||
function isDelim(token, code) {
|
||||
return token !== null && token.type === DELIM && token.value.charCodeAt(0) === code;
|
||||
}
|
||||
|
||||
function skipSC(token, offset, getNextToken) {
|
||||
while (token !== null && (token.type === WHITESPACE || token.type === COMMENT)) {
|
||||
token = getNextToken(++offset);
|
||||
}
|
||||
|
||||
return offset;
|
||||
}
|
||||
|
||||
function checkInteger(token, valueOffset, disallowSign, offset) {
|
||||
if (!token) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
var code = token.value.charCodeAt(valueOffset);
|
||||
|
||||
if (code === PLUSSIGN || code === HYPHENMINUS) {
|
||||
if (disallowSign) {
|
||||
// Number sign is not allowed
|
||||
return 0;
|
||||
}
|
||||
valueOffset++;
|
||||
}
|
||||
|
||||
for (; valueOffset < token.value.length; valueOffset++) {
|
||||
if (!isDigit(token.value.charCodeAt(valueOffset))) {
|
||||
// Integer is expected
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
return offset + 1;
|
||||
}
|
||||
|
||||
// ... <signed-integer>
|
||||
// ... ['+' | '-'] <signless-integer>
|
||||
function consumeB(token, offset_, getNextToken) {
|
||||
var sign = false;
|
||||
var offset = skipSC(token, offset_, getNextToken);
|
||||
|
||||
token = getNextToken(offset);
|
||||
|
||||
if (token === null) {
|
||||
return offset_;
|
||||
}
|
||||
|
||||
if (token.type !== NUMBER) {
|
||||
if (isDelim(token, PLUSSIGN) || isDelim(token, HYPHENMINUS)) {
|
||||
sign = true;
|
||||
offset = skipSC(getNextToken(++offset), offset, getNextToken);
|
||||
token = getNextToken(offset);
|
||||
|
||||
if (token === null && token.type !== NUMBER) {
|
||||
return 0;
|
||||
}
|
||||
} else {
|
||||
return offset_;
|
||||
}
|
||||
}
|
||||
|
||||
if (!sign) {
|
||||
var code = token.value.charCodeAt(0);
|
||||
if (code !== PLUSSIGN && code !== HYPHENMINUS) {
|
||||
// Number sign is expected
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
return checkInteger(token, sign ? 0 : 1, sign, offset);
|
||||
}
|
||||
|
||||
// An+B microsyntax https://www.w3.org/TR/css-syntax-3/#anb
|
||||
module.exports = function anPlusB(token, getNextToken) {
|
||||
/* eslint-disable brace-style*/
|
||||
var offset = 0;
|
||||
|
||||
if (!token) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// <integer>
|
||||
if (token.type === NUMBER) {
|
||||
return checkInteger(token, 0, ALLOW_SIGN, offset); // b
|
||||
}
|
||||
|
||||
// -n
|
||||
// -n <signed-integer>
|
||||
// -n ['+' | '-'] <signless-integer>
|
||||
// -n- <signless-integer>
|
||||
// <dashndashdigit-ident>
|
||||
else if (token.type === IDENT && token.value.charCodeAt(0) === HYPHENMINUS) {
|
||||
// expect 1st char is N
|
||||
if (!cmpChar(token.value, 1, N)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
switch (token.value.length) {
|
||||
// -n
|
||||
// -n <signed-integer>
|
||||
// -n ['+' | '-'] <signless-integer>
|
||||
case 2:
|
||||
return consumeB(getNextToken(++offset), offset, getNextToken);
|
||||
|
||||
// -n- <signless-integer>
|
||||
case 3:
|
||||
if (token.value.charCodeAt(2) !== HYPHENMINUS) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
offset = skipSC(getNextToken(++offset), offset, getNextToken);
|
||||
token = getNextToken(offset);
|
||||
|
||||
return checkInteger(token, 0, DISALLOW_SIGN, offset);
|
||||
|
||||
// <dashndashdigit-ident>
|
||||
default:
|
||||
if (token.value.charCodeAt(2) !== HYPHENMINUS) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return checkInteger(token, 3, DISALLOW_SIGN, offset);
|
||||
}
|
||||
}
|
||||
|
||||
// '+'? n
|
||||
// '+'? n <signed-integer>
|
||||
// '+'? n ['+' | '-'] <signless-integer>
|
||||
// '+'? n- <signless-integer>
|
||||
// '+'? <ndashdigit-ident>
|
||||
else if (token.type === IDENT || (isDelim(token, PLUSSIGN) && getNextToken(offset + 1).type === IDENT)) {
|
||||
// just ignore a plus
|
||||
if (token.type !== IDENT) {
|
||||
token = getNextToken(++offset);
|
||||
}
|
||||
|
||||
if (token === null || !cmpChar(token.value, 0, N)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
switch (token.value.length) {
|
||||
// '+'? n
|
||||
// '+'? n <signed-integer>
|
||||
// '+'? n ['+' | '-'] <signless-integer>
|
||||
case 1:
|
||||
return consumeB(getNextToken(++offset), offset, getNextToken);
|
||||
|
||||
// '+'? n- <signless-integer>
|
||||
case 2:
|
||||
if (token.value.charCodeAt(1) !== HYPHENMINUS) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
offset = skipSC(getNextToken(++offset), offset, getNextToken);
|
||||
token = getNextToken(offset);
|
||||
|
||||
return checkInteger(token, 0, DISALLOW_SIGN, offset);
|
||||
|
||||
// '+'? <ndashdigit-ident>
|
||||
default:
|
||||
if (token.value.charCodeAt(1) !== HYPHENMINUS) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return checkInteger(token, 2, DISALLOW_SIGN, offset);
|
||||
}
|
||||
}
|
||||
|
||||
// <ndashdigit-dimension>
|
||||
// <ndash-dimension> <signless-integer>
|
||||
// <n-dimension>
|
||||
// <n-dimension> <signed-integer>
|
||||
// <n-dimension> ['+' | '-'] <signless-integer>
|
||||
else if (token.type === DIMENSION) {
|
||||
var code = token.value.charCodeAt(0);
|
||||
var sign = code === PLUSSIGN || code === HYPHENMINUS ? 1 : 0;
|
||||
|
||||
for (var i = sign; i < token.value.length; i++) {
|
||||
if (!isDigit(token.value.charCodeAt(i))) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (i === sign) {
|
||||
// Integer is expected
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (!cmpChar(token.value, i, N)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// <n-dimension>
|
||||
// <n-dimension> <signed-integer>
|
||||
// <n-dimension> ['+' | '-'] <signless-integer>
|
||||
if (i + 1 === token.value.length) {
|
||||
return consumeB(getNextToken(++offset), offset, getNextToken);
|
||||
} else {
|
||||
if (token.value.charCodeAt(i + 1) !== HYPHENMINUS) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// <ndash-dimension> <signless-integer>
|
||||
if (i + 2 === token.value.length) {
|
||||
offset = skipSC(getNextToken(++offset), offset, getNextToken);
|
||||
token = getNextToken(offset);
|
||||
|
||||
return checkInteger(token, 0, DISALLOW_SIGN, offset);
|
||||
}
|
||||
// <ndashdigit-dimension>
|
||||
else {
|
||||
return checkInteger(token, i + 2, DISALLOW_SIGN, offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
};
|
||||
159
node_modules/css-tree/lib/lexer/generic-urange.js
generated
vendored
Normal file
159
node_modules/css-tree/lib/lexer/generic-urange.js
generated
vendored
Normal file
|
|
@ -0,0 +1,159 @@
|
|||
var isHexDigit = require('../tokenizer').isHexDigit;
|
||||
var cmpChar = require('../tokenizer').cmpChar;
|
||||
var TYPE = require('../tokenizer').TYPE;
|
||||
|
||||
var IDENT = TYPE.Ident;
|
||||
var DELIM = TYPE.Delim;
|
||||
var NUMBER = TYPE.Number;
|
||||
var DIMENSION = TYPE.Dimension;
|
||||
var PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
|
||||
var HYPHENMINUS = 0x002D; // U+002D HYPHEN-MINUS (-)
|
||||
var QUESTIONMARK = 0x003F; // U+003F QUESTION MARK (?)
|
||||
var U = 0x0075; // U+0075 LATIN SMALL LETTER U (u)
|
||||
|
||||
function isDelim(token, code) {
|
||||
return token !== null && token.type === DELIM && token.value.charCodeAt(0) === code;
|
||||
}
|
||||
|
||||
function startsWith(token, code) {
|
||||
return token.value.charCodeAt(0) === code;
|
||||
}
|
||||
|
||||
function hexSequence(token, offset, allowDash) {
|
||||
for (var pos = offset, hexlen = 0; pos < token.value.length; pos++) {
|
||||
var code = token.value.charCodeAt(pos);
|
||||
|
||||
if (code === HYPHENMINUS && allowDash && hexlen !== 0) {
|
||||
if (hexSequence(token, offset + hexlen + 1, false) > 0) {
|
||||
return 6; // dissallow following question marks
|
||||
}
|
||||
|
||||
return 0; // dash at the ending of a hex sequence is not allowed
|
||||
}
|
||||
|
||||
if (!isHexDigit(code)) {
|
||||
return 0; // not a hex digit
|
||||
}
|
||||
|
||||
if (++hexlen > 6) {
|
||||
return 0; // too many hex digits
|
||||
};
|
||||
}
|
||||
|
||||
return hexlen;
|
||||
}
|
||||
|
||||
function withQuestionMarkSequence(consumed, length, getNextToken) {
|
||||
if (!consumed) {
|
||||
return 0; // nothing consumed
|
||||
}
|
||||
|
||||
while (isDelim(getNextToken(length), QUESTIONMARK)) {
|
||||
if (++consumed > 6) {
|
||||
return 0; // too many question marks
|
||||
}
|
||||
|
||||
length++;
|
||||
}
|
||||
|
||||
return length;
|
||||
}
|
||||
|
||||
// https://drafts.csswg.org/css-syntax/#urange
|
||||
// Informally, the <urange> production has three forms:
|
||||
// U+0001
|
||||
// Defines a range consisting of a single code point, in this case the code point "1".
|
||||
// U+0001-00ff
|
||||
// Defines a range of codepoints between the first and the second value, in this case
|
||||
// the range between "1" and "ff" (255 in decimal) inclusive.
|
||||
// U+00??
|
||||
// Defines a range of codepoints where the "?" characters range over all hex digits,
|
||||
// in this case defining the same as the value U+0000-00ff.
|
||||
// In each form, a maximum of 6 digits is allowed for each hexadecimal number (if you treat "?" as a hexadecimal digit).
|
||||
//
|
||||
// <urange> =
|
||||
// u '+' <ident-token> '?'* |
|
||||
// u <dimension-token> '?'* |
|
||||
// u <number-token> '?'* |
|
||||
// u <number-token> <dimension-token> |
|
||||
// u <number-token> <number-token> |
|
||||
// u '+' '?'+
|
||||
module.exports = function urange(token, getNextToken) {
|
||||
var length = 0;
|
||||
|
||||
// should start with `u` or `U`
|
||||
if (token === null || token.type !== IDENT || !cmpChar(token.value, 0, U)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
token = getNextToken(++length);
|
||||
if (token === null) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// u '+' <ident-token> '?'*
|
||||
// u '+' '?'+
|
||||
if (isDelim(token, PLUSSIGN)) {
|
||||
token = getNextToken(++length);
|
||||
if (token === null) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (token.type === IDENT) {
|
||||
// u '+' <ident-token> '?'*
|
||||
return withQuestionMarkSequence(hexSequence(token, 0, true), ++length, getNextToken);
|
||||
}
|
||||
|
||||
if (isDelim(token, QUESTIONMARK)) {
|
||||
// u '+' '?'+
|
||||
return withQuestionMarkSequence(1, ++length, getNextToken);
|
||||
}
|
||||
|
||||
// Hex digit or question mark is expected
|
||||
return 0;
|
||||
}
|
||||
|
||||
// u <number-token> '?'*
|
||||
// u <number-token> <dimension-token>
|
||||
// u <number-token> <number-token>
|
||||
if (token.type === NUMBER) {
|
||||
if (!startsWith(token, PLUSSIGN)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
var consumedHexLength = hexSequence(token, 1, true);
|
||||
if (consumedHexLength === 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
token = getNextToken(++length);
|
||||
if (token === null) {
|
||||
// u <number-token> <eof>
|
||||
return length;
|
||||
}
|
||||
|
||||
if (token.type === DIMENSION || token.type === NUMBER) {
|
||||
// u <number-token> <dimension-token>
|
||||
// u <number-token> <number-token>
|
||||
if (!startsWith(token, HYPHENMINUS) || !hexSequence(token, 1, false)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return length + 1;
|
||||
}
|
||||
|
||||
// u <number-token> '?'*
|
||||
return withQuestionMarkSequence(consumedHexLength, length, getNextToken);
|
||||
}
|
||||
|
||||
// u <dimension-token> '?'*
|
||||
if (token.type === DIMENSION) {
|
||||
if (!startsWith(token, PLUSSIGN)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return withQuestionMarkSequence(hexSequence(token, 1, true), ++length, getNextToken);
|
||||
}
|
||||
|
||||
return 0;
|
||||
};
|
||||
585
node_modules/css-tree/lib/lexer/generic.js
generated
vendored
Normal file
585
node_modules/css-tree/lib/lexer/generic.js
generated
vendored
Normal file
|
|
@ -0,0 +1,585 @@
|
|||
var tokenizer = require('../tokenizer');
|
||||
var isIdentifierStart = tokenizer.isIdentifierStart;
|
||||
var isHexDigit = tokenizer.isHexDigit;
|
||||
var isDigit = tokenizer.isDigit;
|
||||
var cmpStr = tokenizer.cmpStr;
|
||||
var consumeNumber = tokenizer.consumeNumber;
|
||||
var TYPE = tokenizer.TYPE;
|
||||
var anPlusB = require('./generic-an-plus-b');
|
||||
var urange = require('./generic-urange');
|
||||
|
||||
var cssWideKeywords = ['unset', 'initial', 'inherit'];
|
||||
var calcFunctionNames = ['calc(', '-moz-calc(', '-webkit-calc('];
|
||||
|
||||
// https://www.w3.org/TR/css-values-3/#lengths
|
||||
var LENGTH = {
|
||||
// absolute length units
|
||||
'px': true,
|
||||
'mm': true,
|
||||
'cm': true,
|
||||
'in': true,
|
||||
'pt': true,
|
||||
'pc': true,
|
||||
'q': true,
|
||||
|
||||
// relative length units
|
||||
'em': true,
|
||||
'ex': true,
|
||||
'ch': true,
|
||||
'rem': true,
|
||||
|
||||
// viewport-percentage lengths
|
||||
'vh': true,
|
||||
'vw': true,
|
||||
'vmin': true,
|
||||
'vmax': true,
|
||||
'vm': true
|
||||
};
|
||||
|
||||
var ANGLE = {
|
||||
'deg': true,
|
||||
'grad': true,
|
||||
'rad': true,
|
||||
'turn': true
|
||||
};
|
||||
|
||||
var TIME = {
|
||||
's': true,
|
||||
'ms': true
|
||||
};
|
||||
|
||||
var FREQUENCY = {
|
||||
'hz': true,
|
||||
'khz': true
|
||||
};
|
||||
|
||||
// https://www.w3.org/TR/css-values-3/#resolution (https://drafts.csswg.org/css-values/#resolution)
|
||||
var RESOLUTION = {
|
||||
'dpi': true,
|
||||
'dpcm': true,
|
||||
'dppx': true,
|
||||
'x': true // https://github.com/w3c/csswg-drafts/issues/461
|
||||
};
|
||||
|
||||
// https://drafts.csswg.org/css-grid/#fr-unit
|
||||
var FLEX = {
|
||||
'fr': true
|
||||
};
|
||||
|
||||
// https://www.w3.org/TR/css3-speech/#mixing-props-voice-volume
|
||||
var DECIBEL = {
|
||||
'db': true
|
||||
};
|
||||
|
||||
// https://www.w3.org/TR/css3-speech/#voice-props-voice-pitch
|
||||
var SEMITONES = {
|
||||
'st': true
|
||||
};
|
||||
|
||||
// safe char code getter
|
||||
function charCode(str, index) {
|
||||
return index < str.length ? str.charCodeAt(index) : 0;
|
||||
}
|
||||
|
||||
function eqStr(actual, expected) {
|
||||
return cmpStr(actual, 0, actual.length, expected);
|
||||
}
|
||||
|
||||
function eqStrAny(actual, expected) {
|
||||
for (var i = 0; i < expected.length; i++) {
|
||||
if (eqStr(actual, expected[i])) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
// IE postfix hack, i.e. 123\0 or 123px\9
|
||||
function isPostfixIeHack(str, offset) {
|
||||
if (offset !== str.length - 2) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return (
|
||||
str.charCodeAt(offset) === 0x005C && // U+005C REVERSE SOLIDUS (\)
|
||||
isDigit(str.charCodeAt(offset + 1))
|
||||
);
|
||||
}
|
||||
|
||||
function outOfRange(opts, value, numEnd) {
|
||||
if (opts && opts.type === 'Range') {
|
||||
var num = Number(
|
||||
numEnd !== undefined && numEnd !== value.length
|
||||
? value.substr(0, numEnd)
|
||||
: value
|
||||
);
|
||||
|
||||
if (isNaN(num)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (opts.min !== null && num < opts.min) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (opts.max !== null && num > opts.max) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function consumeFunction(token, getNextToken) {
|
||||
var startIdx = token.index;
|
||||
var length = 0;
|
||||
|
||||
// balanced token consuming
|
||||
do {
|
||||
length++;
|
||||
|
||||
if (token.balance <= startIdx) {
|
||||
break;
|
||||
}
|
||||
} while (token = getNextToken(length));
|
||||
|
||||
return length;
|
||||
}
|
||||
|
||||
// TODO: implement
|
||||
// can be used wherever <length>, <frequency>, <angle>, <time>, <percentage>, <number>, or <integer> values are allowed
|
||||
// https://drafts.csswg.org/css-values/#calc-notation
|
||||
function calc(next) {
|
||||
return function(token, getNextToken, opts) {
|
||||
if (token === null) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (token.type === TYPE.Function && eqStrAny(token.value, calcFunctionNames)) {
|
||||
return consumeFunction(token, getNextToken);
|
||||
}
|
||||
|
||||
return next(token, getNextToken, opts);
|
||||
};
|
||||
}
|
||||
|
||||
function tokenType(expectedTokenType) {
|
||||
return function(token) {
|
||||
if (token === null || token.type !== expectedTokenType) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 1;
|
||||
};
|
||||
}
|
||||
|
||||
function func(name) {
|
||||
name = name + '(';
|
||||
|
||||
return function(token, getNextToken) {
|
||||
if (token !== null && eqStr(token.value, name)) {
|
||||
return consumeFunction(token, getNextToken);
|
||||
}
|
||||
|
||||
return 0;
|
||||
};
|
||||
}
|
||||
|
||||
// =========================
|
||||
// Complex types
|
||||
//
|
||||
|
||||
// https://drafts.csswg.org/css-values-4/#custom-idents
|
||||
// 4.2. Author-defined Identifiers: the <custom-ident> type
|
||||
// Some properties accept arbitrary author-defined identifiers as a component value.
|
||||
// This generic data type is denoted by <custom-ident>, and represents any valid CSS identifier
|
||||
// that would not be misinterpreted as a pre-defined keyword in that property’s value definition.
|
||||
//
|
||||
// See also: https://developer.mozilla.org/en-US/docs/Web/CSS/custom-ident
|
||||
function customIdent(token) {
|
||||
if (token === null || token.type !== TYPE.Ident) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
var name = token.value.toLowerCase();
|
||||
|
||||
// The CSS-wide keywords are not valid <custom-ident>s
|
||||
if (eqStrAny(name, cssWideKeywords)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// The default keyword is reserved and is also not a valid <custom-ident>
|
||||
if (eqStr(name, 'default')) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// TODO: ignore property specific keywords (as described https://developer.mozilla.org/en-US/docs/Web/CSS/custom-ident)
|
||||
// Specifications using <custom-ident> must specify clearly what other keywords
|
||||
// are excluded from <custom-ident>, if any—for example by saying that any pre-defined keywords
|
||||
// in that property’s value definition are excluded. Excluded keywords are excluded
|
||||
// in all ASCII case permutations.
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
// https://drafts.csswg.org/css-variables/#typedef-custom-property-name
|
||||
// A custom property is any property whose name starts with two dashes (U+002D HYPHEN-MINUS), like --foo.
|
||||
// The <custom-property-name> production corresponds to this: it’s defined as any valid identifier
|
||||
// that starts with two dashes, except -- itself, which is reserved for future use by CSS.
|
||||
// NOTE: Current implementation treat `--` as a valid name since most (all?) major browsers treat it as valid.
|
||||
function customPropertyName(token) {
|
||||
// ... defined as any valid identifier
|
||||
if (token === null || token.type !== TYPE.Ident) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// ... that starts with two dashes (U+002D HYPHEN-MINUS)
|
||||
if (charCode(token.value, 0) !== 0x002D || charCode(token.value, 1) !== 0x002D) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
// https://drafts.csswg.org/css-color-4/#hex-notation
|
||||
// The syntax of a <hex-color> is a <hash-token> token whose value consists of 3, 4, 6, or 8 hexadecimal digits.
|
||||
// In other words, a hex color is written as a hash character, "#", followed by some number of digits 0-9 or
|
||||
// letters a-f (the case of the letters doesn’t matter - #00ff00 is identical to #00FF00).
|
||||
function hexColor(token) {
|
||||
if (token === null || token.type !== TYPE.Hash) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
var length = token.value.length;
|
||||
|
||||
// valid values (length): #rgb (4), #rgba (5), #rrggbb (7), #rrggbbaa (9)
|
||||
if (length !== 4 && length !== 5 && length !== 7 && length !== 9) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
for (var i = 1; i < length; i++) {
|
||||
if (!isHexDigit(token.value.charCodeAt(i))) {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
function idSelector(token) {
|
||||
if (token === null || token.type !== TYPE.Hash) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (!isIdentifierStart(charCode(token.value, 1), charCode(token.value, 2), charCode(token.value, 3))) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
// https://drafts.csswg.org/css-syntax/#any-value
|
||||
// It represents the entirety of what a valid declaration can have as its value.
|
||||
function declarationValue(token, getNextToken) {
|
||||
if (!token) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
var length = 0;
|
||||
var level = 0;
|
||||
var startIdx = token.index;
|
||||
|
||||
// The <declaration-value> production matches any sequence of one or more tokens,
|
||||
// so long as the sequence ...
|
||||
scan:
|
||||
do {
|
||||
switch (token.type) {
|
||||
// ... does not contain <bad-string-token>, <bad-url-token>,
|
||||
case TYPE.BadString:
|
||||
case TYPE.BadUrl:
|
||||
break scan;
|
||||
|
||||
// ... unmatched <)-token>, <]-token>, or <}-token>,
|
||||
case TYPE.RightCurlyBracket:
|
||||
case TYPE.RightParenthesis:
|
||||
case TYPE.RightSquareBracket:
|
||||
if (token.balance > token.index || token.balance < startIdx) {
|
||||
break scan;
|
||||
}
|
||||
|
||||
level--;
|
||||
break;
|
||||
|
||||
// ... or top-level <semicolon-token> tokens
|
||||
case TYPE.Semicolon:
|
||||
if (level === 0) {
|
||||
break scan;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
// ... or <delim-token> tokens with a value of "!"
|
||||
case TYPE.Delim:
|
||||
if (token.value === '!' && level === 0) {
|
||||
break scan;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case TYPE.Function:
|
||||
case TYPE.LeftParenthesis:
|
||||
case TYPE.LeftSquareBracket:
|
||||
case TYPE.LeftCurlyBracket:
|
||||
level++;
|
||||
break;
|
||||
}
|
||||
|
||||
length++;
|
||||
|
||||
// until balance closing
|
||||
if (token.balance <= startIdx) {
|
||||
break;
|
||||
}
|
||||
} while (token = getNextToken(length));
|
||||
|
||||
return length;
|
||||
}
|
||||
|
||||
// https://drafts.csswg.org/css-syntax/#any-value
|
||||
// The <any-value> production is identical to <declaration-value>, but also
|
||||
// allows top-level <semicolon-token> tokens and <delim-token> tokens
|
||||
// with a value of "!". It represents the entirety of what valid CSS can be in any context.
|
||||
function anyValue(token, getNextToken) {
|
||||
if (!token) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
var startIdx = token.index;
|
||||
var length = 0;
|
||||
|
||||
// The <any-value> production matches any sequence of one or more tokens,
|
||||
// so long as the sequence ...
|
||||
scan:
|
||||
do {
|
||||
switch (token.type) {
|
||||
// ... does not contain <bad-string-token>, <bad-url-token>,
|
||||
case TYPE.BadString:
|
||||
case TYPE.BadUrl:
|
||||
break scan;
|
||||
|
||||
// ... unmatched <)-token>, <]-token>, or <}-token>,
|
||||
case TYPE.RightCurlyBracket:
|
||||
case TYPE.RightParenthesis:
|
||||
case TYPE.RightSquareBracket:
|
||||
if (token.balance > token.index || token.balance < startIdx) {
|
||||
break scan;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
length++;
|
||||
|
||||
// until balance closing
|
||||
if (token.balance <= startIdx) {
|
||||
break;
|
||||
}
|
||||
} while (token = getNextToken(length));
|
||||
|
||||
return length;
|
||||
}
|
||||
|
||||
// =========================
|
||||
// Dimensions
|
||||
//
|
||||
|
||||
function dimension(type) {
|
||||
return function(token, getNextToken, opts) {
|
||||
if (token === null || token.type !== TYPE.Dimension) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
var numberEnd = consumeNumber(token.value, 0);
|
||||
|
||||
// check unit
|
||||
if (type !== null) {
|
||||
// check for IE postfix hack, i.e. 123px\0 or 123px\9
|
||||
var reverseSolidusOffset = token.value.indexOf('\\', numberEnd);
|
||||
var unit = reverseSolidusOffset === -1 || !isPostfixIeHack(token.value, reverseSolidusOffset)
|
||||
? token.value.substr(numberEnd)
|
||||
: token.value.substring(numberEnd, reverseSolidusOffset);
|
||||
|
||||
if (type.hasOwnProperty(unit.toLowerCase()) === false) {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
// check range if specified
|
||||
if (outOfRange(opts, token.value, numberEnd)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 1;
|
||||
};
|
||||
}
|
||||
|
||||
// =========================
|
||||
// Percentage
|
||||
//
|
||||
|
||||
// §5.5. Percentages: the <percentage> type
|
||||
// https://drafts.csswg.org/css-values-4/#percentages
|
||||
function percentage(token, getNextToken, opts) {
|
||||
// ... corresponds to the <percentage-token> production
|
||||
if (token === null || token.type !== TYPE.Percentage) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// check range if specified
|
||||
if (outOfRange(opts, token.value, token.value.length - 1)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
// =========================
|
||||
// Numeric
|
||||
//
|
||||
|
||||
// https://drafts.csswg.org/css-values-4/#numbers
|
||||
// The value <zero> represents a literal number with the value 0. Expressions that merely
|
||||
// evaluate to a <number> with the value 0 (for example, calc(0)) do not match <zero>;
|
||||
// only literal <number-token>s do.
|
||||
function zero(next) {
|
||||
if (typeof next !== 'function') {
|
||||
next = function() {
|
||||
return 0;
|
||||
};
|
||||
}
|
||||
|
||||
return function(token, getNextToken, opts) {
|
||||
if (token !== null && token.type === TYPE.Number) {
|
||||
if (Number(token.value) === 0) {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
return next(token, getNextToken, opts);
|
||||
};
|
||||
}
|
||||
|
||||
// § 5.3. Real Numbers: the <number> type
|
||||
// https://drafts.csswg.org/css-values-4/#numbers
|
||||
// Number values are denoted by <number>, and represent real numbers, possibly with a fractional component.
|
||||
// ... It corresponds to the <number-token> production
|
||||
function number(token, getNextToken, opts) {
|
||||
if (token === null) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
var numberEnd = consumeNumber(token.value, 0);
|
||||
var isNumber = numberEnd === token.value.length;
|
||||
if (!isNumber && !isPostfixIeHack(token.value, numberEnd)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// check range if specified
|
||||
if (outOfRange(opts, token.value, numberEnd)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
// §5.2. Integers: the <integer> type
|
||||
// https://drafts.csswg.org/css-values-4/#integers
|
||||
function integer(token, getNextToken, opts) {
|
||||
// ... corresponds to a subset of the <number-token> production
|
||||
if (token === null || token.type !== TYPE.Number) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// The first digit of an integer may be immediately preceded by `-` or `+` to indicate the integer’s sign.
|
||||
var i = token.value.charCodeAt(0) === 0x002B || // U+002B PLUS SIGN (+)
|
||||
token.value.charCodeAt(0) === 0x002D ? 1 : 0; // U+002D HYPHEN-MINUS (-)
|
||||
|
||||
// When written literally, an integer is one or more decimal digits 0 through 9 ...
|
||||
for (; i < token.value.length; i++) {
|
||||
if (!isDigit(token.value.charCodeAt(i))) {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
// check range if specified
|
||||
if (outOfRange(opts, token.value, i)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
// token types
|
||||
'ident-token': tokenType(TYPE.Ident),
|
||||
'function-token': tokenType(TYPE.Function),
|
||||
'at-keyword-token': tokenType(TYPE.AtKeyword),
|
||||
'hash-token': tokenType(TYPE.Hash),
|
||||
'string-token': tokenType(TYPE.String),
|
||||
'bad-string-token': tokenType(TYPE.BadString),
|
||||
'url-token': tokenType(TYPE.Url),
|
||||
'bad-url-token': tokenType(TYPE.BadUrl),
|
||||
'delim-token': tokenType(TYPE.Delim),
|
||||
'number-token': tokenType(TYPE.Number),
|
||||
'percentage-token': tokenType(TYPE.Percentage),
|
||||
'dimension-token': tokenType(TYPE.Dimension),
|
||||
'whitespace-token': tokenType(TYPE.WhiteSpace),
|
||||
'CDO-token': tokenType(TYPE.CDO),
|
||||
'CDC-token': tokenType(TYPE.CDC),
|
||||
'colon-token': tokenType(TYPE.Colon),
|
||||
'semicolon-token': tokenType(TYPE.Semicolon),
|
||||
'comma-token': tokenType(TYPE.Comma),
|
||||
'[-token': tokenType(TYPE.LeftSquareBracket),
|
||||
']-token': tokenType(TYPE.RightSquareBracket),
|
||||
'(-token': tokenType(TYPE.LeftParenthesis),
|
||||
')-token': tokenType(TYPE.RightParenthesis),
|
||||
'{-token': tokenType(TYPE.LeftCurlyBracket),
|
||||
'}-token': tokenType(TYPE.RightCurlyBracket),
|
||||
|
||||
// token type aliases
|
||||
'string': tokenType(TYPE.String),
|
||||
'ident': tokenType(TYPE.Ident),
|
||||
|
||||
// complex types
|
||||
'custom-ident': customIdent,
|
||||
'custom-property-name': customPropertyName,
|
||||
'hex-color': hexColor,
|
||||
'id-selector': idSelector, // element( <id-selector> )
|
||||
'an-plus-b': anPlusB,
|
||||
'urange': urange,
|
||||
'declaration-value': declarationValue,
|
||||
'any-value': anyValue,
|
||||
|
||||
// dimensions
|
||||
'dimension': calc(dimension(null)),
|
||||
'angle': calc(dimension(ANGLE)),
|
||||
'decibel': calc(dimension(DECIBEL)),
|
||||
'frequency': calc(dimension(FREQUENCY)),
|
||||
'flex': calc(dimension(FLEX)),
|
||||
'length': calc(zero(dimension(LENGTH))),
|
||||
'resolution': calc(dimension(RESOLUTION)),
|
||||
'semitones': calc(dimension(SEMITONES)),
|
||||
'time': calc(dimension(TIME)),
|
||||
|
||||
// percentage
|
||||
'percentage': calc(percentage),
|
||||
|
||||
// numeric
|
||||
'zero': zero(),
|
||||
'number': calc(number),
|
||||
'integer': calc(integer),
|
||||
|
||||
// old IE stuff
|
||||
'-ms-legacy-expression': func('expression')
|
||||
};
|
||||
3
node_modules/css-tree/lib/lexer/index.js
generated
vendored
Normal file
3
node_modules/css-tree/lib/lexer/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
module.exports = {
|
||||
Lexer: require('./Lexer')
|
||||
};
|
||||
455
node_modules/css-tree/lib/lexer/match-graph.js
generated
vendored
Normal file
455
node_modules/css-tree/lib/lexer/match-graph.js
generated
vendored
Normal file
|
|
@ -0,0 +1,455 @@
|
|||
var parse = require('../definition-syntax/parse');
|
||||
|
||||
var MATCH = { type: 'Match' };
|
||||
var MISMATCH = { type: 'Mismatch' };
|
||||
var DISALLOW_EMPTY = { type: 'DisallowEmpty' };
|
||||
var LEFTPARENTHESIS = 40; // (
|
||||
var RIGHTPARENTHESIS = 41; // )
|
||||
|
||||
function createCondition(match, thenBranch, elseBranch) {
|
||||
// reduce node count
|
||||
if (thenBranch === MATCH && elseBranch === MISMATCH) {
|
||||
return match;
|
||||
}
|
||||
|
||||
if (match === MATCH && thenBranch === MATCH && elseBranch === MATCH) {
|
||||
return match;
|
||||
}
|
||||
|
||||
if (match.type === 'If' && match.else === MISMATCH && thenBranch === MATCH) {
|
||||
thenBranch = match.then;
|
||||
match = match.match;
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'If',
|
||||
match: match,
|
||||
then: thenBranch,
|
||||
else: elseBranch
|
||||
};
|
||||
}
|
||||
|
||||
function isFunctionType(name) {
|
||||
return (
|
||||
name.length > 2 &&
|
||||
name.charCodeAt(name.length - 2) === LEFTPARENTHESIS &&
|
||||
name.charCodeAt(name.length - 1) === RIGHTPARENTHESIS
|
||||
);
|
||||
}
|
||||
|
||||
function isEnumCapatible(term) {
|
||||
return (
|
||||
term.type === 'Keyword' ||
|
||||
term.type === 'AtKeyword' ||
|
||||
term.type === 'Function' ||
|
||||
term.type === 'Type' && isFunctionType(term.name)
|
||||
);
|
||||
}
|
||||
|
||||
function buildGroupMatchGraph(combinator, terms, atLeastOneTermMatched) {
|
||||
switch (combinator) {
|
||||
case ' ':
|
||||
// Juxtaposing components means that all of them must occur, in the given order.
|
||||
//
|
||||
// a b c
|
||||
// =
|
||||
// match a
|
||||
// then match b
|
||||
// then match c
|
||||
// then MATCH
|
||||
// else MISMATCH
|
||||
// else MISMATCH
|
||||
// else MISMATCH
|
||||
var result = MATCH;
|
||||
|
||||
for (var i = terms.length - 1; i >= 0; i--) {
|
||||
var term = terms[i];
|
||||
|
||||
result = createCondition(
|
||||
term,
|
||||
result,
|
||||
MISMATCH
|
||||
);
|
||||
};
|
||||
|
||||
return result;
|
||||
|
||||
case '|':
|
||||
// A bar (|) separates two or more alternatives: exactly one of them must occur.
|
||||
//
|
||||
// a | b | c
|
||||
// =
|
||||
// match a
|
||||
// then MATCH
|
||||
// else match b
|
||||
// then MATCH
|
||||
// else match c
|
||||
// then MATCH
|
||||
// else MISMATCH
|
||||
|
||||
var result = MISMATCH;
|
||||
var map = null;
|
||||
|
||||
for (var i = terms.length - 1; i >= 0; i--) {
|
||||
var term = terms[i];
|
||||
|
||||
// reduce sequence of keywords into a Enum
|
||||
if (isEnumCapatible(term)) {
|
||||
if (map === null && i > 0 && isEnumCapatible(terms[i - 1])) {
|
||||
map = Object.create(null);
|
||||
result = createCondition(
|
||||
{
|
||||
type: 'Enum',
|
||||
map: map
|
||||
},
|
||||
MATCH,
|
||||
result
|
||||
);
|
||||
}
|
||||
|
||||
if (map !== null) {
|
||||
var key = (isFunctionType(term.name) ? term.name.slice(0, -1) : term.name).toLowerCase();
|
||||
if (key in map === false) {
|
||||
map[key] = term;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
map = null;
|
||||
|
||||
// create a new conditonal node
|
||||
result = createCondition(
|
||||
term,
|
||||
MATCH,
|
||||
result
|
||||
);
|
||||
};
|
||||
|
||||
return result;
|
||||
|
||||
case '&&':
|
||||
// A double ampersand (&&) separates two or more components,
|
||||
// all of which must occur, in any order.
|
||||
|
||||
// Use MatchOnce for groups with a large number of terms,
|
||||
// since &&-groups produces at least N!-node trees
|
||||
if (terms.length > 5) {
|
||||
return {
|
||||
type: 'MatchOnce',
|
||||
terms: terms,
|
||||
all: true
|
||||
};
|
||||
}
|
||||
|
||||
// Use a combination tree for groups with small number of terms
|
||||
//
|
||||
// a && b && c
|
||||
// =
|
||||
// match a
|
||||
// then [b && c]
|
||||
// else match b
|
||||
// then [a && c]
|
||||
// else match c
|
||||
// then [a && b]
|
||||
// else MISMATCH
|
||||
//
|
||||
// a && b
|
||||
// =
|
||||
// match a
|
||||
// then match b
|
||||
// then MATCH
|
||||
// else MISMATCH
|
||||
// else match b
|
||||
// then match a
|
||||
// then MATCH
|
||||
// else MISMATCH
|
||||
// else MISMATCH
|
||||
var result = MISMATCH;
|
||||
|
||||
for (var i = terms.length - 1; i >= 0; i--) {
|
||||
var term = terms[i];
|
||||
var thenClause;
|
||||
|
||||
if (terms.length > 1) {
|
||||
thenClause = buildGroupMatchGraph(
|
||||
combinator,
|
||||
terms.filter(function(newGroupTerm) {
|
||||
return newGroupTerm !== term;
|
||||
}),
|
||||
false
|
||||
);
|
||||
} else {
|
||||
thenClause = MATCH;
|
||||
}
|
||||
|
||||
result = createCondition(
|
||||
term,
|
||||
thenClause,
|
||||
result
|
||||
);
|
||||
};
|
||||
|
||||
return result;
|
||||
|
||||
case '||':
|
||||
// A double bar (||) separates two or more options:
|
||||
// one or more of them must occur, in any order.
|
||||
|
||||
// Use MatchOnce for groups with a large number of terms,
|
||||
// since ||-groups produces at least N!-node trees
|
||||
if (terms.length > 5) {
|
||||
return {
|
||||
type: 'MatchOnce',
|
||||
terms: terms,
|
||||
all: false
|
||||
};
|
||||
}
|
||||
|
||||
// Use a combination tree for groups with small number of terms
|
||||
//
|
||||
// a || b || c
|
||||
// =
|
||||
// match a
|
||||
// then [b || c]
|
||||
// else match b
|
||||
// then [a || c]
|
||||
// else match c
|
||||
// then [a || b]
|
||||
// else MISMATCH
|
||||
//
|
||||
// a || b
|
||||
// =
|
||||
// match a
|
||||
// then match b
|
||||
// then MATCH
|
||||
// else MATCH
|
||||
// else match b
|
||||
// then match a
|
||||
// then MATCH
|
||||
// else MATCH
|
||||
// else MISMATCH
|
||||
var result = atLeastOneTermMatched ? MATCH : MISMATCH;
|
||||
|
||||
for (var i = terms.length - 1; i >= 0; i--) {
|
||||
var term = terms[i];
|
||||
var thenClause;
|
||||
|
||||
if (terms.length > 1) {
|
||||
thenClause = buildGroupMatchGraph(
|
||||
combinator,
|
||||
terms.filter(function(newGroupTerm) {
|
||||
return newGroupTerm !== term;
|
||||
}),
|
||||
true
|
||||
);
|
||||
} else {
|
||||
thenClause = MATCH;
|
||||
}
|
||||
|
||||
result = createCondition(
|
||||
term,
|
||||
thenClause,
|
||||
result
|
||||
);
|
||||
};
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
function buildMultiplierMatchGraph(node) {
|
||||
var result = MATCH;
|
||||
var matchTerm = buildMatchGraph(node.term);
|
||||
|
||||
if (node.max === 0) {
|
||||
// disable repeating of empty match to prevent infinite loop
|
||||
matchTerm = createCondition(
|
||||
matchTerm,
|
||||
DISALLOW_EMPTY,
|
||||
MISMATCH
|
||||
);
|
||||
|
||||
// an occurrence count is not limited, make a cycle;
|
||||
// to collect more terms on each following matching mismatch
|
||||
result = createCondition(
|
||||
matchTerm,
|
||||
null, // will be a loop
|
||||
MISMATCH
|
||||
);
|
||||
|
||||
result.then = createCondition(
|
||||
MATCH,
|
||||
MATCH,
|
||||
result // make a loop
|
||||
);
|
||||
|
||||
if (node.comma) {
|
||||
result.then.else = createCondition(
|
||||
{ type: 'Comma', syntax: node },
|
||||
result,
|
||||
MISMATCH
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// create a match node chain for [min .. max] interval with optional matches
|
||||
for (var i = node.min || 1; i <= node.max; i++) {
|
||||
if (node.comma && result !== MATCH) {
|
||||
result = createCondition(
|
||||
{ type: 'Comma', syntax: node },
|
||||
result,
|
||||
MISMATCH
|
||||
);
|
||||
}
|
||||
|
||||
result = createCondition(
|
||||
matchTerm,
|
||||
createCondition(
|
||||
MATCH,
|
||||
MATCH,
|
||||
result
|
||||
),
|
||||
MISMATCH
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (node.min === 0) {
|
||||
// allow zero match
|
||||
result = createCondition(
|
||||
MATCH,
|
||||
MATCH,
|
||||
result
|
||||
);
|
||||
} else {
|
||||
// create a match node chain to collect [0 ... min - 1] required matches
|
||||
for (var i = 0; i < node.min - 1; i++) {
|
||||
if (node.comma && result !== MATCH) {
|
||||
result = createCondition(
|
||||
{ type: 'Comma', syntax: node },
|
||||
result,
|
||||
MISMATCH
|
||||
);
|
||||
}
|
||||
|
||||
result = createCondition(
|
||||
matchTerm,
|
||||
result,
|
||||
MISMATCH
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function buildMatchGraph(node) {
|
||||
if (typeof node === 'function') {
|
||||
return {
|
||||
type: 'Generic',
|
||||
fn: node
|
||||
};
|
||||
}
|
||||
|
||||
switch (node.type) {
|
||||
case 'Group':
|
||||
var result = buildGroupMatchGraph(
|
||||
node.combinator,
|
||||
node.terms.map(buildMatchGraph),
|
||||
false
|
||||
);
|
||||
|
||||
if (node.disallowEmpty) {
|
||||
result = createCondition(
|
||||
result,
|
||||
DISALLOW_EMPTY,
|
||||
MISMATCH
|
||||
);
|
||||
}
|
||||
|
||||
return result;
|
||||
|
||||
case 'Multiplier':
|
||||
return buildMultiplierMatchGraph(node);
|
||||
|
||||
case 'Type':
|
||||
case 'Property':
|
||||
return {
|
||||
type: node.type,
|
||||
name: node.name,
|
||||
syntax: node
|
||||
};
|
||||
|
||||
case 'Keyword':
|
||||
return {
|
||||
type: node.type,
|
||||
name: node.name.toLowerCase(),
|
||||
syntax: node
|
||||
};
|
||||
|
||||
case 'AtKeyword':
|
||||
return {
|
||||
type: node.type,
|
||||
name: '@' + node.name.toLowerCase(),
|
||||
syntax: node
|
||||
};
|
||||
|
||||
case 'Function':
|
||||
return {
|
||||
type: node.type,
|
||||
name: node.name.toLowerCase() + '(',
|
||||
syntax: node
|
||||
};
|
||||
|
||||
case 'String':
|
||||
// convert a one char length String to a Token
|
||||
if (node.value.length === 3) {
|
||||
return {
|
||||
type: 'Token',
|
||||
value: node.value.charAt(1),
|
||||
syntax: node
|
||||
};
|
||||
}
|
||||
|
||||
// otherwise use it as is
|
||||
return {
|
||||
type: node.type,
|
||||
value: node.value.substr(1, node.value.length - 2).replace(/\\'/g, '\''),
|
||||
syntax: node
|
||||
};
|
||||
|
||||
case 'Token':
|
||||
return {
|
||||
type: node.type,
|
||||
value: node.value,
|
||||
syntax: node
|
||||
};
|
||||
|
||||
case 'Comma':
|
||||
return {
|
||||
type: node.type,
|
||||
syntax: node
|
||||
};
|
||||
|
||||
default:
|
||||
throw new Error('Unknown node type:', node.type);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
MATCH: MATCH,
|
||||
MISMATCH: MISMATCH,
|
||||
DISALLOW_EMPTY: DISALLOW_EMPTY,
|
||||
buildMatchGraph: function(syntaxTree, ref) {
|
||||
if (typeof syntaxTree === 'string') {
|
||||
syntaxTree = parse(syntaxTree);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'MatchGraph',
|
||||
match: buildMatchGraph(syntaxTree),
|
||||
syntax: ref || null,
|
||||
source: syntaxTree
|
||||
};
|
||||
}
|
||||
};
|
||||
639
node_modules/css-tree/lib/lexer/match.js
generated
vendored
Normal file
639
node_modules/css-tree/lib/lexer/match.js
generated
vendored
Normal file
|
|
@ -0,0 +1,639 @@
|
|||
var hasOwnProperty = Object.prototype.hasOwnProperty;
|
||||
var matchGraph = require('./match-graph');
|
||||
var MATCH = matchGraph.MATCH;
|
||||
var MISMATCH = matchGraph.MISMATCH;
|
||||
var DISALLOW_EMPTY = matchGraph.DISALLOW_EMPTY;
|
||||
var TYPE = require('../tokenizer/const').TYPE;
|
||||
|
||||
var STUB = 0;
|
||||
var TOKEN = 1;
|
||||
var OPEN_SYNTAX = 2;
|
||||
var CLOSE_SYNTAX = 3;
|
||||
|
||||
var EXIT_REASON_MATCH = 'Match';
|
||||
var EXIT_REASON_MISMATCH = 'Mismatch';
|
||||
var EXIT_REASON_ITERATION_LIMIT = 'Maximum iteration number exceeded (please fill an issue on https://github.com/csstree/csstree/issues)';
|
||||
|
||||
var ITERATION_LIMIT = 15000;
|
||||
var totalIterationCount = 0;
|
||||
|
||||
function reverseList(list) {
|
||||
var prev = null;
|
||||
var next = null;
|
||||
var item = list;
|
||||
|
||||
while (item !== null) {
|
||||
next = item.prev;
|
||||
item.prev = prev;
|
||||
prev = item;
|
||||
item = next;
|
||||
}
|
||||
|
||||
return prev;
|
||||
}
|
||||
|
||||
function areStringsEqualCaseInsensitive(testStr, referenceStr) {
|
||||
if (testStr.length !== referenceStr.length) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (var i = 0; i < testStr.length; i++) {
|
||||
var testCode = testStr.charCodeAt(i);
|
||||
var referenceCode = referenceStr.charCodeAt(i);
|
||||
|
||||
// testCode.toLowerCase() for U+0041 LATIN CAPITAL LETTER A (A) .. U+005A LATIN CAPITAL LETTER Z (Z).
|
||||
if (testCode >= 0x0041 && testCode <= 0x005A) {
|
||||
testCode = testCode | 32;
|
||||
}
|
||||
|
||||
if (testCode !== referenceCode) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function isContextEdgeDelim(token) {
|
||||
if (token.type !== TYPE.Delim) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Fix matching for unicode-range: U+30??, U+FF00-FF9F
|
||||
// Probably we need to check out previous match instead
|
||||
return token.value !== '?';
|
||||
}
|
||||
|
||||
function isCommaContextStart(token) {
|
||||
if (token === null) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return (
|
||||
token.type === TYPE.Comma ||
|
||||
token.type === TYPE.Function ||
|
||||
token.type === TYPE.LeftParenthesis ||
|
||||
token.type === TYPE.LeftSquareBracket ||
|
||||
token.type === TYPE.LeftCurlyBracket ||
|
||||
isContextEdgeDelim(token)
|
||||
);
|
||||
}
|
||||
|
||||
function isCommaContextEnd(token) {
|
||||
if (token === null) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return (
|
||||
token.type === TYPE.RightParenthesis ||
|
||||
token.type === TYPE.RightSquareBracket ||
|
||||
token.type === TYPE.RightCurlyBracket ||
|
||||
token.type === TYPE.Delim
|
||||
);
|
||||
}
|
||||
|
||||
function internalMatch(tokens, state, syntaxes) {
|
||||
function moveToNextToken() {
|
||||
do {
|
||||
tokenIndex++;
|
||||
token = tokenIndex < tokens.length ? tokens[tokenIndex] : null;
|
||||
} while (token !== null && (token.type === TYPE.WhiteSpace || token.type === TYPE.Comment));
|
||||
}
|
||||
|
||||
function getNextToken(offset) {
|
||||
var nextIndex = tokenIndex + offset;
|
||||
|
||||
return nextIndex < tokens.length ? tokens[nextIndex] : null;
|
||||
}
|
||||
|
||||
function stateSnapshotFromSyntax(nextState, prev) {
|
||||
return {
|
||||
nextState: nextState,
|
||||
matchStack: matchStack,
|
||||
syntaxStack: syntaxStack,
|
||||
thenStack: thenStack,
|
||||
tokenIndex: tokenIndex,
|
||||
prev: prev
|
||||
};
|
||||
}
|
||||
|
||||
function pushThenStack(nextState) {
|
||||
thenStack = {
|
||||
nextState: nextState,
|
||||
matchStack: matchStack,
|
||||
syntaxStack: syntaxStack,
|
||||
prev: thenStack
|
||||
};
|
||||
}
|
||||
|
||||
function pushElseStack(nextState) {
|
||||
elseStack = stateSnapshotFromSyntax(nextState, elseStack);
|
||||
}
|
||||
|
||||
function addTokenToMatch() {
|
||||
matchStack = {
|
||||
type: TOKEN,
|
||||
syntax: state.syntax,
|
||||
token: token,
|
||||
prev: matchStack
|
||||
};
|
||||
|
||||
moveToNextToken();
|
||||
syntaxStash = null;
|
||||
|
||||
if (tokenIndex > longestMatch) {
|
||||
longestMatch = tokenIndex;
|
||||
}
|
||||
}
|
||||
|
||||
function openSyntax() {
|
||||
syntaxStack = {
|
||||
syntax: state.syntax,
|
||||
opts: state.syntax.opts || (syntaxStack !== null && syntaxStack.opts) || null,
|
||||
prev: syntaxStack
|
||||
};
|
||||
|
||||
matchStack = {
|
||||
type: OPEN_SYNTAX,
|
||||
syntax: state.syntax,
|
||||
token: matchStack.token,
|
||||
prev: matchStack
|
||||
};
|
||||
}
|
||||
|
||||
function closeSyntax() {
|
||||
if (matchStack.type === OPEN_SYNTAX) {
|
||||
matchStack = matchStack.prev;
|
||||
} else {
|
||||
matchStack = {
|
||||
type: CLOSE_SYNTAX,
|
||||
syntax: syntaxStack.syntax,
|
||||
token: matchStack.token,
|
||||
prev: matchStack
|
||||
};
|
||||
}
|
||||
|
||||
syntaxStack = syntaxStack.prev;
|
||||
}
|
||||
|
||||
var syntaxStack = null;
|
||||
var thenStack = null;
|
||||
var elseStack = null;
|
||||
|
||||
// null – stashing allowed, nothing stashed
|
||||
// false – stashing disabled, nothing stashed
|
||||
// anithing else – fail stashable syntaxes, some syntax stashed
|
||||
var syntaxStash = null;
|
||||
|
||||
var iterationCount = 0; // count iterations and prevent infinite loop
|
||||
var exitReason = null;
|
||||
|
||||
var token = null;
|
||||
var tokenIndex = -1;
|
||||
var longestMatch = 0;
|
||||
var matchStack = {
|
||||
type: STUB,
|
||||
syntax: null,
|
||||
token: null,
|
||||
prev: null
|
||||
};
|
||||
|
||||
moveToNextToken();
|
||||
|
||||
while (exitReason === null && ++iterationCount < ITERATION_LIMIT) {
|
||||
// function mapList(list, fn) {
|
||||
// var result = [];
|
||||
// while (list) {
|
||||
// result.unshift(fn(list));
|
||||
// list = list.prev;
|
||||
// }
|
||||
// return result;
|
||||
// }
|
||||
// console.log('--\n',
|
||||
// '#' + iterationCount,
|
||||
// require('util').inspect({
|
||||
// match: mapList(matchStack, x => x.type === TOKEN ? x.token && x.token.value : x.syntax ? ({ [OPEN_SYNTAX]: '<', [CLOSE_SYNTAX]: '</' }[x.type] || x.type) + '!' + x.syntax.name : null),
|
||||
// token: token && token.value,
|
||||
// tokenIndex,
|
||||
// syntax: syntax.type + (syntax.id ? ' #' + syntax.id : '')
|
||||
// }, { depth: null })
|
||||
// );
|
||||
switch (state.type) {
|
||||
case 'Match':
|
||||
if (thenStack === null) {
|
||||
// turn to MISMATCH when some tokens left unmatched
|
||||
if (token !== null) {
|
||||
// doesn't mismatch if just one token left and it's an IE hack
|
||||
if (tokenIndex !== tokens.length - 1 || (token.value !== '\\0' && token.value !== '\\9')) {
|
||||
state = MISMATCH;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// break the main loop, return a result - MATCH
|
||||
exitReason = EXIT_REASON_MATCH;
|
||||
break;
|
||||
}
|
||||
|
||||
// go to next syntax (`then` branch)
|
||||
state = thenStack.nextState;
|
||||
|
||||
// check match is not empty
|
||||
if (state === DISALLOW_EMPTY) {
|
||||
if (thenStack.matchStack === matchStack) {
|
||||
state = MISMATCH;
|
||||
break;
|
||||
} else {
|
||||
state = MATCH;
|
||||
}
|
||||
}
|
||||
|
||||
// close syntax if needed
|
||||
while (thenStack.syntaxStack !== syntaxStack) {
|
||||
closeSyntax();
|
||||
}
|
||||
|
||||
// pop stack
|
||||
thenStack = thenStack.prev;
|
||||
break;
|
||||
|
||||
case 'Mismatch':
|
||||
// when some syntax is stashed
|
||||
if (syntaxStash !== null && syntaxStash !== false) {
|
||||
// there is no else branches or a branch reduce match stack
|
||||
if (elseStack === null || tokenIndex > elseStack.tokenIndex) {
|
||||
// restore state from the stash
|
||||
elseStack = syntaxStash;
|
||||
syntaxStash = false; // disable stashing
|
||||
}
|
||||
} else if (elseStack === null) {
|
||||
// no else branches -> break the main loop
|
||||
// return a result - MISMATCH
|
||||
exitReason = EXIT_REASON_MISMATCH;
|
||||
break;
|
||||
}
|
||||
|
||||
// go to next syntax (`else` branch)
|
||||
state = elseStack.nextState;
|
||||
|
||||
// restore all the rest stack states
|
||||
thenStack = elseStack.thenStack;
|
||||
syntaxStack = elseStack.syntaxStack;
|
||||
matchStack = elseStack.matchStack;
|
||||
tokenIndex = elseStack.tokenIndex;
|
||||
token = tokenIndex < tokens.length ? tokens[tokenIndex] : null;
|
||||
|
||||
// pop stack
|
||||
elseStack = elseStack.prev;
|
||||
break;
|
||||
|
||||
case 'MatchGraph':
|
||||
state = state.match;
|
||||
break;
|
||||
|
||||
case 'If':
|
||||
// IMPORTANT: else stack push must go first,
|
||||
// since it stores the state of thenStack before changes
|
||||
if (state.else !== MISMATCH) {
|
||||
pushElseStack(state.else);
|
||||
}
|
||||
|
||||
if (state.then !== MATCH) {
|
||||
pushThenStack(state.then);
|
||||
}
|
||||
|
||||
state = state.match;
|
||||
break;
|
||||
|
||||
case 'MatchOnce':
|
||||
state = {
|
||||
type: 'MatchOnceBuffer',
|
||||
syntax: state,
|
||||
index: 0,
|
||||
mask: 0
|
||||
};
|
||||
break;
|
||||
|
||||
case 'MatchOnceBuffer':
|
||||
var terms = state.syntax.terms;
|
||||
|
||||
if (state.index === terms.length) {
|
||||
// no matches at all or it's required all terms to be matched
|
||||
if (state.mask === 0 || state.syntax.all) {
|
||||
state = MISMATCH;
|
||||
break;
|
||||
}
|
||||
|
||||
// a partial match is ok
|
||||
state = MATCH;
|
||||
break;
|
||||
}
|
||||
|
||||
// all terms are matched
|
||||
if (state.mask === (1 << terms.length) - 1) {
|
||||
state = MATCH;
|
||||
break;
|
||||
}
|
||||
|
||||
for (; state.index < terms.length; state.index++) {
|
||||
var matchFlag = 1 << state.index;
|
||||
|
||||
if ((state.mask & matchFlag) === 0) {
|
||||
// IMPORTANT: else stack push must go first,
|
||||
// since it stores the state of thenStack before changes
|
||||
pushElseStack(state);
|
||||
pushThenStack({
|
||||
type: 'AddMatchOnce',
|
||||
syntax: state.syntax,
|
||||
mask: state.mask | matchFlag
|
||||
});
|
||||
|
||||
// match
|
||||
state = terms[state.index++];
|
||||
break;
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case 'AddMatchOnce':
|
||||
state = {
|
||||
type: 'MatchOnceBuffer',
|
||||
syntax: state.syntax,
|
||||
index: 0,
|
||||
mask: state.mask
|
||||
};
|
||||
break;
|
||||
|
||||
case 'Enum':
|
||||
if (token !== null) {
|
||||
var name = token.value.toLowerCase();
|
||||
|
||||
// drop \0 and \9 hack from keyword name
|
||||
if (name.indexOf('\\') !== -1) {
|
||||
name = name.replace(/\\[09].*$/, '');
|
||||
}
|
||||
|
||||
if (hasOwnProperty.call(state.map, name)) {
|
||||
state = state.map[name];
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
state = MISMATCH;
|
||||
break;
|
||||
|
||||
case 'Generic':
|
||||
var opts = syntaxStack !== null ? syntaxStack.opts : null;
|
||||
var lastTokenIndex = tokenIndex + Math.floor(state.fn(token, getNextToken, opts));
|
||||
|
||||
if (!isNaN(lastTokenIndex) && lastTokenIndex > tokenIndex) {
|
||||
while (tokenIndex < lastTokenIndex) {
|
||||
addTokenToMatch();
|
||||
}
|
||||
|
||||
state = MATCH;
|
||||
} else {
|
||||
state = MISMATCH;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case 'Type':
|
||||
case 'Property':
|
||||
var syntaxDict = state.type === 'Type' ? 'types' : 'properties';
|
||||
var dictSyntax = hasOwnProperty.call(syntaxes, syntaxDict) ? syntaxes[syntaxDict][state.name] : null;
|
||||
|
||||
if (!dictSyntax || !dictSyntax.match) {
|
||||
throw new Error(
|
||||
'Bad syntax reference: ' +
|
||||
(state.type === 'Type'
|
||||
? '<' + state.name + '>'
|
||||
: '<\'' + state.name + '\'>')
|
||||
);
|
||||
}
|
||||
|
||||
// stash a syntax for types with low priority
|
||||
if (syntaxStash !== false && token !== null && state.type === 'Type') {
|
||||
var lowPriorityMatching =
|
||||
// https://drafts.csswg.org/css-values-4/#custom-idents
|
||||
// When parsing positionally-ambiguous keywords in a property value, a <custom-ident> production
|
||||
// can only claim the keyword if no other unfulfilled production can claim it.
|
||||
(state.name === 'custom-ident' && token.type === TYPE.Ident) ||
|
||||
|
||||
// https://drafts.csswg.org/css-values-4/#lengths
|
||||
// ... if a `0` could be parsed as either a <number> or a <length> in a property (such as line-height),
|
||||
// it must parse as a <number>
|
||||
(state.name === 'length' && token.value === '0');
|
||||
|
||||
if (lowPriorityMatching) {
|
||||
if (syntaxStash === null) {
|
||||
syntaxStash = stateSnapshotFromSyntax(state, elseStack);
|
||||
}
|
||||
|
||||
state = MISMATCH;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
openSyntax();
|
||||
state = dictSyntax.match;
|
||||
break;
|
||||
|
||||
case 'Keyword':
|
||||
var name = state.name;
|
||||
|
||||
if (token !== null) {
|
||||
var keywordName = token.value;
|
||||
|
||||
// drop \0 and \9 hack from keyword name
|
||||
if (keywordName.indexOf('\\') !== -1) {
|
||||
keywordName = keywordName.replace(/\\[09].*$/, '');
|
||||
}
|
||||
|
||||
if (areStringsEqualCaseInsensitive(keywordName, name)) {
|
||||
addTokenToMatch();
|
||||
state = MATCH;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
state = MISMATCH;
|
||||
break;
|
||||
|
||||
case 'AtKeyword':
|
||||
case 'Function':
|
||||
if (token !== null && areStringsEqualCaseInsensitive(token.value, state.name)) {
|
||||
addTokenToMatch();
|
||||
state = MATCH;
|
||||
break;
|
||||
}
|
||||
|
||||
state = MISMATCH;
|
||||
break;
|
||||
|
||||
case 'Token':
|
||||
if (token !== null && token.value === state.value) {
|
||||
addTokenToMatch();
|
||||
state = MATCH;
|
||||
break;
|
||||
}
|
||||
|
||||
state = MISMATCH;
|
||||
break;
|
||||
|
||||
case 'Comma':
|
||||
if (token !== null && token.type === TYPE.Comma) {
|
||||
if (isCommaContextStart(matchStack.token)) {
|
||||
state = MISMATCH;
|
||||
} else {
|
||||
addTokenToMatch();
|
||||
state = isCommaContextEnd(token) ? MISMATCH : MATCH;
|
||||
}
|
||||
} else {
|
||||
state = isCommaContextStart(matchStack.token) || isCommaContextEnd(token) ? MATCH : MISMATCH;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case 'String':
|
||||
var string = '';
|
||||
|
||||
for (var lastTokenIndex = tokenIndex; lastTokenIndex < tokens.length && string.length < state.value.length; lastTokenIndex++) {
|
||||
string += tokens[lastTokenIndex].value;
|
||||
}
|
||||
|
||||
if (areStringsEqualCaseInsensitive(string, state.value)) {
|
||||
while (tokenIndex < lastTokenIndex) {
|
||||
addTokenToMatch();
|
||||
}
|
||||
|
||||
state = MATCH;
|
||||
} else {
|
||||
state = MISMATCH;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new Error('Unknown node type: ' + state.type);
|
||||
}
|
||||
}
|
||||
|
||||
totalIterationCount += iterationCount;
|
||||
|
||||
switch (exitReason) {
|
||||
case null:
|
||||
console.warn('[csstree-match] BREAK after ' + ITERATION_LIMIT + ' iterations');
|
||||
exitReason = EXIT_REASON_ITERATION_LIMIT;
|
||||
matchStack = null;
|
||||
break;
|
||||
|
||||
case EXIT_REASON_MATCH:
|
||||
while (syntaxStack !== null) {
|
||||
closeSyntax();
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
matchStack = null;
|
||||
}
|
||||
|
||||
return {
|
||||
tokens: tokens,
|
||||
reason: exitReason,
|
||||
iterations: iterationCount,
|
||||
match: matchStack,
|
||||
longestMatch: longestMatch
|
||||
};
|
||||
}
|
||||
|
||||
function matchAsList(tokens, matchGraph, syntaxes) {
|
||||
var matchResult = internalMatch(tokens, matchGraph, syntaxes || {});
|
||||
|
||||
if (matchResult.match !== null) {
|
||||
var item = reverseList(matchResult.match).prev;
|
||||
|
||||
matchResult.match = [];
|
||||
|
||||
while (item !== null) {
|
||||
switch (item.type) {
|
||||
case STUB:
|
||||
break;
|
||||
|
||||
case OPEN_SYNTAX:
|
||||
case CLOSE_SYNTAX:
|
||||
matchResult.match.push({
|
||||
type: item.type,
|
||||
syntax: item.syntax
|
||||
});
|
||||
break;
|
||||
|
||||
default:
|
||||
matchResult.match.push({
|
||||
token: item.token.value,
|
||||
node: item.token.node
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
item = item.prev;
|
||||
}
|
||||
}
|
||||
|
||||
return matchResult;
|
||||
}
|
||||
|
||||
function matchAsTree(tokens, matchGraph, syntaxes) {
|
||||
var matchResult = internalMatch(tokens, matchGraph, syntaxes || {});
|
||||
|
||||
if (matchResult.match === null) {
|
||||
return matchResult;
|
||||
}
|
||||
|
||||
var item = matchResult.match;
|
||||
var host = matchResult.match = {
|
||||
syntax: matchGraph.syntax || null,
|
||||
match: []
|
||||
};
|
||||
var hostStack = [host];
|
||||
|
||||
// revert a list and start with 2nd item since 1st is a stub item
|
||||
item = reverseList(item).prev;
|
||||
|
||||
// build a tree
|
||||
while (item !== null) {
|
||||
switch (item.type) {
|
||||
case OPEN_SYNTAX:
|
||||
host.match.push(host = {
|
||||
syntax: item.syntax,
|
||||
match: []
|
||||
});
|
||||
hostStack.push(host);
|
||||
break;
|
||||
|
||||
case CLOSE_SYNTAX:
|
||||
hostStack.pop();
|
||||
host = hostStack[hostStack.length - 1];
|
||||
break;
|
||||
|
||||
default:
|
||||
host.match.push({
|
||||
syntax: item.syntax || null,
|
||||
token: item.token.value,
|
||||
node: item.token.node
|
||||
});
|
||||
}
|
||||
|
||||
item = item.prev;
|
||||
}
|
||||
|
||||
return matchResult;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
matchAsList: matchAsList,
|
||||
matchAsTree: matchAsTree,
|
||||
getTotalIterationCount: function() {
|
||||
return totalIterationCount;
|
||||
}
|
||||
};
|
||||
73
node_modules/css-tree/lib/lexer/prepare-tokens.js
generated
vendored
Normal file
73
node_modules/css-tree/lib/lexer/prepare-tokens.js
generated
vendored
Normal file
|
|
@ -0,0 +1,73 @@
|
|||
var tokenize = require('../tokenizer');
|
||||
var TokenStream = require('../common/TokenStream');
|
||||
var tokenStream = new TokenStream();
|
||||
var astToTokens = {
|
||||
decorator: function(handlers) {
|
||||
var curNode = null;
|
||||
var prev = { len: 0, node: null };
|
||||
var nodes = [prev];
|
||||
var buffer = '';
|
||||
|
||||
return {
|
||||
children: handlers.children,
|
||||
node: function(node) {
|
||||
var tmp = curNode;
|
||||
curNode = node;
|
||||
handlers.node.call(this, node);
|
||||
curNode = tmp;
|
||||
},
|
||||
chunk: function(chunk) {
|
||||
buffer += chunk;
|
||||
if (prev.node !== curNode) {
|
||||
nodes.push({
|
||||
len: chunk.length,
|
||||
node: curNode
|
||||
});
|
||||
} else {
|
||||
prev.len += chunk.length;
|
||||
}
|
||||
},
|
||||
result: function() {
|
||||
return prepareTokens(buffer, nodes);
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
function prepareTokens(str, nodes) {
|
||||
var tokens = [];
|
||||
var nodesOffset = 0;
|
||||
var nodesIndex = 0;
|
||||
var currentNode = nodes ? nodes[nodesIndex].node : null;
|
||||
|
||||
tokenize(str, tokenStream);
|
||||
|
||||
while (!tokenStream.eof) {
|
||||
if (nodes) {
|
||||
while (nodesIndex < nodes.length && nodesOffset + nodes[nodesIndex].len <= tokenStream.tokenStart) {
|
||||
nodesOffset += nodes[nodesIndex++].len;
|
||||
currentNode = nodes[nodesIndex].node;
|
||||
}
|
||||
}
|
||||
|
||||
tokens.push({
|
||||
type: tokenStream.tokenType,
|
||||
value: tokenStream.getTokenValue(),
|
||||
index: tokenStream.tokenIndex, // TODO: remove it, temporary solution
|
||||
balance: tokenStream.balance[tokenStream.tokenIndex], // TODO: remove it, temporary solution
|
||||
node: currentNode
|
||||
});
|
||||
tokenStream.next();
|
||||
// console.log({ ...tokens[tokens.length - 1], node: undefined });
|
||||
}
|
||||
|
||||
return tokens;
|
||||
}
|
||||
|
||||
module.exports = function(value, syntax) {
|
||||
if (typeof value === 'string') {
|
||||
return prepareTokens(value, null);
|
||||
}
|
||||
|
||||
return syntax.generate(value, astToTokens);
|
||||
};
|
||||
65
node_modules/css-tree/lib/lexer/search.js
generated
vendored
Normal file
65
node_modules/css-tree/lib/lexer/search.js
generated
vendored
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
var List = require('../common/List');
|
||||
|
||||
function getFirstMatchNode(matchNode) {
|
||||
if ('node' in matchNode) {
|
||||
return matchNode.node;
|
||||
}
|
||||
|
||||
return getFirstMatchNode(matchNode.match[0]);
|
||||
}
|
||||
|
||||
function getLastMatchNode(matchNode) {
|
||||
if ('node' in matchNode) {
|
||||
return matchNode.node;
|
||||
}
|
||||
|
||||
return getLastMatchNode(matchNode.match[matchNode.match.length - 1]);
|
||||
}
|
||||
|
||||
function matchFragments(lexer, ast, match, type, name) {
|
||||
function findFragments(matchNode) {
|
||||
if (matchNode.syntax !== null &&
|
||||
matchNode.syntax.type === type &&
|
||||
matchNode.syntax.name === name) {
|
||||
var start = getFirstMatchNode(matchNode);
|
||||
var end = getLastMatchNode(matchNode);
|
||||
|
||||
lexer.syntax.walk(ast, function(node, item, list) {
|
||||
if (node === start) {
|
||||
var nodes = new List();
|
||||
|
||||
do {
|
||||
nodes.appendData(item.data);
|
||||
|
||||
if (item.data === end) {
|
||||
break;
|
||||
}
|
||||
|
||||
item = item.next;
|
||||
} while (item !== null);
|
||||
|
||||
fragments.push({
|
||||
parent: list,
|
||||
nodes: nodes
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (Array.isArray(matchNode.match)) {
|
||||
matchNode.match.forEach(findFragments);
|
||||
}
|
||||
}
|
||||
|
||||
var fragments = [];
|
||||
|
||||
if (match.matched !== null) {
|
||||
findFragments(match.matched);
|
||||
}
|
||||
|
||||
return fragments;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
matchFragments: matchFragments
|
||||
};
|
||||
163
node_modules/css-tree/lib/lexer/structure.js
generated
vendored
Normal file
163
node_modules/css-tree/lib/lexer/structure.js
generated
vendored
Normal file
|
|
@ -0,0 +1,163 @@
|
|||
var List = require('../common/List');
|
||||
var hasOwnProperty = Object.prototype.hasOwnProperty;
|
||||
|
||||
function isValidNumber(value) {
|
||||
// Number.isInteger(value) && value >= 0
|
||||
return (
|
||||
typeof value === 'number' &&
|
||||
isFinite(value) &&
|
||||
Math.floor(value) === value &&
|
||||
value >= 0
|
||||
);
|
||||
}
|
||||
|
||||
function isValidLocation(loc) {
|
||||
return (
|
||||
Boolean(loc) &&
|
||||
isValidNumber(loc.offset) &&
|
||||
isValidNumber(loc.line) &&
|
||||
isValidNumber(loc.column)
|
||||
);
|
||||
}
|
||||
|
||||
function createNodeStructureChecker(type, fields) {
|
||||
return function checkNode(node, warn) {
|
||||
if (!node || node.constructor !== Object) {
|
||||
return warn(node, 'Type of node should be an Object');
|
||||
}
|
||||
|
||||
for (var key in node) {
|
||||
var valid = true;
|
||||
|
||||
if (hasOwnProperty.call(node, key) === false) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (key === 'type') {
|
||||
if (node.type !== type) {
|
||||
warn(node, 'Wrong node type `' + node.type + '`, expected `' + type + '`');
|
||||
}
|
||||
} else if (key === 'loc') {
|
||||
if (node.loc === null) {
|
||||
continue;
|
||||
} else if (node.loc && node.loc.constructor === Object) {
|
||||
if (typeof node.loc.source !== 'string') {
|
||||
key += '.source';
|
||||
} else if (!isValidLocation(node.loc.start)) {
|
||||
key += '.start';
|
||||
} else if (!isValidLocation(node.loc.end)) {
|
||||
key += '.end';
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
valid = false;
|
||||
} else if (fields.hasOwnProperty(key)) {
|
||||
for (var i = 0, valid = false; !valid && i < fields[key].length; i++) {
|
||||
var fieldType = fields[key][i];
|
||||
|
||||
switch (fieldType) {
|
||||
case String:
|
||||
valid = typeof node[key] === 'string';
|
||||
break;
|
||||
|
||||
case Boolean:
|
||||
valid = typeof node[key] === 'boolean';
|
||||
break;
|
||||
|
||||
case null:
|
||||
valid = node[key] === null;
|
||||
break;
|
||||
|
||||
default:
|
||||
if (typeof fieldType === 'string') {
|
||||
valid = node[key] && node[key].type === fieldType;
|
||||
} else if (Array.isArray(fieldType)) {
|
||||
valid = node[key] instanceof List;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
warn(node, 'Unknown field `' + key + '` for ' + type + ' node type');
|
||||
}
|
||||
|
||||
if (!valid) {
|
||||
warn(node, 'Bad value for `' + type + '.' + key + '`');
|
||||
}
|
||||
}
|
||||
|
||||
for (var key in fields) {
|
||||
if (hasOwnProperty.call(fields, key) &&
|
||||
hasOwnProperty.call(node, key) === false) {
|
||||
warn(node, 'Field `' + type + '.' + key + '` is missed');
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function processStructure(name, nodeType) {
|
||||
var structure = nodeType.structure;
|
||||
var fields = {
|
||||
type: String,
|
||||
loc: true
|
||||
};
|
||||
var docs = {
|
||||
type: '"' + name + '"'
|
||||
};
|
||||
|
||||
for (var key in structure) {
|
||||
if (hasOwnProperty.call(structure, key) === false) {
|
||||
continue;
|
||||
}
|
||||
|
||||
var docsTypes = [];
|
||||
var fieldTypes = fields[key] = Array.isArray(structure[key])
|
||||
? structure[key].slice()
|
||||
: [structure[key]];
|
||||
|
||||
for (var i = 0; i < fieldTypes.length; i++) {
|
||||
var fieldType = fieldTypes[i];
|
||||
if (fieldType === String || fieldType === Boolean) {
|
||||
docsTypes.push(fieldType.name);
|
||||
} else if (fieldType === null) {
|
||||
docsTypes.push('null');
|
||||
} else if (typeof fieldType === 'string') {
|
||||
docsTypes.push('<' + fieldType + '>');
|
||||
} else if (Array.isArray(fieldType)) {
|
||||
docsTypes.push('List'); // TODO: use type enum
|
||||
} else {
|
||||
throw new Error('Wrong value `' + fieldType + '` in `' + name + '.' + key + '` structure definition');
|
||||
}
|
||||
}
|
||||
|
||||
docs[key] = docsTypes.join(' | ');
|
||||
}
|
||||
|
||||
return {
|
||||
docs: docs,
|
||||
check: createNodeStructureChecker(name, fields)
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getStructureFromConfig: function(config) {
|
||||
var structure = {};
|
||||
|
||||
if (config.node) {
|
||||
for (var name in config.node) {
|
||||
if (hasOwnProperty.call(config.node, name)) {
|
||||
var nodeType = config.node[name];
|
||||
|
||||
if (nodeType.structure) {
|
||||
structure[name] = processStructure(name, nodeType);
|
||||
} else {
|
||||
throw new Error('Missed `structure` field in `' + name + '` node type definition');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return structure;
|
||||
}
|
||||
};
|
||||
79
node_modules/css-tree/lib/lexer/trace.js
generated
vendored
Normal file
79
node_modules/css-tree/lib/lexer/trace.js
generated
vendored
Normal file
|
|
@ -0,0 +1,79 @@
|
|||
function getTrace(node) {
|
||||
function shouldPutToTrace(syntax) {
|
||||
if (syntax === null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return (
|
||||
syntax.type === 'Type' ||
|
||||
syntax.type === 'Property' ||
|
||||
syntax.type === 'Keyword'
|
||||
);
|
||||
}
|
||||
|
||||
function hasMatch(matchNode) {
|
||||
if (Array.isArray(matchNode.match)) {
|
||||
// use for-loop for better perfomance
|
||||
for (var i = 0; i < matchNode.match.length; i++) {
|
||||
if (hasMatch(matchNode.match[i])) {
|
||||
if (shouldPutToTrace(matchNode.syntax)) {
|
||||
result.unshift(matchNode.syntax);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
} else if (matchNode.node === node) {
|
||||
result = shouldPutToTrace(matchNode.syntax)
|
||||
? [matchNode.syntax]
|
||||
: [];
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
var result = null;
|
||||
|
||||
if (this.matched !== null) {
|
||||
hasMatch(this.matched);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function testNode(match, node, fn) {
|
||||
var trace = getTrace.call(match, node);
|
||||
|
||||
if (trace === null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return trace.some(fn);
|
||||
}
|
||||
|
||||
function isType(node, type) {
|
||||
return testNode(this, node, function(matchNode) {
|
||||
return matchNode.type === 'Type' && matchNode.name === type;
|
||||
});
|
||||
}
|
||||
|
||||
function isProperty(node, property) {
|
||||
return testNode(this, node, function(matchNode) {
|
||||
return matchNode.type === 'Property' && matchNode.name === property;
|
||||
});
|
||||
}
|
||||
|
||||
function isKeyword(node) {
|
||||
return testNode(this, node, function(matchNode) {
|
||||
return matchNode.type === 'Keyword';
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getTrace: getTrace,
|
||||
isType: isType,
|
||||
isProperty: isProperty,
|
||||
isKeyword: isKeyword
|
||||
};
|
||||
304
node_modules/css-tree/lib/parser/create.js
generated
vendored
Normal file
304
node_modules/css-tree/lib/parser/create.js
generated
vendored
Normal file
|
|
@ -0,0 +1,304 @@
|
|||
var OffsetToLocation = require('../common/OffsetToLocation');
|
||||
var SyntaxError = require('../common/SyntaxError');
|
||||
var TokenStream = require('../common/TokenStream');
|
||||
var List = require('../common/List');
|
||||
var tokenize = require('../tokenizer');
|
||||
var constants = require('../tokenizer/const');
|
||||
var { findWhiteSpaceStart, cmpStr } = require('../tokenizer/utils');
|
||||
var sequence = require('./sequence');
|
||||
var noop = function() {};
|
||||
|
||||
var TYPE = constants.TYPE;
|
||||
var NAME = constants.NAME;
|
||||
var WHITESPACE = TYPE.WhiteSpace;
|
||||
var COMMENT = TYPE.Comment;
|
||||
var IDENT = TYPE.Ident;
|
||||
var FUNCTION = TYPE.Function;
|
||||
var URL = TYPE.Url;
|
||||
var HASH = TYPE.Hash;
|
||||
var PERCENTAGE = TYPE.Percentage;
|
||||
var NUMBER = TYPE.Number;
|
||||
var NUMBERSIGN = 0x0023; // U+0023 NUMBER SIGN (#)
|
||||
var NULL = 0;
|
||||
|
||||
function createParseContext(name) {
|
||||
return function() {
|
||||
return this[name]();
|
||||
};
|
||||
}
|
||||
|
||||
function processConfig(config) {
|
||||
var parserConfig = {
|
||||
context: {},
|
||||
scope: {},
|
||||
atrule: {},
|
||||
pseudo: {}
|
||||
};
|
||||
|
||||
if (config.parseContext) {
|
||||
for (var name in config.parseContext) {
|
||||
switch (typeof config.parseContext[name]) {
|
||||
case 'function':
|
||||
parserConfig.context[name] = config.parseContext[name];
|
||||
break;
|
||||
|
||||
case 'string':
|
||||
parserConfig.context[name] = createParseContext(config.parseContext[name]);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (config.scope) {
|
||||
for (var name in config.scope) {
|
||||
parserConfig.scope[name] = config.scope[name];
|
||||
}
|
||||
}
|
||||
|
||||
if (config.atrule) {
|
||||
for (var name in config.atrule) {
|
||||
var atrule = config.atrule[name];
|
||||
|
||||
if (atrule.parse) {
|
||||
parserConfig.atrule[name] = atrule.parse;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (config.pseudo) {
|
||||
for (var name in config.pseudo) {
|
||||
var pseudo = config.pseudo[name];
|
||||
|
||||
if (pseudo.parse) {
|
||||
parserConfig.pseudo[name] = pseudo.parse;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (config.node) {
|
||||
for (var name in config.node) {
|
||||
parserConfig[name] = config.node[name].parse;
|
||||
}
|
||||
}
|
||||
|
||||
return parserConfig;
|
||||
}
|
||||
|
||||
module.exports = function createParser(config) {
|
||||
var parser = {
|
||||
scanner: new TokenStream(),
|
||||
locationMap: new OffsetToLocation(),
|
||||
|
||||
filename: '<unknown>',
|
||||
needPositions: false,
|
||||
onParseError: noop,
|
||||
onParseErrorThrow: false,
|
||||
parseAtrulePrelude: true,
|
||||
parseRulePrelude: true,
|
||||
parseValue: true,
|
||||
parseCustomProperty: false,
|
||||
|
||||
readSequence: sequence,
|
||||
|
||||
createList: function() {
|
||||
return new List();
|
||||
},
|
||||
createSingleNodeList: function(node) {
|
||||
return new List().appendData(node);
|
||||
},
|
||||
getFirstListNode: function(list) {
|
||||
return list && list.first();
|
||||
},
|
||||
getLastListNode: function(list) {
|
||||
return list.last();
|
||||
},
|
||||
|
||||
parseWithFallback: function(consumer, fallback) {
|
||||
var startToken = this.scanner.tokenIndex;
|
||||
|
||||
try {
|
||||
return consumer.call(this);
|
||||
} catch (e) {
|
||||
if (this.onParseErrorThrow) {
|
||||
throw e;
|
||||
}
|
||||
|
||||
var fallbackNode = fallback.call(this, startToken);
|
||||
|
||||
this.onParseErrorThrow = true;
|
||||
this.onParseError(e, fallbackNode);
|
||||
this.onParseErrorThrow = false;
|
||||
|
||||
return fallbackNode;
|
||||
}
|
||||
},
|
||||
|
||||
lookupNonWSType: function(offset) {
|
||||
do {
|
||||
var type = this.scanner.lookupType(offset++);
|
||||
if (type !== WHITESPACE) {
|
||||
return type;
|
||||
}
|
||||
} while (type !== NULL);
|
||||
|
||||
return NULL;
|
||||
},
|
||||
|
||||
eat: function(tokenType) {
|
||||
if (this.scanner.tokenType !== tokenType) {
|
||||
var offset = this.scanner.tokenStart;
|
||||
var message = NAME[tokenType] + ' is expected';
|
||||
|
||||
// tweak message and offset
|
||||
switch (tokenType) {
|
||||
case IDENT:
|
||||
// when identifier is expected but there is a function or url
|
||||
if (this.scanner.tokenType === FUNCTION || this.scanner.tokenType === URL) {
|
||||
offset = this.scanner.tokenEnd - 1;
|
||||
message = 'Identifier is expected but function found';
|
||||
} else {
|
||||
message = 'Identifier is expected';
|
||||
}
|
||||
break;
|
||||
|
||||
case HASH:
|
||||
if (this.scanner.isDelim(NUMBERSIGN)) {
|
||||
this.scanner.next();
|
||||
offset++;
|
||||
message = 'Name is expected';
|
||||
}
|
||||
break;
|
||||
|
||||
case PERCENTAGE:
|
||||
if (this.scanner.tokenType === NUMBER) {
|
||||
offset = this.scanner.tokenEnd;
|
||||
message = 'Percent sign is expected';
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
// when test type is part of another token show error for current position + 1
|
||||
// e.g. eat(HYPHENMINUS) will fail on "-foo", but pointing on "-" is odd
|
||||
if (this.scanner.source.charCodeAt(this.scanner.tokenStart) === tokenType) {
|
||||
offset = offset + 1;
|
||||
}
|
||||
}
|
||||
|
||||
this.error(message, offset);
|
||||
}
|
||||
|
||||
this.scanner.next();
|
||||
},
|
||||
|
||||
consume: function(tokenType) {
|
||||
var value = this.scanner.getTokenValue();
|
||||
|
||||
this.eat(tokenType);
|
||||
|
||||
return value;
|
||||
},
|
||||
consumeFunctionName: function() {
|
||||
var name = this.scanner.source.substring(this.scanner.tokenStart, this.scanner.tokenEnd - 1);
|
||||
|
||||
this.eat(FUNCTION);
|
||||
|
||||
return name;
|
||||
},
|
||||
|
||||
getLocation: function(start, end) {
|
||||
if (this.needPositions) {
|
||||
return this.locationMap.getLocationRange(
|
||||
start,
|
||||
end,
|
||||
this.filename
|
||||
);
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
getLocationFromList: function(list) {
|
||||
if (this.needPositions) {
|
||||
var head = this.getFirstListNode(list);
|
||||
var tail = this.getLastListNode(list);
|
||||
return this.locationMap.getLocationRange(
|
||||
head !== null ? head.loc.start.offset - this.locationMap.startOffset : this.scanner.tokenStart,
|
||||
tail !== null ? tail.loc.end.offset - this.locationMap.startOffset : this.scanner.tokenStart,
|
||||
this.filename
|
||||
);
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
|
||||
error: function(message, offset) {
|
||||
var location = typeof offset !== 'undefined' && offset < this.scanner.source.length
|
||||
? this.locationMap.getLocation(offset)
|
||||
: this.scanner.eof
|
||||
? this.locationMap.getLocation(findWhiteSpaceStart(this.scanner.source, this.scanner.source.length - 1))
|
||||
: this.locationMap.getLocation(this.scanner.tokenStart);
|
||||
|
||||
throw new SyntaxError(
|
||||
message || 'Unexpected input',
|
||||
this.scanner.source,
|
||||
location.offset,
|
||||
location.line,
|
||||
location.column
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
config = processConfig(config || {});
|
||||
for (var key in config) {
|
||||
parser[key] = config[key];
|
||||
}
|
||||
|
||||
return function(source, options) {
|
||||
options = options || {};
|
||||
|
||||
var context = options.context || 'default';
|
||||
var onComment = options.onComment;
|
||||
var ast;
|
||||
|
||||
tokenize(source, parser.scanner);
|
||||
parser.locationMap.setSource(
|
||||
source,
|
||||
options.offset,
|
||||
options.line,
|
||||
options.column
|
||||
);
|
||||
|
||||
parser.filename = options.filename || '<unknown>';
|
||||
parser.needPositions = Boolean(options.positions);
|
||||
parser.onParseError = typeof options.onParseError === 'function' ? options.onParseError : noop;
|
||||
parser.onParseErrorThrow = false;
|
||||
parser.parseAtrulePrelude = 'parseAtrulePrelude' in options ? Boolean(options.parseAtrulePrelude) : true;
|
||||
parser.parseRulePrelude = 'parseRulePrelude' in options ? Boolean(options.parseRulePrelude) : true;
|
||||
parser.parseValue = 'parseValue' in options ? Boolean(options.parseValue) : true;
|
||||
parser.parseCustomProperty = 'parseCustomProperty' in options ? Boolean(options.parseCustomProperty) : false;
|
||||
|
||||
if (!parser.context.hasOwnProperty(context)) {
|
||||
throw new Error('Unknown context `' + context + '`');
|
||||
}
|
||||
|
||||
if (typeof onComment === 'function') {
|
||||
parser.scanner.forEachToken((type, start, end) => {
|
||||
if (type === COMMENT) {
|
||||
const loc = parser.getLocation(start, end);
|
||||
const value = cmpStr(source, end - 2, end, '*/')
|
||||
? source.slice(start + 2, end - 2)
|
||||
: source.slice(start + 2, end);
|
||||
|
||||
onComment(value, loc);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
ast = parser.context[context].call(parser, options);
|
||||
|
||||
if (!parser.scanner.eof) {
|
||||
parser.error();
|
||||
}
|
||||
|
||||
return ast;
|
||||
};
|
||||
};
|
||||
4
node_modules/css-tree/lib/parser/index.js
generated
vendored
Normal file
4
node_modules/css-tree/lib/parser/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
var createParser = require('./create');
|
||||
var config = require('../syntax/config/parser');
|
||||
|
||||
module.exports = createParser(config);
|
||||
54
node_modules/css-tree/lib/parser/sequence.js
generated
vendored
Normal file
54
node_modules/css-tree/lib/parser/sequence.js
generated
vendored
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
var TYPE = require('../tokenizer').TYPE;
|
||||
var WHITESPACE = TYPE.WhiteSpace;
|
||||
var COMMENT = TYPE.Comment;
|
||||
|
||||
module.exports = function readSequence(recognizer) {
|
||||
var children = this.createList();
|
||||
var child = null;
|
||||
var context = {
|
||||
recognizer: recognizer,
|
||||
space: null,
|
||||
ignoreWS: false,
|
||||
ignoreWSAfter: false
|
||||
};
|
||||
|
||||
this.scanner.skipSC();
|
||||
|
||||
while (!this.scanner.eof) {
|
||||
switch (this.scanner.tokenType) {
|
||||
case COMMENT:
|
||||
this.scanner.next();
|
||||
continue;
|
||||
|
||||
case WHITESPACE:
|
||||
if (context.ignoreWS) {
|
||||
this.scanner.next();
|
||||
} else {
|
||||
context.space = this.WhiteSpace();
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
child = recognizer.getNode.call(this, context);
|
||||
|
||||
if (child === undefined) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (context.space !== null) {
|
||||
children.push(context.space);
|
||||
context.space = null;
|
||||
}
|
||||
|
||||
children.push(child);
|
||||
|
||||
if (context.ignoreWSAfter) {
|
||||
context.ignoreWSAfter = false;
|
||||
context.ignoreWS = true;
|
||||
} else {
|
||||
context.ignoreWS = false;
|
||||
}
|
||||
}
|
||||
|
||||
return children;
|
||||
};
|
||||
8
node_modules/css-tree/lib/syntax/atrule/font-face.js
generated
vendored
Normal file
8
node_modules/css-tree/lib/syntax/atrule/font-face.js
generated
vendored
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
module.exports = {
|
||||
parse: {
|
||||
prelude: null,
|
||||
block: function() {
|
||||
return this.Block(true);
|
||||
}
|
||||
}
|
||||
};
|
||||
40
node_modules/css-tree/lib/syntax/atrule/import.js
generated
vendored
Normal file
40
node_modules/css-tree/lib/syntax/atrule/import.js
generated
vendored
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
var TYPE = require('../../tokenizer').TYPE;
|
||||
|
||||
var STRING = TYPE.String;
|
||||
var IDENT = TYPE.Ident;
|
||||
var URL = TYPE.Url;
|
||||
var FUNCTION = TYPE.Function;
|
||||
var LEFTPARENTHESIS = TYPE.LeftParenthesis;
|
||||
|
||||
module.exports = {
|
||||
parse: {
|
||||
prelude: function() {
|
||||
var children = this.createList();
|
||||
|
||||
this.scanner.skipSC();
|
||||
|
||||
switch (this.scanner.tokenType) {
|
||||
case STRING:
|
||||
children.push(this.String());
|
||||
break;
|
||||
|
||||
case URL:
|
||||
case FUNCTION:
|
||||
children.push(this.Url());
|
||||
break;
|
||||
|
||||
default:
|
||||
this.error('String or url() is expected');
|
||||
}
|
||||
|
||||
if (this.lookupNonWSType(0) === IDENT ||
|
||||
this.lookupNonWSType(0) === LEFTPARENTHESIS) {
|
||||
children.push(this.WhiteSpace());
|
||||
children.push(this.MediaQueryList());
|
||||
}
|
||||
|
||||
return children;
|
||||
},
|
||||
block: null
|
||||
}
|
||||
};
|
||||
7
node_modules/css-tree/lib/syntax/atrule/index.js
generated
vendored
Normal file
7
node_modules/css-tree/lib/syntax/atrule/index.js
generated
vendored
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
module.exports = {
|
||||
'font-face': require('./font-face'),
|
||||
'import': require('./import'),
|
||||
'media': require('./media'),
|
||||
'page': require('./page'),
|
||||
'supports': require('./supports')
|
||||
};
|
||||
12
node_modules/css-tree/lib/syntax/atrule/media.js
generated
vendored
Normal file
12
node_modules/css-tree/lib/syntax/atrule/media.js
generated
vendored
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
module.exports = {
|
||||
parse: {
|
||||
prelude: function() {
|
||||
return this.createSingleNodeList(
|
||||
this.MediaQueryList()
|
||||
);
|
||||
},
|
||||
block: function() {
|
||||
return this.Block(false);
|
||||
}
|
||||
}
|
||||
};
|
||||
12
node_modules/css-tree/lib/syntax/atrule/page.js
generated
vendored
Normal file
12
node_modules/css-tree/lib/syntax/atrule/page.js
generated
vendored
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
module.exports = {
|
||||
parse: {
|
||||
prelude: function() {
|
||||
return this.createSingleNodeList(
|
||||
this.SelectorList()
|
||||
);
|
||||
},
|
||||
block: function() {
|
||||
return this.Block(true);
|
||||
}
|
||||
}
|
||||
};
|
||||
89
node_modules/css-tree/lib/syntax/atrule/supports.js
generated
vendored
Normal file
89
node_modules/css-tree/lib/syntax/atrule/supports.js
generated
vendored
Normal file
|
|
@ -0,0 +1,89 @@
|
|||
var TYPE = require('../../tokenizer').TYPE;
|
||||
|
||||
var WHITESPACE = TYPE.WhiteSpace;
|
||||
var COMMENT = TYPE.Comment;
|
||||
var IDENT = TYPE.Ident;
|
||||
var FUNCTION = TYPE.Function;
|
||||
var COLON = TYPE.Colon;
|
||||
var LEFTPARENTHESIS = TYPE.LeftParenthesis;
|
||||
|
||||
function consumeRaw() {
|
||||
return this.createSingleNodeList(
|
||||
this.Raw(this.scanner.tokenIndex, null, false)
|
||||
);
|
||||
}
|
||||
|
||||
function parentheses() {
|
||||
this.scanner.skipSC();
|
||||
|
||||
if (this.scanner.tokenType === IDENT &&
|
||||
this.lookupNonWSType(1) === COLON) {
|
||||
return this.createSingleNodeList(
|
||||
this.Declaration()
|
||||
);
|
||||
}
|
||||
|
||||
return readSequence.call(this);
|
||||
}
|
||||
|
||||
function readSequence() {
|
||||
var children = this.createList();
|
||||
var space = null;
|
||||
var child;
|
||||
|
||||
this.scanner.skipSC();
|
||||
|
||||
scan:
|
||||
while (!this.scanner.eof) {
|
||||
switch (this.scanner.tokenType) {
|
||||
case WHITESPACE:
|
||||
space = this.WhiteSpace();
|
||||
continue;
|
||||
|
||||
case COMMENT:
|
||||
this.scanner.next();
|
||||
continue;
|
||||
|
||||
case FUNCTION:
|
||||
child = this.Function(consumeRaw, this.scope.AtrulePrelude);
|
||||
break;
|
||||
|
||||
case IDENT:
|
||||
child = this.Identifier();
|
||||
break;
|
||||
|
||||
case LEFTPARENTHESIS:
|
||||
child = this.Parentheses(parentheses, this.scope.AtrulePrelude);
|
||||
break;
|
||||
|
||||
default:
|
||||
break scan;
|
||||
}
|
||||
|
||||
if (space !== null) {
|
||||
children.push(space);
|
||||
space = null;
|
||||
}
|
||||
|
||||
children.push(child);
|
||||
}
|
||||
|
||||
return children;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parse: {
|
||||
prelude: function() {
|
||||
var children = readSequence.call(this);
|
||||
|
||||
if (this.getFirstListNode(children) === null) {
|
||||
this.error('Condition is expected');
|
||||
}
|
||||
|
||||
return children;
|
||||
},
|
||||
block: function() {
|
||||
return this.Block(false);
|
||||
}
|
||||
}
|
||||
};
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue