diff --git a/index.html b/index.html index ed2d0ea..dfe6ddd 100644 --- a/index.html +++ b/index.html @@ -1,36 +1,29 @@ - - - - -
- -
-
+ + + + + + + + + + + + Ouroboros + + + + +
+ +
+ + + + \ No newline at end of file diff --git a/mybulma/css/mystyles.css b/mybulma/css/mystyles.css new file mode 100644 index 0000000..cb11790 --- /dev/null +++ b/mybulma/css/mystyles.css @@ -0,0 +1,3415 @@ +@import url("https://fonts.googleapis.com/css?family=Nunito:400,700"); +/* Bulma Utilities */ +.button, .input, .textarea, .select select, .file-cta, +.file-name { + -moz-appearance: none; + -webkit-appearance: none; + align-items: center; + border: 2px solid transparent; + border-radius: 4px; + box-shadow: none; + display: inline-flex; + font-size: 1rem; + height: 2.5em; + justify-content: flex-start; + line-height: 1.5; + padding-bottom: calc(0.5em - 2px); + padding-left: calc(0.75em - 2px); + padding-right: calc(0.75em - 2px); + padding-top: calc(0.5em - 2px); + position: relative; + vertical-align: top; } + .button:focus, .input:focus, .textarea:focus, .select select:focus, .file-cta:focus, + .file-name:focus, .is-focused.button, .is-focused.input, .is-focused.textarea, .select select.is-focused, .is-focused.file-cta, + .is-focused.file-name, .button:active, .input:active, .textarea:active, .select select:active, .file-cta:active, + .file-name:active, .is-active.button, .is-active.input, .is-active.textarea, .select select.is-active, .is-active.file-cta, + .is-active.file-name { + outline: none; } + .button[disabled], .input[disabled], .textarea[disabled], .select select[disabled], .file-cta[disabled], + .file-name[disabled], + fieldset[disabled] .button, + fieldset[disabled] .input, + fieldset[disabled] .textarea, + fieldset[disabled] .select select, + .select fieldset[disabled] select, + fieldset[disabled] .file-cta, + fieldset[disabled] .file-name { + cursor: not-allowed; } + +.button, .file { + -webkit-touch-callout: none; + -webkit-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; } + +.select:not(.is-multiple):not(.is-loading)::after, .navbar-link:not(.is-arrowless)::after { + border: 3px solid transparent; + border-radius: 2px; + border-right: 0; + border-top: 0; + content: " "; + display: block; + height: 0.625em; + margin-top: -0.4375em; + pointer-events: none; + position: absolute; + top: 50%; + transform: rotate(-45deg); + transform-origin: center; + width: 0.625em; } + +.title:not(:last-child), +.subtitle:not(:last-child) { + margin-bottom: 1.5rem; } + +.button.is-loading::after, .select.is-loading::after, .control.is-loading::after { + animation: spinAround 500ms infinite linear; + border: 2px solid #dbdbdb; + border-radius: 9999px; + border-right-color: transparent; + border-top-color: transparent; + content: ""; + display: block; + height: 1em; + position: relative; + width: 1em; } + +.hero-video { + bottom: 0; + left: 0; + position: absolute; + right: 0; + top: 0; } + +.navbar-burger { + -moz-appearance: none; + -webkit-appearance: none; + appearance: none; + background: none; + border: none; + color: currentColor; + font-family: inherit; + font-size: 1em; + margin: 0; + padding: 0; } + +/* Bulma Base */ +/*! minireset.css v0.0.6 | MIT License | github.com/jgthms/minireset.css */ +html, +body, +p, +ol, +ul, +li, +dl, +dt, +dd, +blockquote, +figure, +fieldset, +legend, +textarea, +pre, +iframe, +hr, +h1, +h2, +h3, +h4, +h5, +h6 { + margin: 0; + padding: 0; } + +h1, +h2, +h3, +h4, +h5, +h6 { + font-size: 100%; + font-weight: normal; } + +ul { + list-style: none; } + +button, +input, +select, +textarea { + margin: 0; } + +html { + box-sizing: border-box; } + +*, *::before, *::after { + box-sizing: inherit; } + +img, +video { + height: auto; + max-width: 100%; } + +iframe { + border: 0; } + +table { + border-collapse: collapse; + border-spacing: 0; } + +td, +th { + padding: 0; } + td:not([align]), + th:not([align]) { + text-align: inherit; } + +html { + background-color: #303446; + font-size: 16px; + -moz-osx-font-smoothing: grayscale; + -webkit-font-smoothing: antialiased; + min-width: 300px; + overflow-x: hidden; + overflow-y: scroll; + text-rendering: optimizeLegibility; + text-size-adjust: 100%; } + +article, +aside, +figure, +footer, +header, +hgroup, +section { + display: block; } + +body, +button, +input, +optgroup, +select, +textarea { + font-family: "Nunito", sans-serif; } + +code, +pre { + -moz-osx-font-smoothing: auto; + -webkit-font-smoothing: auto; + font-family: monospace; } + +body { + color: #c6d0f5; + font-size: 1em; + font-weight: 400; + line-height: 1.5; } + +a { + color: #81c8be; + cursor: pointer; + text-decoration: none; } + a strong { + color: currentColor; } + a:hover { + color: #363636; } + +code { + background-color: whitesmoke; + color: #da1039; + font-size: 0.875em; + font-weight: normal; + padding: 0.25em 0.5em 0.25em; } + +hr { + background-color: whitesmoke; + border: none; + display: block; + height: 2px; + margin: 1.5rem 0; } + +img { + height: auto; + max-width: 100%; } + +input[type="checkbox"], +input[type="radio"] { + vertical-align: baseline; } + +small { + font-size: 0.875em; } + +span { + font-style: inherit; + font-weight: inherit; } + +strong { + color: #b5bfe2; + font-weight: 700; } + +fieldset { + border: none; } + +pre { + -webkit-overflow-scrolling: touch; + background-color: whitesmoke; + color: #c6d0f5; + font-size: 0.875em; + overflow-x: auto; + padding: 1.25rem 1.5rem; + white-space: pre; + word-wrap: normal; } + pre code { + background-color: transparent; + color: currentColor; + font-size: 1em; + padding: 0; } + +table td, +table th { + vertical-align: top; } + table td:not([align]), + table th:not([align]) { + text-align: inherit; } + +table th { + color: #b5bfe2; } + +@keyframes spinAround { + from { + transform: rotate(0deg); } + to { + transform: rotate(359deg); } } + +.button { + background-color: white; + border-color: #dbdbdb; + border-width: 2px; + color: #b5bfe2; + cursor: pointer; + justify-content: center; + padding-bottom: calc(0.5em - 2px); + padding-left: 1em; + padding-right: 1em; + padding-top: calc(0.5em - 2px); + text-align: center; + white-space: nowrap; } + .button strong { + color: inherit; } + .button .icon, .button .icon.is-small, .button .icon.is-medium, .button .icon.is-large { + height: 1.5em; + width: 1.5em; } + .button .icon:first-child:not(:last-child) { + margin-left: calc(-0.5em - 2px); + margin-right: 0.25em; } + .button .icon:last-child:not(:first-child) { + margin-left: 0.25em; + margin-right: calc(-0.5em - 2px); } + .button .icon:first-child:last-child { + margin-left: calc(-0.5em - 2px); + margin-right: calc(-0.5em - 2px); } + .button:hover, .button.is-hovered { + border-color: #51576d; + color: #363636; } + .button:focus, .button.is-focused { + border-color: #485fc7; + color: #363636; } + .button:focus:not(:active), .button.is-focused:not(:active) { + box-shadow: 0 0 0 0.125em rgba(129, 200, 190, 0.25); } + .button:active, .button.is-active { + border-color: #414559; + color: #363636; } + .button.is-text { + background-color: transparent; + border-color: transparent; + color: #c6d0f5; + text-decoration: underline; } + .button.is-text:hover, .button.is-text.is-hovered, .button.is-text:focus, .button.is-text.is-focused { + background-color: whitesmoke; + color: #b5bfe2; } + .button.is-text:active, .button.is-text.is-active { + background-color: #e8e8e8; + color: #b5bfe2; } + .button.is-text[disabled], + fieldset[disabled] .button.is-text { + background-color: transparent; + border-color: transparent; + box-shadow: none; } + .button.is-ghost { + background: none; + border-color: transparent; + color: #81c8be; + text-decoration: none; } + .button.is-ghost:hover, .button.is-ghost.is-hovered { + color: #81c8be; + text-decoration: underline; } + .button.is-white { + background-color: white; + border-color: transparent; + color: #0a0a0a; } + .button.is-white:hover, .button.is-white.is-hovered { + background-color: #f9f9f9; + border-color: transparent; + color: #0a0a0a; } + .button.is-white:focus, .button.is-white.is-focused { + border-color: transparent; + color: #0a0a0a; } + .button.is-white:focus:not(:active), .button.is-white.is-focused:not(:active) { + box-shadow: 0 0 0 0.125em rgba(255, 255, 255, 0.25); } + .button.is-white:active, .button.is-white.is-active { + background-color: #f2f2f2; + border-color: transparent; + color: #0a0a0a; } + .button.is-white[disabled], + fieldset[disabled] .button.is-white { + background-color: white; + border-color: white; + box-shadow: none; } + .button.is-white.is-inverted { + background-color: #0a0a0a; + color: white; } + .button.is-white.is-inverted:hover, .button.is-white.is-inverted.is-hovered { + background-color: black; } + .button.is-white.is-inverted[disabled], + fieldset[disabled] .button.is-white.is-inverted { + background-color: #0a0a0a; + border-color: transparent; + box-shadow: none; + color: white; } + .button.is-white.is-loading::after { + border-color: transparent transparent #0a0a0a #0a0a0a !important; } + .button.is-white.is-outlined { + background-color: transparent; + border-color: white; + color: white; } + .button.is-white.is-outlined:hover, .button.is-white.is-outlined.is-hovered, .button.is-white.is-outlined:focus, .button.is-white.is-outlined.is-focused { + background-color: white; + border-color: white; + color: #0a0a0a; } + .button.is-white.is-outlined.is-loading::after { + border-color: transparent transparent white white !important; } + .button.is-white.is-outlined.is-loading:hover::after, .button.is-white.is-outlined.is-loading.is-hovered::after, .button.is-white.is-outlined.is-loading:focus::after, .button.is-white.is-outlined.is-loading.is-focused::after { + border-color: transparent transparent #0a0a0a #0a0a0a !important; } + .button.is-white.is-outlined[disabled], + fieldset[disabled] .button.is-white.is-outlined { + background-color: transparent; + border-color: white; + box-shadow: none; + color: white; } + .button.is-white.is-inverted.is-outlined { + background-color: transparent; + border-color: #0a0a0a; + color: #0a0a0a; } + .button.is-white.is-inverted.is-outlined:hover, .button.is-white.is-inverted.is-outlined.is-hovered, .button.is-white.is-inverted.is-outlined:focus, .button.is-white.is-inverted.is-outlined.is-focused { + background-color: #0a0a0a; + color: white; } + .button.is-white.is-inverted.is-outlined.is-loading:hover::after, .button.is-white.is-inverted.is-outlined.is-loading.is-hovered::after, .button.is-white.is-inverted.is-outlined.is-loading:focus::after, .button.is-white.is-inverted.is-outlined.is-loading.is-focused::after { + border-color: transparent transparent white white !important; } + .button.is-white.is-inverted.is-outlined[disabled], + fieldset[disabled] .button.is-white.is-inverted.is-outlined { + background-color: transparent; + border-color: #0a0a0a; + box-shadow: none; + color: #0a0a0a; } + .button.is-black { + background-color: #0a0a0a; + border-color: transparent; + color: white; } + .button.is-black:hover, .button.is-black.is-hovered { + background-color: #040404; + border-color: transparent; + color: white; } + .button.is-black:focus, .button.is-black.is-focused { + border-color: transparent; + color: white; } + .button.is-black:focus:not(:active), .button.is-black.is-focused:not(:active) { + box-shadow: 0 0 0 0.125em rgba(10, 10, 10, 0.25); } + .button.is-black:active, .button.is-black.is-active { + background-color: black; + border-color: transparent; + color: white; } + .button.is-black[disabled], + fieldset[disabled] .button.is-black { + background-color: #0a0a0a; + border-color: #0a0a0a; + box-shadow: none; } + .button.is-black.is-inverted { + background-color: white; + color: #0a0a0a; } + .button.is-black.is-inverted:hover, .button.is-black.is-inverted.is-hovered { + background-color: #f2f2f2; } + .button.is-black.is-inverted[disabled], + fieldset[disabled] .button.is-black.is-inverted { + background-color: white; + border-color: transparent; + box-shadow: none; + color: #0a0a0a; } + .button.is-black.is-loading::after { + border-color: transparent transparent white white !important; } + .button.is-black.is-outlined { + background-color: transparent; + border-color: #0a0a0a; + color: #0a0a0a; } + .button.is-black.is-outlined:hover, .button.is-black.is-outlined.is-hovered, .button.is-black.is-outlined:focus, .button.is-black.is-outlined.is-focused { + background-color: #0a0a0a; + border-color: #0a0a0a; + color: white; } + .button.is-black.is-outlined.is-loading::after { + border-color: transparent transparent #0a0a0a #0a0a0a !important; } + .button.is-black.is-outlined.is-loading:hover::after, .button.is-black.is-outlined.is-loading.is-hovered::after, .button.is-black.is-outlined.is-loading:focus::after, .button.is-black.is-outlined.is-loading.is-focused::after { + border-color: transparent transparent white white !important; } + .button.is-black.is-outlined[disabled], + fieldset[disabled] .button.is-black.is-outlined { + background-color: transparent; + border-color: #0a0a0a; + box-shadow: none; + color: #0a0a0a; } + .button.is-black.is-inverted.is-outlined { + background-color: transparent; + border-color: white; + color: white; } + .button.is-black.is-inverted.is-outlined:hover, .button.is-black.is-inverted.is-outlined.is-hovered, .button.is-black.is-inverted.is-outlined:focus, .button.is-black.is-inverted.is-outlined.is-focused { + background-color: white; + color: #0a0a0a; } + .button.is-black.is-inverted.is-outlined.is-loading:hover::after, .button.is-black.is-inverted.is-outlined.is-loading.is-hovered::after, .button.is-black.is-inverted.is-outlined.is-loading:focus::after, .button.is-black.is-inverted.is-outlined.is-loading.is-focused::after { + border-color: transparent transparent #0a0a0a #0a0a0a !important; } + .button.is-black.is-inverted.is-outlined[disabled], + fieldset[disabled] .button.is-black.is-inverted.is-outlined { + background-color: transparent; + border-color: white; + box-shadow: none; + color: white; } + .button.is-light { + background-color: whitesmoke; + border-color: transparent; + color: rgba(0, 0, 0, 0.7); } + .button.is-light:hover, .button.is-light.is-hovered { + background-color: #eeeeee; + border-color: transparent; + color: rgba(0, 0, 0, 0.7); } + .button.is-light:focus, .button.is-light.is-focused { + border-color: transparent; + color: rgba(0, 0, 0, 0.7); } + .button.is-light:focus:not(:active), .button.is-light.is-focused:not(:active) { + box-shadow: 0 0 0 0.125em rgba(245, 245, 245, 0.25); } + .button.is-light:active, .button.is-light.is-active { + background-color: #e8e8e8; + border-color: transparent; + color: rgba(0, 0, 0, 0.7); } + .button.is-light[disabled], + fieldset[disabled] .button.is-light { + background-color: whitesmoke; + border-color: whitesmoke; + box-shadow: none; } + .button.is-light.is-inverted { + background-color: rgba(0, 0, 0, 0.7); + color: whitesmoke; } + .button.is-light.is-inverted:hover, .button.is-light.is-inverted.is-hovered { + background-color: rgba(0, 0, 0, 0.7); } + .button.is-light.is-inverted[disabled], + fieldset[disabled] .button.is-light.is-inverted { + background-color: rgba(0, 0, 0, 0.7); + border-color: transparent; + box-shadow: none; + color: whitesmoke; } + .button.is-light.is-loading::after { + border-color: transparent transparent rgba(0, 0, 0, 0.7) rgba(0, 0, 0, 0.7) !important; } + .button.is-light.is-outlined { + background-color: transparent; + border-color: whitesmoke; + color: whitesmoke; } + .button.is-light.is-outlined:hover, .button.is-light.is-outlined.is-hovered, .button.is-light.is-outlined:focus, .button.is-light.is-outlined.is-focused { + background-color: whitesmoke; + border-color: whitesmoke; + color: rgba(0, 0, 0, 0.7); } + .button.is-light.is-outlined.is-loading::after { + border-color: transparent transparent whitesmoke whitesmoke !important; } + .button.is-light.is-outlined.is-loading:hover::after, .button.is-light.is-outlined.is-loading.is-hovered::after, .button.is-light.is-outlined.is-loading:focus::after, .button.is-light.is-outlined.is-loading.is-focused::after { + border-color: transparent transparent rgba(0, 0, 0, 0.7) rgba(0, 0, 0, 0.7) !important; } + .button.is-light.is-outlined[disabled], + fieldset[disabled] .button.is-light.is-outlined { + background-color: transparent; + border-color: whitesmoke; + box-shadow: none; + color: whitesmoke; } + .button.is-light.is-inverted.is-outlined { + background-color: transparent; + border-color: rgba(0, 0, 0, 0.7); + color: rgba(0, 0, 0, 0.7); } + .button.is-light.is-inverted.is-outlined:hover, .button.is-light.is-inverted.is-outlined.is-hovered, .button.is-light.is-inverted.is-outlined:focus, .button.is-light.is-inverted.is-outlined.is-focused { + background-color: rgba(0, 0, 0, 0.7); + color: whitesmoke; } + .button.is-light.is-inverted.is-outlined.is-loading:hover::after, .button.is-light.is-inverted.is-outlined.is-loading.is-hovered::after, .button.is-light.is-inverted.is-outlined.is-loading:focus::after, .button.is-light.is-inverted.is-outlined.is-loading.is-focused::after { + border-color: transparent transparent whitesmoke whitesmoke !important; } + .button.is-light.is-inverted.is-outlined[disabled], + fieldset[disabled] .button.is-light.is-inverted.is-outlined { + background-color: transparent; + border-color: rgba(0, 0, 0, 0.7); + box-shadow: none; + color: rgba(0, 0, 0, 0.7); } + .button.is-dark { + background-color: #363636; + border-color: transparent; + color: #fff; } + .button.is-dark:hover, .button.is-dark.is-hovered { + background-color: #2f2f2f; + border-color: transparent; + color: #fff; } + .button.is-dark:focus, .button.is-dark.is-focused { + border-color: transparent; + color: #fff; } + .button.is-dark:focus:not(:active), .button.is-dark.is-focused:not(:active) { + box-shadow: 0 0 0 0.125em rgba(54, 54, 54, 0.25); } + .button.is-dark:active, .button.is-dark.is-active { + background-color: #292929; + border-color: transparent; + color: #fff; } + .button.is-dark[disabled], + fieldset[disabled] .button.is-dark { + background-color: #363636; + border-color: #363636; + box-shadow: none; } + .button.is-dark.is-inverted { + background-color: #fff; + color: #363636; } + .button.is-dark.is-inverted:hover, .button.is-dark.is-inverted.is-hovered { + background-color: #f2f2f2; } + .button.is-dark.is-inverted[disabled], + fieldset[disabled] .button.is-dark.is-inverted { + background-color: #fff; + border-color: transparent; + box-shadow: none; + color: #363636; } + .button.is-dark.is-loading::after { + border-color: transparent transparent #fff #fff !important; } + .button.is-dark.is-outlined { + background-color: transparent; + border-color: #363636; + color: #363636; } + .button.is-dark.is-outlined:hover, .button.is-dark.is-outlined.is-hovered, .button.is-dark.is-outlined:focus, .button.is-dark.is-outlined.is-focused { + background-color: #363636; + border-color: #363636; + color: #fff; } + .button.is-dark.is-outlined.is-loading::after { + border-color: transparent transparent #363636 #363636 !important; } + .button.is-dark.is-outlined.is-loading:hover::after, .button.is-dark.is-outlined.is-loading.is-hovered::after, .button.is-dark.is-outlined.is-loading:focus::after, .button.is-dark.is-outlined.is-loading.is-focused::after { + border-color: transparent transparent #fff #fff !important; } + .button.is-dark.is-outlined[disabled], + fieldset[disabled] .button.is-dark.is-outlined { + background-color: transparent; + border-color: #363636; + box-shadow: none; + color: #363636; } + .button.is-dark.is-inverted.is-outlined { + background-color: transparent; + border-color: #fff; + color: #fff; } + .button.is-dark.is-inverted.is-outlined:hover, .button.is-dark.is-inverted.is-outlined.is-hovered, .button.is-dark.is-inverted.is-outlined:focus, .button.is-dark.is-inverted.is-outlined.is-focused { + background-color: #fff; + color: #363636; } + .button.is-dark.is-inverted.is-outlined.is-loading:hover::after, .button.is-dark.is-inverted.is-outlined.is-loading.is-hovered::after, .button.is-dark.is-inverted.is-outlined.is-loading:focus::after, .button.is-dark.is-inverted.is-outlined.is-loading.is-focused::after { + border-color: transparent transparent #363636 #363636 !important; } + .button.is-dark.is-inverted.is-outlined[disabled], + fieldset[disabled] .button.is-dark.is-inverted.is-outlined { + background-color: transparent; + border-color: #fff; + box-shadow: none; + color: #fff; } + .button.is-primary { + background-color: #c6d0f5; + border-color: transparent; + color: rgba(0, 0, 0, 0.7); } + .button.is-primary:hover, .button.is-primary.is-hovered { + background-color: #bbc7f3; + border-color: transparent; + color: rgba(0, 0, 0, 0.7); } + .button.is-primary:focus, .button.is-primary.is-focused { + border-color: transparent; + color: rgba(0, 0, 0, 0.7); } + .button.is-primary:focus:not(:active), .button.is-primary.is-focused:not(:active) { + box-shadow: 0 0 0 0.125em rgba(198, 208, 245, 0.25); } + .button.is-primary:active, .button.is-primary.is-active { + background-color: #b0bef1; + border-color: transparent; + color: rgba(0, 0, 0, 0.7); } + .button.is-primary[disabled], + fieldset[disabled] .button.is-primary { + background-color: #c6d0f5; + border-color: #c6d0f5; + box-shadow: none; } + .button.is-primary.is-inverted { + background-color: rgba(0, 0, 0, 0.7); + color: #c6d0f5; } + .button.is-primary.is-inverted:hover, .button.is-primary.is-inverted.is-hovered { + background-color: rgba(0, 0, 0, 0.7); } + .button.is-primary.is-inverted[disabled], + fieldset[disabled] .button.is-primary.is-inverted { + background-color: rgba(0, 0, 0, 0.7); + border-color: transparent; + box-shadow: none; + color: #c6d0f5; } + .button.is-primary.is-loading::after { + border-color: transparent transparent rgba(0, 0, 0, 0.7) rgba(0, 0, 0, 0.7) !important; } + .button.is-primary.is-outlined { + background-color: transparent; + border-color: #c6d0f5; + color: #c6d0f5; } + .button.is-primary.is-outlined:hover, .button.is-primary.is-outlined.is-hovered, .button.is-primary.is-outlined:focus, .button.is-primary.is-outlined.is-focused { + background-color: #c6d0f5; + border-color: #c6d0f5; + color: rgba(0, 0, 0, 0.7); } + .button.is-primary.is-outlined.is-loading::after { + border-color: transparent transparent #c6d0f5 #c6d0f5 !important; } + .button.is-primary.is-outlined.is-loading:hover::after, .button.is-primary.is-outlined.is-loading.is-hovered::after, .button.is-primary.is-outlined.is-loading:focus::after, .button.is-primary.is-outlined.is-loading.is-focused::after { + border-color: transparent transparent rgba(0, 0, 0, 0.7) rgba(0, 0, 0, 0.7) !important; } + .button.is-primary.is-outlined[disabled], + fieldset[disabled] .button.is-primary.is-outlined { + background-color: transparent; + border-color: #c6d0f5; + box-shadow: none; + color: #c6d0f5; } + .button.is-primary.is-inverted.is-outlined { + background-color: transparent; + border-color: rgba(0, 0, 0, 0.7); + color: rgba(0, 0, 0, 0.7); } + .button.is-primary.is-inverted.is-outlined:hover, .button.is-primary.is-inverted.is-outlined.is-hovered, .button.is-primary.is-inverted.is-outlined:focus, .button.is-primary.is-inverted.is-outlined.is-focused { + background-color: rgba(0, 0, 0, 0.7); + color: #c6d0f5; } + .button.is-primary.is-inverted.is-outlined.is-loading:hover::after, .button.is-primary.is-inverted.is-outlined.is-loading.is-hovered::after, .button.is-primary.is-inverted.is-outlined.is-loading:focus::after, .button.is-primary.is-inverted.is-outlined.is-loading.is-focused::after { + border-color: transparent transparent #c6d0f5 #c6d0f5 !important; } + .button.is-primary.is-inverted.is-outlined[disabled], + fieldset[disabled] .button.is-primary.is-inverted.is-outlined { + background-color: transparent; + border-color: rgba(0, 0, 0, 0.7); + box-shadow: none; + color: rgba(0, 0, 0, 0.7); } + .button.is-primary.is-light { + background-color: #eef1fc; + color: #162c7e; } + .button.is-primary.is-light:hover, .button.is-primary.is-light.is-hovered { + background-color: #e3e8fa; + border-color: transparent; + color: #162c7e; } + .button.is-primary.is-light:active, .button.is-primary.is-light.is-active { + background-color: #d8dff8; + border-color: transparent; + color: #162c7e; } + .button.is-link { + background-color: #81c8be; + border-color: transparent; + color: rgba(0, 0, 0, 0.7); } + .button.is-link:hover, .button.is-link.is-hovered { + background-color: #78c4b9; + border-color: transparent; + color: rgba(0, 0, 0, 0.7); } + .button.is-link:focus, .button.is-link.is-focused { + border-color: transparent; + color: rgba(0, 0, 0, 0.7); } + .button.is-link:focus:not(:active), .button.is-link.is-focused:not(:active) { + box-shadow: 0 0 0 0.125em rgba(129, 200, 190, 0.25); } + .button.is-link:active, .button.is-link.is-active { + background-color: #6fc0b5; + border-color: transparent; + color: rgba(0, 0, 0, 0.7); } + .button.is-link[disabled], + fieldset[disabled] .button.is-link { + background-color: #81c8be; + border-color: #81c8be; + box-shadow: none; } + .button.is-link.is-inverted { + background-color: rgba(0, 0, 0, 0.7); + color: #81c8be; } + .button.is-link.is-inverted:hover, .button.is-link.is-inverted.is-hovered { + background-color: rgba(0, 0, 0, 0.7); } + .button.is-link.is-inverted[disabled], + fieldset[disabled] .button.is-link.is-inverted { + background-color: rgba(0, 0, 0, 0.7); + border-color: transparent; + box-shadow: none; + color: #81c8be; } + .button.is-link.is-loading::after { + border-color: transparent transparent rgba(0, 0, 0, 0.7) rgba(0, 0, 0, 0.7) !important; } + .button.is-link.is-outlined { + background-color: transparent; + border-color: #81c8be; + color: #81c8be; } + .button.is-link.is-outlined:hover, .button.is-link.is-outlined.is-hovered, .button.is-link.is-outlined:focus, .button.is-link.is-outlined.is-focused { + background-color: #81c8be; + border-color: #81c8be; + color: rgba(0, 0, 0, 0.7); } + .button.is-link.is-outlined.is-loading::after { + border-color: transparent transparent #81c8be #81c8be !important; } + .button.is-link.is-outlined.is-loading:hover::after, .button.is-link.is-outlined.is-loading.is-hovered::after, .button.is-link.is-outlined.is-loading:focus::after, .button.is-link.is-outlined.is-loading.is-focused::after { + border-color: transparent transparent rgba(0, 0, 0, 0.7) rgba(0, 0, 0, 0.7) !important; } + .button.is-link.is-outlined[disabled], + fieldset[disabled] .button.is-link.is-outlined { + background-color: transparent; + border-color: #81c8be; + box-shadow: none; + color: #81c8be; } + .button.is-link.is-inverted.is-outlined { + background-color: transparent; + border-color: rgba(0, 0, 0, 0.7); + color: rgba(0, 0, 0, 0.7); } + .button.is-link.is-inverted.is-outlined:hover, .button.is-link.is-inverted.is-outlined.is-hovered, .button.is-link.is-inverted.is-outlined:focus, .button.is-link.is-inverted.is-outlined.is-focused { + background-color: rgba(0, 0, 0, 0.7); + color: #81c8be; } + .button.is-link.is-inverted.is-outlined.is-loading:hover::after, .button.is-link.is-inverted.is-outlined.is-loading.is-hovered::after, .button.is-link.is-inverted.is-outlined.is-loading:focus::after, .button.is-link.is-inverted.is-outlined.is-loading.is-focused::after { + border-color: transparent transparent #81c8be #81c8be !important; } + .button.is-link.is-inverted.is-outlined[disabled], + fieldset[disabled] .button.is-link.is-inverted.is-outlined { + background-color: transparent; + border-color: rgba(0, 0, 0, 0.7); + box-shadow: none; + color: rgba(0, 0, 0, 0.7); } + .button.is-link.is-light { + background-color: #f1f9f8; + color: #2d675f; } + .button.is-link.is-light:hover, .button.is-link.is-light.is-hovered { + background-color: #e8f5f3; + border-color: transparent; + color: #2d675f; } + .button.is-link.is-light:active, .button.is-link.is-light.is-active { + background-color: #dff1ef; + border-color: transparent; + color: #2d675f; } + .button.is-info { + background-color: #3e8ed0; + border-color: transparent; + color: #fff; } + .button.is-info:hover, .button.is-info.is-hovered { + background-color: #3488ce; + border-color: transparent; + color: #fff; } + .button.is-info:focus, .button.is-info.is-focused { + border-color: transparent; + color: #fff; } + .button.is-info:focus:not(:active), .button.is-info.is-focused:not(:active) { + box-shadow: 0 0 0 0.125em rgba(62, 142, 208, 0.25); } + .button.is-info:active, .button.is-info.is-active { + background-color: #3082c5; + border-color: transparent; + color: #fff; } + .button.is-info[disabled], + fieldset[disabled] .button.is-info { + background-color: #3e8ed0; + border-color: #3e8ed0; + box-shadow: none; } + .button.is-info.is-inverted { + background-color: #fff; + color: #3e8ed0; } + .button.is-info.is-inverted:hover, .button.is-info.is-inverted.is-hovered { + background-color: #f2f2f2; } + .button.is-info.is-inverted[disabled], + fieldset[disabled] .button.is-info.is-inverted { + background-color: #fff; + border-color: transparent; + box-shadow: none; + color: #3e8ed0; } + .button.is-info.is-loading::after { + border-color: transparent transparent #fff #fff !important; } + .button.is-info.is-outlined { + background-color: transparent; + border-color: #3e8ed0; + color: #3e8ed0; } + .button.is-info.is-outlined:hover, .button.is-info.is-outlined.is-hovered, .button.is-info.is-outlined:focus, .button.is-info.is-outlined.is-focused { + background-color: #3e8ed0; + border-color: #3e8ed0; + color: #fff; } + .button.is-info.is-outlined.is-loading::after { + border-color: transparent transparent #3e8ed0 #3e8ed0 !important; } + .button.is-info.is-outlined.is-loading:hover::after, .button.is-info.is-outlined.is-loading.is-hovered::after, .button.is-info.is-outlined.is-loading:focus::after, .button.is-info.is-outlined.is-loading.is-focused::after { + border-color: transparent transparent #fff #fff !important; } + .button.is-info.is-outlined[disabled], + fieldset[disabled] .button.is-info.is-outlined { + background-color: transparent; + border-color: #3e8ed0; + box-shadow: none; + color: #3e8ed0; } + .button.is-info.is-inverted.is-outlined { + background-color: transparent; + border-color: #fff; + color: #fff; } + .button.is-info.is-inverted.is-outlined:hover, .button.is-info.is-inverted.is-outlined.is-hovered, .button.is-info.is-inverted.is-outlined:focus, .button.is-info.is-inverted.is-outlined.is-focused { + background-color: #fff; + color: #3e8ed0; } + .button.is-info.is-inverted.is-outlined.is-loading:hover::after, .button.is-info.is-inverted.is-outlined.is-loading.is-hovered::after, .button.is-info.is-inverted.is-outlined.is-loading:focus::after, .button.is-info.is-inverted.is-outlined.is-loading.is-focused::after { + border-color: transparent transparent #3e8ed0 #3e8ed0 !important; } + .button.is-info.is-inverted.is-outlined[disabled], + fieldset[disabled] .button.is-info.is-inverted.is-outlined { + background-color: transparent; + border-color: #fff; + box-shadow: none; + color: #fff; } + .button.is-info.is-light { + background-color: #eff5fb; + color: #296fa8; } + .button.is-info.is-light:hover, .button.is-info.is-light.is-hovered { + background-color: #e4eff9; + border-color: transparent; + color: #296fa8; } + .button.is-info.is-light:active, .button.is-info.is-light.is-active { + background-color: #dae9f6; + border-color: transparent; + color: #296fa8; } + .button.is-success { + background-color: #48c78e; + border-color: transparent; + color: #fff; } + .button.is-success:hover, .button.is-success.is-hovered { + background-color: #3ec487; + border-color: transparent; + color: #fff; } + .button.is-success:focus, .button.is-success.is-focused { + border-color: transparent; + color: #fff; } + .button.is-success:focus:not(:active), .button.is-success.is-focused:not(:active) { + box-shadow: 0 0 0 0.125em rgba(72, 199, 142, 0.25); } + .button.is-success:active, .button.is-success.is-active { + background-color: #3abb81; + border-color: transparent; + color: #fff; } + .button.is-success[disabled], + fieldset[disabled] .button.is-success { + background-color: #48c78e; + border-color: #48c78e; + box-shadow: none; } + .button.is-success.is-inverted { + background-color: #fff; + color: #48c78e; } + .button.is-success.is-inverted:hover, .button.is-success.is-inverted.is-hovered { + background-color: #f2f2f2; } + .button.is-success.is-inverted[disabled], + fieldset[disabled] .button.is-success.is-inverted { + background-color: #fff; + border-color: transparent; + box-shadow: none; + color: #48c78e; } + .button.is-success.is-loading::after { + border-color: transparent transparent #fff #fff !important; } + .button.is-success.is-outlined { + background-color: transparent; + border-color: #48c78e; + color: #48c78e; } + .button.is-success.is-outlined:hover, .button.is-success.is-outlined.is-hovered, .button.is-success.is-outlined:focus, .button.is-success.is-outlined.is-focused { + background-color: #48c78e; + border-color: #48c78e; + color: #fff; } + .button.is-success.is-outlined.is-loading::after { + border-color: transparent transparent #48c78e #48c78e !important; } + .button.is-success.is-outlined.is-loading:hover::after, .button.is-success.is-outlined.is-loading.is-hovered::after, .button.is-success.is-outlined.is-loading:focus::after, .button.is-success.is-outlined.is-loading.is-focused::after { + border-color: transparent transparent #fff #fff !important; } + .button.is-success.is-outlined[disabled], + fieldset[disabled] .button.is-success.is-outlined { + background-color: transparent; + border-color: #48c78e; + box-shadow: none; + color: #48c78e; } + .button.is-success.is-inverted.is-outlined { + background-color: transparent; + border-color: #fff; + color: #fff; } + .button.is-success.is-inverted.is-outlined:hover, .button.is-success.is-inverted.is-outlined.is-hovered, .button.is-success.is-inverted.is-outlined:focus, .button.is-success.is-inverted.is-outlined.is-focused { + background-color: #fff; + color: #48c78e; } + .button.is-success.is-inverted.is-outlined.is-loading:hover::after, .button.is-success.is-inverted.is-outlined.is-loading.is-hovered::after, .button.is-success.is-inverted.is-outlined.is-loading:focus::after, .button.is-success.is-inverted.is-outlined.is-loading.is-focused::after { + border-color: transparent transparent #48c78e #48c78e !important; } + .button.is-success.is-inverted.is-outlined[disabled], + fieldset[disabled] .button.is-success.is-inverted.is-outlined { + background-color: transparent; + border-color: #fff; + box-shadow: none; + color: #fff; } + .button.is-success.is-light { + background-color: #effaf5; + color: #257953; } + .button.is-success.is-light:hover, .button.is-success.is-light.is-hovered { + background-color: #e6f7ef; + border-color: transparent; + color: #257953; } + .button.is-success.is-light:active, .button.is-success.is-light.is-active { + background-color: #dcf4e9; + border-color: transparent; + color: #257953; } + .button.is-warning { + background-color: #ffe08a; + border-color: transparent; + color: rgba(0, 0, 0, 0.7); } + .button.is-warning:hover, .button.is-warning.is-hovered { + background-color: #ffdc7d; + border-color: transparent; + color: rgba(0, 0, 0, 0.7); } + .button.is-warning:focus, .button.is-warning.is-focused { + border-color: transparent; + color: rgba(0, 0, 0, 0.7); } + .button.is-warning:focus:not(:active), .button.is-warning.is-focused:not(:active) { + box-shadow: 0 0 0 0.125em rgba(255, 224, 138, 0.25); } + .button.is-warning:active, .button.is-warning.is-active { + background-color: #ffd970; + border-color: transparent; + color: rgba(0, 0, 0, 0.7); } + .button.is-warning[disabled], + fieldset[disabled] .button.is-warning { + background-color: #ffe08a; + border-color: #ffe08a; + box-shadow: none; } + .button.is-warning.is-inverted { + background-color: rgba(0, 0, 0, 0.7); + color: #ffe08a; } + .button.is-warning.is-inverted:hover, .button.is-warning.is-inverted.is-hovered { + background-color: rgba(0, 0, 0, 0.7); } + .button.is-warning.is-inverted[disabled], + fieldset[disabled] .button.is-warning.is-inverted { + background-color: rgba(0, 0, 0, 0.7); + border-color: transparent; + box-shadow: none; + color: #ffe08a; } + .button.is-warning.is-loading::after { + border-color: transparent transparent rgba(0, 0, 0, 0.7) rgba(0, 0, 0, 0.7) !important; } + .button.is-warning.is-outlined { + background-color: transparent; + border-color: #ffe08a; + color: #ffe08a; } + .button.is-warning.is-outlined:hover, .button.is-warning.is-outlined.is-hovered, .button.is-warning.is-outlined:focus, .button.is-warning.is-outlined.is-focused { + background-color: #ffe08a; + border-color: #ffe08a; + color: rgba(0, 0, 0, 0.7); } + .button.is-warning.is-outlined.is-loading::after { + border-color: transparent transparent #ffe08a #ffe08a !important; } + .button.is-warning.is-outlined.is-loading:hover::after, .button.is-warning.is-outlined.is-loading.is-hovered::after, .button.is-warning.is-outlined.is-loading:focus::after, .button.is-warning.is-outlined.is-loading.is-focused::after { + border-color: transparent transparent rgba(0, 0, 0, 0.7) rgba(0, 0, 0, 0.7) !important; } + .button.is-warning.is-outlined[disabled], + fieldset[disabled] .button.is-warning.is-outlined { + background-color: transparent; + border-color: #ffe08a; + box-shadow: none; + color: #ffe08a; } + .button.is-warning.is-inverted.is-outlined { + background-color: transparent; + border-color: rgba(0, 0, 0, 0.7); + color: rgba(0, 0, 0, 0.7); } + .button.is-warning.is-inverted.is-outlined:hover, .button.is-warning.is-inverted.is-outlined.is-hovered, .button.is-warning.is-inverted.is-outlined:focus, .button.is-warning.is-inverted.is-outlined.is-focused { + background-color: rgba(0, 0, 0, 0.7); + color: #ffe08a; } + .button.is-warning.is-inverted.is-outlined.is-loading:hover::after, .button.is-warning.is-inverted.is-outlined.is-loading.is-hovered::after, .button.is-warning.is-inverted.is-outlined.is-loading:focus::after, .button.is-warning.is-inverted.is-outlined.is-loading.is-focused::after { + border-color: transparent transparent #ffe08a #ffe08a !important; } + .button.is-warning.is-inverted.is-outlined[disabled], + fieldset[disabled] .button.is-warning.is-inverted.is-outlined { + background-color: transparent; + border-color: rgba(0, 0, 0, 0.7); + box-shadow: none; + color: rgba(0, 0, 0, 0.7); } + .button.is-warning.is-light { + background-color: #fffaeb; + color: #946c00; } + .button.is-warning.is-light:hover, .button.is-warning.is-light.is-hovered { + background-color: #fff6de; + border-color: transparent; + color: #946c00; } + .button.is-warning.is-light:active, .button.is-warning.is-light.is-active { + background-color: #fff3d1; + border-color: transparent; + color: #946c00; } + .button.is-danger { + background-color: #f14668; + border-color: transparent; + color: #fff; } + .button.is-danger:hover, .button.is-danger.is-hovered { + background-color: #f03a5f; + border-color: transparent; + color: #fff; } + .button.is-danger:focus, .button.is-danger.is-focused { + border-color: transparent; + color: #fff; } + .button.is-danger:focus:not(:active), .button.is-danger.is-focused:not(:active) { + box-shadow: 0 0 0 0.125em rgba(241, 70, 104, 0.25); } + .button.is-danger:active, .button.is-danger.is-active { + background-color: #ef2e55; + border-color: transparent; + color: #fff; } + .button.is-danger[disabled], + fieldset[disabled] .button.is-danger { + background-color: #f14668; + border-color: #f14668; + box-shadow: none; } + .button.is-danger.is-inverted { + background-color: #fff; + color: #f14668; } + .button.is-danger.is-inverted:hover, .button.is-danger.is-inverted.is-hovered { + background-color: #f2f2f2; } + .button.is-danger.is-inverted[disabled], + fieldset[disabled] .button.is-danger.is-inverted { + background-color: #fff; + border-color: transparent; + box-shadow: none; + color: #f14668; } + .button.is-danger.is-loading::after { + border-color: transparent transparent #fff #fff !important; } + .button.is-danger.is-outlined { + background-color: transparent; + border-color: #f14668; + color: #f14668; } + .button.is-danger.is-outlined:hover, .button.is-danger.is-outlined.is-hovered, .button.is-danger.is-outlined:focus, .button.is-danger.is-outlined.is-focused { + background-color: #f14668; + border-color: #f14668; + color: #fff; } + .button.is-danger.is-outlined.is-loading::after { + border-color: transparent transparent #f14668 #f14668 !important; } + .button.is-danger.is-outlined.is-loading:hover::after, .button.is-danger.is-outlined.is-loading.is-hovered::after, .button.is-danger.is-outlined.is-loading:focus::after, .button.is-danger.is-outlined.is-loading.is-focused::after { + border-color: transparent transparent #fff #fff !important; } + .button.is-danger.is-outlined[disabled], + fieldset[disabled] .button.is-danger.is-outlined { + background-color: transparent; + border-color: #f14668; + box-shadow: none; + color: #f14668; } + .button.is-danger.is-inverted.is-outlined { + background-color: transparent; + border-color: #fff; + color: #fff; } + .button.is-danger.is-inverted.is-outlined:hover, .button.is-danger.is-inverted.is-outlined.is-hovered, .button.is-danger.is-inverted.is-outlined:focus, .button.is-danger.is-inverted.is-outlined.is-focused { + background-color: #fff; + color: #f14668; } + .button.is-danger.is-inverted.is-outlined.is-loading:hover::after, .button.is-danger.is-inverted.is-outlined.is-loading.is-hovered::after, .button.is-danger.is-inverted.is-outlined.is-loading:focus::after, .button.is-danger.is-inverted.is-outlined.is-loading.is-focused::after { + border-color: transparent transparent #f14668 #f14668 !important; } + .button.is-danger.is-inverted.is-outlined[disabled], + fieldset[disabled] .button.is-danger.is-inverted.is-outlined { + background-color: transparent; + border-color: #fff; + box-shadow: none; + color: #fff; } + .button.is-danger.is-light { + background-color: #feecf0; + color: #cc0f35; } + .button.is-danger.is-light:hover, .button.is-danger.is-light.is-hovered { + background-color: #fde0e6; + border-color: transparent; + color: #cc0f35; } + .button.is-danger.is-light:active, .button.is-danger.is-light.is-active { + background-color: #fcd4dc; + border-color: transparent; + color: #cc0f35; } + .button.is-small { + font-size: 0.75rem; } + .button.is-small:not(.is-rounded) { + border-radius: 2px; } + .button.is-normal { + font-size: 1rem; } + .button.is-medium { + font-size: 1.25rem; } + .button.is-large { + font-size: 1.5rem; } + .button[disabled], + fieldset[disabled] .button { + background-color: white; + border-color: #dbdbdb; + box-shadow: none; + opacity: 0.5; } + .button.is-fullwidth { + display: flex; + width: 100%; } + .button.is-loading { + color: transparent !important; + pointer-events: none; } + .button.is-loading::after { + position: absolute; + left: calc(50% - (1em * 0.5)); + top: calc(50% - (1em * 0.5)); + position: absolute !important; } + .button.is-static { + background-color: whitesmoke; + border-color: #dbdbdb; + color: #7a7a7a; + box-shadow: none; + pointer-events: none; } + .button.is-rounded { + border-radius: 9999px; + padding-left: calc(1em + 0.25em); + padding-right: calc(1em + 0.25em); } + +.buttons { + align-items: center; + display: flex; + flex-wrap: wrap; + justify-content: flex-start; } + .buttons .button { + margin-bottom: 0.5rem; } + .buttons .button:not(:last-child):not(.is-fullwidth) { + margin-right: 0.5rem; } + .buttons:last-child { + margin-bottom: -0.5rem; } + .buttons:not(:last-child) { + margin-bottom: 1rem; } + .buttons.are-small .button:not(.is-normal):not(.is-medium):not(.is-large) { + font-size: 0.75rem; } + .buttons.are-small .button:not(.is-normal):not(.is-medium):not(.is-large):not(.is-rounded) { + border-radius: 2px; } + .buttons.are-medium .button:not(.is-small):not(.is-normal):not(.is-large) { + font-size: 1.25rem; } + .buttons.are-large .button:not(.is-small):not(.is-normal):not(.is-medium) { + font-size: 1.5rem; } + .buttons.has-addons .button:not(:first-child) { + border-bottom-left-radius: 0; + border-top-left-radius: 0; } + .buttons.has-addons .button:not(:last-child) { + border-bottom-right-radius: 0; + border-top-right-radius: 0; + margin-right: -1px; } + .buttons.has-addons .button:last-child { + margin-right: 0; } + .buttons.has-addons .button:hover, .buttons.has-addons .button.is-hovered { + z-index: 2; } + .buttons.has-addons .button:focus, .buttons.has-addons .button.is-focused, .buttons.has-addons .button:active, .buttons.has-addons .button.is-active, .buttons.has-addons .button.is-selected { + z-index: 3; } + .buttons.has-addons .button:focus:hover, .buttons.has-addons .button.is-focused:hover, .buttons.has-addons .button:active:hover, .buttons.has-addons .button.is-active:hover, .buttons.has-addons .button.is-selected:hover { + z-index: 4; } + .buttons.has-addons .button.is-expanded { + flex-grow: 1; + flex-shrink: 1; } + .buttons.is-centered { + justify-content: center; } + .buttons.is-centered:not(.has-addons) .button:not(.is-fullwidth) { + margin-left: 0.25rem; + margin-right: 0.25rem; } + .buttons.is-right { + justify-content: flex-end; } + .buttons.is-right:not(.has-addons) .button:not(.is-fullwidth) { + margin-left: 0.25rem; + margin-right: 0.25rem; } + +@media screen and (max-width: 768px) { + .button.is-responsive.is-small { + font-size: 0.5625rem; } + .button.is-responsive, + .button.is-responsive.is-normal { + font-size: 0.65625rem; } + .button.is-responsive.is-medium { + font-size: 0.75rem; } + .button.is-responsive.is-large { + font-size: 1rem; } } + +@media screen and (min-width: 769px) and (max-width: 1023px) { + .button.is-responsive.is-small { + font-size: 0.65625rem; } + .button.is-responsive, + .button.is-responsive.is-normal { + font-size: 0.75rem; } + .button.is-responsive.is-medium { + font-size: 1rem; } + .button.is-responsive.is-large { + font-size: 1.25rem; } } + +.container { + flex-grow: 1; + margin: 0 auto; + position: relative; + width: auto; } + .container.is-fluid { + max-width: none !important; + padding-left: 32px; + padding-right: 32px; + width: 100%; } + @media screen and (min-width: 1024px) { + .container { + max-width: 960px; } } + +.title, +.subtitle { + word-break: break-word; } + .title em, + .title span, + .subtitle em, + .subtitle span { + font-weight: inherit; } + .title sub, + .subtitle sub { + font-size: 0.75em; } + .title sup, + .subtitle sup { + font-size: 0.75em; } + .title .tag, + .subtitle .tag { + vertical-align: middle; } + +.title { + color: #81c8be; + font-size: 2rem; + font-weight: 600; + line-height: 1.125; } + .title strong { + color: inherit; + font-weight: inherit; } + .title:not(.is-spaced) + .subtitle { + margin-top: -1.25rem; } + .title.is-1 { + font-size: 3rem; } + .title.is-2 { + font-size: 2.5rem; } + .title.is-3 { + font-size: 2rem; } + .title.is-4 { + font-size: 1.5rem; } + .title.is-5 { + font-size: 1.25rem; } + .title.is-6 { + font-size: 1rem; } + .title.is-7 { + font-size: 0.75rem; } + +.subtitle { + color: #c6d0f5; + font-size: 1.25rem; + font-weight: 400; + line-height: 1.25; } + .subtitle strong { + color: #b5bfe2; + font-weight: 600; } + .subtitle:not(.is-spaced) + .title { + margin-top: -1.25rem; } + .subtitle.is-1 { + font-size: 3rem; } + .subtitle.is-2 { + font-size: 2.5rem; } + .subtitle.is-3 { + font-size: 2rem; } + .subtitle.is-4 { + font-size: 1.5rem; } + .subtitle.is-5 { + font-size: 1.25rem; } + .subtitle.is-6 { + font-size: 1rem; } + .subtitle.is-7 { + font-size: 0.75rem; } + +/* Bulma Form */ +.input, .textarea, .select select { + background-color: white; + border-color: transparent; + border-radius: 4px; + color: #b5bfe2; } + .input::-moz-placeholder, .textarea::-moz-placeholder, .select select::-moz-placeholder { + color: rgba(181, 191, 226, 0.3); } + .input::-webkit-input-placeholder, .textarea::-webkit-input-placeholder, .select select::-webkit-input-placeholder { + color: rgba(181, 191, 226, 0.3); } + .input:-moz-placeholder, .textarea:-moz-placeholder, .select select:-moz-placeholder { + color: rgba(181, 191, 226, 0.3); } + .input:-ms-input-placeholder, .textarea:-ms-input-placeholder, .select select:-ms-input-placeholder { + color: rgba(181, 191, 226, 0.3); } + .input:hover, .textarea:hover, .select select:hover, .is-hovered.input, .is-hovered.textarea, .select select.is-hovered { + border-color: #51576d; } + .input:focus, .textarea:focus, .select select:focus, .is-focused.input, .is-focused.textarea, .select select.is-focused, .input:active, .textarea:active, .select select:active, .is-active.input, .is-active.textarea, .select select.is-active { + border-color: #81c8be; + box-shadow: 0 0 0 0.125em rgba(129, 200, 190, 0.25); } + .input[disabled], .textarea[disabled], .select select[disabled], + fieldset[disabled] .input, + fieldset[disabled] .textarea, + fieldset[disabled] .select select, + .select fieldset[disabled] select { + background-color: whitesmoke; + border-color: whitesmoke; + box-shadow: none; + color: #7a7a7a; } + .input[disabled]::-moz-placeholder, .textarea[disabled]::-moz-placeholder, .select select[disabled]::-moz-placeholder, + fieldset[disabled] .input::-moz-placeholder, + fieldset[disabled] .textarea::-moz-placeholder, + fieldset[disabled] .select select::-moz-placeholder, + .select fieldset[disabled] select::-moz-placeholder { + color: rgba(122, 122, 122, 0.3); } + .input[disabled]::-webkit-input-placeholder, .textarea[disabled]::-webkit-input-placeholder, .select select[disabled]::-webkit-input-placeholder, + fieldset[disabled] .input::-webkit-input-placeholder, + fieldset[disabled] .textarea::-webkit-input-placeholder, + fieldset[disabled] .select select::-webkit-input-placeholder, + .select fieldset[disabled] select::-webkit-input-placeholder { + color: rgba(122, 122, 122, 0.3); } + .input[disabled]:-moz-placeholder, .textarea[disabled]:-moz-placeholder, .select select[disabled]:-moz-placeholder, + fieldset[disabled] .input:-moz-placeholder, + fieldset[disabled] .textarea:-moz-placeholder, + fieldset[disabled] .select select:-moz-placeholder, + .select fieldset[disabled] select:-moz-placeholder { + color: rgba(122, 122, 122, 0.3); } + .input[disabled]:-ms-input-placeholder, .textarea[disabled]:-ms-input-placeholder, .select select[disabled]:-ms-input-placeholder, + fieldset[disabled] .input:-ms-input-placeholder, + fieldset[disabled] .textarea:-ms-input-placeholder, + fieldset[disabled] .select select:-ms-input-placeholder, + .select fieldset[disabled] select:-ms-input-placeholder { + color: rgba(122, 122, 122, 0.3); } + +.input, .textarea { + box-shadow: none; + max-width: 100%; + width: 100%; } + .input[readonly], .textarea[readonly] { + box-shadow: none; } + .is-white.input, .is-white.textarea { + border-color: white; } + .is-white.input:focus, .is-white.textarea:focus, .is-white.is-focused.input, .is-white.is-focused.textarea, .is-white.input:active, .is-white.textarea:active, .is-white.is-active.input, .is-white.is-active.textarea { + box-shadow: 0 0 0 0.125em rgba(255, 255, 255, 0.25); } + .is-black.input, .is-black.textarea { + border-color: #0a0a0a; } + .is-black.input:focus, .is-black.textarea:focus, .is-black.is-focused.input, .is-black.is-focused.textarea, .is-black.input:active, .is-black.textarea:active, .is-black.is-active.input, .is-black.is-active.textarea { + box-shadow: 0 0 0 0.125em rgba(10, 10, 10, 0.25); } + .is-light.input, .is-light.textarea { + border-color: whitesmoke; } + .is-light.input:focus, .is-light.textarea:focus, .is-light.is-focused.input, .is-light.is-focused.textarea, .is-light.input:active, .is-light.textarea:active, .is-light.is-active.input, .is-light.is-active.textarea { + box-shadow: 0 0 0 0.125em rgba(245, 245, 245, 0.25); } + .is-dark.input, .is-dark.textarea { + border-color: #363636; } + .is-dark.input:focus, .is-dark.textarea:focus, .is-dark.is-focused.input, .is-dark.is-focused.textarea, .is-dark.input:active, .is-dark.textarea:active, .is-dark.is-active.input, .is-dark.is-active.textarea { + box-shadow: 0 0 0 0.125em rgba(54, 54, 54, 0.25); } + .is-primary.input, .is-primary.textarea { + border-color: #c6d0f5; } + .is-primary.input:focus, .is-primary.textarea:focus, .is-primary.is-focused.input, .is-primary.is-focused.textarea, .is-primary.input:active, .is-primary.textarea:active, .is-primary.is-active.input, .is-primary.is-active.textarea { + box-shadow: 0 0 0 0.125em rgba(198, 208, 245, 0.25); } + .is-link.input, .is-link.textarea { + border-color: #81c8be; } + .is-link.input:focus, .is-link.textarea:focus, .is-link.is-focused.input, .is-link.is-focused.textarea, .is-link.input:active, .is-link.textarea:active, .is-link.is-active.input, .is-link.is-active.textarea { + box-shadow: 0 0 0 0.125em rgba(129, 200, 190, 0.25); } + .is-info.input, .is-info.textarea { + border-color: #3e8ed0; } + .is-info.input:focus, .is-info.textarea:focus, .is-info.is-focused.input, .is-info.is-focused.textarea, .is-info.input:active, .is-info.textarea:active, .is-info.is-active.input, .is-info.is-active.textarea { + box-shadow: 0 0 0 0.125em rgba(62, 142, 208, 0.25); } + .is-success.input, .is-success.textarea { + border-color: #48c78e; } + .is-success.input:focus, .is-success.textarea:focus, .is-success.is-focused.input, .is-success.is-focused.textarea, .is-success.input:active, .is-success.textarea:active, .is-success.is-active.input, .is-success.is-active.textarea { + box-shadow: 0 0 0 0.125em rgba(72, 199, 142, 0.25); } + .is-warning.input, .is-warning.textarea { + border-color: #ffe08a; } + .is-warning.input:focus, .is-warning.textarea:focus, .is-warning.is-focused.input, .is-warning.is-focused.textarea, .is-warning.input:active, .is-warning.textarea:active, .is-warning.is-active.input, .is-warning.is-active.textarea { + box-shadow: 0 0 0 0.125em rgba(255, 224, 138, 0.25); } + .is-danger.input, .is-danger.textarea { + border-color: #f14668; } + .is-danger.input:focus, .is-danger.textarea:focus, .is-danger.is-focused.input, .is-danger.is-focused.textarea, .is-danger.input:active, .is-danger.textarea:active, .is-danger.is-active.input, .is-danger.is-active.textarea { + box-shadow: 0 0 0 0.125em rgba(241, 70, 104, 0.25); } + .is-small.input, .is-small.textarea { + border-radius: 2px; + font-size: 0.75rem; } + .is-medium.input, .is-medium.textarea { + font-size: 1.25rem; } + .is-large.input, .is-large.textarea { + font-size: 1.5rem; } + .is-fullwidth.input, .is-fullwidth.textarea { + display: block; + width: 100%; } + .is-inline.input, .is-inline.textarea { + display: inline; + width: auto; } + +.input.is-rounded { + border-radius: 9999px; + padding-left: calc(calc(0.75em - 2px) + 0.375em); + padding-right: calc(calc(0.75em - 2px) + 0.375em); } + +.input.is-static { + background-color: transparent; + border-color: transparent; + box-shadow: none; + padding-left: 0; + padding-right: 0; } + +.textarea { + display: block; + max-width: 100%; + min-width: 100%; + padding: calc(0.75em - 2px); + resize: vertical; } + .textarea:not([rows]) { + max-height: 40em; + min-height: 8em; } + .textarea[rows] { + height: initial; } + .textarea.has-fixed-size { + resize: none; } + +.checkbox, .radio { + cursor: pointer; + display: inline-block; + line-height: 1.25; + position: relative; } + .checkbox input, .radio input { + cursor: pointer; } + .checkbox:hover, .radio:hover { + color: #b5bfe2; } + .checkbox[disabled], .radio[disabled], + fieldset[disabled] .checkbox, + fieldset[disabled] .radio, + .checkbox input[disabled], + .radio input[disabled] { + color: #7a7a7a; + cursor: not-allowed; } + +.radio + .radio { + margin-left: 0.5em; } + +.select { + display: inline-block; + max-width: 100%; + position: relative; + vertical-align: top; } + .select:not(.is-multiple) { + height: 2.5em; } + .select:not(.is-multiple):not(.is-loading)::after { + border-color: #81c8be; + right: 1.125em; + z-index: 4; } + .select.is-rounded select { + border-radius: 9999px; + padding-left: 1em; } + .select select { + cursor: pointer; + display: block; + font-size: 1em; + max-width: 100%; + outline: none; } + .select select::-ms-expand { + display: none; } + .select select[disabled]:hover, + fieldset[disabled] .select select:hover { + border-color: whitesmoke; } + .select select:not([multiple]) { + padding-right: 2.5em; } + .select select[multiple] { + height: auto; + padding: 0; } + .select select[multiple] option { + padding: 0.5em 1em; } + .select:not(.is-multiple):not(.is-loading):hover::after { + border-color: #b5bfe2; } + .select.is-white:not(:hover)::after { + border-color: white; } + .select.is-white select { + border-color: white; } + .select.is-white select:hover, .select.is-white select.is-hovered { + border-color: #f2f2f2; } + .select.is-white select:focus, .select.is-white select.is-focused, .select.is-white select:active, .select.is-white select.is-active { + box-shadow: 0 0 0 0.125em rgba(255, 255, 255, 0.25); } + .select.is-black:not(:hover)::after { + border-color: #0a0a0a; } + .select.is-black select { + border-color: #0a0a0a; } + .select.is-black select:hover, .select.is-black select.is-hovered { + border-color: black; } + .select.is-black select:focus, .select.is-black select.is-focused, .select.is-black select:active, .select.is-black select.is-active { + box-shadow: 0 0 0 0.125em rgba(10, 10, 10, 0.25); } + .select.is-light:not(:hover)::after { + border-color: whitesmoke; } + .select.is-light select { + border-color: whitesmoke; } + .select.is-light select:hover, .select.is-light select.is-hovered { + border-color: #e8e8e8; } + .select.is-light select:focus, .select.is-light select.is-focused, .select.is-light select:active, .select.is-light select.is-active { + box-shadow: 0 0 0 0.125em rgba(245, 245, 245, 0.25); } + .select.is-dark:not(:hover)::after { + border-color: #363636; } + .select.is-dark select { + border-color: #363636; } + .select.is-dark select:hover, .select.is-dark select.is-hovered { + border-color: #292929; } + .select.is-dark select:focus, .select.is-dark select.is-focused, .select.is-dark select:active, .select.is-dark select.is-active { + box-shadow: 0 0 0 0.125em rgba(54, 54, 54, 0.25); } + .select.is-primary:not(:hover)::after { + border-color: #c6d0f5; } + .select.is-primary select { + border-color: #c6d0f5; } + .select.is-primary select:hover, .select.is-primary select.is-hovered { + border-color: #b0bef1; } + .select.is-primary select:focus, .select.is-primary select.is-focused, .select.is-primary select:active, .select.is-primary select.is-active { + box-shadow: 0 0 0 0.125em rgba(198, 208, 245, 0.25); } + .select.is-link:not(:hover)::after { + border-color: #81c8be; } + .select.is-link select { + border-color: #81c8be; } + .select.is-link select:hover, .select.is-link select.is-hovered { + border-color: #6fc0b5; } + .select.is-link select:focus, .select.is-link select.is-focused, .select.is-link select:active, .select.is-link select.is-active { + box-shadow: 0 0 0 0.125em rgba(129, 200, 190, 0.25); } + .select.is-info:not(:hover)::after { + border-color: #3e8ed0; } + .select.is-info select { + border-color: #3e8ed0; } + .select.is-info select:hover, .select.is-info select.is-hovered { + border-color: #3082c5; } + .select.is-info select:focus, .select.is-info select.is-focused, .select.is-info select:active, .select.is-info select.is-active { + box-shadow: 0 0 0 0.125em rgba(62, 142, 208, 0.25); } + .select.is-success:not(:hover)::after { + border-color: #48c78e; } + .select.is-success select { + border-color: #48c78e; } + .select.is-success select:hover, .select.is-success select.is-hovered { + border-color: #3abb81; } + .select.is-success select:focus, .select.is-success select.is-focused, .select.is-success select:active, .select.is-success select.is-active { + box-shadow: 0 0 0 0.125em rgba(72, 199, 142, 0.25); } + .select.is-warning:not(:hover)::after { + border-color: #ffe08a; } + .select.is-warning select { + border-color: #ffe08a; } + .select.is-warning select:hover, .select.is-warning select.is-hovered { + border-color: #ffd970; } + .select.is-warning select:focus, .select.is-warning select.is-focused, .select.is-warning select:active, .select.is-warning select.is-active { + box-shadow: 0 0 0 0.125em rgba(255, 224, 138, 0.25); } + .select.is-danger:not(:hover)::after { + border-color: #f14668; } + .select.is-danger select { + border-color: #f14668; } + .select.is-danger select:hover, .select.is-danger select.is-hovered { + border-color: #ef2e55; } + .select.is-danger select:focus, .select.is-danger select.is-focused, .select.is-danger select:active, .select.is-danger select.is-active { + box-shadow: 0 0 0 0.125em rgba(241, 70, 104, 0.25); } + .select.is-small { + border-radius: 2px; + font-size: 0.75rem; } + .select.is-medium { + font-size: 1.25rem; } + .select.is-large { + font-size: 1.5rem; } + .select.is-disabled::after { + border-color: #7a7a7a !important; + opacity: 0.5; } + .select.is-fullwidth { + width: 100%; } + .select.is-fullwidth select { + width: 100%; } + .select.is-loading::after { + margin-top: 0; + position: absolute; + right: 0.625em; + top: 0.625em; + transform: none; } + .select.is-loading.is-small:after { + font-size: 0.75rem; } + .select.is-loading.is-medium:after { + font-size: 1.25rem; } + .select.is-loading.is-large:after { + font-size: 1.5rem; } + +.file { + align-items: stretch; + display: flex; + justify-content: flex-start; + position: relative; } + .file.is-white .file-cta { + background-color: white; + border-color: transparent; + color: #0a0a0a; } + .file.is-white:hover .file-cta, .file.is-white.is-hovered .file-cta { + background-color: #f9f9f9; + border-color: transparent; + color: #0a0a0a; } + .file.is-white:focus .file-cta, .file.is-white.is-focused .file-cta { + border-color: transparent; + box-shadow: 0 0 0.5em rgba(255, 255, 255, 0.25); + color: #0a0a0a; } + .file.is-white:active .file-cta, .file.is-white.is-active .file-cta { + background-color: #f2f2f2; + border-color: transparent; + color: #0a0a0a; } + .file.is-black .file-cta { + background-color: #0a0a0a; + border-color: transparent; + color: white; } + .file.is-black:hover .file-cta, .file.is-black.is-hovered .file-cta { + background-color: #040404; + border-color: transparent; + color: white; } + .file.is-black:focus .file-cta, .file.is-black.is-focused .file-cta { + border-color: transparent; + box-shadow: 0 0 0.5em rgba(10, 10, 10, 0.25); + color: white; } + .file.is-black:active .file-cta, .file.is-black.is-active .file-cta { + background-color: black; + border-color: transparent; + color: white; } + .file.is-light .file-cta { + background-color: whitesmoke; + border-color: transparent; + color: rgba(0, 0, 0, 0.7); } + .file.is-light:hover .file-cta, .file.is-light.is-hovered .file-cta { + background-color: #eeeeee; + border-color: transparent; + color: rgba(0, 0, 0, 0.7); } + .file.is-light:focus .file-cta, .file.is-light.is-focused .file-cta { + border-color: transparent; + box-shadow: 0 0 0.5em rgba(245, 245, 245, 0.25); + color: rgba(0, 0, 0, 0.7); } + .file.is-light:active .file-cta, .file.is-light.is-active .file-cta { + background-color: #e8e8e8; + border-color: transparent; + color: rgba(0, 0, 0, 0.7); } + .file.is-dark .file-cta { + background-color: #363636; + border-color: transparent; + color: #fff; } + .file.is-dark:hover .file-cta, .file.is-dark.is-hovered .file-cta { + background-color: #2f2f2f; + border-color: transparent; + color: #fff; } + .file.is-dark:focus .file-cta, .file.is-dark.is-focused .file-cta { + border-color: transparent; + box-shadow: 0 0 0.5em rgba(54, 54, 54, 0.25); + color: #fff; } + .file.is-dark:active .file-cta, .file.is-dark.is-active .file-cta { + background-color: #292929; + border-color: transparent; + color: #fff; } + .file.is-primary .file-cta { + background-color: #c6d0f5; + border-color: transparent; + color: rgba(0, 0, 0, 0.7); } + .file.is-primary:hover .file-cta, .file.is-primary.is-hovered .file-cta { + background-color: #bbc7f3; + border-color: transparent; + color: rgba(0, 0, 0, 0.7); } + .file.is-primary:focus .file-cta, .file.is-primary.is-focused .file-cta { + border-color: transparent; + box-shadow: 0 0 0.5em rgba(198, 208, 245, 0.25); + color: rgba(0, 0, 0, 0.7); } + .file.is-primary:active .file-cta, .file.is-primary.is-active .file-cta { + background-color: #b0bef1; + border-color: transparent; + color: rgba(0, 0, 0, 0.7); } + .file.is-link .file-cta { + background-color: #81c8be; + border-color: transparent; + color: rgba(0, 0, 0, 0.7); } + .file.is-link:hover .file-cta, .file.is-link.is-hovered .file-cta { + background-color: #78c4b9; + border-color: transparent; + color: rgba(0, 0, 0, 0.7); } + .file.is-link:focus .file-cta, .file.is-link.is-focused .file-cta { + border-color: transparent; + box-shadow: 0 0 0.5em rgba(129, 200, 190, 0.25); + color: rgba(0, 0, 0, 0.7); } + .file.is-link:active .file-cta, .file.is-link.is-active .file-cta { + background-color: #6fc0b5; + border-color: transparent; + color: rgba(0, 0, 0, 0.7); } + .file.is-info .file-cta { + background-color: #3e8ed0; + border-color: transparent; + color: #fff; } + .file.is-info:hover .file-cta, .file.is-info.is-hovered .file-cta { + background-color: #3488ce; + border-color: transparent; + color: #fff; } + .file.is-info:focus .file-cta, .file.is-info.is-focused .file-cta { + border-color: transparent; + box-shadow: 0 0 0.5em rgba(62, 142, 208, 0.25); + color: #fff; } + .file.is-info:active .file-cta, .file.is-info.is-active .file-cta { + background-color: #3082c5; + border-color: transparent; + color: #fff; } + .file.is-success .file-cta { + background-color: #48c78e; + border-color: transparent; + color: #fff; } + .file.is-success:hover .file-cta, .file.is-success.is-hovered .file-cta { + background-color: #3ec487; + border-color: transparent; + color: #fff; } + .file.is-success:focus .file-cta, .file.is-success.is-focused .file-cta { + border-color: transparent; + box-shadow: 0 0 0.5em rgba(72, 199, 142, 0.25); + color: #fff; } + .file.is-success:active .file-cta, .file.is-success.is-active .file-cta { + background-color: #3abb81; + border-color: transparent; + color: #fff; } + .file.is-warning .file-cta { + background-color: #ffe08a; + border-color: transparent; + color: rgba(0, 0, 0, 0.7); } + .file.is-warning:hover .file-cta, .file.is-warning.is-hovered .file-cta { + background-color: #ffdc7d; + border-color: transparent; + color: rgba(0, 0, 0, 0.7); } + .file.is-warning:focus .file-cta, .file.is-warning.is-focused .file-cta { + border-color: transparent; + box-shadow: 0 0 0.5em rgba(255, 224, 138, 0.25); + color: rgba(0, 0, 0, 0.7); } + .file.is-warning:active .file-cta, .file.is-warning.is-active .file-cta { + background-color: #ffd970; + border-color: transparent; + color: rgba(0, 0, 0, 0.7); } + .file.is-danger .file-cta { + background-color: #f14668; + border-color: transparent; + color: #fff; } + .file.is-danger:hover .file-cta, .file.is-danger.is-hovered .file-cta { + background-color: #f03a5f; + border-color: transparent; + color: #fff; } + .file.is-danger:focus .file-cta, .file.is-danger.is-focused .file-cta { + border-color: transparent; + box-shadow: 0 0 0.5em rgba(241, 70, 104, 0.25); + color: #fff; } + .file.is-danger:active .file-cta, .file.is-danger.is-active .file-cta { + background-color: #ef2e55; + border-color: transparent; + color: #fff; } + .file.is-small { + font-size: 0.75rem; } + .file.is-normal { + font-size: 1rem; } + .file.is-medium { + font-size: 1.25rem; } + .file.is-medium .file-icon .fa { + font-size: 21px; } + .file.is-large { + font-size: 1.5rem; } + .file.is-large .file-icon .fa { + font-size: 28px; } + .file.has-name .file-cta { + border-bottom-right-radius: 0; + border-top-right-radius: 0; } + .file.has-name .file-name { + border-bottom-left-radius: 0; + border-top-left-radius: 0; } + .file.has-name.is-empty .file-cta { + border-radius: 4px; } + .file.has-name.is-empty .file-name { + display: none; } + .file.is-boxed .file-label { + flex-direction: column; } + .file.is-boxed .file-cta { + flex-direction: column; + height: auto; + padding: 1em 3em; } + .file.is-boxed .file-name { + border-width: 0 1px 1px; } + .file.is-boxed .file-icon { + height: 1.5em; + width: 1.5em; } + .file.is-boxed .file-icon .fa { + font-size: 21px; } + .file.is-boxed.is-small .file-icon .fa { + font-size: 14px; } + .file.is-boxed.is-medium .file-icon .fa { + font-size: 28px; } + .file.is-boxed.is-large .file-icon .fa { + font-size: 35px; } + .file.is-boxed.has-name .file-cta { + border-radius: 4px 4px 0 0; } + .file.is-boxed.has-name .file-name { + border-radius: 0 0 4px 4px; + border-width: 0 1px 1px; } + .file.is-centered { + justify-content: center; } + .file.is-fullwidth .file-label { + width: 100%; } + .file.is-fullwidth .file-name { + flex-grow: 1; + max-width: none; } + .file.is-right { + justify-content: flex-end; } + .file.is-right .file-cta { + border-radius: 0 4px 4px 0; } + .file.is-right .file-name { + border-radius: 4px 0 0 4px; + border-width: 1px 0 1px 1px; + order: -1; } + +.file-label { + align-items: stretch; + display: flex; + cursor: pointer; + justify-content: flex-start; + overflow: hidden; + position: relative; } + .file-label:hover .file-cta { + background-color: #eeeeee; + color: #b5bfe2; } + .file-label:hover .file-name { + border-color: #d5d5d5; } + .file-label:active .file-cta { + background-color: #e8e8e8; + color: #b5bfe2; } + .file-label:active .file-name { + border-color: #cfcfcf; } + +.file-input { + height: 100%; + left: 0; + opacity: 0; + outline: none; + position: absolute; + top: 0; + width: 100%; } + +.file-cta, +.file-name { + border-color: #dbdbdb; + border-radius: 4px; + font-size: 1em; + padding-left: 1em; + padding-right: 1em; + white-space: nowrap; } + +.file-cta { + background-color: whitesmoke; + color: #c6d0f5; } + +.file-name { + border-color: #dbdbdb; + border-style: solid; + border-width: 1px 1px 1px 0; + display: block; + max-width: 16em; + overflow: hidden; + text-align: inherit; + text-overflow: ellipsis; } + +.file-icon { + align-items: center; + display: flex; + height: 1em; + justify-content: center; + margin-right: 0.5em; + width: 1em; } + .file-icon .fa { + font-size: 14px; } + +.label { + color: #b5bfe2; + display: block; + font-size: 1rem; + font-weight: 700; } + .label:not(:last-child) { + margin-bottom: 0.5em; } + .label.is-small { + font-size: 0.75rem; } + .label.is-medium { + font-size: 1.25rem; } + .label.is-large { + font-size: 1.5rem; } + +.help { + display: block; + font-size: 0.75rem; + margin-top: 0.25rem; } + .help.is-white { + color: white; } + .help.is-black { + color: #0a0a0a; } + .help.is-light { + color: whitesmoke; } + .help.is-dark { + color: #363636; } + .help.is-primary { + color: #c6d0f5; } + .help.is-link { + color: #81c8be; } + .help.is-info { + color: #3e8ed0; } + .help.is-success { + color: #48c78e; } + .help.is-warning { + color: #ffe08a; } + .help.is-danger { + color: #f14668; } + +.field:not(:last-child) { + margin-bottom: 0.75rem; } + +.field.has-addons { + display: flex; + justify-content: flex-start; } + .field.has-addons .control:not(:last-child) { + margin-right: -1px; } + .field.has-addons .control:not(:first-child):not(:last-child) .button, + .field.has-addons .control:not(:first-child):not(:last-child) .input, + .field.has-addons .control:not(:first-child):not(:last-child) .select select { + border-radius: 0; } + .field.has-addons .control:first-child:not(:only-child) .button, + .field.has-addons .control:first-child:not(:only-child) .input, + .field.has-addons .control:first-child:not(:only-child) .select select { + border-bottom-right-radius: 0; + border-top-right-radius: 0; } + .field.has-addons .control:last-child:not(:only-child) .button, + .field.has-addons .control:last-child:not(:only-child) .input, + .field.has-addons .control:last-child:not(:only-child) .select select { + border-bottom-left-radius: 0; + border-top-left-radius: 0; } + .field.has-addons .control .button:not([disabled]):hover, .field.has-addons .control .button:not([disabled]).is-hovered, + .field.has-addons .control .input:not([disabled]):hover, + .field.has-addons .control .input:not([disabled]).is-hovered, + .field.has-addons .control .select select:not([disabled]):hover, + .field.has-addons .control .select select:not([disabled]).is-hovered { + z-index: 2; } + .field.has-addons .control .button:not([disabled]):focus, .field.has-addons .control .button:not([disabled]).is-focused, .field.has-addons .control .button:not([disabled]):active, .field.has-addons .control .button:not([disabled]).is-active, + .field.has-addons .control .input:not([disabled]):focus, + .field.has-addons .control .input:not([disabled]).is-focused, + .field.has-addons .control .input:not([disabled]):active, + .field.has-addons .control .input:not([disabled]).is-active, + .field.has-addons .control .select select:not([disabled]):focus, + .field.has-addons .control .select select:not([disabled]).is-focused, + .field.has-addons .control .select select:not([disabled]):active, + .field.has-addons .control .select select:not([disabled]).is-active { + z-index: 3; } + .field.has-addons .control .button:not([disabled]):focus:hover, .field.has-addons .control .button:not([disabled]).is-focused:hover, .field.has-addons .control .button:not([disabled]):active:hover, .field.has-addons .control .button:not([disabled]).is-active:hover, + .field.has-addons .control .input:not([disabled]):focus:hover, + .field.has-addons .control .input:not([disabled]).is-focused:hover, + .field.has-addons .control .input:not([disabled]):active:hover, + .field.has-addons .control .input:not([disabled]).is-active:hover, + .field.has-addons .control .select select:not([disabled]):focus:hover, + .field.has-addons .control .select select:not([disabled]).is-focused:hover, + .field.has-addons .control .select select:not([disabled]):active:hover, + .field.has-addons .control .select select:not([disabled]).is-active:hover { + z-index: 4; } + .field.has-addons .control.is-expanded { + flex-grow: 1; + flex-shrink: 1; } + .field.has-addons.has-addons-centered { + justify-content: center; } + .field.has-addons.has-addons-right { + justify-content: flex-end; } + .field.has-addons.has-addons-fullwidth .control { + flex-grow: 1; + flex-shrink: 0; } + +.field.is-grouped { + display: flex; + justify-content: flex-start; } + .field.is-grouped > .control { + flex-shrink: 0; } + .field.is-grouped > .control:not(:last-child) { + margin-bottom: 0; + margin-right: 0.75rem; } + .field.is-grouped > .control.is-expanded { + flex-grow: 1; + flex-shrink: 1; } + .field.is-grouped.is-grouped-centered { + justify-content: center; } + .field.is-grouped.is-grouped-right { + justify-content: flex-end; } + .field.is-grouped.is-grouped-multiline { + flex-wrap: wrap; } + .field.is-grouped.is-grouped-multiline > .control:last-child, .field.is-grouped.is-grouped-multiline > .control:not(:last-child) { + margin-bottom: 0.75rem; } + .field.is-grouped.is-grouped-multiline:last-child { + margin-bottom: -0.75rem; } + .field.is-grouped.is-grouped-multiline:not(:last-child) { + margin-bottom: 0; } + +@media screen and (min-width: 769px), print { + .field.is-horizontal { + display: flex; } } + +.field-label .label { + font-size: inherit; } + +@media screen and (max-width: 768px) { + .field-label { + margin-bottom: 0.5rem; } } + +@media screen and (min-width: 769px), print { + .field-label { + flex-basis: 0; + flex-grow: 1; + flex-shrink: 0; + margin-right: 1.5rem; + text-align: right; } + .field-label.is-small { + font-size: 0.75rem; + padding-top: 0.375em; } + .field-label.is-normal { + padding-top: 0.375em; } + .field-label.is-medium { + font-size: 1.25rem; + padding-top: 0.375em; } + .field-label.is-large { + font-size: 1.5rem; + padding-top: 0.375em; } } + +.field-body .field .field { + margin-bottom: 0; } + +@media screen and (min-width: 769px), print { + .field-body { + display: flex; + flex-basis: 0; + flex-grow: 5; + flex-shrink: 1; } + .field-body .field { + margin-bottom: 0; } + .field-body > .field { + flex-shrink: 1; } + .field-body > .field:not(.is-narrow) { + flex-grow: 1; } + .field-body > .field:not(:last-child) { + margin-right: 0.75rem; } } + +.control { + box-sizing: border-box; + clear: both; + font-size: 1rem; + position: relative; + text-align: inherit; } + .control.has-icons-left .input:focus ~ .icon, + .control.has-icons-left .select:focus ~ .icon, .control.has-icons-right .input:focus ~ .icon, + .control.has-icons-right .select:focus ~ .icon { + color: #c6d0f5; } + .control.has-icons-left .input.is-small ~ .icon, + .control.has-icons-left .select.is-small ~ .icon, .control.has-icons-right .input.is-small ~ .icon, + .control.has-icons-right .select.is-small ~ .icon { + font-size: 0.75rem; } + .control.has-icons-left .input.is-medium ~ .icon, + .control.has-icons-left .select.is-medium ~ .icon, .control.has-icons-right .input.is-medium ~ .icon, + .control.has-icons-right .select.is-medium ~ .icon { + font-size: 1.25rem; } + .control.has-icons-left .input.is-large ~ .icon, + .control.has-icons-left .select.is-large ~ .icon, .control.has-icons-right .input.is-large ~ .icon, + .control.has-icons-right .select.is-large ~ .icon { + font-size: 1.5rem; } + .control.has-icons-left .icon, .control.has-icons-right .icon { + color: #dbdbdb; + height: 2.5em; + pointer-events: none; + position: absolute; + top: 0; + width: 2.5em; + z-index: 4; } + .control.has-icons-left .input, + .control.has-icons-left .select select { + padding-left: 2.5em; } + .control.has-icons-left .icon.is-left { + left: 0; } + .control.has-icons-right .input, + .control.has-icons-right .select select { + padding-right: 2.5em; } + .control.has-icons-right .icon.is-right { + right: 0; } + .control.is-loading::after { + position: absolute !important; + right: 0.625em; + top: 0.625em; + z-index: 4; } + .control.is-loading.is-small:after { + font-size: 0.75rem; } + .control.is-loading.is-medium:after { + font-size: 1.25rem; } + .control.is-loading.is-large:after { + font-size: 1.5rem; } + +.navbar { + background-color: #303446; + min-height: 3.25rem; + position: relative; + z-index: 30; } + .navbar.is-white { + background-color: white; + color: #0a0a0a; } + .navbar.is-white .navbar-brand > .navbar-item, + .navbar.is-white .navbar-brand .navbar-link { + color: #0a0a0a; } + .navbar.is-white .navbar-brand > a.navbar-item:focus, .navbar.is-white .navbar-brand > a.navbar-item:hover, .navbar.is-white .navbar-brand > a.navbar-item.is-active, + .navbar.is-white .navbar-brand .navbar-link:focus, + .navbar.is-white .navbar-brand .navbar-link:hover, + .navbar.is-white .navbar-brand .navbar-link.is-active { + background-color: #f2f2f2; + color: #0a0a0a; } + .navbar.is-white .navbar-brand .navbar-link::after { + border-color: #0a0a0a; } + .navbar.is-white .navbar-burger { + color: #0a0a0a; } + @media screen and (min-width: 1024px) { + .navbar.is-white .navbar-start > .navbar-item, + .navbar.is-white .navbar-start .navbar-link, + .navbar.is-white .navbar-end > .navbar-item, + .navbar.is-white .navbar-end .navbar-link { + color: #0a0a0a; } + .navbar.is-white .navbar-start > a.navbar-item:focus, .navbar.is-white .navbar-start > a.navbar-item:hover, .navbar.is-white .navbar-start > a.navbar-item.is-active, + .navbar.is-white .navbar-start .navbar-link:focus, + .navbar.is-white .navbar-start .navbar-link:hover, + .navbar.is-white .navbar-start .navbar-link.is-active, + .navbar.is-white .navbar-end > a.navbar-item:focus, + .navbar.is-white .navbar-end > a.navbar-item:hover, + .navbar.is-white .navbar-end > a.navbar-item.is-active, + .navbar.is-white .navbar-end .navbar-link:focus, + .navbar.is-white .navbar-end .navbar-link:hover, + .navbar.is-white .navbar-end .navbar-link.is-active { + background-color: #f2f2f2; + color: #0a0a0a; } + .navbar.is-white .navbar-start .navbar-link::after, + .navbar.is-white .navbar-end .navbar-link::after { + border-color: #0a0a0a; } + .navbar.is-white .navbar-item.has-dropdown:focus .navbar-link, + .navbar.is-white .navbar-item.has-dropdown:hover .navbar-link, + .navbar.is-white .navbar-item.has-dropdown.is-active .navbar-link { + background-color: #f2f2f2; + color: #0a0a0a; } + .navbar.is-white .navbar-dropdown a.navbar-item.is-active { + background-color: white; + color: #0a0a0a; } } + .navbar.is-black { + background-color: #0a0a0a; + color: white; } + .navbar.is-black .navbar-brand > .navbar-item, + .navbar.is-black .navbar-brand .navbar-link { + color: white; } + .navbar.is-black .navbar-brand > a.navbar-item:focus, .navbar.is-black .navbar-brand > a.navbar-item:hover, .navbar.is-black .navbar-brand > a.navbar-item.is-active, + .navbar.is-black .navbar-brand .navbar-link:focus, + .navbar.is-black .navbar-brand .navbar-link:hover, + .navbar.is-black .navbar-brand .navbar-link.is-active { + background-color: black; + color: white; } + .navbar.is-black .navbar-brand .navbar-link::after { + border-color: white; } + .navbar.is-black .navbar-burger { + color: white; } + @media screen and (min-width: 1024px) { + .navbar.is-black .navbar-start > .navbar-item, + .navbar.is-black .navbar-start .navbar-link, + .navbar.is-black .navbar-end > .navbar-item, + .navbar.is-black .navbar-end .navbar-link { + color: white; } + .navbar.is-black .navbar-start > a.navbar-item:focus, .navbar.is-black .navbar-start > a.navbar-item:hover, .navbar.is-black .navbar-start > a.navbar-item.is-active, + .navbar.is-black .navbar-start .navbar-link:focus, + .navbar.is-black .navbar-start .navbar-link:hover, + .navbar.is-black .navbar-start .navbar-link.is-active, + .navbar.is-black .navbar-end > a.navbar-item:focus, + .navbar.is-black .navbar-end > a.navbar-item:hover, + .navbar.is-black .navbar-end > a.navbar-item.is-active, + .navbar.is-black .navbar-end .navbar-link:focus, + .navbar.is-black .navbar-end .navbar-link:hover, + .navbar.is-black .navbar-end .navbar-link.is-active { + background-color: black; + color: white; } + .navbar.is-black .navbar-start .navbar-link::after, + .navbar.is-black .navbar-end .navbar-link::after { + border-color: white; } + .navbar.is-black .navbar-item.has-dropdown:focus .navbar-link, + .navbar.is-black .navbar-item.has-dropdown:hover .navbar-link, + .navbar.is-black .navbar-item.has-dropdown.is-active .navbar-link { + background-color: black; + color: white; } + .navbar.is-black .navbar-dropdown a.navbar-item.is-active { + background-color: #0a0a0a; + color: white; } } + .navbar.is-light { + background-color: whitesmoke; + color: rgba(0, 0, 0, 0.7); } + .navbar.is-light .navbar-brand > .navbar-item, + .navbar.is-light .navbar-brand .navbar-link { + color: rgba(0, 0, 0, 0.7); } + .navbar.is-light .navbar-brand > a.navbar-item:focus, .navbar.is-light .navbar-brand > a.navbar-item:hover, .navbar.is-light .navbar-brand > a.navbar-item.is-active, + .navbar.is-light .navbar-brand .navbar-link:focus, + .navbar.is-light .navbar-brand .navbar-link:hover, + .navbar.is-light .navbar-brand .navbar-link.is-active { + background-color: #e8e8e8; + color: rgba(0, 0, 0, 0.7); } + .navbar.is-light .navbar-brand .navbar-link::after { + border-color: rgba(0, 0, 0, 0.7); } + .navbar.is-light .navbar-burger { + color: rgba(0, 0, 0, 0.7); } + @media screen and (min-width: 1024px) { + .navbar.is-light .navbar-start > .navbar-item, + .navbar.is-light .navbar-start .navbar-link, + .navbar.is-light .navbar-end > .navbar-item, + .navbar.is-light .navbar-end .navbar-link { + color: rgba(0, 0, 0, 0.7); } + .navbar.is-light .navbar-start > a.navbar-item:focus, .navbar.is-light .navbar-start > a.navbar-item:hover, .navbar.is-light .navbar-start > a.navbar-item.is-active, + .navbar.is-light .navbar-start .navbar-link:focus, + .navbar.is-light .navbar-start .navbar-link:hover, + .navbar.is-light .navbar-start .navbar-link.is-active, + .navbar.is-light .navbar-end > a.navbar-item:focus, + .navbar.is-light .navbar-end > a.navbar-item:hover, + .navbar.is-light .navbar-end > a.navbar-item.is-active, + .navbar.is-light .navbar-end .navbar-link:focus, + .navbar.is-light .navbar-end .navbar-link:hover, + .navbar.is-light .navbar-end .navbar-link.is-active { + background-color: #e8e8e8; + color: rgba(0, 0, 0, 0.7); } + .navbar.is-light .navbar-start .navbar-link::after, + .navbar.is-light .navbar-end .navbar-link::after { + border-color: rgba(0, 0, 0, 0.7); } + .navbar.is-light .navbar-item.has-dropdown:focus .navbar-link, + .navbar.is-light .navbar-item.has-dropdown:hover .navbar-link, + .navbar.is-light .navbar-item.has-dropdown.is-active .navbar-link { + background-color: #e8e8e8; + color: rgba(0, 0, 0, 0.7); } + .navbar.is-light .navbar-dropdown a.navbar-item.is-active { + background-color: whitesmoke; + color: rgba(0, 0, 0, 0.7); } } + .navbar.is-dark { + background-color: #363636; + color: #fff; } + .navbar.is-dark .navbar-brand > .navbar-item, + .navbar.is-dark .navbar-brand .navbar-link { + color: #fff; } + .navbar.is-dark .navbar-brand > a.navbar-item:focus, .navbar.is-dark .navbar-brand > a.navbar-item:hover, .navbar.is-dark .navbar-brand > a.navbar-item.is-active, + .navbar.is-dark .navbar-brand .navbar-link:focus, + .navbar.is-dark .navbar-brand .navbar-link:hover, + .navbar.is-dark .navbar-brand .navbar-link.is-active { + background-color: #292929; + color: #fff; } + .navbar.is-dark .navbar-brand .navbar-link::after { + border-color: #fff; } + .navbar.is-dark .navbar-burger { + color: #fff; } + @media screen and (min-width: 1024px) { + .navbar.is-dark .navbar-start > .navbar-item, + .navbar.is-dark .navbar-start .navbar-link, + .navbar.is-dark .navbar-end > .navbar-item, + .navbar.is-dark .navbar-end .navbar-link { + color: #fff; } + .navbar.is-dark .navbar-start > a.navbar-item:focus, .navbar.is-dark .navbar-start > a.navbar-item:hover, .navbar.is-dark .navbar-start > a.navbar-item.is-active, + .navbar.is-dark .navbar-start .navbar-link:focus, + .navbar.is-dark .navbar-start .navbar-link:hover, + .navbar.is-dark .navbar-start .navbar-link.is-active, + .navbar.is-dark .navbar-end > a.navbar-item:focus, + .navbar.is-dark .navbar-end > a.navbar-item:hover, + .navbar.is-dark .navbar-end > a.navbar-item.is-active, + .navbar.is-dark .navbar-end .navbar-link:focus, + .navbar.is-dark .navbar-end .navbar-link:hover, + .navbar.is-dark .navbar-end .navbar-link.is-active { + background-color: #292929; + color: #fff; } + .navbar.is-dark .navbar-start .navbar-link::after, + .navbar.is-dark .navbar-end .navbar-link::after { + border-color: #fff; } + .navbar.is-dark .navbar-item.has-dropdown:focus .navbar-link, + .navbar.is-dark .navbar-item.has-dropdown:hover .navbar-link, + .navbar.is-dark .navbar-item.has-dropdown.is-active .navbar-link { + background-color: #292929; + color: #fff; } + .navbar.is-dark .navbar-dropdown a.navbar-item.is-active { + background-color: #363636; + color: #fff; } } + .navbar.is-primary { + background-color: #c6d0f5; + color: rgba(0, 0, 0, 0.7); } + .navbar.is-primary .navbar-brand > .navbar-item, + .navbar.is-primary .navbar-brand .navbar-link { + color: rgba(0, 0, 0, 0.7); } + .navbar.is-primary .navbar-brand > a.navbar-item:focus, .navbar.is-primary .navbar-brand > a.navbar-item:hover, .navbar.is-primary .navbar-brand > a.navbar-item.is-active, + .navbar.is-primary .navbar-brand .navbar-link:focus, + .navbar.is-primary .navbar-brand .navbar-link:hover, + .navbar.is-primary .navbar-brand .navbar-link.is-active { + background-color: #b0bef1; + color: rgba(0, 0, 0, 0.7); } + .navbar.is-primary .navbar-brand .navbar-link::after { + border-color: rgba(0, 0, 0, 0.7); } + .navbar.is-primary .navbar-burger { + color: rgba(0, 0, 0, 0.7); } + @media screen and (min-width: 1024px) { + .navbar.is-primary .navbar-start > .navbar-item, + .navbar.is-primary .navbar-start .navbar-link, + .navbar.is-primary .navbar-end > .navbar-item, + .navbar.is-primary .navbar-end .navbar-link { + color: rgba(0, 0, 0, 0.7); } + .navbar.is-primary .navbar-start > a.navbar-item:focus, .navbar.is-primary .navbar-start > a.navbar-item:hover, .navbar.is-primary .navbar-start > a.navbar-item.is-active, + .navbar.is-primary .navbar-start .navbar-link:focus, + .navbar.is-primary .navbar-start .navbar-link:hover, + .navbar.is-primary .navbar-start .navbar-link.is-active, + .navbar.is-primary .navbar-end > a.navbar-item:focus, + .navbar.is-primary .navbar-end > a.navbar-item:hover, + .navbar.is-primary .navbar-end > a.navbar-item.is-active, + .navbar.is-primary .navbar-end .navbar-link:focus, + .navbar.is-primary .navbar-end .navbar-link:hover, + .navbar.is-primary .navbar-end .navbar-link.is-active { + background-color: #b0bef1; + color: rgba(0, 0, 0, 0.7); } + .navbar.is-primary .navbar-start .navbar-link::after, + .navbar.is-primary .navbar-end .navbar-link::after { + border-color: rgba(0, 0, 0, 0.7); } + .navbar.is-primary .navbar-item.has-dropdown:focus .navbar-link, + .navbar.is-primary .navbar-item.has-dropdown:hover .navbar-link, + .navbar.is-primary .navbar-item.has-dropdown.is-active .navbar-link { + background-color: #b0bef1; + color: rgba(0, 0, 0, 0.7); } + .navbar.is-primary .navbar-dropdown a.navbar-item.is-active { + background-color: #c6d0f5; + color: rgba(0, 0, 0, 0.7); } } + .navbar.is-link { + background-color: #81c8be; + color: rgba(0, 0, 0, 0.7); } + .navbar.is-link .navbar-brand > .navbar-item, + .navbar.is-link .navbar-brand .navbar-link { + color: rgba(0, 0, 0, 0.7); } + .navbar.is-link .navbar-brand > a.navbar-item:focus, .navbar.is-link .navbar-brand > a.navbar-item:hover, .navbar.is-link .navbar-brand > a.navbar-item.is-active, + .navbar.is-link .navbar-brand .navbar-link:focus, + .navbar.is-link .navbar-brand .navbar-link:hover, + .navbar.is-link .navbar-brand .navbar-link.is-active { + background-color: #6fc0b5; + color: rgba(0, 0, 0, 0.7); } + .navbar.is-link .navbar-brand .navbar-link::after { + border-color: rgba(0, 0, 0, 0.7); } + .navbar.is-link .navbar-burger { + color: rgba(0, 0, 0, 0.7); } + @media screen and (min-width: 1024px) { + .navbar.is-link .navbar-start > .navbar-item, + .navbar.is-link .navbar-start .navbar-link, + .navbar.is-link .navbar-end > .navbar-item, + .navbar.is-link .navbar-end .navbar-link { + color: rgba(0, 0, 0, 0.7); } + .navbar.is-link .navbar-start > a.navbar-item:focus, .navbar.is-link .navbar-start > a.navbar-item:hover, .navbar.is-link .navbar-start > a.navbar-item.is-active, + .navbar.is-link .navbar-start .navbar-link:focus, + .navbar.is-link .navbar-start .navbar-link:hover, + .navbar.is-link .navbar-start .navbar-link.is-active, + .navbar.is-link .navbar-end > a.navbar-item:focus, + .navbar.is-link .navbar-end > a.navbar-item:hover, + .navbar.is-link .navbar-end > a.navbar-item.is-active, + .navbar.is-link .navbar-end .navbar-link:focus, + .navbar.is-link .navbar-end .navbar-link:hover, + .navbar.is-link .navbar-end .navbar-link.is-active { + background-color: #6fc0b5; + color: rgba(0, 0, 0, 0.7); } + .navbar.is-link .navbar-start .navbar-link::after, + .navbar.is-link .navbar-end .navbar-link::after { + border-color: rgba(0, 0, 0, 0.7); } + .navbar.is-link .navbar-item.has-dropdown:focus .navbar-link, + .navbar.is-link .navbar-item.has-dropdown:hover .navbar-link, + .navbar.is-link .navbar-item.has-dropdown.is-active .navbar-link { + background-color: #6fc0b5; + color: rgba(0, 0, 0, 0.7); } + .navbar.is-link .navbar-dropdown a.navbar-item.is-active { + background-color: #81c8be; + color: rgba(0, 0, 0, 0.7); } } + .navbar.is-info { + background-color: #3e8ed0; + color: #fff; } + .navbar.is-info .navbar-brand > .navbar-item, + .navbar.is-info .navbar-brand .navbar-link { + color: #fff; } + .navbar.is-info .navbar-brand > a.navbar-item:focus, .navbar.is-info .navbar-brand > a.navbar-item:hover, .navbar.is-info .navbar-brand > a.navbar-item.is-active, + .navbar.is-info .navbar-brand .navbar-link:focus, + .navbar.is-info .navbar-brand .navbar-link:hover, + .navbar.is-info .navbar-brand .navbar-link.is-active { + background-color: #3082c5; + color: #fff; } + .navbar.is-info .navbar-brand .navbar-link::after { + border-color: #fff; } + .navbar.is-info .navbar-burger { + color: #fff; } + @media screen and (min-width: 1024px) { + .navbar.is-info .navbar-start > .navbar-item, + .navbar.is-info .navbar-start .navbar-link, + .navbar.is-info .navbar-end > .navbar-item, + .navbar.is-info .navbar-end .navbar-link { + color: #fff; } + .navbar.is-info .navbar-start > a.navbar-item:focus, .navbar.is-info .navbar-start > a.navbar-item:hover, .navbar.is-info .navbar-start > a.navbar-item.is-active, + .navbar.is-info .navbar-start .navbar-link:focus, + .navbar.is-info .navbar-start .navbar-link:hover, + .navbar.is-info .navbar-start .navbar-link.is-active, + .navbar.is-info .navbar-end > a.navbar-item:focus, + .navbar.is-info .navbar-end > a.navbar-item:hover, + .navbar.is-info .navbar-end > a.navbar-item.is-active, + .navbar.is-info .navbar-end .navbar-link:focus, + .navbar.is-info .navbar-end .navbar-link:hover, + .navbar.is-info .navbar-end .navbar-link.is-active { + background-color: #3082c5; + color: #fff; } + .navbar.is-info .navbar-start .navbar-link::after, + .navbar.is-info .navbar-end .navbar-link::after { + border-color: #fff; } + .navbar.is-info .navbar-item.has-dropdown:focus .navbar-link, + .navbar.is-info .navbar-item.has-dropdown:hover .navbar-link, + .navbar.is-info .navbar-item.has-dropdown.is-active .navbar-link { + background-color: #3082c5; + color: #fff; } + .navbar.is-info .navbar-dropdown a.navbar-item.is-active { + background-color: #3e8ed0; + color: #fff; } } + .navbar.is-success { + background-color: #48c78e; + color: #fff; } + .navbar.is-success .navbar-brand > .navbar-item, + .navbar.is-success .navbar-brand .navbar-link { + color: #fff; } + .navbar.is-success .navbar-brand > a.navbar-item:focus, .navbar.is-success .navbar-brand > a.navbar-item:hover, .navbar.is-success .navbar-brand > a.navbar-item.is-active, + .navbar.is-success .navbar-brand .navbar-link:focus, + .navbar.is-success .navbar-brand .navbar-link:hover, + .navbar.is-success .navbar-brand .navbar-link.is-active { + background-color: #3abb81; + color: #fff; } + .navbar.is-success .navbar-brand .navbar-link::after { + border-color: #fff; } + .navbar.is-success .navbar-burger { + color: #fff; } + @media screen and (min-width: 1024px) { + .navbar.is-success .navbar-start > .navbar-item, + .navbar.is-success .navbar-start .navbar-link, + .navbar.is-success .navbar-end > .navbar-item, + .navbar.is-success .navbar-end .navbar-link { + color: #fff; } + .navbar.is-success .navbar-start > a.navbar-item:focus, .navbar.is-success .navbar-start > a.navbar-item:hover, .navbar.is-success .navbar-start > a.navbar-item.is-active, + .navbar.is-success .navbar-start .navbar-link:focus, + .navbar.is-success .navbar-start .navbar-link:hover, + .navbar.is-success .navbar-start .navbar-link.is-active, + .navbar.is-success .navbar-end > a.navbar-item:focus, + .navbar.is-success .navbar-end > a.navbar-item:hover, + .navbar.is-success .navbar-end > a.navbar-item.is-active, + .navbar.is-success .navbar-end .navbar-link:focus, + .navbar.is-success .navbar-end .navbar-link:hover, + .navbar.is-success .navbar-end .navbar-link.is-active { + background-color: #3abb81; + color: #fff; } + .navbar.is-success .navbar-start .navbar-link::after, + .navbar.is-success .navbar-end .navbar-link::after { + border-color: #fff; } + .navbar.is-success .navbar-item.has-dropdown:focus .navbar-link, + .navbar.is-success .navbar-item.has-dropdown:hover .navbar-link, + .navbar.is-success .navbar-item.has-dropdown.is-active .navbar-link { + background-color: #3abb81; + color: #fff; } + .navbar.is-success .navbar-dropdown a.navbar-item.is-active { + background-color: #48c78e; + color: #fff; } } + .navbar.is-warning { + background-color: #ffe08a; + color: rgba(0, 0, 0, 0.7); } + .navbar.is-warning .navbar-brand > .navbar-item, + .navbar.is-warning .navbar-brand .navbar-link { + color: rgba(0, 0, 0, 0.7); } + .navbar.is-warning .navbar-brand > a.navbar-item:focus, .navbar.is-warning .navbar-brand > a.navbar-item:hover, .navbar.is-warning .navbar-brand > a.navbar-item.is-active, + .navbar.is-warning .navbar-brand .navbar-link:focus, + .navbar.is-warning .navbar-brand .navbar-link:hover, + .navbar.is-warning .navbar-brand .navbar-link.is-active { + background-color: #ffd970; + color: rgba(0, 0, 0, 0.7); } + .navbar.is-warning .navbar-brand .navbar-link::after { + border-color: rgba(0, 0, 0, 0.7); } + .navbar.is-warning .navbar-burger { + color: rgba(0, 0, 0, 0.7); } + @media screen and (min-width: 1024px) { + .navbar.is-warning .navbar-start > .navbar-item, + .navbar.is-warning .navbar-start .navbar-link, + .navbar.is-warning .navbar-end > .navbar-item, + .navbar.is-warning .navbar-end .navbar-link { + color: rgba(0, 0, 0, 0.7); } + .navbar.is-warning .navbar-start > a.navbar-item:focus, .navbar.is-warning .navbar-start > a.navbar-item:hover, .navbar.is-warning .navbar-start > a.navbar-item.is-active, + .navbar.is-warning .navbar-start .navbar-link:focus, + .navbar.is-warning .navbar-start .navbar-link:hover, + .navbar.is-warning .navbar-start .navbar-link.is-active, + .navbar.is-warning .navbar-end > a.navbar-item:focus, + .navbar.is-warning .navbar-end > a.navbar-item:hover, + .navbar.is-warning .navbar-end > a.navbar-item.is-active, + .navbar.is-warning .navbar-end .navbar-link:focus, + .navbar.is-warning .navbar-end .navbar-link:hover, + .navbar.is-warning .navbar-end .navbar-link.is-active { + background-color: #ffd970; + color: rgba(0, 0, 0, 0.7); } + .navbar.is-warning .navbar-start .navbar-link::after, + .navbar.is-warning .navbar-end .navbar-link::after { + border-color: rgba(0, 0, 0, 0.7); } + .navbar.is-warning .navbar-item.has-dropdown:focus .navbar-link, + .navbar.is-warning .navbar-item.has-dropdown:hover .navbar-link, + .navbar.is-warning .navbar-item.has-dropdown.is-active .navbar-link { + background-color: #ffd970; + color: rgba(0, 0, 0, 0.7); } + .navbar.is-warning .navbar-dropdown a.navbar-item.is-active { + background-color: #ffe08a; + color: rgba(0, 0, 0, 0.7); } } + .navbar.is-danger { + background-color: #f14668; + color: #fff; } + .navbar.is-danger .navbar-brand > .navbar-item, + .navbar.is-danger .navbar-brand .navbar-link { + color: #fff; } + .navbar.is-danger .navbar-brand > a.navbar-item:focus, .navbar.is-danger .navbar-brand > a.navbar-item:hover, .navbar.is-danger .navbar-brand > a.navbar-item.is-active, + .navbar.is-danger .navbar-brand .navbar-link:focus, + .navbar.is-danger .navbar-brand .navbar-link:hover, + .navbar.is-danger .navbar-brand .navbar-link.is-active { + background-color: #ef2e55; + color: #fff; } + .navbar.is-danger .navbar-brand .navbar-link::after { + border-color: #fff; } + .navbar.is-danger .navbar-burger { + color: #fff; } + @media screen and (min-width: 1024px) { + .navbar.is-danger .navbar-start > .navbar-item, + .navbar.is-danger .navbar-start .navbar-link, + .navbar.is-danger .navbar-end > .navbar-item, + .navbar.is-danger .navbar-end .navbar-link { + color: #fff; } + .navbar.is-danger .navbar-start > a.navbar-item:focus, .navbar.is-danger .navbar-start > a.navbar-item:hover, .navbar.is-danger .navbar-start > a.navbar-item.is-active, + .navbar.is-danger .navbar-start .navbar-link:focus, + .navbar.is-danger .navbar-start .navbar-link:hover, + .navbar.is-danger .navbar-start .navbar-link.is-active, + .navbar.is-danger .navbar-end > a.navbar-item:focus, + .navbar.is-danger .navbar-end > a.navbar-item:hover, + .navbar.is-danger .navbar-end > a.navbar-item.is-active, + .navbar.is-danger .navbar-end .navbar-link:focus, + .navbar.is-danger .navbar-end .navbar-link:hover, + .navbar.is-danger .navbar-end .navbar-link.is-active { + background-color: #ef2e55; + color: #fff; } + .navbar.is-danger .navbar-start .navbar-link::after, + .navbar.is-danger .navbar-end .navbar-link::after { + border-color: #fff; } + .navbar.is-danger .navbar-item.has-dropdown:focus .navbar-link, + .navbar.is-danger .navbar-item.has-dropdown:hover .navbar-link, + .navbar.is-danger .navbar-item.has-dropdown.is-active .navbar-link { + background-color: #ef2e55; + color: #fff; } + .navbar.is-danger .navbar-dropdown a.navbar-item.is-active { + background-color: #f14668; + color: #fff; } } + .navbar > .container { + align-items: stretch; + display: flex; + min-height: 3.25rem; + width: 100%; } + .navbar.has-shadow { + box-shadow: 0 2px 0 0 whitesmoke; } + .navbar.is-fixed-bottom, .navbar.is-fixed-top { + left: 0; + position: fixed; + right: 0; + z-index: 30; } + .navbar.is-fixed-bottom { + bottom: 0; } + .navbar.is-fixed-bottom.has-shadow { + box-shadow: 0 -2px 0 0 whitesmoke; } + .navbar.is-fixed-top { + top: 0; } + +html.has-navbar-fixed-top, +body.has-navbar-fixed-top { + padding-top: 3.25rem; } + +html.has-navbar-fixed-bottom, +body.has-navbar-fixed-bottom { + padding-bottom: 3.25rem; } + +.navbar-brand, +.navbar-tabs { + align-items: stretch; + display: flex; + flex-shrink: 0; + min-height: 3.25rem; } + +.navbar-brand a.navbar-item:focus, .navbar-brand a.navbar-item:hover { + background-color: transparent; } + +.navbar-tabs { + -webkit-overflow-scrolling: touch; + max-width: 100vw; + overflow-x: auto; + overflow-y: hidden; } + +.navbar-burger { + color: #81c8be; + -moz-appearance: none; + -webkit-appearance: none; + appearance: none; + background: none; + border: none; + cursor: pointer; + display: block; + height: 3.25rem; + position: relative; + width: 3.25rem; + margin-left: auto; } + .navbar-burger span { + background-color: currentColor; + display: block; + height: 1px; + left: calc(50% - 8px); + position: absolute; + transform-origin: center; + transition-duration: 86ms; + transition-property: background-color, opacity, transform; + transition-timing-function: ease-out; + width: 16px; } + .navbar-burger span:nth-child(1) { + top: calc(50% - 6px); } + .navbar-burger span:nth-child(2) { + top: calc(50% - 1px); } + .navbar-burger span:nth-child(3) { + top: calc(50% + 4px); } + .navbar-burger:hover { + background-color: rgba(0, 0, 0, 0.05); } + .navbar-burger.is-active span:nth-child(1) { + transform: translateY(5px) rotate(45deg); } + .navbar-burger.is-active span:nth-child(2) { + opacity: 0; } + .navbar-burger.is-active span:nth-child(3) { + transform: translateY(-5px) rotate(-45deg); } + +.navbar-menu { + display: none; } + +.navbar-item, +.navbar-link { + color: #81c8be; + display: block; + line-height: 1.5; + padding: 0.5rem 0.75rem; + position: relative; } + .navbar-item .icon:only-child, + .navbar-link .icon:only-child { + margin-left: -0.25rem; + margin-right: -0.25rem; } + +a.navbar-item, +.navbar-link { + cursor: pointer; } + a.navbar-item:focus, a.navbar-item:focus-within, a.navbar-item:hover, a.navbar-item.is-active, + .navbar-link:focus, + .navbar-link:focus-within, + .navbar-link:hover, + .navbar-link.is-active { + background-color: #414559; + color: #81c8be; } + +.navbar-item { + flex-grow: 0; + flex-shrink: 0; } + .navbar-item img { + max-height: 1.75rem; } + .navbar-item.has-dropdown { + padding: 0; } + .navbar-item.is-expanded { + flex-grow: 1; + flex-shrink: 1; } + .navbar-item.is-tab { + border-bottom: 1px solid transparent; + min-height: 3.25rem; + padding-bottom: calc(0.5rem - 1px); } + .navbar-item.is-tab:focus, .navbar-item.is-tab:hover { + background-color: transparent; + border-bottom-color: #81c8be; } + .navbar-item.is-tab.is-active { + background-color: transparent; + border-bottom-color: #81c8be; + border-bottom-style: solid; + border-bottom-width: 3px; + color: #81c8be; + padding-bottom: calc(0.5rem - 3px); } + +.navbar-content { + flex-grow: 1; + flex-shrink: 1; } + +.navbar-link:not(.is-arrowless) { + padding-right: 2.5em; } + .navbar-link:not(.is-arrowless)::after { + border-color: #81c8be; + margin-top: -0.375em; + right: 1.125em; } + +.navbar-dropdown { + font-size: 0.875rem; + padding-bottom: 0.5rem; + padding-top: 0.5rem; } + .navbar-dropdown .navbar-item { + padding-left: 1.5rem; + padding-right: 1.5rem; } + +.navbar-divider { + background-color: #51576d; + border: none; + display: none; + height: 2px; + margin: 0.5rem 0; } + +@media screen and (max-width: 1023px) { + .navbar > .container { + display: block; } + .navbar-brand .navbar-item, + .navbar-tabs .navbar-item { + align-items: center; + display: flex; } + .navbar-link::after { + display: none; } + .navbar-menu { + background-color: #303446; + box-shadow: 0 8px 16px rgba(10, 10, 10, 0.1); + padding: 0.5rem 0; } + .navbar-menu.is-active { + display: block; } + .navbar.is-fixed-bottom-touch, .navbar.is-fixed-top-touch { + left: 0; + position: fixed; + right: 0; + z-index: 30; } + .navbar.is-fixed-bottom-touch { + bottom: 0; } + .navbar.is-fixed-bottom-touch.has-shadow { + box-shadow: 0 -2px 3px rgba(10, 10, 10, 0.1); } + .navbar.is-fixed-top-touch { + top: 0; } + .navbar.is-fixed-top .navbar-menu, .navbar.is-fixed-top-touch .navbar-menu { + -webkit-overflow-scrolling: touch; + max-height: calc(100vh - 3.25rem); + overflow: auto; } + html.has-navbar-fixed-top-touch, + body.has-navbar-fixed-top-touch { + padding-top: 3.25rem; } + html.has-navbar-fixed-bottom-touch, + body.has-navbar-fixed-bottom-touch { + padding-bottom: 3.25rem; } } + +@media screen and (min-width: 1024px) { + .navbar, + .navbar-menu, + .navbar-start, + .navbar-end { + align-items: stretch; + display: flex; } + .navbar { + min-height: 3.25rem; } + .navbar.is-spaced { + padding: 1rem 2rem; } + .navbar.is-spaced .navbar-start, + .navbar.is-spaced .navbar-end { + align-items: center; } + .navbar.is-spaced a.navbar-item, + .navbar.is-spaced .navbar-link { + border-radius: 4px; } + .navbar.is-transparent a.navbar-item:focus, .navbar.is-transparent a.navbar-item:hover, .navbar.is-transparent a.navbar-item.is-active, + .navbar.is-transparent .navbar-link:focus, + .navbar.is-transparent .navbar-link:hover, + .navbar.is-transparent .navbar-link.is-active { + background-color: transparent !important; } + .navbar.is-transparent .navbar-item.has-dropdown.is-active .navbar-link, .navbar.is-transparent .navbar-item.has-dropdown.is-hoverable:focus .navbar-link, .navbar.is-transparent .navbar-item.has-dropdown.is-hoverable:focus-within .navbar-link, .navbar.is-transparent .navbar-item.has-dropdown.is-hoverable:hover .navbar-link { + background-color: transparent !important; } + .navbar.is-transparent .navbar-dropdown a.navbar-item:focus, .navbar.is-transparent .navbar-dropdown a.navbar-item:hover { + background-color: #414559; + color: #81c8be; } + .navbar.is-transparent .navbar-dropdown a.navbar-item.is-active { + background-color: whitesmoke; + color: #81c8be; } + .navbar-burger { + display: none; } + .navbar-item, + .navbar-link { + align-items: center; + display: flex; } + .navbar-item.has-dropdown { + align-items: stretch; } + .navbar-item.has-dropdown-up .navbar-link::after { + transform: rotate(135deg) translate(0.25em, -0.25em); } + .navbar-item.has-dropdown-up .navbar-dropdown { + border-bottom: 2px solid #dbdbdb; + border-radius: 6px 6px 0 0; + border-top: none; + bottom: 100%; + box-shadow: 0 -8px 8px rgba(10, 10, 10, 0.1); + top: auto; } + .navbar-item.is-active .navbar-dropdown, .navbar-item.is-hoverable:focus .navbar-dropdown, .navbar-item.is-hoverable:focus-within .navbar-dropdown, .navbar-item.is-hoverable:hover .navbar-dropdown { + display: block; } + .navbar.is-spaced .navbar-item.is-active .navbar-dropdown, .navbar-item.is-active .navbar-dropdown.is-boxed, .navbar.is-spaced .navbar-item.is-hoverable:focus .navbar-dropdown, .navbar-item.is-hoverable:focus .navbar-dropdown.is-boxed, .navbar.is-spaced .navbar-item.is-hoverable:focus-within .navbar-dropdown, .navbar-item.is-hoverable:focus-within .navbar-dropdown.is-boxed, .navbar.is-spaced .navbar-item.is-hoverable:hover .navbar-dropdown, .navbar-item.is-hoverable:hover .navbar-dropdown.is-boxed { + opacity: 1; + pointer-events: auto; + transform: translateY(0); } + .navbar-menu { + flex-grow: 1; + flex-shrink: 0; } + .navbar-start { + justify-content: flex-start; + margin-right: auto; } + .navbar-end { + justify-content: flex-end; + margin-left: auto; } + .navbar-dropdown { + background-color: #414559; + border-bottom-left-radius: 6px; + border-bottom-right-radius: 6px; + border-top: 2px solid #dbdbdb; + box-shadow: 0 8px 8px rgba(10, 10, 10, 0.1); + display: none; + font-size: 0.875rem; + left: 0; + min-width: 100%; + position: absolute; + top: 100%; + z-index: 20; } + .navbar-dropdown .navbar-item { + padding: 0.375rem 1rem; + white-space: nowrap; } + .navbar-dropdown a.navbar-item { + padding-right: 3rem; } + .navbar-dropdown a.navbar-item:focus, .navbar-dropdown a.navbar-item:hover { + background-color: #414559; + color: #81c8be; } + .navbar-dropdown a.navbar-item.is-active { + background-color: whitesmoke; + color: #81c8be; } + .navbar.is-spaced .navbar-dropdown, .navbar-dropdown.is-boxed { + border-radius: 6px; + border-top: none; + box-shadow: 0 8px 8px rgba(10, 10, 10, 0.1), 0 0 0 1px rgba(10, 10, 10, 0.1); + display: block; + opacity: 0; + pointer-events: none; + top: calc(100% + (-4px)); + transform: translateY(-5px); + transition-duration: 86ms; + transition-property: opacity, transform; } + .navbar-dropdown.is-right { + left: auto; + right: 0; } + .navbar-divider { + display: block; } + .navbar > .container .navbar-brand, + .container > .navbar .navbar-brand { + margin-left: -0.75rem; } + .navbar > .container .navbar-menu, + .container > .navbar .navbar-menu { + margin-right: -0.75rem; } + .navbar.is-fixed-bottom-desktop, .navbar.is-fixed-top-desktop { + left: 0; + position: fixed; + right: 0; + z-index: 30; } + .navbar.is-fixed-bottom-desktop { + bottom: 0; } + .navbar.is-fixed-bottom-desktop.has-shadow { + box-shadow: 0 -2px 3px rgba(10, 10, 10, 0.1); } + .navbar.is-fixed-top-desktop { + top: 0; } + html.has-navbar-fixed-top-desktop, + body.has-navbar-fixed-top-desktop { + padding-top: 3.25rem; } + html.has-navbar-fixed-bottom-desktop, + body.has-navbar-fixed-bottom-desktop { + padding-bottom: 3.25rem; } + html.has-spaced-navbar-fixed-top, + body.has-spaced-navbar-fixed-top { + padding-top: 5.25rem; } + html.has-spaced-navbar-fixed-bottom, + body.has-spaced-navbar-fixed-bottom { + padding-bottom: 5.25rem; } + a.navbar-item.is-active, + .navbar-link.is-active { + color: #0a0a0a; } + a.navbar-item.is-active:not(:focus):not(:hover), + .navbar-link.is-active:not(:focus):not(:hover) { + background-color: transparent; } + .navbar-item.has-dropdown:focus .navbar-link, .navbar-item.has-dropdown:hover .navbar-link, .navbar-item.has-dropdown.is-active .navbar-link { + background-color: #414559; } } + +.hero.is-fullheight-with-navbar { + min-height: calc(100vh - 3.25rem); } + +.hero { + align-items: stretch; + display: flex; + flex-direction: column; + justify-content: space-between; } + .hero .navbar { + background: none; } + .hero .tabs ul { + border-bottom: none; } + .hero.is-white { + background-color: white; + color: #0a0a0a; } + .hero.is-white a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current), + .hero.is-white strong { + color: inherit; } + .hero.is-white .title { + color: #0a0a0a; } + .hero.is-white .subtitle { + color: rgba(10, 10, 10, 0.9); } + .hero.is-white .subtitle a:not(.button), + .hero.is-white .subtitle strong { + color: #0a0a0a; } + @media screen and (max-width: 1023px) { + .hero.is-white .navbar-menu { + background-color: white; } } + .hero.is-white .navbar-item, + .hero.is-white .navbar-link { + color: rgba(10, 10, 10, 0.7); } + .hero.is-white a.navbar-item:hover, .hero.is-white a.navbar-item.is-active, + .hero.is-white .navbar-link:hover, + .hero.is-white .navbar-link.is-active { + background-color: #f2f2f2; + color: #0a0a0a; } + .hero.is-white .tabs a { + color: #0a0a0a; + opacity: 0.9; } + .hero.is-white .tabs a:hover { + opacity: 1; } + .hero.is-white .tabs li.is-active a { + color: white !important; + opacity: 1; } + .hero.is-white .tabs.is-boxed a, .hero.is-white .tabs.is-toggle a { + color: #0a0a0a; } + .hero.is-white .tabs.is-boxed a:hover, .hero.is-white .tabs.is-toggle a:hover { + background-color: rgba(10, 10, 10, 0.1); } + .hero.is-white .tabs.is-boxed li.is-active a, .hero.is-white .tabs.is-boxed li.is-active a:hover, .hero.is-white .tabs.is-toggle li.is-active a, .hero.is-white .tabs.is-toggle li.is-active a:hover { + background-color: #0a0a0a; + border-color: #0a0a0a; + color: white; } + .hero.is-white.is-bold { + background-image: linear-gradient(141deg, #e6e6e6 0%, white 71%, white 100%); } + @media screen and (max-width: 768px) { + .hero.is-white.is-bold .navbar-menu { + background-image: linear-gradient(141deg, #e6e6e6 0%, white 71%, white 100%); } } + .hero.is-black { + background-color: #0a0a0a; + color: white; } + .hero.is-black a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current), + .hero.is-black strong { + color: inherit; } + .hero.is-black .title { + color: white; } + .hero.is-black .subtitle { + color: rgba(255, 255, 255, 0.9); } + .hero.is-black .subtitle a:not(.button), + .hero.is-black .subtitle strong { + color: white; } + @media screen and (max-width: 1023px) { + .hero.is-black .navbar-menu { + background-color: #0a0a0a; } } + .hero.is-black .navbar-item, + .hero.is-black .navbar-link { + color: rgba(255, 255, 255, 0.7); } + .hero.is-black a.navbar-item:hover, .hero.is-black a.navbar-item.is-active, + .hero.is-black .navbar-link:hover, + .hero.is-black .navbar-link.is-active { + background-color: black; + color: white; } + .hero.is-black .tabs a { + color: white; + opacity: 0.9; } + .hero.is-black .tabs a:hover { + opacity: 1; } + .hero.is-black .tabs li.is-active a { + color: #0a0a0a !important; + opacity: 1; } + .hero.is-black .tabs.is-boxed a, .hero.is-black .tabs.is-toggle a { + color: white; } + .hero.is-black .tabs.is-boxed a:hover, .hero.is-black .tabs.is-toggle a:hover { + background-color: rgba(10, 10, 10, 0.1); } + .hero.is-black .tabs.is-boxed li.is-active a, .hero.is-black .tabs.is-boxed li.is-active a:hover, .hero.is-black .tabs.is-toggle li.is-active a, .hero.is-black .tabs.is-toggle li.is-active a:hover { + background-color: white; + border-color: white; + color: #0a0a0a; } + .hero.is-black.is-bold { + background-image: linear-gradient(141deg, black 0%, #0a0a0a 71%, #181616 100%); } + @media screen and (max-width: 768px) { + .hero.is-black.is-bold .navbar-menu { + background-image: linear-gradient(141deg, black 0%, #0a0a0a 71%, #181616 100%); } } + .hero.is-light { + background-color: whitesmoke; + color: rgba(0, 0, 0, 0.7); } + .hero.is-light a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current), + .hero.is-light strong { + color: inherit; } + .hero.is-light .title { + color: rgba(0, 0, 0, 0.7); } + .hero.is-light .subtitle { + color: rgba(0, 0, 0, 0.9); } + .hero.is-light .subtitle a:not(.button), + .hero.is-light .subtitle strong { + color: rgba(0, 0, 0, 0.7); } + @media screen and (max-width: 1023px) { + .hero.is-light .navbar-menu { + background-color: whitesmoke; } } + .hero.is-light .navbar-item, + .hero.is-light .navbar-link { + color: rgba(0, 0, 0, 0.7); } + .hero.is-light a.navbar-item:hover, .hero.is-light a.navbar-item.is-active, + .hero.is-light .navbar-link:hover, + .hero.is-light .navbar-link.is-active { + background-color: #e8e8e8; + color: rgba(0, 0, 0, 0.7); } + .hero.is-light .tabs a { + color: rgba(0, 0, 0, 0.7); + opacity: 0.9; } + .hero.is-light .tabs a:hover { + opacity: 1; } + .hero.is-light .tabs li.is-active a { + color: whitesmoke !important; + opacity: 1; } + .hero.is-light .tabs.is-boxed a, .hero.is-light .tabs.is-toggle a { + color: rgba(0, 0, 0, 0.7); } + .hero.is-light .tabs.is-boxed a:hover, .hero.is-light .tabs.is-toggle a:hover { + background-color: rgba(10, 10, 10, 0.1); } + .hero.is-light .tabs.is-boxed li.is-active a, .hero.is-light .tabs.is-boxed li.is-active a:hover, .hero.is-light .tabs.is-toggle li.is-active a, .hero.is-light .tabs.is-toggle li.is-active a:hover { + background-color: rgba(0, 0, 0, 0.7); + border-color: rgba(0, 0, 0, 0.7); + color: whitesmoke; } + .hero.is-light.is-bold { + background-image: linear-gradient(141deg, #dfd8d9 0%, whitesmoke 71%, white 100%); } + @media screen and (max-width: 768px) { + .hero.is-light.is-bold .navbar-menu { + background-image: linear-gradient(141deg, #dfd8d9 0%, whitesmoke 71%, white 100%); } } + .hero.is-dark { + background-color: #363636; + color: #fff; } + .hero.is-dark a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current), + .hero.is-dark strong { + color: inherit; } + .hero.is-dark .title { + color: #fff; } + .hero.is-dark .subtitle { + color: rgba(255, 255, 255, 0.9); } + .hero.is-dark .subtitle a:not(.button), + .hero.is-dark .subtitle strong { + color: #fff; } + @media screen and (max-width: 1023px) { + .hero.is-dark .navbar-menu { + background-color: #363636; } } + .hero.is-dark .navbar-item, + .hero.is-dark .navbar-link { + color: rgba(255, 255, 255, 0.7); } + .hero.is-dark a.navbar-item:hover, .hero.is-dark a.navbar-item.is-active, + .hero.is-dark .navbar-link:hover, + .hero.is-dark .navbar-link.is-active { + background-color: #292929; + color: #fff; } + .hero.is-dark .tabs a { + color: #fff; + opacity: 0.9; } + .hero.is-dark .tabs a:hover { + opacity: 1; } + .hero.is-dark .tabs li.is-active a { + color: #363636 !important; + opacity: 1; } + .hero.is-dark .tabs.is-boxed a, .hero.is-dark .tabs.is-toggle a { + color: #fff; } + .hero.is-dark .tabs.is-boxed a:hover, .hero.is-dark .tabs.is-toggle a:hover { + background-color: rgba(10, 10, 10, 0.1); } + .hero.is-dark .tabs.is-boxed li.is-active a, .hero.is-dark .tabs.is-boxed li.is-active a:hover, .hero.is-dark .tabs.is-toggle li.is-active a, .hero.is-dark .tabs.is-toggle li.is-active a:hover { + background-color: #fff; + border-color: #fff; + color: #363636; } + .hero.is-dark.is-bold { + background-image: linear-gradient(141deg, #1f191a 0%, #363636 71%, #46403f 100%); } + @media screen and (max-width: 768px) { + .hero.is-dark.is-bold .navbar-menu { + background-image: linear-gradient(141deg, #1f191a 0%, #363636 71%, #46403f 100%); } } + .hero.is-primary { + background-color: #c6d0f5; + color: rgba(0, 0, 0, 0.7); } + .hero.is-primary a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current), + .hero.is-primary strong { + color: inherit; } + .hero.is-primary .title { + color: rgba(0, 0, 0, 0.7); } + .hero.is-primary .subtitle { + color: rgba(0, 0, 0, 0.9); } + .hero.is-primary .subtitle a:not(.button), + .hero.is-primary .subtitle strong { + color: rgba(0, 0, 0, 0.7); } + @media screen and (max-width: 1023px) { + .hero.is-primary .navbar-menu { + background-color: #c6d0f5; } } + .hero.is-primary .navbar-item, + .hero.is-primary .navbar-link { + color: rgba(0, 0, 0, 0.7); } + .hero.is-primary a.navbar-item:hover, .hero.is-primary a.navbar-item.is-active, + .hero.is-primary .navbar-link:hover, + .hero.is-primary .navbar-link.is-active { + background-color: #b0bef1; + color: rgba(0, 0, 0, 0.7); } + .hero.is-primary .tabs a { + color: rgba(0, 0, 0, 0.7); + opacity: 0.9; } + .hero.is-primary .tabs a:hover { + opacity: 1; } + .hero.is-primary .tabs li.is-active a { + color: #c6d0f5 !important; + opacity: 1; } + .hero.is-primary .tabs.is-boxed a, .hero.is-primary .tabs.is-toggle a { + color: rgba(0, 0, 0, 0.7); } + .hero.is-primary .tabs.is-boxed a:hover, .hero.is-primary .tabs.is-toggle a:hover { + background-color: rgba(10, 10, 10, 0.1); } + .hero.is-primary .tabs.is-boxed li.is-active a, .hero.is-primary .tabs.is-boxed li.is-active a:hover, .hero.is-primary .tabs.is-toggle li.is-active a, .hero.is-primary .tabs.is-toggle li.is-active a:hover { + background-color: rgba(0, 0, 0, 0.7); + border-color: rgba(0, 0, 0, 0.7); + color: #c6d0f5; } + .hero.is-primary.is-bold { + background-image: linear-gradient(141deg, #95b9f3 0%, #c6d0f5 71%, #dbdcfa 100%); } + @media screen and (max-width: 768px) { + .hero.is-primary.is-bold .navbar-menu { + background-image: linear-gradient(141deg, #95b9f3 0%, #c6d0f5 71%, #dbdcfa 100%); } } + .hero.is-link { + background-color: #81c8be; + color: rgba(0, 0, 0, 0.7); } + .hero.is-link a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current), + .hero.is-link strong { + color: inherit; } + .hero.is-link .title { + color: rgba(0, 0, 0, 0.7); } + .hero.is-link .subtitle { + color: rgba(0, 0, 0, 0.9); } + .hero.is-link .subtitle a:not(.button), + .hero.is-link .subtitle strong { + color: rgba(0, 0, 0, 0.7); } + @media screen and (max-width: 1023px) { + .hero.is-link .navbar-menu { + background-color: #81c8be; } } + .hero.is-link .navbar-item, + .hero.is-link .navbar-link { + color: rgba(0, 0, 0, 0.7); } + .hero.is-link a.navbar-item:hover, .hero.is-link a.navbar-item.is-active, + .hero.is-link .navbar-link:hover, + .hero.is-link .navbar-link.is-active { + background-color: #6fc0b5; + color: rgba(0, 0, 0, 0.7); } + .hero.is-link .tabs a { + color: rgba(0, 0, 0, 0.7); + opacity: 0.9; } + .hero.is-link .tabs a:hover { + opacity: 1; } + .hero.is-link .tabs li.is-active a { + color: #81c8be !important; + opacity: 1; } + .hero.is-link .tabs.is-boxed a, .hero.is-link .tabs.is-toggle a { + color: rgba(0, 0, 0, 0.7); } + .hero.is-link .tabs.is-boxed a:hover, .hero.is-link .tabs.is-toggle a:hover { + background-color: rgba(10, 10, 10, 0.1); } + .hero.is-link .tabs.is-boxed li.is-active a, .hero.is-link .tabs.is-boxed li.is-active a:hover, .hero.is-link .tabs.is-toggle li.is-active a, .hero.is-link .tabs.is-toggle li.is-active a:hover { + background-color: rgba(0, 0, 0, 0.7); + border-color: rgba(0, 0, 0, 0.7); + color: #81c8be; } + .hero.is-link.is-bold { + background-image: linear-gradient(141deg, #52c4a1 0%, #81c8be 71%, #8fd2d4 100%); } + @media screen and (max-width: 768px) { + .hero.is-link.is-bold .navbar-menu { + background-image: linear-gradient(141deg, #52c4a1 0%, #81c8be 71%, #8fd2d4 100%); } } + .hero.is-info { + background-color: #3e8ed0; + color: #fff; } + .hero.is-info a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current), + .hero.is-info strong { + color: inherit; } + .hero.is-info .title { + color: #fff; } + .hero.is-info .subtitle { + color: rgba(255, 255, 255, 0.9); } + .hero.is-info .subtitle a:not(.button), + .hero.is-info .subtitle strong { + color: #fff; } + @media screen and (max-width: 1023px) { + .hero.is-info .navbar-menu { + background-color: #3e8ed0; } } + .hero.is-info .navbar-item, + .hero.is-info .navbar-link { + color: rgba(255, 255, 255, 0.7); } + .hero.is-info a.navbar-item:hover, .hero.is-info a.navbar-item.is-active, + .hero.is-info .navbar-link:hover, + .hero.is-info .navbar-link.is-active { + background-color: #3082c5; + color: #fff; } + .hero.is-info .tabs a { + color: #fff; + opacity: 0.9; } + .hero.is-info .tabs a:hover { + opacity: 1; } + .hero.is-info .tabs li.is-active a { + color: #3e8ed0 !important; + opacity: 1; } + .hero.is-info .tabs.is-boxed a, .hero.is-info .tabs.is-toggle a { + color: #fff; } + .hero.is-info .tabs.is-boxed a:hover, .hero.is-info .tabs.is-toggle a:hover { + background-color: rgba(10, 10, 10, 0.1); } + .hero.is-info .tabs.is-boxed li.is-active a, .hero.is-info .tabs.is-boxed li.is-active a:hover, .hero.is-info .tabs.is-toggle li.is-active a, .hero.is-info .tabs.is-toggle li.is-active a:hover { + background-color: #fff; + border-color: #fff; + color: #3e8ed0; } + .hero.is-info.is-bold { + background-image: linear-gradient(141deg, #208fbc 0%, #3e8ed0 71%, #4d83db 100%); } + @media screen and (max-width: 768px) { + .hero.is-info.is-bold .navbar-menu { + background-image: linear-gradient(141deg, #208fbc 0%, #3e8ed0 71%, #4d83db 100%); } } + .hero.is-success { + background-color: #48c78e; + color: #fff; } + .hero.is-success a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current), + .hero.is-success strong { + color: inherit; } + .hero.is-success .title { + color: #fff; } + .hero.is-success .subtitle { + color: rgba(255, 255, 255, 0.9); } + .hero.is-success .subtitle a:not(.button), + .hero.is-success .subtitle strong { + color: #fff; } + @media screen and (max-width: 1023px) { + .hero.is-success .navbar-menu { + background-color: #48c78e; } } + .hero.is-success .navbar-item, + .hero.is-success .navbar-link { + color: rgba(255, 255, 255, 0.7); } + .hero.is-success a.navbar-item:hover, .hero.is-success a.navbar-item.is-active, + .hero.is-success .navbar-link:hover, + .hero.is-success .navbar-link.is-active { + background-color: #3abb81; + color: #fff; } + .hero.is-success .tabs a { + color: #fff; + opacity: 0.9; } + .hero.is-success .tabs a:hover { + opacity: 1; } + .hero.is-success .tabs li.is-active a { + color: #48c78e !important; + opacity: 1; } + .hero.is-success .tabs.is-boxed a, .hero.is-success .tabs.is-toggle a { + color: #fff; } + .hero.is-success .tabs.is-boxed a:hover, .hero.is-success .tabs.is-toggle a:hover { + background-color: rgba(10, 10, 10, 0.1); } + .hero.is-success .tabs.is-boxed li.is-active a, .hero.is-success .tabs.is-boxed li.is-active a:hover, .hero.is-success .tabs.is-toggle li.is-active a, .hero.is-success .tabs.is-toggle li.is-active a:hover { + background-color: #fff; + border-color: #fff; + color: #48c78e; } + .hero.is-success.is-bold { + background-image: linear-gradient(141deg, #29b35e 0%, #48c78e 71%, #56d2af 100%); } + @media screen and (max-width: 768px) { + .hero.is-success.is-bold .navbar-menu { + background-image: linear-gradient(141deg, #29b35e 0%, #48c78e 71%, #56d2af 100%); } } + .hero.is-warning { + background-color: #ffe08a; + color: rgba(0, 0, 0, 0.7); } + .hero.is-warning a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current), + .hero.is-warning strong { + color: inherit; } + .hero.is-warning .title { + color: rgba(0, 0, 0, 0.7); } + .hero.is-warning .subtitle { + color: rgba(0, 0, 0, 0.9); } + .hero.is-warning .subtitle a:not(.button), + .hero.is-warning .subtitle strong { + color: rgba(0, 0, 0, 0.7); } + @media screen and (max-width: 1023px) { + .hero.is-warning .navbar-menu { + background-color: #ffe08a; } } + .hero.is-warning .navbar-item, + .hero.is-warning .navbar-link { + color: rgba(0, 0, 0, 0.7); } + .hero.is-warning a.navbar-item:hover, .hero.is-warning a.navbar-item.is-active, + .hero.is-warning .navbar-link:hover, + .hero.is-warning .navbar-link.is-active { + background-color: #ffd970; + color: rgba(0, 0, 0, 0.7); } + .hero.is-warning .tabs a { + color: rgba(0, 0, 0, 0.7); + opacity: 0.9; } + .hero.is-warning .tabs a:hover { + opacity: 1; } + .hero.is-warning .tabs li.is-active a { + color: #ffe08a !important; + opacity: 1; } + .hero.is-warning .tabs.is-boxed a, .hero.is-warning .tabs.is-toggle a { + color: rgba(0, 0, 0, 0.7); } + .hero.is-warning .tabs.is-boxed a:hover, .hero.is-warning .tabs.is-toggle a:hover { + background-color: rgba(10, 10, 10, 0.1); } + .hero.is-warning .tabs.is-boxed li.is-active a, .hero.is-warning .tabs.is-boxed li.is-active a:hover, .hero.is-warning .tabs.is-toggle li.is-active a, .hero.is-warning .tabs.is-toggle li.is-active a:hover { + background-color: rgba(0, 0, 0, 0.7); + border-color: rgba(0, 0, 0, 0.7); + color: #ffe08a; } + .hero.is-warning.is-bold { + background-image: linear-gradient(141deg, #ffb657 0%, #ffe08a 71%, #fff6a3 100%); } + @media screen and (max-width: 768px) { + .hero.is-warning.is-bold .navbar-menu { + background-image: linear-gradient(141deg, #ffb657 0%, #ffe08a 71%, #fff6a3 100%); } } + .hero.is-danger { + background-color: #f14668; + color: #fff; } + .hero.is-danger a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current), + .hero.is-danger strong { + color: inherit; } + .hero.is-danger .title { + color: #fff; } + .hero.is-danger .subtitle { + color: rgba(255, 255, 255, 0.9); } + .hero.is-danger .subtitle a:not(.button), + .hero.is-danger .subtitle strong { + color: #fff; } + @media screen and (max-width: 1023px) { + .hero.is-danger .navbar-menu { + background-color: #f14668; } } + .hero.is-danger .navbar-item, + .hero.is-danger .navbar-link { + color: rgba(255, 255, 255, 0.7); } + .hero.is-danger a.navbar-item:hover, .hero.is-danger a.navbar-item.is-active, + .hero.is-danger .navbar-link:hover, + .hero.is-danger .navbar-link.is-active { + background-color: #ef2e55; + color: #fff; } + .hero.is-danger .tabs a { + color: #fff; + opacity: 0.9; } + .hero.is-danger .tabs a:hover { + opacity: 1; } + .hero.is-danger .tabs li.is-active a { + color: #f14668 !important; + opacity: 1; } + .hero.is-danger .tabs.is-boxed a, .hero.is-danger .tabs.is-toggle a { + color: #fff; } + .hero.is-danger .tabs.is-boxed a:hover, .hero.is-danger .tabs.is-toggle a:hover { + background-color: rgba(10, 10, 10, 0.1); } + .hero.is-danger .tabs.is-boxed li.is-active a, .hero.is-danger .tabs.is-boxed li.is-active a:hover, .hero.is-danger .tabs.is-toggle li.is-active a, .hero.is-danger .tabs.is-toggle li.is-active a:hover { + background-color: #fff; + border-color: #fff; + color: #f14668; } + .hero.is-danger.is-bold { + background-image: linear-gradient(141deg, #fa0a62 0%, #f14668 71%, #f7595f 100%); } + @media screen and (max-width: 768px) { + .hero.is-danger.is-bold .navbar-menu { + background-image: linear-gradient(141deg, #fa0a62 0%, #f14668 71%, #f7595f 100%); } } + .hero.is-small .hero-body { + padding: 1.5rem; } + @media screen and (min-width: 769px), print { + .hero.is-medium .hero-body { + padding: 9rem 4.5rem; } } + @media screen and (min-width: 769px), print { + .hero.is-large .hero-body { + padding: 18rem 6rem; } } + .hero.is-halfheight .hero-body, .hero.is-fullheight .hero-body, .hero.is-fullheight-with-navbar .hero-body { + align-items: center; + display: flex; } + .hero.is-halfheight .hero-body > .container, .hero.is-fullheight .hero-body > .container, .hero.is-fullheight-with-navbar .hero-body > .container { + flex-grow: 1; + flex-shrink: 1; } + .hero.is-halfheight { + min-height: 50vh; } + .hero.is-fullheight { + min-height: 100vh; } + +.hero-video { + overflow: hidden; } + .hero-video video { + left: 50%; + min-height: 100%; + min-width: 100%; + position: absolute; + top: 50%; + transform: translate3d(-50%, -50%, 0); } + .hero-video.is-transparent { + opacity: 0.3; } + @media screen and (max-width: 768px) { + .hero-video { + display: none; } } + +.hero-buttons { + margin-top: 1.5rem; } + @media screen and (max-width: 768px) { + .hero-buttons .button { + display: flex; } + .hero-buttons .button:not(:last-child) { + margin-bottom: 0.75rem; } } + @media screen and (min-width: 769px), print { + .hero-buttons { + display: flex; + justify-content: center; } + .hero-buttons .button:not(:last-child) { + margin-right: 1.5rem; } } + +.hero-head, +.hero-foot { + flex-grow: 0; + flex-shrink: 0; } + +.hero-body { + flex-grow: 1; + flex-shrink: 0; + padding: 3rem 1.5rem; } + @media screen and (min-width: 769px), print { + .hero-body { + padding: 3rem 3rem; } } + +.section { + padding: 3rem 1.5rem; } + @media screen and (min-width: 1024px) { + .section { + padding: 3rem 3rem; } + .section.is-medium { + padding: 9rem 4.5rem; } + .section.is-large { + padding: 18rem 6rem; } } diff --git a/mybulma/node_modules/.bin/color-support b/mybulma/node_modules/.bin/color-support new file mode 100644 index 0000000..3c0a967 --- /dev/null +++ b/mybulma/node_modules/.bin/color-support @@ -0,0 +1,3 @@ +#!/usr/bin/env node +var colorSupport = require('./')({alwaysReturn: true }) +console.log(JSON.stringify(colorSupport, null, 2)) diff --git a/mybulma/node_modules/.bin/mkdirp b/mybulma/node_modules/.bin/mkdirp new file mode 100644 index 0000000..6e0aa8d --- /dev/null +++ b/mybulma/node_modules/.bin/mkdirp @@ -0,0 +1,68 @@ +#!/usr/bin/env node + +const usage = () => ` +usage: mkdirp [DIR1,DIR2..] {OPTIONS} + + Create each supplied directory including any necessary parent directories + that don't yet exist. + + If the directory already exists, do nothing. + +OPTIONS are: + + -m If a directory needs to be created, set the mode as an octal + --mode= permission string. + + -v --version Print the mkdirp version number + + -h --help Print this helpful banner + + -p --print Print the first directories created for each path provided + + --manual Use manual implementation, even if native is available +` + +const dirs = [] +const opts = {} +let print = false +let dashdash = false +let manual = false +for (const arg of process.argv.slice(2)) { + if (dashdash) + dirs.push(arg) + else if (arg === '--') + dashdash = true + else if (arg === '--manual') + manual = true + else if (/^-h/.test(arg) || /^--help/.test(arg)) { + console.log(usage()) + process.exit(0) + } else if (arg === '-v' || arg === '--version') { + console.log(require('../package.json').version) + process.exit(0) + } else if (arg === '-p' || arg === '--print') { + print = true + } else if (/^-m/.test(arg) || /^--mode=/.test(arg)) { + const mode = parseInt(arg.replace(/^(-m|--mode=)/, ''), 8) + if (isNaN(mode)) { + console.error(`invalid mode argument: ${arg}\nMust be an octal number.`) + process.exit(1) + } + opts.mode = mode + } else + dirs.push(arg) +} + +const mkdirp = require('../') +const impl = manual ? mkdirp.manual : mkdirp +if (dirs.length === 0) + console.error(usage()) + +Promise.all(dirs.map(dir => impl(dir, opts))) + .then(made => print ? made.forEach(m => m && console.log(m)) : null) + .catch(er => { + console.error(er.message) + if (er.code) + console.error(' code: ' + er.code) + process.exit(1) + }) diff --git a/mybulma/node_modules/.bin/node-gyp b/mybulma/node_modules/.bin/node-gyp new file mode 100644 index 0000000..8652ea2 --- /dev/null +++ b/mybulma/node_modules/.bin/node-gyp @@ -0,0 +1,140 @@ +#!/usr/bin/env node + +'use strict' + +process.title = 'node-gyp' + +const envPaths = require('env-paths') +const gyp = require('../') +const log = require('npmlog') +const os = require('os') + +/** + * Process and execute the selected commands. + */ + +const prog = gyp() +var completed = false +prog.parseArgv(process.argv) +prog.devDir = prog.opts.devdir + +var homeDir = os.homedir() +if (prog.devDir) { + prog.devDir = prog.devDir.replace(/^~/, homeDir) +} else if (homeDir) { + prog.devDir = envPaths('node-gyp', { suffix: '' }).cache +} else { + throw new Error( + "node-gyp requires that the user's home directory is specified " + + 'in either of the environmental variables HOME or USERPROFILE. ' + + 'Overide with: --devdir /path/to/.node-gyp') +} + +if (prog.todo.length === 0) { + if (~process.argv.indexOf('-v') || ~process.argv.indexOf('--version')) { + console.log('v%s', prog.version) + } else { + console.log('%s', prog.usage()) + } + process.exit(0) +} + +log.info('it worked if it ends with', 'ok') +log.verbose('cli', process.argv) +log.info('using', 'node-gyp@%s', prog.version) +log.info('using', 'node@%s | %s | %s', process.versions.node, process.platform, process.arch) + +/** + * Change dir if -C/--directory was passed. + */ + +var dir = prog.opts.directory +if (dir) { + var fs = require('fs') + try { + var stat = fs.statSync(dir) + if (stat.isDirectory()) { + log.info('chdir', dir) + process.chdir(dir) + } else { + log.warn('chdir', dir + ' is not a directory') + } + } catch (e) { + if (e.code === 'ENOENT') { + log.warn('chdir', dir + ' is not a directory') + } else { + log.warn('chdir', 'error during chdir() "%s"', e.message) + } + } +} + +function run () { + var command = prog.todo.shift() + if (!command) { + // done! + completed = true + log.info('ok') + return + } + + prog.commands[command.name](command.args, function (err) { + if (err) { + log.error(command.name + ' error') + log.error('stack', err.stack) + errorMessage() + log.error('not ok') + return process.exit(1) + } + if (command.name === 'list') { + var versions = arguments[1] + if (versions.length > 0) { + versions.forEach(function (version) { + console.log(version) + }) + } else { + console.log('No node development files installed. Use `node-gyp install` to install a version.') + } + } else if (arguments.length >= 2) { + console.log.apply(console, [].slice.call(arguments, 1)) + } + + // now run the next command in the queue + process.nextTick(run) + }) +} + +process.on('exit', function (code) { + if (!completed && !code) { + log.error('Completion callback never invoked!') + issueMessage() + process.exit(6) + } +}) + +process.on('uncaughtException', function (err) { + log.error('UNCAUGHT EXCEPTION') + log.error('stack', err.stack) + issueMessage() + process.exit(7) +}) + +function errorMessage () { + // copied from npm's lib/utils/error-handler.js + var os = require('os') + log.error('System', os.type() + ' ' + os.release()) + log.error('command', process.argv + .map(JSON.stringify).join(' ')) + log.error('cwd', process.cwd()) + log.error('node -v', process.version) + log.error('node-gyp -v', 'v' + prog.package.version) +} + +function issueMessage () { + errorMessage() + log.error('', ['Node-gyp failed to build your package.', + 'Try to update npm and/or node-gyp and if it does not help file an issue with the package author.' + ].join('\n')) +} + +// start running the given commands! +run() diff --git a/mybulma/node_modules/.bin/node-sass b/mybulma/node_modules/.bin/node-sass new file mode 100644 index 0000000..7645ecb --- /dev/null +++ b/mybulma/node_modules/.bin/node-sass @@ -0,0 +1,444 @@ +#!/usr/bin/env node + +var Emitter = require('events').EventEmitter, + forEach = require('async-foreach').forEach, + Gaze = require('gaze'), + meow = require('meow'), + util = require('util'), + path = require('path'), + glob = require('glob'), + sass = require('../lib'), + render = require('../lib/render'), + watcher = require('../lib/watcher'), + stdout = require('stdout-stream'), + stdin = require('get-stdin'), + fs = require('fs'); + +/** + * Initialize CLI + */ + +var cli = meow(` + Usage: + node-sass [options] + cat | node-sass [options] > output.css + + Example: Compile foobar.scss to foobar.css + node-sass --output-style compressed foobar.scss > foobar.css + cat foobar.scss | node-sass --output-style compressed > foobar.css + + Example: Watch the sass directory for changes, compile with sourcemaps to the css directory + node-sass --watch --recursive --output css + --source-map true --source-map-contents sass + + Options + -w, --watch Watch a directory or file + -r, --recursive Recursively watch directories or files + -o, --output Output directory + -x, --omit-source-map-url Omit source map URL comment from output + -i, --indented-syntax Treat data from stdin as sass code (versus scss) + -q, --quiet Suppress log output except on error + -v, --version Prints version info + --output-style CSS output style (nested | expanded | compact | compressed) + --indent-type Indent type for output CSS (space | tab) + --indent-width Indent width; number of spaces or tabs (maximum value: 10) + --linefeed Linefeed style (cr | crlf | lf | lfcr) + --source-comments Include debug info in output + --source-map Emit source map (boolean, or path to output .map file) + --source-map-contents Embed include contents in map + --source-map-embed Embed sourceMappingUrl as data URI + --source-map-root Base path, will be emitted in source-map as is + --include-path Path to look for imported files + --follow Follow symlinked directories + --precision The amount of precision allowed in decimal numbers + --error-bell Output a bell character on errors + --importer Path to .js file containing custom importer + --functions Path to .js file containing custom functions + --help Print usage info +`, { + version: sass.info, + flags: { + errorBell: { + type: 'boolean', + }, + functions: { + type: 'string', + }, + follow: { + type: 'boolean', + }, + importer: { + type: 'string', + }, + includePath: { + type: 'string', + default: [process.cwd()], + isMultiple: true, + }, + indentType: { + type: 'string', + default: 'space', + }, + indentWidth: { + type: 'number', + default: 2, + }, + indentedSyntax: { + type: 'boolean', + alias: 'i', + }, + linefeed: { + type: 'string', + default: 'lf', + }, + omitSourceMapUrl: { + type: 'boolean', + alias: 'x', + }, + output: { + type: 'string', + alias: 'o', + }, + outputStyle: { + type: 'string', + default: 'nested', + }, + precision: { + type: 'number', + default: 5, + }, + quiet: { + type: 'boolean', + default: false, + alias: 'q', + }, + recursive: { + type: 'boolean', + default: true, + alias: 'r', + }, + sourceMapContents: { + type: 'boolean', + }, + sourceMapEmbed: { + type: 'boolean', + }, + sourceMapRoot: { + type: 'string', + }, + sourceComments: { + type: 'boolean', + alias: 'c', + }, + version: { + type: 'boolean', + alias: 'v', + }, + watch: { + type: 'boolean', + alias: 'w', + }, + }, +}); + +/** + * Is a Directory + * + * @param {String} filePath + * @returns {Boolean} + * @api private + */ + +function isDirectory(filePath) { + var isDir = false; + try { + var absolutePath = path.resolve(filePath); + isDir = fs.statSync(absolutePath).isDirectory(); + } catch (e) { + isDir = e.code === 'ENOENT'; + } + return isDir; +} + +/** + * Get correct glob pattern + * + * @param {Object} options + * @returns {String} + * @api private + */ + +function globPattern(options) { + return options.recursive ? '**/*.{sass,scss}' : '*.{sass,scss}'; +} + +/** + * Create emitter + * + * @api private + */ + +function getEmitter() { + var emitter = new Emitter(); + + emitter.on('error', function(err) { + if (options.errorBell) { + err += '\x07'; + } + console.error(err); + if (!options.watch) { + process.exit(1); + } + }); + + emitter.on('warn', function(data) { + if (!options.quiet) { + console.warn(data); + } + }); + + emitter.on('info', function(data) { + if (!options.quiet) { + console.info(data); + } + }); + + emitter.on('log', stdout.write.bind(stdout)); + + return emitter; +} + +/** + * Construct options + * + * @param {Array} arguments + * @param {Object} options + * @api private + */ + +function getOptions(args, options) { + var cssDir, sassDir, file, mapDir; + options.src = args[0]; + + if (args[1]) { + options.dest = path.resolve(args[1]); + } else if (options.output) { + options.dest = path.join( + path.resolve(options.output), + [path.basename(options.src, path.extname(options.src)), '.css'].join('')); // replace ext. + } + + if (options.directory) { + sassDir = path.resolve(options.directory); + file = path.relative(sassDir, args[0]); + cssDir = path.resolve(options.output); + options.dest = path.join(cssDir, file).replace(path.extname(file), '.css'); + } + + if (options.sourceMap) { + if(!options.sourceMapOriginal) { + options.sourceMapOriginal = options.sourceMap; + } + + if (options.sourceMapOriginal === 'true') { + options.sourceMap = options.dest + '.map'; + } else { + // check if sourceMap path ends with .map to avoid isDirectory false-positive + var sourceMapIsDirectory = options.sourceMapOriginal.indexOf('.map', options.sourceMapOriginal.length - 4) === -1 && isDirectory(options.sourceMapOriginal); + + if (!sourceMapIsDirectory) { + options.sourceMap = path.resolve(options.sourceMapOriginal); + } else if (!options.directory) { + options.sourceMap = path.resolve(options.sourceMapOriginal, path.basename(options.dest) + '.map'); + } else { + sassDir = path.resolve(options.directory); + file = path.relative(sassDir, args[0]); + mapDir = path.resolve(options.sourceMapOriginal); + options.sourceMap = path.join(mapDir, file).replace(path.extname(file), '.css.map'); + } + } + } + + return options; +} + +/** + * Watch + * + * @param {Object} options + * @param {Object} emitter + * @api private + */ + +function watch(options, emitter) { + var handler = function(files) { + files.added.forEach(function(file) { + var watch = gaze.watched(); + Object.keys(watch).forEach(function (dir) { + if (watch[dir].indexOf(file) !== -1) { + gaze.add(file); + } + }); + }); + + files.changed.forEach(function(file) { + if (path.basename(file)[0] !== '_') { + renderFile(file, options, emitter); + } + }); + + files.removed.forEach(function(file) { + gaze.remove(file); + }); + }; + + var gaze = new Gaze(); + gaze.add(watcher.reset(options)); + gaze.on('error', emitter.emit.bind(emitter, 'error')); + + gaze.on('changed', function(file) { + handler(watcher.changed(file)); + }); + + gaze.on('added', function(file) { + handler(watcher.added(file)); + }); + + gaze.on('deleted', function(file) { + handler(watcher.removed(file)); + }); +} + +/** + * Run + * + * @param {Object} options + * @param {Object} emitter + * @api private + */ + +function run(options, emitter) { + if (options.directory) { + if (!options.output) { + emitter.emit('error', 'An output directory must be specified when compiling a directory'); + } + if (!isDirectory(options.output)) { + emitter.emit('error', 'An output directory must be specified when compiling a directory'); + } + } + + if (options.sourceMapOriginal && options.directory && !isDirectory(options.sourceMapOriginal) && options.sourceMapOriginal !== 'true') { + emitter.emit('error', 'The --source-map option must be either a boolean or directory when compiling a directory'); + } + + if (options.importer) { + if ((path.resolve(options.importer) === path.normalize(options.importer).replace(/(.+)([/|\\])$/, '$1'))) { + options.importer = require(options.importer); + } else { + options.importer = require(path.resolve(options.importer)); + } + } + + if (options.functions) { + if ((path.resolve(options.functions) === path.normalize(options.functions).replace(/(.+)([/|\\])$/, '$1'))) { + options.functions = require(options.functions); + } else { + options.functions = require(path.resolve(options.functions)); + } + } + + if (options.watch) { + watch(options, emitter); + } else if (options.directory) { + renderDir(options, emitter); + } else { + render(options, emitter); + } +} + +/** + * Render a file + * + * @param {String} file + * @param {Object} options + * @param {Object} emitter + * @api private + */ +function renderFile(file, options, emitter) { + options = getOptions([path.resolve(file)], options); + if (options.watch && !options.quiet) { + emitter.emit('info', util.format('=> changed: %s', file)); + } + render(options, emitter); +} + +/** + * Render all sass files in a directory + * + * @param {Object} options + * @param {Object} emitter + * @api private + */ +function renderDir(options, emitter) { + var globPath = path.resolve(options.directory, globPattern(options)); + glob(globPath, { ignore: '**/_*', follow: options.follow }, function(err, files) { + if (err) { + return emitter.emit('error', util.format('You do not have permission to access this path: %s.', err.path)); + } else if (!files.length) { + return emitter.emit('error', 'No input file was found.'); + } + + forEach(files, function(subject) { + emitter.once('done', this.async()); + renderFile(subject, options, emitter); + }, function(successful, arr) { + var outputDir = path.join(process.cwd(), options.output); + if (!options.quiet) { + emitter.emit('info', util.format('Wrote %s CSS files to %s', arr.length, outputDir)); + } + process.exit(); + }); + }); +} + +/** + * Arguments and options + */ + +var options = getOptions(cli.input, cli.flags); +var emitter = getEmitter(); + +/** + * Show usage if no arguments are supplied + */ + +if (!options.src && process.stdin.isTTY) { + emitter.emit('error', [ + 'Provide a Sass file to render', + '', + 'Example: Compile foobar.scss to foobar.css', + ' node-sass --output-style compressed foobar.scss > foobar.css', + ' cat foobar.scss | node-sass --output-style compressed > foobar.css', + '', + 'Example: Watch the sass directory for changes, compile with sourcemaps to the css directory', + ' node-sass --watch --recursive --output css', + ' --source-map true --source-map-contents sass', + ].join('\n')); +} + +/** + * Apply arguments + */ + +if (options.src) { + if (isDirectory(options.src)) { + options.directory = options.src; + } + run(options, emitter); +} else if (!process.stdin.isTTY) { + stdin(function(data) { + options.data = data; + options.stdin = true; + run(options, emitter); + }); +} diff --git a/mybulma/node_modules/.bin/node-which b/mybulma/node_modules/.bin/node-which new file mode 100644 index 0000000..7cee372 --- /dev/null +++ b/mybulma/node_modules/.bin/node-which @@ -0,0 +1,52 @@ +#!/usr/bin/env node +var which = require("../") +if (process.argv.length < 3) + usage() + +function usage () { + console.error('usage: which [-as] program ...') + process.exit(1) +} + +var all = false +var silent = false +var dashdash = false +var args = process.argv.slice(2).filter(function (arg) { + if (dashdash || !/^-/.test(arg)) + return true + + if (arg === '--') { + dashdash = true + return false + } + + var flags = arg.substr(1).split('') + for (var f = 0; f < flags.length; f++) { + var flag = flags[f] + switch (flag) { + case 's': + silent = true + break + case 'a': + all = true + break + default: + console.error('which: illegal option -- ' + flag) + usage() + } + } + return false +}) + +process.exit(args.reduce(function (pv, current) { + try { + var f = which.sync(current, { all: all }) + if (all) + f = f.join('\n') + if (!silent) + console.log(f) + return pv; + } catch (e) { + return 1; + } +}, 0)) diff --git a/mybulma/node_modules/.bin/nopt b/mybulma/node_modules/.bin/nopt new file mode 100644 index 0000000..3232d4c --- /dev/null +++ b/mybulma/node_modules/.bin/nopt @@ -0,0 +1,54 @@ +#!/usr/bin/env node +var nopt = require("../lib/nopt") + , path = require("path") + , types = { num: Number + , bool: Boolean + , help: Boolean + , list: Array + , "num-list": [Number, Array] + , "str-list": [String, Array] + , "bool-list": [Boolean, Array] + , str: String + , clear: Boolean + , config: Boolean + , length: Number + , file: path + } + , shorthands = { s: [ "--str", "astring" ] + , b: [ "--bool" ] + , nb: [ "--no-bool" ] + , tft: [ "--bool-list", "--no-bool-list", "--bool-list", "true" ] + , "?": ["--help"] + , h: ["--help"] + , H: ["--help"] + , n: [ "--num", "125" ] + , c: ["--config"] + , l: ["--length"] + , f: ["--file"] + } + , parsed = nopt( types + , shorthands + , process.argv + , 2 ) + +console.log("parsed", parsed) + +if (parsed.help) { + console.log("") + console.log("nopt cli tester") + console.log("") + console.log("types") + console.log(Object.keys(types).map(function M (t) { + var type = types[t] + if (Array.isArray(type)) { + return [t, type.map(function (type) { return type.name })] + } + return [t, type && type.name] + }).reduce(function (s, i) { + s[i[0]] = i[1] + return s + }, {})) + console.log("") + console.log("shorthands") + console.log(shorthands) +} diff --git a/mybulma/node_modules/.bin/resolve b/mybulma/node_modules/.bin/resolve new file mode 100644 index 0000000..5ee329a --- /dev/null +++ b/mybulma/node_modules/.bin/resolve @@ -0,0 +1,50 @@ +#!/usr/bin/env node + +'use strict'; + +var path = require('path'); +var fs = require('fs'); + +if ( + String(process.env.npm_lifecycle_script).slice(0, 8) !== 'resolve ' + && ( + !process.argv + || process.argv.length < 2 + || (process.argv[1] !== __filename && fs.statSync(process.argv[1]).ino !== fs.statSync(__filename).ino) + || (process.env._ && path.resolve(process.env._) !== __filename) + ) +) { + console.error('Error: `resolve` must be run directly as an executable'); + process.exit(1); +} + +var supportsPreserveSymlinkFlag = require('supports-preserve-symlinks-flag'); + +var preserveSymlinks = false; +for (var i = 2; i < process.argv.length; i += 1) { + if (process.argv[i].slice(0, 2) === '--') { + if (supportsPreserveSymlinkFlag && process.argv[i] === '--preserve-symlinks') { + preserveSymlinks = true; + } else if (process.argv[i].length > 2) { + console.error('Unknown argument ' + process.argv[i].replace(/[=].*$/, '')); + process.exit(2); + } + process.argv.splice(i, 1); + i -= 1; + if (process.argv[i] === '--') { break; } // eslint-disable-line no-restricted-syntax + } +} + +if (process.argv.length < 3) { + console.error('Error: `resolve` expects a specifier'); + process.exit(2); +} + +var resolve = require('../'); + +var result = resolve.sync(process.argv[2], { + basedir: process.cwd(), + preserveSymlinks: preserveSymlinks +}); + +console.log(result); diff --git a/mybulma/node_modules/.bin/rimraf b/mybulma/node_modules/.bin/rimraf new file mode 100644 index 0000000..023814c --- /dev/null +++ b/mybulma/node_modules/.bin/rimraf @@ -0,0 +1,68 @@ +#!/usr/bin/env node + +const rimraf = require('./') + +const path = require('path') + +const isRoot = arg => /^(\/|[a-zA-Z]:\\)$/.test(path.resolve(arg)) +const filterOutRoot = arg => { + const ok = preserveRoot === false || !isRoot(arg) + if (!ok) { + console.error(`refusing to remove ${arg}`) + console.error('Set --no-preserve-root to allow this') + } + return ok +} + +let help = false +let dashdash = false +let noglob = false +let preserveRoot = true +const args = process.argv.slice(2).filter(arg => { + if (dashdash) + return !!arg + else if (arg === '--') + dashdash = true + else if (arg === '--no-glob' || arg === '-G') + noglob = true + else if (arg === '--glob' || arg === '-g') + noglob = false + else if (arg.match(/^(-+|\/)(h(elp)?|\?)$/)) + help = true + else if (arg === '--preserve-root') + preserveRoot = true + else if (arg === '--no-preserve-root') + preserveRoot = false + else + return !!arg +}).filter(arg => !preserveRoot || filterOutRoot(arg)) + +const go = n => { + if (n >= args.length) + return + const options = noglob ? { glob: false } : {} + rimraf(args[n], options, er => { + if (er) + throw er + go(n+1) + }) +} + +if (help || args.length === 0) { + // If they didn't ask for help, then this is not a "success" + const log = help ? console.log : console.error + log('Usage: rimraf [ ...]') + log('') + log(' Deletes all files and folders at "path" recursively.') + log('') + log('Options:') + log('') + log(' -h, --help Display this usage info') + log(' -G, --no-glob Do not expand glob patterns in arguments') + log(' -g, --glob Expand glob patterns in arguments (default)') + log(' --preserve-root Do not remove \'/\' (default)') + log(' --no-preserve-root Do not treat \'/\' specially') + log(' -- Stop parsing flags') + process.exit(help ? 0 : 1) +} else + go(0) diff --git a/mybulma/node_modules/.bin/sassgraph b/mybulma/node_modules/.bin/sassgraph new file mode 100644 index 0000000..0d6f842 --- /dev/null +++ b/mybulma/node_modules/.bin/sassgraph @@ -0,0 +1,122 @@ +#!/usr/bin/env node +var fs = require('fs'); +var path = require('path'); + +var command, directory, file; + +var yargs = require('yargs') + .usage('Usage: $0 [options] [file]') + // .demand(1) + + .command('ancestors', 'Output the ancestors') + .command('descendents', 'Output the descendents') + + .example('$0 ancestors -I src src/ src/_footer.scss', 'outputs the ancestors of src/_footer.scss') + + .option('I', { + alias: 'load-path', + default: [process.cwd()], + describe: 'Add directories to the sass load path', + type: 'array', + }) + + .option('e', { + alias: 'extensions', + default: ['scss', 'sass'], + describe: 'File extensions to include in the graph', + type: 'array', + }) + + .option('f', { + alias: 'follow', + default: false, + describe: 'Follow symbolic links', + type: 'bool', + }) + + .option('j', { + alias: 'json', + default: false, + describe: 'Output the index in json', + type: 'bool', + }) + + .version() + .alias('v', 'version') + + .help('h') + .alias('h', 'help'); + +var argv = yargs.argv; + +if (argv._.length === 0) { + yargs.showHelp(); + process.exit(1); +} + +if (['ancestors', 'descendents'].indexOf(argv._[0]) !== -1) { + command = argv._.shift(); +} + +if (argv._.length && path.extname(argv._[0]) === '') { + directory = argv._.shift(); +} + +if (argv._.length && path.extname(argv._[0])) { + file = argv._.shift(); +} + + +try { + if (!directory) { + throw new Error('Missing directory'); + } + + if (!command && !argv.json) { + throw new Error('Missing command'); + } + + if (!file && (command === 'ancestors' || command === 'descendents')) { + throw new Error(command + ' command requires a file'); + } + + var loadPaths = argv.loadPath; + if(process.env.SASS_PATH) { + loadPaths = loadPaths.concat(process.env.SASS_PATH.split(/:/).map(function(f) { + return path.resolve(f); + })); + } + + var graph = require('../').parseDir(directory, { + extensions: argv.extensions, + loadPaths: loadPaths, + follow: argv.follow, + }); + + if(argv.json) { + console.log(JSON.stringify(graph.index, null, 4)); + process.exit(0); + } + + if (command === 'ancestors') { + graph.visitAncestors(path.resolve(file), function(f) { + console.log(f); + }); + } + + if (command === 'descendents') { + graph.visitDescendents(path.resolve(file), function(f) { + console.log(f); + }); + } +} catch(e) { + if (e.code === 'ENOENT') { + console.error('Error: no such file or directory "' + e.path + '"'); + } + else { + console.log('Error: ' + e.message); + } + + // console.log(e.stack); + process.exit(1); +} diff --git a/mybulma/node_modules/.bin/semver b/mybulma/node_modules/.bin/semver new file mode 100644 index 0000000..8d1b557 --- /dev/null +++ b/mybulma/node_modules/.bin/semver @@ -0,0 +1,183 @@ +#!/usr/bin/env node +// Standalone semver comparison program. +// Exits successfully and prints matching version(s) if +// any supplied version is valid and passes all tests. + +const argv = process.argv.slice(2) + +let versions = [] + +const range = [] + +let inc = null + +const version = require('../package.json').version + +let loose = false + +let includePrerelease = false + +let coerce = false + +let rtl = false + +let identifier + +const semver = require('../') + +let reverse = false + +let options = {} + +const main = () => { + if (!argv.length) { + return help() + } + while (argv.length) { + let a = argv.shift() + const indexOfEqualSign = a.indexOf('=') + if (indexOfEqualSign !== -1) { + const value = a.slice(indexOfEqualSign + 1) + a = a.slice(0, indexOfEqualSign) + argv.unshift(value) + } + switch (a) { + case '-rv': case '-rev': case '--rev': case '--reverse': + reverse = true + break + case '-l': case '--loose': + loose = true + break + case '-p': case '--include-prerelease': + includePrerelease = true + break + case '-v': case '--version': + versions.push(argv.shift()) + break + case '-i': case '--inc': case '--increment': + switch (argv[0]) { + case 'major': case 'minor': case 'patch': case 'prerelease': + case 'premajor': case 'preminor': case 'prepatch': + inc = argv.shift() + break + default: + inc = 'patch' + break + } + break + case '--preid': + identifier = argv.shift() + break + case '-r': case '--range': + range.push(argv.shift()) + break + case '-c': case '--coerce': + coerce = true + break + case '--rtl': + rtl = true + break + case '--ltr': + rtl = false + break + case '-h': case '--help': case '-?': + return help() + default: + versions.push(a) + break + } + } + + options = { loose: loose, includePrerelease: includePrerelease, rtl: rtl } + + versions = versions.map((v) => { + return coerce ? (semver.coerce(v, options) || { version: v }).version : v + }).filter((v) => { + return semver.valid(v) + }) + if (!versions.length) { + return fail() + } + if (inc && (versions.length !== 1 || range.length)) { + return failInc() + } + + for (let i = 0, l = range.length; i < l; i++) { + versions = versions.filter((v) => { + return semver.satisfies(v, range[i], options) + }) + if (!versions.length) { + return fail() + } + } + return success(versions) +} + +const failInc = () => { + console.error('--inc can only be used on a single version with no range') + fail() +} + +const fail = () => process.exit(1) + +const success = () => { + const compare = reverse ? 'rcompare' : 'compare' + versions.sort((a, b) => { + return semver[compare](a, b, options) + }).map((v) => { + return semver.clean(v, options) + }).map((v) => { + return inc ? semver.inc(v, inc, options, identifier) : v + }).forEach((v, i, _) => { + console.log(v) + }) +} + +const help = () => console.log( +`SemVer ${version} + +A JavaScript implementation of the https://semver.org/ specification +Copyright Isaac Z. Schlueter + +Usage: semver [options] [ [...]] +Prints valid versions sorted by SemVer precedence + +Options: +-r --range + Print versions that match the specified range. + +-i --increment [] + Increment a version by the specified level. Level can + be one of: major, minor, patch, premajor, preminor, + prepatch, or prerelease. Default level is 'patch'. + Only one version may be specified. + +--preid + Identifier to be used to prefix premajor, preminor, + prepatch or prerelease version increments. + +-l --loose + Interpret versions and ranges loosely + +-p --include-prerelease + Always include prerelease versions in range matching + +-c --coerce + Coerce a string into SemVer if possible + (does not imply --loose) + +--rtl + Coerce version strings right to left + +--ltr + Coerce version strings left to right (default) + +Program exits successfully if any valid version satisfies +all supplied ranges, and prints all satisfying versions. + +If no satisfying versions are found, then exits failure. + +Versions are printed in ascending order, so supplying +multiple versions to the utility will just sort them.`) + +main() diff --git a/mybulma/node_modules/.package-lock.json b/mybulma/node_modules/.package-lock.json new file mode 100644 index 0000000..1d48317 --- /dev/null +++ b/mybulma/node_modules/.package-lock.json @@ -0,0 +1,2357 @@ +{ + "name": "mybulma", + "version": "1.0.0", + "lockfileVersion": 2, + "requires": true, + "packages": { + "node_modules/@babel/code-frame": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.18.6.tgz", + "integrity": "sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==", + "dev": true, + "dependencies": { + "@babel/highlight": "^7.18.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.19.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz", + "integrity": "sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.18.6.tgz", + "integrity": "sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==", + "dev": true, + "dependencies": { + "@babel/helper-validator-identifier": "^7.18.6", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight/node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/@babel/highlight/node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", + "dev": true + }, + "node_modules/@babel/highlight/node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@gar/promisify": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@gar/promisify/-/promisify-1.1.3.tgz", + "integrity": "sha512-k2Ty1JcVojjJFwrg/ThKi2ujJ7XNLYaFGNB/bWT9wGR+oSMJHMa5w+CUq6p/pVrKeNNgA7pCqEcjSnHVoqJQFw==", + "dev": true + }, + "node_modules/@npmcli/fs": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-2.1.2.tgz", + "integrity": "sha512-yOJKRvohFOaLqipNtwYB9WugyZKhC/DZC4VYPmpaCzDBrA8YpK3qHZ8/HGscMnE4GqbkLNuVcCnxkeQEdGt6LQ==", + "dev": true, + "dependencies": { + "@gar/promisify": "^1.1.3", + "semver": "^7.3.5" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/@npmcli/move-file": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@npmcli/move-file/-/move-file-2.0.1.tgz", + "integrity": "sha512-mJd2Z5TjYWq/ttPLLGqArdtnC74J6bOzg4rMDnN+p1xTacZ2yPRCk2y0oSWQtygLR9YVQXgOcONrwtnk3JupxQ==", + "deprecated": "This functionality has been moved to @npmcli/fs", + "dev": true, + "dependencies": { + "mkdirp": "^1.0.4", + "rimraf": "^3.0.2" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/@tootallnate/once": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", + "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", + "dev": true, + "engines": { + "node": ">= 10" + } + }, + "node_modules/@types/minimist": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@types/minimist/-/minimist-1.2.2.tgz", + "integrity": "sha512-jhuKLIRrhvCPLqwPcx6INqmKeiA5EWrsCOPhrlFSrbrmU4ZMPjj5Ul/oLCMDO98XRUIwVm78xICz4EPCektzeQ==", + "dev": true + }, + "node_modules/@types/normalize-package-data": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.1.tgz", + "integrity": "sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw==", + "dev": true + }, + "node_modules/abbrev": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", + "dev": true + }, + "node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dev": true, + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/agentkeepalive": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.2.1.tgz", + "integrity": "sha512-Zn4cw2NEqd+9fiSVWMscnjyQ1a8Yfoc5oBajLeo5w+YBHgDUcEBY2hS4YpTz6iN5f/2zQiktcuM6tS8x1p9dpA==", + "dev": true, + "dependencies": { + "debug": "^4.1.0", + "depd": "^1.1.2", + "humanize-ms": "^1.2.1" + }, + "engines": { + "node": ">= 8.0.0" + } + }, + "node_modules/aggregate-error": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "dev": true, + "dependencies": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/aproba": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz", + "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==", + "dev": true + }, + "node_modules/are-we-there-yet": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-3.0.1.tgz", + "integrity": "sha512-QZW4EDmGwlYur0Yyf/b2uGucHQMa8aFUP7eu9ddR73vvhFyt4V0Vl3QHPcTNJ8l6qYOBdxgXdnBXQrHilfRQBg==", + "dev": true, + "dependencies": { + "delegates": "^1.0.0", + "readable-stream": "^3.6.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/arrify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", + "integrity": "sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/async-foreach": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/async-foreach/-/async-foreach-0.1.3.tgz", + "integrity": "sha512-VUeSMD8nEGBWaZK4lizI1sf3yEC7pnAQ/mrI7pC2fBz2s/tq5jWWEngTwaf0Gruu/OoXRGLGg1XFqpYBiGTYJA==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/bulma": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/bulma/-/bulma-0.9.4.tgz", + "integrity": "sha512-86FlT5+1GrsgKbPLRRY7cGDg8fsJiP/jzTqXXVqiUZZ2aZT8uemEOHlU1CDU+TxklPEZ11HZNNWclRBBecP4CQ==", + "dev": true + }, + "node_modules/bulma-pageloader": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/bulma-pageloader/-/bulma-pageloader-0.3.0.tgz", + "integrity": "sha512-lbahiqhBCov5AYdziHFnC5/JOhCrJWFTpdRiAkwW49IM/mf0whCWHVe8MuejZFu2PEs1mtH8Gnz8exEks4Q+7g==" + }, + "node_modules/cacache": { + "version": "16.1.3", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.1.3.tgz", + "integrity": "sha512-/+Emcj9DAXxX4cwlLmRI9c166RuL3w30zp4R7Joiv2cQTtTtA+jeuCAjH3ZlGnYS3tKENSrKhAzVVP9GVyzeYQ==", + "dev": true, + "dependencies": { + "@npmcli/fs": "^2.1.0", + "@npmcli/move-file": "^2.0.0", + "chownr": "^2.0.0", + "fs-minipass": "^2.1.0", + "glob": "^8.0.1", + "infer-owner": "^1.0.4", + "lru-cache": "^7.7.1", + "minipass": "^3.1.6", + "minipass-collect": "^1.0.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "mkdirp": "^1.0.4", + "p-map": "^4.0.0", + "promise-inflight": "^1.0.1", + "rimraf": "^3.0.2", + "ssri": "^9.0.0", + "tar": "^6.1.11", + "unique-filename": "^2.0.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/cacache/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/cacache/node_modules/glob": { + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.0.3.tgz", + "integrity": "sha512-ull455NHSHI/Y1FqGaaYFaLGkNMMJbavMrEGFXG/PGrg6y7sutWHUHrz6gy6WEBH6akM1M414dWKCNs+IhKdiQ==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/cacache/node_modules/minimatch": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.2.tgz", + "integrity": "sha512-bNH9mmM9qsJ2X4r2Nat1B//1dJVcn3+iBLa3IgqJ7EbGaDNepL9QSHOxN4ng33s52VMMhhIfgCYDk3C4ZmlDAg==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase-keys": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-6.2.2.tgz", + "integrity": "sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg==", + "dev": true, + "dependencies": { + "camelcase": "^5.3.1", + "map-obj": "^4.0.0", + "quick-lru": "^4.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/chownr": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", + "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/color-support": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", + "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", + "dev": true, + "bin": { + "color-support": "bin.js" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true + }, + "node_modules/console-control-strings": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", + "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==", + "dev": true + }, + "node_modules/core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "dev": true + }, + "node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/decamelize-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/decamelize-keys/-/decamelize-keys-1.1.1.tgz", + "integrity": "sha512-WiPxgEirIV0/eIOMcnFBA3/IJZAZqKnwAwWyvvdi4lsr1WCN22nhdf/3db3DoZcUjTV2SqfzIwNyp6y2xs3nmg==", + "dev": true, + "dependencies": { + "decamelize": "^1.1.0", + "map-obj": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/decamelize-keys/node_modules/map-obj": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz", + "integrity": "sha512-7N/q3lyZ+LVCp7PzuxrJr4KMbBE2hW7BT7YNia330OFxIf4d3r5zVpicP2650l7CPN6RM9zOJRl3NGpqSiw3Eg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/delegates": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", + "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==", + "dev": true + }, + "node_modules/depd": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", + "integrity": "sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/encoding": { + "version": "0.1.13", + "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz", + "integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==", + "dev": true, + "optional": true, + "dependencies": { + "iconv-lite": "^0.6.2" + } + }, + "node_modules/env-paths": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz", + "integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/err-code": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/err-code/-/err-code-2.0.3.tgz", + "integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==", + "dev": true + }, + "node_modules/error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/fs-minipass": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", + "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", + "dev": true, + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true + }, + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "node_modules/gauge": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-4.0.4.tgz", + "integrity": "sha512-f9m+BEN5jkg6a0fZjleidjN51VE1X+mPFQ2DJ0uv1V39oCLCbsGe6yjbBnp7eK7z/+GAon99a3nHuqbuuthyPg==", + "dev": true, + "dependencies": { + "aproba": "^1.0.3 || ^2.0.0", + "color-support": "^1.1.3", + "console-control-strings": "^1.1.0", + "has-unicode": "^2.0.1", + "signal-exit": "^3.0.7", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1", + "wide-align": "^1.1.5" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/gaze": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/gaze/-/gaze-1.1.3.tgz", + "integrity": "sha512-BRdNm8hbWzFzWHERTrejLqwHDfS4GibPoq5wjTPIoJHoBtKGPg3xAFfxmM+9ztbXelxcf2hwQcaz1PtmFeue8g==", + "dev": true, + "dependencies": { + "globule": "^1.0.0" + }, + "engines": { + "node": ">= 4.0.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-stdin": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-4.0.1.tgz", + "integrity": "sha512-F5aQMywwJ2n85s4hJPTT9RPxGmubonuB10MNYo17/xph174n2MIR33HRguhzVag10O/npM7SPk73LMZNP+FaWw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/globule": { + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/globule/-/globule-1.3.4.tgz", + "integrity": "sha512-OPTIfhMBh7JbBYDpa5b+Q5ptmMWKwcNcFSR/0c6t8V4f3ZAVBEsKNY37QdVqmLRYSMhOUGYrY0QhSoEpzGr/Eg==", + "dev": true, + "dependencies": { + "glob": "~7.1.1", + "lodash": "^4.17.21", + "minimatch": "~3.0.2" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/globule/node_modules/glob": { + "version": "7.1.7", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", + "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/globule/node_modules/minimatch": { + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.8.tgz", + "integrity": "sha512-6FsRAQsxQ61mw+qP1ZzbL9Bc78x2p5OqNgNpnoAFLTrX8n5Kxph0CsnhmKKNXTWjXqU5L0pGPR7hYk+XWZr60Q==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.10", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", + "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==", + "dev": true + }, + "node_modules/hard-rejection": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/hard-rejection/-/hard-rejection-2.1.0.tgz", + "integrity": "sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/has-unicode": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", + "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==", + "dev": true + }, + "node_modules/hosted-git-info": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", + "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/hosted-git-info/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/http-cache-semantics": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz", + "integrity": "sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==", + "dev": true + }, + "node_modules/http-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", + "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", + "dev": true, + "dependencies": { + "@tootallnate/once": "2", + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "dev": true, + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/humanize-ms": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/humanize-ms/-/humanize-ms-1.2.1.tgz", + "integrity": "sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==", + "dev": true, + "dependencies": { + "ms": "^2.0.0" + } + }, + "node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dev": true, + "optional": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/infer-owner": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/infer-owner/-/infer-owner-1.0.4.tgz", + "integrity": "sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A==", + "dev": true + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "dev": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "node_modules/ip": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ip/-/ip-2.0.0.tgz", + "integrity": "sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==", + "dev": true + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "dev": true + }, + "node_modules/is-core-module": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.11.0.tgz", + "integrity": "sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw==", + "dev": true, + "dependencies": { + "has": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-lambda": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-lambda/-/is-lambda-1.0.1.tgz", + "integrity": "sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==", + "dev": true + }, + "node_modules/is-plain-obj": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", + "integrity": "sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "dev": true + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true + }, + "node_modules/js-base64": { + "version": "2.6.4", + "resolved": "https://registry.npmjs.org/js-base64/-/js-base64-2.6.4.tgz", + "integrity": "sha512-pZe//GGmwJndub7ZghVHz7vjb2LgC1m8B07Au3eYqeqv9emhESByMXxaEgkUkEqJe87oBbSniGYoQNIBklc7IQ==", + "dev": true + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true + }, + "node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true + }, + "node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true + }, + "node_modules/lru-cache": { + "version": "7.14.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.14.1.tgz", + "integrity": "sha512-ysxwsnTKdAx96aTRdhDOCQfDgbHnt8SK0KY8SEjO0wHinhWOFTESbjVCMPbU1uGXg/ch4lifqx0wfjOawU2+WA==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/make-fetch-happen": { + "version": "10.2.1", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-10.2.1.tgz", + "integrity": "sha512-NgOPbRiaQM10DYXvN3/hhGVI2M5MtITFryzBGxHM5p4wnFxsVCbxkrBrDsk+EZ5OB4jEOT7AjDxtdF+KVEFT7w==", + "dev": true, + "dependencies": { + "agentkeepalive": "^4.2.1", + "cacache": "^16.1.0", + "http-cache-semantics": "^4.1.0", + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "is-lambda": "^1.0.1", + "lru-cache": "^7.7.1", + "minipass": "^3.1.6", + "minipass-collect": "^1.0.2", + "minipass-fetch": "^2.0.3", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "promise-retry": "^2.0.1", + "socks-proxy-agent": "^7.0.0", + "ssri": "^9.0.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/map-obj": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.3.0.tgz", + "integrity": "sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/meow": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/meow/-/meow-9.0.0.tgz", + "integrity": "sha512-+obSblOQmRhcyBt62furQqRAQpNyWXo8BuQ5bN7dG8wmwQ+vwHKp/rCFD4CrTP8CsDQD1sjoZ94K417XEUk8IQ==", + "dev": true, + "dependencies": { + "@types/minimist": "^1.2.0", + "camelcase-keys": "^6.2.2", + "decamelize": "^1.2.0", + "decamelize-keys": "^1.1.0", + "hard-rejection": "^2.1.0", + "minimist-options": "4.1.0", + "normalize-package-data": "^3.0.0", + "read-pkg-up": "^7.0.1", + "redent": "^3.0.0", + "trim-newlines": "^3.0.0", + "type-fest": "^0.18.0", + "yargs-parser": "^20.2.3" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/min-indent": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", + "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist-options": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/minimist-options/-/minimist-options-4.1.0.tgz", + "integrity": "sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A==", + "dev": true, + "dependencies": { + "arrify": "^1.0.1", + "is-plain-obj": "^1.1.0", + "kind-of": "^6.0.3" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minipass-collect": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/minipass-collect/-/minipass-collect-1.0.2.tgz", + "integrity": "sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA==", + "dev": true, + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/minipass-fetch": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-2.1.2.tgz", + "integrity": "sha512-LT49Zi2/WMROHYoqGgdlQIZh8mLPZmOrN2NdJjMXxYe4nkN6FUyuPuOAOedNJDrx0IRGg9+4guZewtp8hE6TxA==", + "dev": true, + "dependencies": { + "minipass": "^3.1.6", + "minipass-sized": "^1.0.3", + "minizlib": "^2.1.2" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + }, + "optionalDependencies": { + "encoding": "^0.1.13" + } + }, + "node_modules/minipass-flush": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/minipass-flush/-/minipass-flush-1.0.5.tgz", + "integrity": "sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==", + "dev": true, + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/minipass-pipeline": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz", + "integrity": "sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==", + "dev": true, + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minipass-sized": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/minipass-sized/-/minipass-sized-1.0.3.tgz", + "integrity": "sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==", + "dev": true, + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minizlib": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", + "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", + "dev": true, + "dependencies": { + "minipass": "^3.0.0", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "dev": true, + "bin": { + "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/nan": { + "version": "2.17.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.17.0.tgz", + "integrity": "sha512-2ZTgtl0nJsO0KQCjEpxcIr5D+Yv90plTitZt9JBfQvVJDS5seMl3FOvsh3+9CoYWXf/1l5OaZzzF6nDm4cagaQ==", + "dev": true + }, + "node_modules/negotiator": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/node-gyp": { + "version": "8.4.1", + "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-8.4.1.tgz", + "integrity": "sha512-olTJRgUtAb/hOXG0E93wZDs5YiJlgbXxTwQAFHyNlRsXQnYzUaF2aGgujZbw+hR8aF4ZG/rST57bWMWD16jr9w==", + "dev": true, + "dependencies": { + "env-paths": "^2.2.0", + "glob": "^7.1.4", + "graceful-fs": "^4.2.6", + "make-fetch-happen": "^9.1.0", + "nopt": "^5.0.0", + "npmlog": "^6.0.0", + "rimraf": "^3.0.2", + "semver": "^7.3.5", + "tar": "^6.1.2", + "which": "^2.0.2" + }, + "bin": { + "node-gyp": "bin/node-gyp.js" + }, + "engines": { + "node": ">= 10.12.0" + } + }, + "node_modules/node-gyp/node_modules/@npmcli/fs": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-1.1.1.tgz", + "integrity": "sha512-8KG5RD0GVP4ydEzRn/I4BNDuxDtqVbOdm8675T49OIG/NGhaK0pjPX7ZcDlvKYbA+ulvVK3ztfcF4uBdOxuJbQ==", + "dev": true, + "dependencies": { + "@gar/promisify": "^1.0.1", + "semver": "^7.3.5" + } + }, + "node_modules/node-gyp/node_modules/@npmcli/move-file": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@npmcli/move-file/-/move-file-1.1.2.tgz", + "integrity": "sha512-1SUf/Cg2GzGDyaf15aR9St9TWlb+XvbZXWpDx8YKs7MLzMH/BCeopv+y9vzrzgkfykCGuWOlSu3mZhj2+FQcrg==", + "deprecated": "This functionality has been moved to @npmcli/fs", + "dev": true, + "dependencies": { + "mkdirp": "^1.0.4", + "rimraf": "^3.0.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/node-gyp/node_modules/@tootallnate/once": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", + "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/node-gyp/node_modules/cacache": { + "version": "15.3.0", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-15.3.0.tgz", + "integrity": "sha512-VVdYzXEn+cnbXpFgWs5hTT7OScegHVmLhJIR8Ufqk3iFD6A6j5iSX1KuBTfNEv4tdJWE2PzA6IVFtcLC7fN9wQ==", + "dev": true, + "dependencies": { + "@npmcli/fs": "^1.0.0", + "@npmcli/move-file": "^1.0.1", + "chownr": "^2.0.0", + "fs-minipass": "^2.0.0", + "glob": "^7.1.4", + "infer-owner": "^1.0.4", + "lru-cache": "^6.0.0", + "minipass": "^3.1.1", + "minipass-collect": "^1.0.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.2", + "mkdirp": "^1.0.3", + "p-map": "^4.0.0", + "promise-inflight": "^1.0.1", + "rimraf": "^3.0.2", + "ssri": "^8.0.1", + "tar": "^6.0.2", + "unique-filename": "^1.1.1" + }, + "engines": { + "node": ">= 10" + } + }, + "node_modules/node-gyp/node_modules/http-proxy-agent": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz", + "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==", + "dev": true, + "dependencies": { + "@tootallnate/once": "1", + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/node-gyp/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/node-gyp/node_modules/make-fetch-happen": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-9.1.0.tgz", + "integrity": "sha512-+zopwDy7DNknmwPQplem5lAZX/eCOzSvSNNcSKm5eVwTkOBzoktEfXsa9L23J/GIRhxRsaxzkPEhrJEpE2F4Gg==", + "dev": true, + "dependencies": { + "agentkeepalive": "^4.1.3", + "cacache": "^15.2.0", + "http-cache-semantics": "^4.1.0", + "http-proxy-agent": "^4.0.1", + "https-proxy-agent": "^5.0.0", + "is-lambda": "^1.0.1", + "lru-cache": "^6.0.0", + "minipass": "^3.1.3", + "minipass-collect": "^1.0.2", + "minipass-fetch": "^1.3.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.2", + "promise-retry": "^2.0.1", + "socks-proxy-agent": "^6.0.0", + "ssri": "^8.0.0" + }, + "engines": { + "node": ">= 10" + } + }, + "node_modules/node-gyp/node_modules/minipass-fetch": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-1.4.1.tgz", + "integrity": "sha512-CGH1eblLq26Y15+Azk7ey4xh0J/XfJfrCox5LDJiKqI2Q2iwOLOKrlmIaODiSQS8d18jalF6y2K2ePUm0CmShw==", + "dev": true, + "dependencies": { + "minipass": "^3.1.0", + "minipass-sized": "^1.0.3", + "minizlib": "^2.0.0" + }, + "engines": { + "node": ">=8" + }, + "optionalDependencies": { + "encoding": "^0.1.12" + } + }, + "node_modules/node-gyp/node_modules/socks-proxy-agent": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-6.2.1.tgz", + "integrity": "sha512-a6KW9G+6B3nWZ1yB8G7pJwL3ggLy1uTzKAgCb7ttblwqdz9fMGJUuTy3uFzEP48FAs9FLILlmzDlE2JJhVQaXQ==", + "dev": true, + "dependencies": { + "agent-base": "^6.0.2", + "debug": "^4.3.3", + "socks": "^2.6.2" + }, + "engines": { + "node": ">= 10" + } + }, + "node_modules/node-gyp/node_modules/ssri": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/ssri/-/ssri-8.0.1.tgz", + "integrity": "sha512-97qShzy1AiyxvPNIkLWoGua7xoQzzPjQ0HAH4B0rWKo7SZ6USuPcrUiAFrws0UH8RrbWmgq3LMTObhPIHbbBeQ==", + "dev": true, + "dependencies": { + "minipass": "^3.1.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/node-gyp/node_modules/unique-filename": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-1.1.1.tgz", + "integrity": "sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ==", + "dev": true, + "dependencies": { + "unique-slug": "^2.0.0" + } + }, + "node_modules/node-gyp/node_modules/unique-slug": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-2.0.2.tgz", + "integrity": "sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w==", + "dev": true, + "dependencies": { + "imurmurhash": "^0.1.4" + } + }, + "node_modules/node-sass": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/node-sass/-/node-sass-8.0.0.tgz", + "integrity": "sha512-jPzqCF2/e6JXw6r3VxfIqYc8tKQdkj5Z/BDATYyG6FL6b/LuYBNFGFVhus0mthcWifHm/JzBpKAd+3eXsWeK/A==", + "dev": true, + "hasInstallScript": true, + "dependencies": { + "async-foreach": "^0.1.3", + "chalk": "^4.1.2", + "cross-spawn": "^7.0.3", + "gaze": "^1.0.0", + "get-stdin": "^4.0.1", + "glob": "^7.0.3", + "lodash": "^4.17.15", + "make-fetch-happen": "^10.0.4", + "meow": "^9.0.0", + "nan": "^2.17.0", + "node-gyp": "^8.4.1", + "sass-graph": "^4.0.1", + "stdout-stream": "^1.4.0", + "true-case-path": "^2.2.1" + }, + "bin": { + "node-sass": "bin/node-sass" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/nopt": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", + "integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==", + "dev": true, + "dependencies": { + "abbrev": "1" + }, + "bin": { + "nopt": "bin/nopt.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/normalize-package-data": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.3.tgz", + "integrity": "sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==", + "dev": true, + "dependencies": { + "hosted-git-info": "^4.0.1", + "is-core-module": "^2.5.0", + "semver": "^7.3.4", + "validate-npm-package-license": "^3.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/npmlog": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-6.0.2.tgz", + "integrity": "sha512-/vBvz5Jfr9dT/aFWd0FIRf+T/Q2WBsLENygUaFUqstqsycmZAP/t5BvFJTK0viFmSUxiUKTUplWy5vt+rvKIxg==", + "dev": true, + "dependencies": { + "are-we-there-yet": "^3.0.0", + "console-control-strings": "^1.1.0", + "gauge": "^4.0.3", + "set-blocking": "^2.0.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/p-map": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", + "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", + "dev": true, + "dependencies": { + "aggregate-error": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "dev": true + }, + "node_modules/promise-inflight": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/promise-inflight/-/promise-inflight-1.0.1.tgz", + "integrity": "sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==", + "dev": true + }, + "node_modules/promise-retry": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/promise-retry/-/promise-retry-2.0.1.tgz", + "integrity": "sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==", + "dev": true, + "dependencies": { + "err-code": "^2.0.2", + "retry": "^0.12.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/quick-lru": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-4.0.1.tgz", + "integrity": "sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/read-pkg": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz", + "integrity": "sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==", + "dev": true, + "dependencies": { + "@types/normalize-package-data": "^2.4.0", + "normalize-package-data": "^2.5.0", + "parse-json": "^5.0.0", + "type-fest": "^0.6.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/read-pkg-up": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-7.0.1.tgz", + "integrity": "sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==", + "dev": true, + "dependencies": { + "find-up": "^4.1.0", + "read-pkg": "^5.2.0", + "type-fest": "^0.8.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/read-pkg-up/node_modules/type-fest": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", + "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/read-pkg/node_modules/hosted-git-info": { + "version": "2.8.9", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", + "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", + "dev": true + }, + "node_modules/read-pkg/node_modules/normalize-package-data": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "dev": true, + "dependencies": { + "hosted-git-info": "^2.1.4", + "resolve": "^1.10.0", + "semver": "2 || 3 || 4 || 5", + "validate-npm-package-license": "^3.0.1" + } + }, + "node_modules/read-pkg/node_modules/semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/read-pkg/node_modules/type-fest": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz", + "integrity": "sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dev": true, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/redent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz", + "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==", + "dev": true, + "dependencies": { + "indent-string": "^4.0.0", + "strip-indent": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve": { + "version": "1.22.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz", + "integrity": "sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==", + "dev": true, + "dependencies": { + "is-core-module": "^2.9.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/retry": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", + "integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "dev": true, + "optional": true + }, + "node_modules/sass-graph": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/sass-graph/-/sass-graph-4.0.1.tgz", + "integrity": "sha512-5YCfmGBmxoIRYHnKK2AKzrAkCoQ8ozO+iumT8K4tXJXRVCPf+7s1/9KxTSW3Rbvf+7Y7b4FR3mWyLnQr3PHocA==", + "dev": true, + "dependencies": { + "glob": "^7.0.0", + "lodash": "^4.17.11", + "scss-tokenizer": "^0.4.3", + "yargs": "^17.2.1" + }, + "bin": { + "sassgraph": "bin/sassgraph" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/scss-tokenizer": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/scss-tokenizer/-/scss-tokenizer-0.4.3.tgz", + "integrity": "sha512-raKLgf1LI5QMQnG+RxHz6oK0sL3x3I4FN2UDLqgLOGO8hodECNnNh5BXn7fAyBxrA8zVzdQizQ6XjNJQ+uBwMw==", + "dev": true, + "dependencies": { + "js-base64": "^2.4.9", + "source-map": "^0.7.3" + } + }, + "node_modules/semver": { + "version": "7.3.8", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", + "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/semver/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==", + "dev": true + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, + "node_modules/smart-buffer": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", + "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==", + "dev": true, + "engines": { + "node": ">= 6.0.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/socks": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/socks/-/socks-2.7.1.tgz", + "integrity": "sha512-7maUZy1N7uo6+WVEX6psASxtNlKaNVMlGQKkG/63nEDdLOWNbiUMoLK7X4uYoLhQstau72mLgfEWcXcwsaHbYQ==", + "dev": true, + "dependencies": { + "ip": "^2.0.0", + "smart-buffer": "^4.2.0" + }, + "engines": { + "node": ">= 10.13.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/socks-proxy-agent": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-7.0.0.tgz", + "integrity": "sha512-Fgl0YPZ902wEsAyiQ+idGd1A7rSFx/ayC1CQVMw5P+EQx2V0SgpGtf6OKFhVjPflPUl9YMmEOnmfjCdMUsygww==", + "dev": true, + "dependencies": { + "agent-base": "^6.0.2", + "debug": "^4.3.3", + "socks": "^2.6.2" + }, + "engines": { + "node": ">= 10" + } + }, + "node_modules/source-map": { + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz", + "integrity": "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/spdx-correct": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.1.tgz", + "integrity": "sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==", + "dev": true, + "dependencies": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-exceptions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz", + "integrity": "sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==", + "dev": true + }, + "node_modules/spdx-expression-parse": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", + "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", + "dev": true, + "dependencies": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-license-ids": { + "version": "3.0.12", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.12.tgz", + "integrity": "sha512-rr+VVSXtRhO4OHbXUiAF7xW3Bo9DuuF6C5jH+q/x15j2jniycgKbxU09Hr0WqlSLUs4i4ltHGXqTe7VHclYWyA==", + "dev": true + }, + "node_modules/ssri": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/ssri/-/ssri-9.0.1.tgz", + "integrity": "sha512-o57Wcn66jMQvfHG1FlYbWeZWW/dHZhJXjpIcTfXldXEk5nz5lStPo3mK0OJQfGR3RbZUlbISexbljkJzuEj/8Q==", + "dev": true, + "dependencies": { + "minipass": "^3.1.1" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/stdout-stream": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/stdout-stream/-/stdout-stream-1.4.1.tgz", + "integrity": "sha512-j4emi03KXqJWcIeF8eIXkjMFN1Cmb8gUlDYGeBALLPo5qdyTfA9bOtl8m33lRoC+vFMkP3gl0WsDr6+gzxbbTA==", + "dev": true, + "dependencies": { + "readable-stream": "^2.0.1" + } + }, + "node_modules/stdout-stream/node_modules/readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/stdout-stream/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "node_modules/stdout-stream/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-indent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz", + "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==", + "dev": true, + "dependencies": { + "min-indent": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/tar": { + "version": "6.1.13", + "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.13.tgz", + "integrity": "sha512-jdIBIN6LTIe2jqzay/2vtYLlBHa3JF42ot3h1dW8Q0PaAG4v8rm0cvpVePtau5C6OKXGGcgO9q2AMNSWxiLqKw==", + "dev": true, + "dependencies": { + "chownr": "^2.0.0", + "fs-minipass": "^2.0.0", + "minipass": "^4.0.0", + "minizlib": "^2.1.1", + "mkdirp": "^1.0.3", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/tar/node_modules/minipass": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-4.0.0.tgz", + "integrity": "sha512-g2Uuh2jEKoht+zvO6vJqXmYpflPqzRBT+Th2h01DKh5z7wbY/AZ2gCQ78cP70YoHPyFdY30YBV5WxgLOEwOykw==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/trim-newlines": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-3.0.1.tgz", + "integrity": "sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/true-case-path": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/true-case-path/-/true-case-path-2.2.1.tgz", + "integrity": "sha512-0z3j8R7MCjy10kc/g+qg7Ln3alJTodw9aDuVWZa3uiWqfuBMKeAeP2ocWcxoyM3D73yz3Jt/Pu4qPr4wHSdB/Q==", + "dev": true + }, + "node_modules/type-fest": { + "version": "0.18.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.18.1.tgz", + "integrity": "sha512-OIAYXk8+ISY+qTOwkHtKqzAuxchoMiD9Udx+FSGQDuiRR+PJKJHc2NJAXlbhkGwTt/4/nKZxELY1w3ReWOL8mw==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/unique-filename": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-2.0.1.tgz", + "integrity": "sha512-ODWHtkkdx3IAR+veKxFV+VBkUMcN+FaqzUUd7IZzt+0zhDZFPFxhlqwPF3YQvMHx1TD0tdgYl+kuPnJ8E6ql7A==", + "dev": true, + "dependencies": { + "unique-slug": "^3.0.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/unique-slug": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-3.0.0.tgz", + "integrity": "sha512-8EyMynh679x/0gqE9fT9oilG+qEt+ibFyqjuVTsZn1+CMxH+XLlpvr2UZx4nVcCwTpx81nICr2JQFkM+HPLq4w==", + "dev": true, + "dependencies": { + "imurmurhash": "^0.1.4" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true + }, + "node_modules/validate-npm-package-license": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", + "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, + "dependencies": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/wide-align": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", + "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", + "dev": true, + "dependencies": { + "string-width": "^1.0.2 || 2 || 3 || 4" + } + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/yargs": { + "version": "17.6.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.6.2.tgz", + "integrity": "sha512-1/9UrdHjDZc0eOU0HxOHoS78C69UD3JRMvzlJ7S79S2nTaWRA/whGCTV8o9e/N/1Va9YIV7Q4sOxD8VV4pCWOw==", + "dev": true, + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "20.2.9", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", + "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs/node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "engines": { + "node": ">=12" + } + } + } +} diff --git a/mybulma/node_modules/@babel/code-frame/LICENSE b/mybulma/node_modules/@babel/code-frame/LICENSE new file mode 100644 index 0000000..f31575e --- /dev/null +++ b/mybulma/node_modules/@babel/code-frame/LICENSE @@ -0,0 +1,22 @@ +MIT License + +Copyright (c) 2014-present Sebastian McKenzie and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/mybulma/node_modules/@babel/code-frame/README.md b/mybulma/node_modules/@babel/code-frame/README.md new file mode 100644 index 0000000..08cacb0 --- /dev/null +++ b/mybulma/node_modules/@babel/code-frame/README.md @@ -0,0 +1,19 @@ +# @babel/code-frame + +> Generate errors that contain a code frame that point to source locations. + +See our website [@babel/code-frame](https://babeljs.io/docs/en/babel-code-frame) for more information. + +## Install + +Using npm: + +```sh +npm install --save-dev @babel/code-frame +``` + +or using yarn: + +```sh +yarn add @babel/code-frame --dev +``` diff --git a/mybulma/node_modules/@babel/code-frame/lib/index.js b/mybulma/node_modules/@babel/code-frame/lib/index.js new file mode 100644 index 0000000..cba3f83 --- /dev/null +++ b/mybulma/node_modules/@babel/code-frame/lib/index.js @@ -0,0 +1,163 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.codeFrameColumns = codeFrameColumns; +exports.default = _default; + +var _highlight = require("@babel/highlight"); + +let deprecationWarningShown = false; + +function getDefs(chalk) { + return { + gutter: chalk.grey, + marker: chalk.red.bold, + message: chalk.red.bold + }; +} + +const NEWLINE = /\r\n|[\n\r\u2028\u2029]/; + +function getMarkerLines(loc, source, opts) { + const startLoc = Object.assign({ + column: 0, + line: -1 + }, loc.start); + const endLoc = Object.assign({}, startLoc, loc.end); + const { + linesAbove = 2, + linesBelow = 3 + } = opts || {}; + const startLine = startLoc.line; + const startColumn = startLoc.column; + const endLine = endLoc.line; + const endColumn = endLoc.column; + let start = Math.max(startLine - (linesAbove + 1), 0); + let end = Math.min(source.length, endLine + linesBelow); + + if (startLine === -1) { + start = 0; + } + + if (endLine === -1) { + end = source.length; + } + + const lineDiff = endLine - startLine; + const markerLines = {}; + + if (lineDiff) { + for (let i = 0; i <= lineDiff; i++) { + const lineNumber = i + startLine; + + if (!startColumn) { + markerLines[lineNumber] = true; + } else if (i === 0) { + const sourceLength = source[lineNumber - 1].length; + markerLines[lineNumber] = [startColumn, sourceLength - startColumn + 1]; + } else if (i === lineDiff) { + markerLines[lineNumber] = [0, endColumn]; + } else { + const sourceLength = source[lineNumber - i].length; + markerLines[lineNumber] = [0, sourceLength]; + } + } + } else { + if (startColumn === endColumn) { + if (startColumn) { + markerLines[startLine] = [startColumn, 0]; + } else { + markerLines[startLine] = true; + } + } else { + markerLines[startLine] = [startColumn, endColumn - startColumn]; + } + } + + return { + start, + end, + markerLines + }; +} + +function codeFrameColumns(rawLines, loc, opts = {}) { + const highlighted = (opts.highlightCode || opts.forceColor) && (0, _highlight.shouldHighlight)(opts); + const chalk = (0, _highlight.getChalk)(opts); + const defs = getDefs(chalk); + + const maybeHighlight = (chalkFn, string) => { + return highlighted ? chalkFn(string) : string; + }; + + const lines = rawLines.split(NEWLINE); + const { + start, + end, + markerLines + } = getMarkerLines(loc, lines, opts); + const hasColumns = loc.start && typeof loc.start.column === "number"; + const numberMaxWidth = String(end).length; + const highlightedLines = highlighted ? (0, _highlight.default)(rawLines, opts) : rawLines; + let frame = highlightedLines.split(NEWLINE, end).slice(start, end).map((line, index) => { + const number = start + 1 + index; + const paddedNumber = ` ${number}`.slice(-numberMaxWidth); + const gutter = ` ${paddedNumber} |`; + const hasMarker = markerLines[number]; + const lastMarkerLine = !markerLines[number + 1]; + + if (hasMarker) { + let markerLine = ""; + + if (Array.isArray(hasMarker)) { + const markerSpacing = line.slice(0, Math.max(hasMarker[0] - 1, 0)).replace(/[^\t]/g, " "); + const numberOfMarkers = hasMarker[1] || 1; + markerLine = ["\n ", maybeHighlight(defs.gutter, gutter.replace(/\d/g, " ")), " ", markerSpacing, maybeHighlight(defs.marker, "^").repeat(numberOfMarkers)].join(""); + + if (lastMarkerLine && opts.message) { + markerLine += " " + maybeHighlight(defs.message, opts.message); + } + } + + return [maybeHighlight(defs.marker, ">"), maybeHighlight(defs.gutter, gutter), line.length > 0 ? ` ${line}` : "", markerLine].join(""); + } else { + return ` ${maybeHighlight(defs.gutter, gutter)}${line.length > 0 ? ` ${line}` : ""}`; + } + }).join("\n"); + + if (opts.message && !hasColumns) { + frame = `${" ".repeat(numberMaxWidth + 1)}${opts.message}\n${frame}`; + } + + if (highlighted) { + return chalk.reset(frame); + } else { + return frame; + } +} + +function _default(rawLines, lineNumber, colNumber, opts = {}) { + if (!deprecationWarningShown) { + deprecationWarningShown = true; + const message = "Passing lineNumber and colNumber is deprecated to @babel/code-frame. Please use `codeFrameColumns`."; + + if (process.emitWarning) { + process.emitWarning(message, "DeprecationWarning"); + } else { + const deprecationError = new Error(message); + deprecationError.name = "DeprecationWarning"; + console.warn(new Error(message)); + } + } + + colNumber = Math.max(colNumber, 0); + const location = { + start: { + column: colNumber, + line: lineNumber + } + }; + return codeFrameColumns(rawLines, location, opts); +} \ No newline at end of file diff --git a/mybulma/node_modules/@babel/code-frame/package.json b/mybulma/node_modules/@babel/code-frame/package.json new file mode 100644 index 0000000..18d8db1 --- /dev/null +++ b/mybulma/node_modules/@babel/code-frame/package.json @@ -0,0 +1,30 @@ +{ + "name": "@babel/code-frame", + "version": "7.18.6", + "description": "Generate errors that contain a code frame that point to source locations.", + "author": "The Babel Team (https://babel.dev/team)", + "homepage": "https://babel.dev/docs/en/next/babel-code-frame", + "bugs": "https://github.com/babel/babel/issues?utf8=%E2%9C%93&q=is%3Aissue+is%3Aopen", + "license": "MIT", + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "https://github.com/babel/babel.git", + "directory": "packages/babel-code-frame" + }, + "main": "./lib/index.js", + "dependencies": { + "@babel/highlight": "^7.18.6" + }, + "devDependencies": { + "@types/chalk": "^2.0.0", + "chalk": "^2.0.0", + "strip-ansi": "^4.0.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "type": "commonjs" +} \ No newline at end of file diff --git a/mybulma/node_modules/@babel/helper-validator-identifier/LICENSE b/mybulma/node_modules/@babel/helper-validator-identifier/LICENSE new file mode 100644 index 0000000..f31575e --- /dev/null +++ b/mybulma/node_modules/@babel/helper-validator-identifier/LICENSE @@ -0,0 +1,22 @@ +MIT License + +Copyright (c) 2014-present Sebastian McKenzie and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/mybulma/node_modules/@babel/helper-validator-identifier/README.md b/mybulma/node_modules/@babel/helper-validator-identifier/README.md new file mode 100644 index 0000000..4f704c4 --- /dev/null +++ b/mybulma/node_modules/@babel/helper-validator-identifier/README.md @@ -0,0 +1,19 @@ +# @babel/helper-validator-identifier + +> Validate identifier/keywords name + +See our website [@babel/helper-validator-identifier](https://babeljs.io/docs/en/babel-helper-validator-identifier) for more information. + +## Install + +Using npm: + +```sh +npm install --save @babel/helper-validator-identifier +``` + +or using yarn: + +```sh +yarn add @babel/helper-validator-identifier +``` diff --git a/mybulma/node_modules/@babel/helper-validator-identifier/lib/identifier.js b/mybulma/node_modules/@babel/helper-validator-identifier/lib/identifier.js new file mode 100644 index 0000000..3a9e335 --- /dev/null +++ b/mybulma/node_modules/@babel/helper-validator-identifier/lib/identifier.js @@ -0,0 +1,86 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.isIdentifierChar = isIdentifierChar; +exports.isIdentifierName = isIdentifierName; +exports.isIdentifierStart = isIdentifierStart; +let nonASCIIidentifierStartChars = "\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376\u0377\u037a-\u037d\u037f\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u052f\u0531-\u0556\u0559\u0560-\u0588\u05d0-\u05ea\u05ef-\u05f2\u0620-\u064a\u066e\u066f\u0671-\u06d3\u06d5\u06e5\u06e6\u06ee\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4\u07f5\u07fa\u0800-\u0815\u081a\u0824\u0828\u0840-\u0858\u0860-\u086a\u0870-\u0887\u0889-\u088e\u08a0-\u08c9\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971-\u0980\u0985-\u098c\u098f\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc\u09dd\u09df-\u09e1\u09f0\u09f1\u09fc\u0a05-\u0a0a\u0a0f\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32\u0a33\u0a35\u0a36\u0a38\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0\u0ae1\u0af9\u0b05-\u0b0c\u0b0f\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32\u0b33\u0b35-\u0b39\u0b3d\u0b5c\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99\u0b9a\u0b9c\u0b9e\u0b9f\u0ba3\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c5d\u0c60\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cdd\u0cde\u0ce0\u0ce1\u0cf1\u0cf2\u0d04-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e33\u0e40-\u0e46\u0e81\u0e82\u0e84\u0e86-\u0e8a\u0e8c-\u0ea3\u0ea5\u0ea7-\u0eb0\u0eb2\u0eb3\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u1711\u171f-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1878\u1880-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1aa7\u1b05-\u1b33\u1b45-\u1b4c\u1b83-\u1ba0\u1bae\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1c80-\u1c88\u1c90-\u1cba\u1cbd-\u1cbf\u1ce9-\u1cec\u1cee-\u1cf3\u1cf5\u1cf6\u1cfa\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u209c\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2ce4\u2ceb-\u2cee\u2cf2\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303c\u3041-\u3096\u309b-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31bf\u31f0-\u31ff\u3400-\u4dbf\u4e00-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua61f\ua62a\ua62b\ua640-\ua66e\ua67f-\ua69d\ua6a0-\ua6ef\ua717-\ua71f\ua722-\ua788\ua78b-\ua7ca\ua7d0\ua7d1\ua7d3\ua7d5-\ua7d9\ua7f2-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd\ua8fe\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9cf\ua9e0-\ua9e4\ua9e6-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadd\uaae0-\uaaea\uaaf2-\uaaf4\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab69\uab70-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40\ufb41\ufb43\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc"; +let nonASCIIidentifierChars = "\u200c\u200d\xb7\u0300-\u036f\u0387\u0483-\u0487\u0591-\u05bd\u05bf\u05c1\u05c2\u05c4\u05c5\u05c7\u0610-\u061a\u064b-\u0669\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7\u06e8\u06ea-\u06ed\u06f0-\u06f9\u0711\u0730-\u074a\u07a6-\u07b0\u07c0-\u07c9\u07eb-\u07f3\u07fd\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0859-\u085b\u0898-\u089f\u08ca-\u08e1\u08e3-\u0903\u093a-\u093c\u093e-\u094f\u0951-\u0957\u0962\u0963\u0966-\u096f\u0981-\u0983\u09bc\u09be-\u09c4\u09c7\u09c8\u09cb-\u09cd\u09d7\u09e2\u09e3\u09e6-\u09ef\u09fe\u0a01-\u0a03\u0a3c\u0a3e-\u0a42\u0a47\u0a48\u0a4b-\u0a4d\u0a51\u0a66-\u0a71\u0a75\u0a81-\u0a83\u0abc\u0abe-\u0ac5\u0ac7-\u0ac9\u0acb-\u0acd\u0ae2\u0ae3\u0ae6-\u0aef\u0afa-\u0aff\u0b01-\u0b03\u0b3c\u0b3e-\u0b44\u0b47\u0b48\u0b4b-\u0b4d\u0b55-\u0b57\u0b62\u0b63\u0b66-\u0b6f\u0b82\u0bbe-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcd\u0bd7\u0be6-\u0bef\u0c00-\u0c04\u0c3c\u0c3e-\u0c44\u0c46-\u0c48\u0c4a-\u0c4d\u0c55\u0c56\u0c62\u0c63\u0c66-\u0c6f\u0c81-\u0c83\u0cbc\u0cbe-\u0cc4\u0cc6-\u0cc8\u0cca-\u0ccd\u0cd5\u0cd6\u0ce2\u0ce3\u0ce6-\u0cef\u0cf3\u0d00-\u0d03\u0d3b\u0d3c\u0d3e-\u0d44\u0d46-\u0d48\u0d4a-\u0d4d\u0d57\u0d62\u0d63\u0d66-\u0d6f\u0d81-\u0d83\u0dca\u0dcf-\u0dd4\u0dd6\u0dd8-\u0ddf\u0de6-\u0def\u0df2\u0df3\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0e50-\u0e59\u0eb1\u0eb4-\u0ebc\u0ec8-\u0ece\u0ed0-\u0ed9\u0f18\u0f19\u0f20-\u0f29\u0f35\u0f37\u0f39\u0f3e\u0f3f\u0f71-\u0f84\u0f86\u0f87\u0f8d-\u0f97\u0f99-\u0fbc\u0fc6\u102b-\u103e\u1040-\u1049\u1056-\u1059\u105e-\u1060\u1062-\u1064\u1067-\u106d\u1071-\u1074\u1082-\u108d\u108f-\u109d\u135d-\u135f\u1369-\u1371\u1712-\u1715\u1732-\u1734\u1752\u1753\u1772\u1773\u17b4-\u17d3\u17dd\u17e0-\u17e9\u180b-\u180d\u180f-\u1819\u18a9\u1920-\u192b\u1930-\u193b\u1946-\u194f\u19d0-\u19da\u1a17-\u1a1b\u1a55-\u1a5e\u1a60-\u1a7c\u1a7f-\u1a89\u1a90-\u1a99\u1ab0-\u1abd\u1abf-\u1ace\u1b00-\u1b04\u1b34-\u1b44\u1b50-\u1b59\u1b6b-\u1b73\u1b80-\u1b82\u1ba1-\u1bad\u1bb0-\u1bb9\u1be6-\u1bf3\u1c24-\u1c37\u1c40-\u1c49\u1c50-\u1c59\u1cd0-\u1cd2\u1cd4-\u1ce8\u1ced\u1cf4\u1cf7-\u1cf9\u1dc0-\u1dff\u203f\u2040\u2054\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2d7f\u2de0-\u2dff\u302a-\u302f\u3099\u309a\ua620-\ua629\ua66f\ua674-\ua67d\ua69e\ua69f\ua6f0\ua6f1\ua802\ua806\ua80b\ua823-\ua827\ua82c\ua880\ua881\ua8b4-\ua8c5\ua8d0-\ua8d9\ua8e0-\ua8f1\ua8ff-\ua909\ua926-\ua92d\ua947-\ua953\ua980-\ua983\ua9b3-\ua9c0\ua9d0-\ua9d9\ua9e5\ua9f0-\ua9f9\uaa29-\uaa36\uaa43\uaa4c\uaa4d\uaa50-\uaa59\uaa7b-\uaa7d\uaab0\uaab2-\uaab4\uaab7\uaab8\uaabe\uaabf\uaac1\uaaeb-\uaaef\uaaf5\uaaf6\uabe3-\uabea\uabec\uabed\uabf0-\uabf9\ufb1e\ufe00-\ufe0f\ufe20-\ufe2f\ufe33\ufe34\ufe4d-\ufe4f\uff10-\uff19\uff3f"; +const nonASCIIidentifierStart = new RegExp("[" + nonASCIIidentifierStartChars + "]"); +const nonASCIIidentifier = new RegExp("[" + nonASCIIidentifierStartChars + nonASCIIidentifierChars + "]"); +nonASCIIidentifierStartChars = nonASCIIidentifierChars = null; +const astralIdentifierStartCodes = [0, 11, 2, 25, 2, 18, 2, 1, 2, 14, 3, 13, 35, 122, 70, 52, 268, 28, 4, 48, 48, 31, 14, 29, 6, 37, 11, 29, 3, 35, 5, 7, 2, 4, 43, 157, 19, 35, 5, 35, 5, 39, 9, 51, 13, 10, 2, 14, 2, 6, 2, 1, 2, 10, 2, 14, 2, 6, 2, 1, 68, 310, 10, 21, 11, 7, 25, 5, 2, 41, 2, 8, 70, 5, 3, 0, 2, 43, 2, 1, 4, 0, 3, 22, 11, 22, 10, 30, 66, 18, 2, 1, 11, 21, 11, 25, 71, 55, 7, 1, 65, 0, 16, 3, 2, 2, 2, 28, 43, 28, 4, 28, 36, 7, 2, 27, 28, 53, 11, 21, 11, 18, 14, 17, 111, 72, 56, 50, 14, 50, 14, 35, 349, 41, 7, 1, 79, 28, 11, 0, 9, 21, 43, 17, 47, 20, 28, 22, 13, 52, 58, 1, 3, 0, 14, 44, 33, 24, 27, 35, 30, 0, 3, 0, 9, 34, 4, 0, 13, 47, 15, 3, 22, 0, 2, 0, 36, 17, 2, 24, 20, 1, 64, 6, 2, 0, 2, 3, 2, 14, 2, 9, 8, 46, 39, 7, 3, 1, 3, 21, 2, 6, 2, 1, 2, 4, 4, 0, 19, 0, 13, 4, 159, 52, 19, 3, 21, 2, 31, 47, 21, 1, 2, 0, 185, 46, 42, 3, 37, 47, 21, 0, 60, 42, 14, 0, 72, 26, 38, 6, 186, 43, 117, 63, 32, 7, 3, 0, 3, 7, 2, 1, 2, 23, 16, 0, 2, 0, 95, 7, 3, 38, 17, 0, 2, 0, 29, 0, 11, 39, 8, 0, 22, 0, 12, 45, 20, 0, 19, 72, 264, 8, 2, 36, 18, 0, 50, 29, 113, 6, 2, 1, 2, 37, 22, 0, 26, 5, 2, 1, 2, 31, 15, 0, 328, 18, 16, 0, 2, 12, 2, 33, 125, 0, 80, 921, 103, 110, 18, 195, 2637, 96, 16, 1071, 18, 5, 4026, 582, 8634, 568, 8, 30, 18, 78, 18, 29, 19, 47, 17, 3, 32, 20, 6, 18, 689, 63, 129, 74, 6, 0, 67, 12, 65, 1, 2, 0, 29, 6135, 9, 1237, 43, 8, 8936, 3, 2, 6, 2, 1, 2, 290, 16, 0, 30, 2, 3, 0, 15, 3, 9, 395, 2309, 106, 6, 12, 4, 8, 8, 9, 5991, 84, 2, 70, 2, 1, 3, 0, 3, 1, 3, 3, 2, 11, 2, 0, 2, 6, 2, 64, 2, 3, 3, 7, 2, 6, 2, 27, 2, 3, 2, 4, 2, 0, 4, 6, 2, 339, 3, 24, 2, 24, 2, 30, 2, 24, 2, 30, 2, 24, 2, 30, 2, 24, 2, 30, 2, 24, 2, 7, 1845, 30, 7, 5, 262, 61, 147, 44, 11, 6, 17, 0, 322, 29, 19, 43, 485, 27, 757, 6, 2, 3, 2, 1, 2, 14, 2, 196, 60, 67, 8, 0, 1205, 3, 2, 26, 2, 1, 2, 0, 3, 0, 2, 9, 2, 3, 2, 0, 2, 0, 7, 0, 5, 0, 2, 0, 2, 0, 2, 2, 2, 1, 2, 0, 3, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 1, 2, 0, 3, 3, 2, 6, 2, 3, 2, 3, 2, 0, 2, 9, 2, 16, 6, 2, 2, 4, 2, 16, 4421, 42719, 33, 4153, 7, 221, 3, 5761, 15, 7472, 3104, 541, 1507, 4938, 6, 4191]; +const astralIdentifierCodes = [509, 0, 227, 0, 150, 4, 294, 9, 1368, 2, 2, 1, 6, 3, 41, 2, 5, 0, 166, 1, 574, 3, 9, 9, 370, 1, 81, 2, 71, 10, 50, 3, 123, 2, 54, 14, 32, 10, 3, 1, 11, 3, 46, 10, 8, 0, 46, 9, 7, 2, 37, 13, 2, 9, 6, 1, 45, 0, 13, 2, 49, 13, 9, 3, 2, 11, 83, 11, 7, 0, 3, 0, 158, 11, 6, 9, 7, 3, 56, 1, 2, 6, 3, 1, 3, 2, 10, 0, 11, 1, 3, 6, 4, 4, 193, 17, 10, 9, 5, 0, 82, 19, 13, 9, 214, 6, 3, 8, 28, 1, 83, 16, 16, 9, 82, 12, 9, 9, 84, 14, 5, 9, 243, 14, 166, 9, 71, 5, 2, 1, 3, 3, 2, 0, 2, 1, 13, 9, 120, 6, 3, 6, 4, 0, 29, 9, 41, 6, 2, 3, 9, 0, 10, 10, 47, 15, 406, 7, 2, 7, 17, 9, 57, 21, 2, 13, 123, 5, 4, 0, 2, 1, 2, 6, 2, 0, 9, 9, 49, 4, 2, 1, 2, 4, 9, 9, 330, 3, 10, 1, 2, 0, 49, 6, 4, 4, 14, 9, 5351, 0, 7, 14, 13835, 9, 87, 9, 39, 4, 60, 6, 26, 9, 1014, 0, 2, 54, 8, 3, 82, 0, 12, 1, 19628, 1, 4706, 45, 3, 22, 543, 4, 4, 5, 9, 7, 3, 6, 31, 3, 149, 2, 1418, 49, 513, 54, 5, 49, 9, 0, 15, 0, 23, 4, 2, 14, 1361, 6, 2, 16, 3, 6, 2, 1, 2, 4, 101, 0, 161, 6, 10, 9, 357, 0, 62, 13, 499, 13, 983, 6, 110, 6, 6, 9, 4759, 9, 787719, 239]; + +function isInAstralSet(code, set) { + let pos = 0x10000; + + for (let i = 0, length = set.length; i < length; i += 2) { + pos += set[i]; + if (pos > code) return false; + pos += set[i + 1]; + if (pos >= code) return true; + } + + return false; +} + +function isIdentifierStart(code) { + if (code < 65) return code === 36; + if (code <= 90) return true; + if (code < 97) return code === 95; + if (code <= 122) return true; + + if (code <= 0xffff) { + return code >= 0xaa && nonASCIIidentifierStart.test(String.fromCharCode(code)); + } + + return isInAstralSet(code, astralIdentifierStartCodes); +} + +function isIdentifierChar(code) { + if (code < 48) return code === 36; + if (code < 58) return true; + if (code < 65) return false; + if (code <= 90) return true; + if (code < 97) return code === 95; + if (code <= 122) return true; + + if (code <= 0xffff) { + return code >= 0xaa && nonASCIIidentifier.test(String.fromCharCode(code)); + } + + return isInAstralSet(code, astralIdentifierStartCodes) || isInAstralSet(code, astralIdentifierCodes); +} + +function isIdentifierName(name) { + let isFirst = true; + + for (let i = 0; i < name.length; i++) { + let cp = name.charCodeAt(i); + + if ((cp & 0xfc00) === 0xd800 && i + 1 < name.length) { + const trail = name.charCodeAt(++i); + + if ((trail & 0xfc00) === 0xdc00) { + cp = 0x10000 + ((cp & 0x3ff) << 10) + (trail & 0x3ff); + } + } + + if (isFirst) { + isFirst = false; + + if (!isIdentifierStart(cp)) { + return false; + } + } else if (!isIdentifierChar(cp)) { + return false; + } + } + + return !isFirst; +} + +//# sourceMappingURL=identifier.js.map diff --git a/mybulma/node_modules/@babel/helper-validator-identifier/lib/identifier.js.map b/mybulma/node_modules/@babel/helper-validator-identifier/lib/identifier.js.map new file mode 100644 index 0000000..45c94ff --- /dev/null +++ b/mybulma/node_modules/@babel/helper-validator-identifier/lib/identifier.js.map @@ -0,0 +1 @@ +{"version":3,"names":["nonASCIIidentifierStartChars","nonASCIIidentifierChars","nonASCIIidentifierStart","RegExp","nonASCIIidentifier","astralIdentifierStartCodes","astralIdentifierCodes","isInAstralSet","code","set","pos","i","length","isIdentifierStart","test","String","fromCharCode","isIdentifierChar","isIdentifierName","name","isFirst","cp","charCodeAt","trail"],"sources":["../src/identifier.ts"],"sourcesContent":["import * as charCodes from \"charcodes\";\n\n// ## Character categories\n\n// Big ugly regular expressions that match characters in the\n// whitespace, identifier, and identifier-start categories. These\n// are only applied when a character is found to actually have a\n// code point between 0x80 and 0xffff.\n// Generated by `scripts/generate-identifier-regex.js`.\n\n/* prettier-ignore */\nlet nonASCIIidentifierStartChars = \"\\xaa\\xb5\\xba\\xc0-\\xd6\\xd8-\\xf6\\xf8-\\u02c1\\u02c6-\\u02d1\\u02e0-\\u02e4\\u02ec\\u02ee\\u0370-\\u0374\\u0376\\u0377\\u037a-\\u037d\\u037f\\u0386\\u0388-\\u038a\\u038c\\u038e-\\u03a1\\u03a3-\\u03f5\\u03f7-\\u0481\\u048a-\\u052f\\u0531-\\u0556\\u0559\\u0560-\\u0588\\u05d0-\\u05ea\\u05ef-\\u05f2\\u0620-\\u064a\\u066e\\u066f\\u0671-\\u06d3\\u06d5\\u06e5\\u06e6\\u06ee\\u06ef\\u06fa-\\u06fc\\u06ff\\u0710\\u0712-\\u072f\\u074d-\\u07a5\\u07b1\\u07ca-\\u07ea\\u07f4\\u07f5\\u07fa\\u0800-\\u0815\\u081a\\u0824\\u0828\\u0840-\\u0858\\u0860-\\u086a\\u0870-\\u0887\\u0889-\\u088e\\u08a0-\\u08c9\\u0904-\\u0939\\u093d\\u0950\\u0958-\\u0961\\u0971-\\u0980\\u0985-\\u098c\\u098f\\u0990\\u0993-\\u09a8\\u09aa-\\u09b0\\u09b2\\u09b6-\\u09b9\\u09bd\\u09ce\\u09dc\\u09dd\\u09df-\\u09e1\\u09f0\\u09f1\\u09fc\\u0a05-\\u0a0a\\u0a0f\\u0a10\\u0a13-\\u0a28\\u0a2a-\\u0a30\\u0a32\\u0a33\\u0a35\\u0a36\\u0a38\\u0a39\\u0a59-\\u0a5c\\u0a5e\\u0a72-\\u0a74\\u0a85-\\u0a8d\\u0a8f-\\u0a91\\u0a93-\\u0aa8\\u0aaa-\\u0ab0\\u0ab2\\u0ab3\\u0ab5-\\u0ab9\\u0abd\\u0ad0\\u0ae0\\u0ae1\\u0af9\\u0b05-\\u0b0c\\u0b0f\\u0b10\\u0b13-\\u0b28\\u0b2a-\\u0b30\\u0b32\\u0b33\\u0b35-\\u0b39\\u0b3d\\u0b5c\\u0b5d\\u0b5f-\\u0b61\\u0b71\\u0b83\\u0b85-\\u0b8a\\u0b8e-\\u0b90\\u0b92-\\u0b95\\u0b99\\u0b9a\\u0b9c\\u0b9e\\u0b9f\\u0ba3\\u0ba4\\u0ba8-\\u0baa\\u0bae-\\u0bb9\\u0bd0\\u0c05-\\u0c0c\\u0c0e-\\u0c10\\u0c12-\\u0c28\\u0c2a-\\u0c39\\u0c3d\\u0c58-\\u0c5a\\u0c5d\\u0c60\\u0c61\\u0c80\\u0c85-\\u0c8c\\u0c8e-\\u0c90\\u0c92-\\u0ca8\\u0caa-\\u0cb3\\u0cb5-\\u0cb9\\u0cbd\\u0cdd\\u0cde\\u0ce0\\u0ce1\\u0cf1\\u0cf2\\u0d04-\\u0d0c\\u0d0e-\\u0d10\\u0d12-\\u0d3a\\u0d3d\\u0d4e\\u0d54-\\u0d56\\u0d5f-\\u0d61\\u0d7a-\\u0d7f\\u0d85-\\u0d96\\u0d9a-\\u0db1\\u0db3-\\u0dbb\\u0dbd\\u0dc0-\\u0dc6\\u0e01-\\u0e30\\u0e32\\u0e33\\u0e40-\\u0e46\\u0e81\\u0e82\\u0e84\\u0e86-\\u0e8a\\u0e8c-\\u0ea3\\u0ea5\\u0ea7-\\u0eb0\\u0eb2\\u0eb3\\u0ebd\\u0ec0-\\u0ec4\\u0ec6\\u0edc-\\u0edf\\u0f00\\u0f40-\\u0f47\\u0f49-\\u0f6c\\u0f88-\\u0f8c\\u1000-\\u102a\\u103f\\u1050-\\u1055\\u105a-\\u105d\\u1061\\u1065\\u1066\\u106e-\\u1070\\u1075-\\u1081\\u108e\\u10a0-\\u10c5\\u10c7\\u10cd\\u10d0-\\u10fa\\u10fc-\\u1248\\u124a-\\u124d\\u1250-\\u1256\\u1258\\u125a-\\u125d\\u1260-\\u1288\\u128a-\\u128d\\u1290-\\u12b0\\u12b2-\\u12b5\\u12b8-\\u12be\\u12c0\\u12c2-\\u12c5\\u12c8-\\u12d6\\u12d8-\\u1310\\u1312-\\u1315\\u1318-\\u135a\\u1380-\\u138f\\u13a0-\\u13f5\\u13f8-\\u13fd\\u1401-\\u166c\\u166f-\\u167f\\u1681-\\u169a\\u16a0-\\u16ea\\u16ee-\\u16f8\\u1700-\\u1711\\u171f-\\u1731\\u1740-\\u1751\\u1760-\\u176c\\u176e-\\u1770\\u1780-\\u17b3\\u17d7\\u17dc\\u1820-\\u1878\\u1880-\\u18a8\\u18aa\\u18b0-\\u18f5\\u1900-\\u191e\\u1950-\\u196d\\u1970-\\u1974\\u1980-\\u19ab\\u19b0-\\u19c9\\u1a00-\\u1a16\\u1a20-\\u1a54\\u1aa7\\u1b05-\\u1b33\\u1b45-\\u1b4c\\u1b83-\\u1ba0\\u1bae\\u1baf\\u1bba-\\u1be5\\u1c00-\\u1c23\\u1c4d-\\u1c4f\\u1c5a-\\u1c7d\\u1c80-\\u1c88\\u1c90-\\u1cba\\u1cbd-\\u1cbf\\u1ce9-\\u1cec\\u1cee-\\u1cf3\\u1cf5\\u1cf6\\u1cfa\\u1d00-\\u1dbf\\u1e00-\\u1f15\\u1f18-\\u1f1d\\u1f20-\\u1f45\\u1f48-\\u1f4d\\u1f50-\\u1f57\\u1f59\\u1f5b\\u1f5d\\u1f5f-\\u1f7d\\u1f80-\\u1fb4\\u1fb6-\\u1fbc\\u1fbe\\u1fc2-\\u1fc4\\u1fc6-\\u1fcc\\u1fd0-\\u1fd3\\u1fd6-\\u1fdb\\u1fe0-\\u1fec\\u1ff2-\\u1ff4\\u1ff6-\\u1ffc\\u2071\\u207f\\u2090-\\u209c\\u2102\\u2107\\u210a-\\u2113\\u2115\\u2118-\\u211d\\u2124\\u2126\\u2128\\u212a-\\u2139\\u213c-\\u213f\\u2145-\\u2149\\u214e\\u2160-\\u2188\\u2c00-\\u2ce4\\u2ceb-\\u2cee\\u2cf2\\u2cf3\\u2d00-\\u2d25\\u2d27\\u2d2d\\u2d30-\\u2d67\\u2d6f\\u2d80-\\u2d96\\u2da0-\\u2da6\\u2da8-\\u2dae\\u2db0-\\u2db6\\u2db8-\\u2dbe\\u2dc0-\\u2dc6\\u2dc8-\\u2dce\\u2dd0-\\u2dd6\\u2dd8-\\u2dde\\u3005-\\u3007\\u3021-\\u3029\\u3031-\\u3035\\u3038-\\u303c\\u3041-\\u3096\\u309b-\\u309f\\u30a1-\\u30fa\\u30fc-\\u30ff\\u3105-\\u312f\\u3131-\\u318e\\u31a0-\\u31bf\\u31f0-\\u31ff\\u3400-\\u4dbf\\u4e00-\\ua48c\\ua4d0-\\ua4fd\\ua500-\\ua60c\\ua610-\\ua61f\\ua62a\\ua62b\\ua640-\\ua66e\\ua67f-\\ua69d\\ua6a0-\\ua6ef\\ua717-\\ua71f\\ua722-\\ua788\\ua78b-\\ua7ca\\ua7d0\\ua7d1\\ua7d3\\ua7d5-\\ua7d9\\ua7f2-\\ua801\\ua803-\\ua805\\ua807-\\ua80a\\ua80c-\\ua822\\ua840-\\ua873\\ua882-\\ua8b3\\ua8f2-\\ua8f7\\ua8fb\\ua8fd\\ua8fe\\ua90a-\\ua925\\ua930-\\ua946\\ua960-\\ua97c\\ua984-\\ua9b2\\ua9cf\\ua9e0-\\ua9e4\\ua9e6-\\ua9ef\\ua9fa-\\ua9fe\\uaa00-\\uaa28\\uaa40-\\uaa42\\uaa44-\\uaa4b\\uaa60-\\uaa76\\uaa7a\\uaa7e-\\uaaaf\\uaab1\\uaab5\\uaab6\\uaab9-\\uaabd\\uaac0\\uaac2\\uaadb-\\uaadd\\uaae0-\\uaaea\\uaaf2-\\uaaf4\\uab01-\\uab06\\uab09-\\uab0e\\uab11-\\uab16\\uab20-\\uab26\\uab28-\\uab2e\\uab30-\\uab5a\\uab5c-\\uab69\\uab70-\\uabe2\\uac00-\\ud7a3\\ud7b0-\\ud7c6\\ud7cb-\\ud7fb\\uf900-\\ufa6d\\ufa70-\\ufad9\\ufb00-\\ufb06\\ufb13-\\ufb17\\ufb1d\\ufb1f-\\ufb28\\ufb2a-\\ufb36\\ufb38-\\ufb3c\\ufb3e\\ufb40\\ufb41\\ufb43\\ufb44\\ufb46-\\ufbb1\\ufbd3-\\ufd3d\\ufd50-\\ufd8f\\ufd92-\\ufdc7\\ufdf0-\\ufdfb\\ufe70-\\ufe74\\ufe76-\\ufefc\\uff21-\\uff3a\\uff41-\\uff5a\\uff66-\\uffbe\\uffc2-\\uffc7\\uffca-\\uffcf\\uffd2-\\uffd7\\uffda-\\uffdc\";\n/* prettier-ignore */\nlet nonASCIIidentifierChars = \"\\u200c\\u200d\\xb7\\u0300-\\u036f\\u0387\\u0483-\\u0487\\u0591-\\u05bd\\u05bf\\u05c1\\u05c2\\u05c4\\u05c5\\u05c7\\u0610-\\u061a\\u064b-\\u0669\\u0670\\u06d6-\\u06dc\\u06df-\\u06e4\\u06e7\\u06e8\\u06ea-\\u06ed\\u06f0-\\u06f9\\u0711\\u0730-\\u074a\\u07a6-\\u07b0\\u07c0-\\u07c9\\u07eb-\\u07f3\\u07fd\\u0816-\\u0819\\u081b-\\u0823\\u0825-\\u0827\\u0829-\\u082d\\u0859-\\u085b\\u0898-\\u089f\\u08ca-\\u08e1\\u08e3-\\u0903\\u093a-\\u093c\\u093e-\\u094f\\u0951-\\u0957\\u0962\\u0963\\u0966-\\u096f\\u0981-\\u0983\\u09bc\\u09be-\\u09c4\\u09c7\\u09c8\\u09cb-\\u09cd\\u09d7\\u09e2\\u09e3\\u09e6-\\u09ef\\u09fe\\u0a01-\\u0a03\\u0a3c\\u0a3e-\\u0a42\\u0a47\\u0a48\\u0a4b-\\u0a4d\\u0a51\\u0a66-\\u0a71\\u0a75\\u0a81-\\u0a83\\u0abc\\u0abe-\\u0ac5\\u0ac7-\\u0ac9\\u0acb-\\u0acd\\u0ae2\\u0ae3\\u0ae6-\\u0aef\\u0afa-\\u0aff\\u0b01-\\u0b03\\u0b3c\\u0b3e-\\u0b44\\u0b47\\u0b48\\u0b4b-\\u0b4d\\u0b55-\\u0b57\\u0b62\\u0b63\\u0b66-\\u0b6f\\u0b82\\u0bbe-\\u0bc2\\u0bc6-\\u0bc8\\u0bca-\\u0bcd\\u0bd7\\u0be6-\\u0bef\\u0c00-\\u0c04\\u0c3c\\u0c3e-\\u0c44\\u0c46-\\u0c48\\u0c4a-\\u0c4d\\u0c55\\u0c56\\u0c62\\u0c63\\u0c66-\\u0c6f\\u0c81-\\u0c83\\u0cbc\\u0cbe-\\u0cc4\\u0cc6-\\u0cc8\\u0cca-\\u0ccd\\u0cd5\\u0cd6\\u0ce2\\u0ce3\\u0ce6-\\u0cef\\u0cf3\\u0d00-\\u0d03\\u0d3b\\u0d3c\\u0d3e-\\u0d44\\u0d46-\\u0d48\\u0d4a-\\u0d4d\\u0d57\\u0d62\\u0d63\\u0d66-\\u0d6f\\u0d81-\\u0d83\\u0dca\\u0dcf-\\u0dd4\\u0dd6\\u0dd8-\\u0ddf\\u0de6-\\u0def\\u0df2\\u0df3\\u0e31\\u0e34-\\u0e3a\\u0e47-\\u0e4e\\u0e50-\\u0e59\\u0eb1\\u0eb4-\\u0ebc\\u0ec8-\\u0ece\\u0ed0-\\u0ed9\\u0f18\\u0f19\\u0f20-\\u0f29\\u0f35\\u0f37\\u0f39\\u0f3e\\u0f3f\\u0f71-\\u0f84\\u0f86\\u0f87\\u0f8d-\\u0f97\\u0f99-\\u0fbc\\u0fc6\\u102b-\\u103e\\u1040-\\u1049\\u1056-\\u1059\\u105e-\\u1060\\u1062-\\u1064\\u1067-\\u106d\\u1071-\\u1074\\u1082-\\u108d\\u108f-\\u109d\\u135d-\\u135f\\u1369-\\u1371\\u1712-\\u1715\\u1732-\\u1734\\u1752\\u1753\\u1772\\u1773\\u17b4-\\u17d3\\u17dd\\u17e0-\\u17e9\\u180b-\\u180d\\u180f-\\u1819\\u18a9\\u1920-\\u192b\\u1930-\\u193b\\u1946-\\u194f\\u19d0-\\u19da\\u1a17-\\u1a1b\\u1a55-\\u1a5e\\u1a60-\\u1a7c\\u1a7f-\\u1a89\\u1a90-\\u1a99\\u1ab0-\\u1abd\\u1abf-\\u1ace\\u1b00-\\u1b04\\u1b34-\\u1b44\\u1b50-\\u1b59\\u1b6b-\\u1b73\\u1b80-\\u1b82\\u1ba1-\\u1bad\\u1bb0-\\u1bb9\\u1be6-\\u1bf3\\u1c24-\\u1c37\\u1c40-\\u1c49\\u1c50-\\u1c59\\u1cd0-\\u1cd2\\u1cd4-\\u1ce8\\u1ced\\u1cf4\\u1cf7-\\u1cf9\\u1dc0-\\u1dff\\u203f\\u2040\\u2054\\u20d0-\\u20dc\\u20e1\\u20e5-\\u20f0\\u2cef-\\u2cf1\\u2d7f\\u2de0-\\u2dff\\u302a-\\u302f\\u3099\\u309a\\ua620-\\ua629\\ua66f\\ua674-\\ua67d\\ua69e\\ua69f\\ua6f0\\ua6f1\\ua802\\ua806\\ua80b\\ua823-\\ua827\\ua82c\\ua880\\ua881\\ua8b4-\\ua8c5\\ua8d0-\\ua8d9\\ua8e0-\\ua8f1\\ua8ff-\\ua909\\ua926-\\ua92d\\ua947-\\ua953\\ua980-\\ua983\\ua9b3-\\ua9c0\\ua9d0-\\ua9d9\\ua9e5\\ua9f0-\\ua9f9\\uaa29-\\uaa36\\uaa43\\uaa4c\\uaa4d\\uaa50-\\uaa59\\uaa7b-\\uaa7d\\uaab0\\uaab2-\\uaab4\\uaab7\\uaab8\\uaabe\\uaabf\\uaac1\\uaaeb-\\uaaef\\uaaf5\\uaaf6\\uabe3-\\uabea\\uabec\\uabed\\uabf0-\\uabf9\\ufb1e\\ufe00-\\ufe0f\\ufe20-\\ufe2f\\ufe33\\ufe34\\ufe4d-\\ufe4f\\uff10-\\uff19\\uff3f\";\n\nconst nonASCIIidentifierStart = new RegExp(\n \"[\" + nonASCIIidentifierStartChars + \"]\",\n);\nconst nonASCIIidentifier = new RegExp(\n \"[\" + nonASCIIidentifierStartChars + nonASCIIidentifierChars + \"]\",\n);\n\nnonASCIIidentifierStartChars = nonASCIIidentifierChars = null;\n\n// These are a run-length and offset-encoded representation of the\n// >0xffff code points that are a valid part of identifiers. The\n// offset starts at 0x10000, and each pair of numbers represents an\n// offset to the next range, and then a size of the range. They were\n// generated by `scripts/generate-identifier-regex.js`.\n/* prettier-ignore */\nconst astralIdentifierStartCodes = [0,11,2,25,2,18,2,1,2,14,3,13,35,122,70,52,268,28,4,48,48,31,14,29,6,37,11,29,3,35,5,7,2,4,43,157,19,35,5,35,5,39,9,51,13,10,2,14,2,6,2,1,2,10,2,14,2,6,2,1,68,310,10,21,11,7,25,5,2,41,2,8,70,5,3,0,2,43,2,1,4,0,3,22,11,22,10,30,66,18,2,1,11,21,11,25,71,55,7,1,65,0,16,3,2,2,2,28,43,28,4,28,36,7,2,27,28,53,11,21,11,18,14,17,111,72,56,50,14,50,14,35,349,41,7,1,79,28,11,0,9,21,43,17,47,20,28,22,13,52,58,1,3,0,14,44,33,24,27,35,30,0,3,0,9,34,4,0,13,47,15,3,22,0,2,0,36,17,2,24,20,1,64,6,2,0,2,3,2,14,2,9,8,46,39,7,3,1,3,21,2,6,2,1,2,4,4,0,19,0,13,4,159,52,19,3,21,2,31,47,21,1,2,0,185,46,42,3,37,47,21,0,60,42,14,0,72,26,38,6,186,43,117,63,32,7,3,0,3,7,2,1,2,23,16,0,2,0,95,7,3,38,17,0,2,0,29,0,11,39,8,0,22,0,12,45,20,0,19,72,264,8,2,36,18,0,50,29,113,6,2,1,2,37,22,0,26,5,2,1,2,31,15,0,328,18,16,0,2,12,2,33,125,0,80,921,103,110,18,195,2637,96,16,1071,18,5,4026,582,8634,568,8,30,18,78,18,29,19,47,17,3,32,20,6,18,689,63,129,74,6,0,67,12,65,1,2,0,29,6135,9,1237,43,8,8936,3,2,6,2,1,2,290,16,0,30,2,3,0,15,3,9,395,2309,106,6,12,4,8,8,9,5991,84,2,70,2,1,3,0,3,1,3,3,2,11,2,0,2,6,2,64,2,3,3,7,2,6,2,27,2,3,2,4,2,0,4,6,2,339,3,24,2,24,2,30,2,24,2,30,2,24,2,30,2,24,2,30,2,24,2,7,1845,30,7,5,262,61,147,44,11,6,17,0,322,29,19,43,485,27,757,6,2,3,2,1,2,14,2,196,60,67,8,0,1205,3,2,26,2,1,2,0,3,0,2,9,2,3,2,0,2,0,7,0,5,0,2,0,2,0,2,2,2,1,2,0,3,0,2,0,2,0,2,0,2,0,2,1,2,0,3,3,2,6,2,3,2,3,2,0,2,9,2,16,6,2,2,4,2,16,4421,42719,33,4153,7,221,3,5761,15,7472,3104,541,1507,4938,6,4191];\n/* prettier-ignore */\nconst astralIdentifierCodes = [509,0,227,0,150,4,294,9,1368,2,2,1,6,3,41,2,5,0,166,1,574,3,9,9,370,1,81,2,71,10,50,3,123,2,54,14,32,10,3,1,11,3,46,10,8,0,46,9,7,2,37,13,2,9,6,1,45,0,13,2,49,13,9,3,2,11,83,11,7,0,3,0,158,11,6,9,7,3,56,1,2,6,3,1,3,2,10,0,11,1,3,6,4,4,193,17,10,9,5,0,82,19,13,9,214,6,3,8,28,1,83,16,16,9,82,12,9,9,84,14,5,9,243,14,166,9,71,5,2,1,3,3,2,0,2,1,13,9,120,6,3,6,4,0,29,9,41,6,2,3,9,0,10,10,47,15,406,7,2,7,17,9,57,21,2,13,123,5,4,0,2,1,2,6,2,0,9,9,49,4,2,1,2,4,9,9,330,3,10,1,2,0,49,6,4,4,14,9,5351,0,7,14,13835,9,87,9,39,4,60,6,26,9,1014,0,2,54,8,3,82,0,12,1,19628,1,4706,45,3,22,543,4,4,5,9,7,3,6,31,3,149,2,1418,49,513,54,5,49,9,0,15,0,23,4,2,14,1361,6,2,16,3,6,2,1,2,4,101,0,161,6,10,9,357,0,62,13,499,13,983,6,110,6,6,9,4759,9,787719,239];\n\n// This has a complexity linear to the value of the code. The\n// assumption is that looking up astral identifier characters is\n// rare.\nfunction isInAstralSet(code: number, set: readonly number[]): boolean {\n let pos = 0x10000;\n for (let i = 0, length = set.length; i < length; i += 2) {\n pos += set[i];\n if (pos > code) return false;\n\n pos += set[i + 1];\n if (pos >= code) return true;\n }\n return false;\n}\n\n// Test whether a given character code starts an identifier.\n\nexport function isIdentifierStart(code: number): boolean {\n if (code < charCodes.uppercaseA) return code === charCodes.dollarSign;\n if (code <= charCodes.uppercaseZ) return true;\n if (code < charCodes.lowercaseA) return code === charCodes.underscore;\n if (code <= charCodes.lowercaseZ) return true;\n if (code <= 0xffff) {\n return (\n code >= 0xaa && nonASCIIidentifierStart.test(String.fromCharCode(code))\n );\n }\n return isInAstralSet(code, astralIdentifierStartCodes);\n}\n\n// Test whether a given character is part of an identifier.\n\nexport function isIdentifierChar(code: number): boolean {\n if (code < charCodes.digit0) return code === charCodes.dollarSign;\n if (code < charCodes.colon) return true;\n if (code < charCodes.uppercaseA) return false;\n if (code <= charCodes.uppercaseZ) return true;\n if (code < charCodes.lowercaseA) return code === charCodes.underscore;\n if (code <= charCodes.lowercaseZ) return true;\n if (code <= 0xffff) {\n return code >= 0xaa && nonASCIIidentifier.test(String.fromCharCode(code));\n }\n return (\n isInAstralSet(code, astralIdentifierStartCodes) ||\n isInAstralSet(code, astralIdentifierCodes)\n );\n}\n\n// Test whether a given string is a valid identifier name\n\nexport function isIdentifierName(name: string): boolean {\n let isFirst = true;\n for (let i = 0; i < name.length; i++) {\n // The implementation is based on\n // https://source.chromium.org/chromium/chromium/src/+/master:v8/src/builtins/builtins-string-gen.cc;l=1455;drc=221e331b49dfefadbc6fa40b0c68e6f97606d0b3;bpv=0;bpt=1\n // We reimplement `codePointAt` because `codePointAt` is a V8 builtin which is not inlined by TurboFan (as of M91)\n // since `name` is mostly ASCII, an inlined `charCodeAt` wins here\n let cp = name.charCodeAt(i);\n if ((cp & 0xfc00) === 0xd800 && i + 1 < name.length) {\n const trail = name.charCodeAt(++i);\n if ((trail & 0xfc00) === 0xdc00) {\n cp = 0x10000 + ((cp & 0x3ff) << 10) + (trail & 0x3ff);\n }\n }\n if (isFirst) {\n isFirst = false;\n if (!isIdentifierStart(cp)) {\n return false;\n }\n } else if (!isIdentifierChar(cp)) {\n return false;\n }\n }\n return !isFirst;\n}\n"],"mappings":";;;;;;;;AAWA,IAAIA,4BAA4B,GAAG,8qIAAnC;AAEA,IAAIC,uBAAuB,GAAG,mkFAA9B;AAEA,MAAMC,uBAAuB,GAAG,IAAIC,MAAJ,CAC9B,MAAMH,4BAAN,GAAqC,GADP,CAAhC;AAGA,MAAMI,kBAAkB,GAAG,IAAID,MAAJ,CACzB,MAAMH,4BAAN,GAAqCC,uBAArC,GAA+D,GADtC,CAA3B;AAIAD,4BAA4B,GAAGC,uBAAuB,GAAG,IAAzD;AAQA,MAAMI,0BAA0B,GAAG,CAAC,CAAD,EAAG,EAAH,EAAM,CAAN,EAAQ,EAAR,EAAW,CAAX,EAAa,EAAb,EAAgB,CAAhB,EAAkB,CAAlB,EAAoB,CAApB,EAAsB,EAAtB,EAAyB,CAAzB,EAA2B,EAA3B,EAA8B,EAA9B,EAAiC,GAAjC,EAAqC,EAArC,EAAwC,EAAxC,EAA2C,GAA3C,EAA+C,EAA/C,EAAkD,CAAlD,EAAoD,EAApD,EAAuD,EAAvD,EAA0D,EAA1D,EAA6D,EAA7D,EAAgE,EAAhE,EAAmE,CAAnE,EAAqE,EAArE,EAAwE,EAAxE,EAA2E,EAA3E,EAA8E,CAA9E,EAAgF,EAAhF,EAAmF,CAAnF,EAAqF,CAArF,EAAuF,CAAvF,EAAyF,CAAzF,EAA2F,EAA3F,EAA8F,GAA9F,EAAkG,EAAlG,EAAqG,EAArG,EAAwG,CAAxG,EAA0G,EAA1G,EAA6G,CAA7G,EAA+G,EAA/G,EAAkH,CAAlH,EAAoH,EAApH,EAAuH,EAAvH,EAA0H,EAA1H,EAA6H,CAA7H,EAA+H,EAA/H,EAAkI,CAAlI,EAAoI,CAApI,EAAsI,CAAtI,EAAwI,CAAxI,EAA0I,CAA1I,EAA4I,EAA5I,EAA+I,CAA/I,EAAiJ,EAAjJ,EAAoJ,CAApJ,EAAsJ,CAAtJ,EAAwJ,CAAxJ,EAA0J,CAA1J,EAA4J,EAA5J,EAA+J,GAA/J,EAAmK,EAAnK,EAAsK,EAAtK,EAAyK,EAAzK,EAA4K,CAA5K,EAA8K,EAA9K,EAAiL,CAAjL,EAAmL,CAAnL,EAAqL,EAArL,EAAwL,CAAxL,EAA0L,CAA1L,EAA4L,EAA5L,EAA+L,CAA/L,EAAiM,CAAjM,EAAmM,CAAnM,EAAqM,CAArM,EAAuM,EAAvM,EAA0M,CAA1M,EAA4M,CAA5M,EAA8M,CAA9M,EAAgN,CAAhN,EAAkN,CAAlN,EAAoN,EAApN,EAAuN,EAAvN,EAA0N,EAA1N,EAA6N,EAA7N,EAAgO,EAAhO,EAAmO,EAAnO,EAAsO,EAAtO,EAAyO,CAAzO,EAA2O,CAA3O,EAA6O,EAA7O,EAAgP,EAAhP,EAAmP,EAAnP,EAAsP,EAAtP,EAAyP,EAAzP,EAA4P,EAA5P,EAA+P,CAA/P,EAAiQ,CAAjQ,EAAmQ,EAAnQ,EAAsQ,CAAtQ,EAAwQ,EAAxQ,EAA2Q,CAA3Q,EAA6Q,CAA7Q,EAA+Q,CAA/Q,EAAiR,CAAjR,EAAmR,EAAnR,EAAsR,EAAtR,EAAyR,EAAzR,EAA4R,CAA5R,EAA8R,EAA9R,EAAiS,EAAjS,EAAoS,CAApS,EAAsS,CAAtS,EAAwS,EAAxS,EAA2S,EAA3S,EAA8S,EAA9S,EAAiT,EAAjT,EAAoT,EAApT,EAAuT,EAAvT,EAA0T,EAA1T,EAA6T,EAA7T,EAAgU,EAAhU,EAAmU,GAAnU,EAAuU,EAAvU,EAA0U,EAA1U,EAA6U,EAA7U,EAAgV,EAAhV,EAAmV,EAAnV,EAAsV,EAAtV,EAAyV,EAAzV,EAA4V,GAA5V,EAAgW,EAAhW,EAAmW,CAAnW,EAAqW,CAArW,EAAuW,EAAvW,EAA0W,EAA1W,EAA6W,EAA7W,EAAgX,CAAhX,EAAkX,CAAlX,EAAoX,EAApX,EAAuX,EAAvX,EAA0X,EAA1X,EAA6X,EAA7X,EAAgY,EAAhY,EAAmY,EAAnY,EAAsY,EAAtY,EAAyY,EAAzY,EAA4Y,EAA5Y,EAA+Y,EAA/Y,EAAkZ,CAAlZ,EAAoZ,CAApZ,EAAsZ,CAAtZ,EAAwZ,EAAxZ,EAA2Z,EAA3Z,EAA8Z,EAA9Z,EAAia,EAAja,EAAoa,EAApa,EAAua,EAAva,EAA0a,EAA1a,EAA6a,CAA7a,EAA+a,CAA/a,EAAib,CAAjb,EAAmb,CAAnb,EAAqb,EAArb,EAAwb,CAAxb,EAA0b,CAA1b,EAA4b,EAA5b,EAA+b,EAA/b,EAAkc,EAAlc,EAAqc,CAArc,EAAuc,EAAvc,EAA0c,CAA1c,EAA4c,CAA5c,EAA8c,CAA9c,EAAgd,EAAhd,EAAmd,EAAnd,EAAsd,CAAtd,EAAwd,EAAxd,EAA2d,EAA3d,EAA8d,CAA9d,EAAge,EAAhe,EAAme,CAAne,EAAqe,CAAre,EAAue,CAAve,EAAye,CAAze,EAA2e,CAA3e,EAA6e,CAA7e,EAA+e,EAA/e,EAAkf,CAAlf,EAAof,CAApf,EAAsf,CAAtf,EAAwf,EAAxf,EAA2f,EAA3f,EAA8f,CAA9f,EAAggB,CAAhgB,EAAkgB,CAAlgB,EAAogB,CAApgB,EAAsgB,EAAtgB,EAAygB,CAAzgB,EAA2gB,CAA3gB,EAA6gB,CAA7gB,EAA+gB,CAA/gB,EAAihB,CAAjhB,EAAmhB,CAAnhB,EAAqhB,CAArhB,EAAuhB,CAAvhB,EAAyhB,EAAzhB,EAA4hB,CAA5hB,EAA8hB,EAA9hB,EAAiiB,CAAjiB,EAAmiB,GAAniB,EAAuiB,EAAviB,EAA0iB,EAA1iB,EAA6iB,CAA7iB,EAA+iB,EAA/iB,EAAkjB,CAAljB,EAAojB,EAApjB,EAAujB,EAAvjB,EAA0jB,EAA1jB,EAA6jB,CAA7jB,EAA+jB,CAA/jB,EAAikB,CAAjkB,EAAmkB,GAAnkB,EAAukB,EAAvkB,EAA0kB,EAA1kB,EAA6kB,CAA7kB,EAA+kB,EAA/kB,EAAklB,EAAllB,EAAqlB,EAArlB,EAAwlB,CAAxlB,EAA0lB,EAA1lB,EAA6lB,EAA7lB,EAAgmB,EAAhmB,EAAmmB,CAAnmB,EAAqmB,EAArmB,EAAwmB,EAAxmB,EAA2mB,EAA3mB,EAA8mB,CAA9mB,EAAgnB,GAAhnB,EAAonB,EAApnB,EAAunB,GAAvnB,EAA2nB,EAA3nB,EAA8nB,EAA9nB,EAAioB,CAAjoB,EAAmoB,CAAnoB,EAAqoB,CAAroB,EAAuoB,CAAvoB,EAAyoB,CAAzoB,EAA2oB,CAA3oB,EAA6oB,CAA7oB,EAA+oB,CAA/oB,EAAipB,EAAjpB,EAAopB,EAAppB,EAAupB,CAAvpB,EAAypB,CAAzpB,EAA2pB,CAA3pB,EAA6pB,EAA7pB,EAAgqB,CAAhqB,EAAkqB,CAAlqB,EAAoqB,EAApqB,EAAuqB,EAAvqB,EAA0qB,CAA1qB,EAA4qB,CAA5qB,EAA8qB,CAA9qB,EAAgrB,EAAhrB,EAAmrB,CAAnrB,EAAqrB,EAArrB,EAAwrB,EAAxrB,EAA2rB,CAA3rB,EAA6rB,CAA7rB,EAA+rB,EAA/rB,EAAksB,CAAlsB,EAAosB,EAApsB,EAAusB,EAAvsB,EAA0sB,EAA1sB,EAA6sB,CAA7sB,EAA+sB,EAA/sB,EAAktB,EAAltB,EAAqtB,GAArtB,EAAytB,CAAztB,EAA2tB,CAA3tB,EAA6tB,EAA7tB,EAAguB,EAAhuB,EAAmuB,CAAnuB,EAAquB,EAAruB,EAAwuB,EAAxuB,EAA2uB,GAA3uB,EAA+uB,CAA/uB,EAAivB,CAAjvB,EAAmvB,CAAnvB,EAAqvB,CAArvB,EAAuvB,EAAvvB,EAA0vB,EAA1vB,EAA6vB,CAA7vB,EAA+vB,EAA/vB,EAAkwB,CAAlwB,EAAowB,CAApwB,EAAswB,CAAtwB,EAAwwB,CAAxwB,EAA0wB,EAA1wB,EAA6wB,EAA7wB,EAAgxB,CAAhxB,EAAkxB,GAAlxB,EAAsxB,EAAtxB,EAAyxB,EAAzxB,EAA4xB,CAA5xB,EAA8xB,CAA9xB,EAAgyB,EAAhyB,EAAmyB,CAAnyB,EAAqyB,EAAryB,EAAwyB,GAAxyB,EAA4yB,CAA5yB,EAA8yB,EAA9yB,EAAizB,GAAjzB,EAAqzB,GAArzB,EAAyzB,GAAzzB,EAA6zB,EAA7zB,EAAg0B,GAAh0B,EAAo0B,IAAp0B,EAAy0B,EAAz0B,EAA40B,EAA50B,EAA+0B,IAA/0B,EAAo1B,EAAp1B,EAAu1B,CAAv1B,EAAy1B,IAAz1B,EAA81B,GAA91B,EAAk2B,IAAl2B,EAAu2B,GAAv2B,EAA22B,CAA32B,EAA62B,EAA72B,EAAg3B,EAAh3B,EAAm3B,EAAn3B,EAAs3B,EAAt3B,EAAy3B,EAAz3B,EAA43B,EAA53B,EAA+3B,EAA/3B,EAAk4B,EAAl4B,EAAq4B,CAAr4B,EAAu4B,EAAv4B,EAA04B,EAA14B,EAA64B,CAA74B,EAA+4B,EAA/4B,EAAk5B,GAAl5B,EAAs5B,EAAt5B,EAAy5B,GAAz5B,EAA65B,EAA75B,EAAg6B,CAAh6B,EAAk6B,CAAl6B,EAAo6B,EAAp6B,EAAu6B,EAAv6B,EAA06B,EAA16B,EAA66B,CAA76B,EAA+6B,CAA/6B,EAAi7B,CAAj7B,EAAm7B,EAAn7B,EAAs7B,IAAt7B,EAA27B,CAA37B,EAA67B,IAA77B,EAAk8B,EAAl8B,EAAq8B,CAAr8B,EAAu8B,IAAv8B,EAA48B,CAA58B,EAA88B,CAA98B,EAAg9B,CAAh9B,EAAk9B,CAAl9B,EAAo9B,CAAp9B,EAAs9B,CAAt9B,EAAw9B,GAAx9B,EAA49B,EAA59B,EAA+9B,CAA/9B,EAAi+B,EAAj+B,EAAo+B,CAAp+B,EAAs+B,CAAt+B,EAAw+B,CAAx+B,EAA0+B,EAA1+B,EAA6+B,CAA7+B,EAA++B,CAA/+B,EAAi/B,GAAj/B,EAAq/B,IAAr/B,EAA0/B,GAA1/B,EAA8/B,CAA9/B,EAAggC,EAAhgC,EAAmgC,CAAngC,EAAqgC,CAArgC,EAAugC,CAAvgC,EAAygC,CAAzgC,EAA2gC,IAA3gC,EAAghC,EAAhhC,EAAmhC,CAAnhC,EAAqhC,EAArhC,EAAwhC,CAAxhC,EAA0hC,CAA1hC,EAA4hC,CAA5hC,EAA8hC,CAA9hC,EAAgiC,CAAhiC,EAAkiC,CAAliC,EAAoiC,CAApiC,EAAsiC,CAAtiC,EAAwiC,CAAxiC,EAA0iC,EAA1iC,EAA6iC,CAA7iC,EAA+iC,CAA/iC,EAAijC,CAAjjC,EAAmjC,CAAnjC,EAAqjC,CAArjC,EAAujC,EAAvjC,EAA0jC,CAA1jC,EAA4jC,CAA5jC,EAA8jC,CAA9jC,EAAgkC,CAAhkC,EAAkkC,CAAlkC,EAAokC,CAApkC,EAAskC,CAAtkC,EAAwkC,EAAxkC,EAA2kC,CAA3kC,EAA6kC,CAA7kC,EAA+kC,CAA/kC,EAAilC,CAAjlC,EAAmlC,CAAnlC,EAAqlC,CAArlC,EAAulC,CAAvlC,EAAylC,CAAzlC,EAA2lC,CAA3lC,EAA6lC,GAA7lC,EAAimC,CAAjmC,EAAmmC,EAAnmC,EAAsmC,CAAtmC,EAAwmC,EAAxmC,EAA2mC,CAA3mC,EAA6mC,EAA7mC,EAAgnC,CAAhnC,EAAknC,EAAlnC,EAAqnC,CAArnC,EAAunC,EAAvnC,EAA0nC,CAA1nC,EAA4nC,EAA5nC,EAA+nC,CAA/nC,EAAioC,EAAjoC,EAAooC,CAApoC,EAAsoC,EAAtoC,EAAyoC,CAAzoC,EAA2oC,EAA3oC,EAA8oC,CAA9oC,EAAgpC,EAAhpC,EAAmpC,CAAnpC,EAAqpC,CAArpC,EAAupC,IAAvpC,EAA4pC,EAA5pC,EAA+pC,CAA/pC,EAAiqC,CAAjqC,EAAmqC,GAAnqC,EAAuqC,EAAvqC,EAA0qC,GAA1qC,EAA8qC,EAA9qC,EAAirC,EAAjrC,EAAorC,CAAprC,EAAsrC,EAAtrC,EAAyrC,CAAzrC,EAA2rC,GAA3rC,EAA+rC,EAA/rC,EAAksC,EAAlsC,EAAqsC,EAArsC,EAAwsC,GAAxsC,EAA4sC,EAA5sC,EAA+sC,GAA/sC,EAAmtC,CAAntC,EAAqtC,CAArtC,EAAutC,CAAvtC,EAAytC,CAAztC,EAA2tC,CAA3tC,EAA6tC,CAA7tC,EAA+tC,EAA/tC,EAAkuC,CAAluC,EAAouC,GAApuC,EAAwuC,EAAxuC,EAA2uC,EAA3uC,EAA8uC,CAA9uC,EAAgvC,CAAhvC,EAAkvC,IAAlvC,EAAuvC,CAAvvC,EAAyvC,CAAzvC,EAA2vC,EAA3vC,EAA8vC,CAA9vC,EAAgwC,CAAhwC,EAAkwC,CAAlwC,EAAowC,CAApwC,EAAswC,CAAtwC,EAAwwC,CAAxwC,EAA0wC,CAA1wC,EAA4wC,CAA5wC,EAA8wC,CAA9wC,EAAgxC,CAAhxC,EAAkxC,CAAlxC,EAAoxC,CAApxC,EAAsxC,CAAtxC,EAAwxC,CAAxxC,EAA0xC,CAA1xC,EAA4xC,CAA5xC,EAA8xC,CAA9xC,EAAgyC,CAAhyC,EAAkyC,CAAlyC,EAAoyC,CAApyC,EAAsyC,CAAtyC,EAAwyC,CAAxyC,EAA0yC,CAA1yC,EAA4yC,CAA5yC,EAA8yC,CAA9yC,EAAgzC,CAAhzC,EAAkzC,CAAlzC,EAAozC,CAApzC,EAAszC,CAAtzC,EAAwzC,CAAxzC,EAA0zC,CAA1zC,EAA4zC,CAA5zC,EAA8zC,CAA9zC,EAAg0C,CAAh0C,EAAk0C,CAAl0C,EAAo0C,CAAp0C,EAAs0C,CAAt0C,EAAw0C,CAAx0C,EAA00C,CAA10C,EAA40C,CAA50C,EAA80C,CAA90C,EAAg1C,CAAh1C,EAAk1C,CAAl1C,EAAo1C,CAAp1C,EAAs1C,CAAt1C,EAAw1C,CAAx1C,EAA01C,CAA11C,EAA41C,CAA51C,EAA81C,CAA91C,EAAg2C,CAAh2C,EAAk2C,CAAl2C,EAAo2C,CAAp2C,EAAs2C,CAAt2C,EAAw2C,CAAx2C,EAA02C,CAA12C,EAA42C,EAA52C,EAA+2C,CAA/2C,EAAi3C,CAAj3C,EAAm3C,CAAn3C,EAAq3C,CAAr3C,EAAu3C,CAAv3C,EAAy3C,EAAz3C,EAA43C,IAA53C,EAAi4C,KAAj4C,EAAu4C,EAAv4C,EAA04C,IAA14C,EAA+4C,CAA/4C,EAAi5C,GAAj5C,EAAq5C,CAAr5C,EAAu5C,IAAv5C,EAA45C,EAA55C,EAA+5C,IAA/5C,EAAo6C,IAAp6C,EAAy6C,GAAz6C,EAA66C,IAA76C,EAAk7C,IAAl7C,EAAu7C,CAAv7C,EAAy7C,IAAz7C,CAAnC;AAEA,MAAMC,qBAAqB,GAAG,CAAC,GAAD,EAAK,CAAL,EAAO,GAAP,EAAW,CAAX,EAAa,GAAb,EAAiB,CAAjB,EAAmB,GAAnB,EAAuB,CAAvB,EAAyB,IAAzB,EAA8B,CAA9B,EAAgC,CAAhC,EAAkC,CAAlC,EAAoC,CAApC,EAAsC,CAAtC,EAAwC,EAAxC,EAA2C,CAA3C,EAA6C,CAA7C,EAA+C,CAA/C,EAAiD,GAAjD,EAAqD,CAArD,EAAuD,GAAvD,EAA2D,CAA3D,EAA6D,CAA7D,EAA+D,CAA/D,EAAiE,GAAjE,EAAqE,CAArE,EAAuE,EAAvE,EAA0E,CAA1E,EAA4E,EAA5E,EAA+E,EAA/E,EAAkF,EAAlF,EAAqF,CAArF,EAAuF,GAAvF,EAA2F,CAA3F,EAA6F,EAA7F,EAAgG,EAAhG,EAAmG,EAAnG,EAAsG,EAAtG,EAAyG,CAAzG,EAA2G,CAA3G,EAA6G,EAA7G,EAAgH,CAAhH,EAAkH,EAAlH,EAAqH,EAArH,EAAwH,CAAxH,EAA0H,CAA1H,EAA4H,EAA5H,EAA+H,CAA/H,EAAiI,CAAjI,EAAmI,CAAnI,EAAqI,EAArI,EAAwI,EAAxI,EAA2I,CAA3I,EAA6I,CAA7I,EAA+I,CAA/I,EAAiJ,CAAjJ,EAAmJ,EAAnJ,EAAsJ,CAAtJ,EAAwJ,EAAxJ,EAA2J,CAA3J,EAA6J,EAA7J,EAAgK,EAAhK,EAAmK,CAAnK,EAAqK,CAArK,EAAuK,CAAvK,EAAyK,EAAzK,EAA4K,EAA5K,EAA+K,EAA/K,EAAkL,CAAlL,EAAoL,CAApL,EAAsL,CAAtL,EAAwL,CAAxL,EAA0L,GAA1L,EAA8L,EAA9L,EAAiM,CAAjM,EAAmM,CAAnM,EAAqM,CAArM,EAAuM,CAAvM,EAAyM,EAAzM,EAA4M,CAA5M,EAA8M,CAA9M,EAAgN,CAAhN,EAAkN,CAAlN,EAAoN,CAApN,EAAsN,CAAtN,EAAwN,CAAxN,EAA0N,EAA1N,EAA6N,CAA7N,EAA+N,EAA/N,EAAkO,CAAlO,EAAoO,CAApO,EAAsO,CAAtO,EAAwO,CAAxO,EAA0O,CAA1O,EAA4O,GAA5O,EAAgP,EAAhP,EAAmP,EAAnP,EAAsP,CAAtP,EAAwP,CAAxP,EAA0P,CAA1P,EAA4P,EAA5P,EAA+P,EAA/P,EAAkQ,EAAlQ,EAAqQ,CAArQ,EAAuQ,GAAvQ,EAA2Q,CAA3Q,EAA6Q,CAA7Q,EAA+Q,CAA/Q,EAAiR,EAAjR,EAAoR,CAApR,EAAsR,EAAtR,EAAyR,EAAzR,EAA4R,EAA5R,EAA+R,CAA/R,EAAiS,EAAjS,EAAoS,EAApS,EAAuS,CAAvS,EAAyS,CAAzS,EAA2S,EAA3S,EAA8S,EAA9S,EAAiT,CAAjT,EAAmT,CAAnT,EAAqT,GAArT,EAAyT,EAAzT,EAA4T,GAA5T,EAAgU,CAAhU,EAAkU,EAAlU,EAAqU,CAArU,EAAuU,CAAvU,EAAyU,CAAzU,EAA2U,CAA3U,EAA6U,CAA7U,EAA+U,CAA/U,EAAiV,CAAjV,EAAmV,CAAnV,EAAqV,CAArV,EAAuV,EAAvV,EAA0V,CAA1V,EAA4V,GAA5V,EAAgW,CAAhW,EAAkW,CAAlW,EAAoW,CAApW,EAAsW,CAAtW,EAAwW,CAAxW,EAA0W,EAA1W,EAA6W,CAA7W,EAA+W,EAA/W,EAAkX,CAAlX,EAAoX,CAApX,EAAsX,CAAtX,EAAwX,CAAxX,EAA0X,CAA1X,EAA4X,EAA5X,EAA+X,EAA/X,EAAkY,EAAlY,EAAqY,EAArY,EAAwY,GAAxY,EAA4Y,CAA5Y,EAA8Y,CAA9Y,EAAgZ,CAAhZ,EAAkZ,EAAlZ,EAAqZ,CAArZ,EAAuZ,EAAvZ,EAA0Z,EAA1Z,EAA6Z,CAA7Z,EAA+Z,EAA/Z,EAAka,GAAla,EAAsa,CAAta,EAAwa,CAAxa,EAA0a,CAA1a,EAA4a,CAA5a,EAA8a,CAA9a,EAAgb,CAAhb,EAAkb,CAAlb,EAAob,CAApb,EAAsb,CAAtb,EAAwb,CAAxb,EAA0b,CAA1b,EAA4b,EAA5b,EAA+b,CAA/b,EAAic,CAAjc,EAAmc,CAAnc,EAAqc,CAArc,EAAuc,CAAvc,EAAyc,CAAzc,EAA2c,CAA3c,EAA6c,GAA7c,EAAid,CAAjd,EAAmd,EAAnd,EAAsd,CAAtd,EAAwd,CAAxd,EAA0d,CAA1d,EAA4d,EAA5d,EAA+d,CAA/d,EAAie,CAAje,EAAme,CAAne,EAAqe,EAAre,EAAwe,CAAxe,EAA0e,IAA1e,EAA+e,CAA/e,EAAif,CAAjf,EAAmf,EAAnf,EAAsf,KAAtf,EAA4f,CAA5f,EAA8f,EAA9f,EAAigB,CAAjgB,EAAmgB,EAAngB,EAAsgB,CAAtgB,EAAwgB,EAAxgB,EAA2gB,CAA3gB,EAA6gB,EAA7gB,EAAghB,CAAhhB,EAAkhB,IAAlhB,EAAuhB,CAAvhB,EAAyhB,CAAzhB,EAA2hB,EAA3hB,EAA8hB,CAA9hB,EAAgiB,CAAhiB,EAAkiB,EAAliB,EAAqiB,CAAriB,EAAuiB,EAAviB,EAA0iB,CAA1iB,EAA4iB,KAA5iB,EAAkjB,CAAljB,EAAojB,IAApjB,EAAyjB,EAAzjB,EAA4jB,CAA5jB,EAA8jB,EAA9jB,EAAikB,GAAjkB,EAAqkB,CAArkB,EAAukB,CAAvkB,EAAykB,CAAzkB,EAA2kB,CAA3kB,EAA6kB,CAA7kB,EAA+kB,CAA/kB,EAAilB,CAAjlB,EAAmlB,EAAnlB,EAAslB,CAAtlB,EAAwlB,GAAxlB,EAA4lB,CAA5lB,EAA8lB,IAA9lB,EAAmmB,EAAnmB,EAAsmB,GAAtmB,EAA0mB,EAA1mB,EAA6mB,CAA7mB,EAA+mB,EAA/mB,EAAknB,CAAlnB,EAAonB,CAApnB,EAAsnB,EAAtnB,EAAynB,CAAznB,EAA2nB,EAA3nB,EAA8nB,CAA9nB,EAAgoB,CAAhoB,EAAkoB,EAAloB,EAAqoB,IAAroB,EAA0oB,CAA1oB,EAA4oB,CAA5oB,EAA8oB,EAA9oB,EAAipB,CAAjpB,EAAmpB,CAAnpB,EAAqpB,CAArpB,EAAupB,CAAvpB,EAAypB,CAAzpB,EAA2pB,CAA3pB,EAA6pB,GAA7pB,EAAiqB,CAAjqB,EAAmqB,GAAnqB,EAAuqB,CAAvqB,EAAyqB,EAAzqB,EAA4qB,CAA5qB,EAA8qB,GAA9qB,EAAkrB,CAAlrB,EAAorB,EAAprB,EAAurB,EAAvrB,EAA0rB,GAA1rB,EAA8rB,EAA9rB,EAAisB,GAAjsB,EAAqsB,CAArsB,EAAusB,GAAvsB,EAA2sB,CAA3sB,EAA6sB,CAA7sB,EAA+sB,CAA/sB,EAAitB,IAAjtB,EAAstB,CAAttB,EAAwtB,MAAxtB,EAA+tB,GAA/tB,CAA9B;;AAKA,SAASC,aAAT,CAAuBC,IAAvB,EAAqCC,GAArC,EAAsE;EACpE,IAAIC,GAAG,GAAG,OAAV;;EACA,KAAK,IAAIC,CAAC,GAAG,CAAR,EAAWC,MAAM,GAAGH,GAAG,CAACG,MAA7B,EAAqCD,CAAC,GAAGC,MAAzC,EAAiDD,CAAC,IAAI,CAAtD,EAAyD;IACvDD,GAAG,IAAID,GAAG,CAACE,CAAD,CAAV;IACA,IAAID,GAAG,GAAGF,IAAV,EAAgB,OAAO,KAAP;IAEhBE,GAAG,IAAID,GAAG,CAACE,CAAC,GAAG,CAAL,CAAV;IACA,IAAID,GAAG,IAAIF,IAAX,EAAiB,OAAO,IAAP;EAClB;;EACD,OAAO,KAAP;AACD;;AAIM,SAASK,iBAAT,CAA2BL,IAA3B,EAAkD;EACvD,IAAIA,IAAI,KAAR,EAAiC,OAAOA,IAAI,OAAX;EACjC,IAAIA,IAAI,MAAR,EAAkC,OAAO,IAAP;EAClC,IAAIA,IAAI,KAAR,EAAiC,OAAOA,IAAI,OAAX;EACjC,IAAIA,IAAI,OAAR,EAAkC,OAAO,IAAP;;EAClC,IAAIA,IAAI,IAAI,MAAZ,EAAoB;IAClB,OACEA,IAAI,IAAI,IAAR,IAAgBN,uBAAuB,CAACY,IAAxB,CAA6BC,MAAM,CAACC,YAAP,CAAoBR,IAApB,CAA7B,CADlB;EAGD;;EACD,OAAOD,aAAa,CAACC,IAAD,EAAOH,0BAAP,CAApB;AACD;;AAIM,SAASY,gBAAT,CAA0BT,IAA1B,EAAiD;EACtD,IAAIA,IAAI,KAAR,EAA6B,OAAOA,IAAI,OAAX;EAC7B,IAAIA,IAAI,KAAR,EAA4B,OAAO,IAAP;EAC5B,IAAIA,IAAI,KAAR,EAAiC,OAAO,KAAP;EACjC,IAAIA,IAAI,MAAR,EAAkC,OAAO,IAAP;EAClC,IAAIA,IAAI,KAAR,EAAiC,OAAOA,IAAI,OAAX;EACjC,IAAIA,IAAI,OAAR,EAAkC,OAAO,IAAP;;EAClC,IAAIA,IAAI,IAAI,MAAZ,EAAoB;IAClB,OAAOA,IAAI,IAAI,IAAR,IAAgBJ,kBAAkB,CAACU,IAAnB,CAAwBC,MAAM,CAACC,YAAP,CAAoBR,IAApB,CAAxB,CAAvB;EACD;;EACD,OACED,aAAa,CAACC,IAAD,EAAOH,0BAAP,CAAb,IACAE,aAAa,CAACC,IAAD,EAAOF,qBAAP,CAFf;AAID;;AAIM,SAASY,gBAAT,CAA0BC,IAA1B,EAAiD;EACtD,IAAIC,OAAO,GAAG,IAAd;;EACA,KAAK,IAAIT,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGQ,IAAI,CAACP,MAAzB,EAAiCD,CAAC,EAAlC,EAAsC;IAKpC,IAAIU,EAAE,GAAGF,IAAI,CAACG,UAAL,CAAgBX,CAAhB,CAAT;;IACA,IAAI,CAACU,EAAE,GAAG,MAAN,MAAkB,MAAlB,IAA4BV,CAAC,GAAG,CAAJ,GAAQQ,IAAI,CAACP,MAA7C,EAAqD;MACnD,MAAMW,KAAK,GAAGJ,IAAI,CAACG,UAAL,CAAgB,EAAEX,CAAlB,CAAd;;MACA,IAAI,CAACY,KAAK,GAAG,MAAT,MAAqB,MAAzB,EAAiC;QAC/BF,EAAE,GAAG,WAAW,CAACA,EAAE,GAAG,KAAN,KAAgB,EAA3B,KAAkCE,KAAK,GAAG,KAA1C,CAAL;MACD;IACF;;IACD,IAAIH,OAAJ,EAAa;MACXA,OAAO,GAAG,KAAV;;MACA,IAAI,CAACP,iBAAiB,CAACQ,EAAD,CAAtB,EAA4B;QAC1B,OAAO,KAAP;MACD;IACF,CALD,MAKO,IAAI,CAACJ,gBAAgB,CAACI,EAAD,CAArB,EAA2B;MAChC,OAAO,KAAP;IACD;EACF;;EACD,OAAO,CAACD,OAAR;AACD"} \ No newline at end of file diff --git a/mybulma/node_modules/@babel/helper-validator-identifier/lib/index.js b/mybulma/node_modules/@babel/helper-validator-identifier/lib/index.js new file mode 100644 index 0000000..076eafd --- /dev/null +++ b/mybulma/node_modules/@babel/helper-validator-identifier/lib/index.js @@ -0,0 +1,59 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "isIdentifierChar", { + enumerable: true, + get: function () { + return _identifier.isIdentifierChar; + } +}); +Object.defineProperty(exports, "isIdentifierName", { + enumerable: true, + get: function () { + return _identifier.isIdentifierName; + } +}); +Object.defineProperty(exports, "isIdentifierStart", { + enumerable: true, + get: function () { + return _identifier.isIdentifierStart; + } +}); +Object.defineProperty(exports, "isKeyword", { + enumerable: true, + get: function () { + return _keyword.isKeyword; + } +}); +Object.defineProperty(exports, "isReservedWord", { + enumerable: true, + get: function () { + return _keyword.isReservedWord; + } +}); +Object.defineProperty(exports, "isStrictBindOnlyReservedWord", { + enumerable: true, + get: function () { + return _keyword.isStrictBindOnlyReservedWord; + } +}); +Object.defineProperty(exports, "isStrictBindReservedWord", { + enumerable: true, + get: function () { + return _keyword.isStrictBindReservedWord; + } +}); +Object.defineProperty(exports, "isStrictReservedWord", { + enumerable: true, + get: function () { + return _keyword.isStrictReservedWord; + } +}); + +var _identifier = require("./identifier"); + +var _keyword = require("./keyword"); + +//# sourceMappingURL=index.js.map diff --git a/mybulma/node_modules/@babel/helper-validator-identifier/lib/index.js.map b/mybulma/node_modules/@babel/helper-validator-identifier/lib/index.js.map new file mode 100644 index 0000000..13266b9 --- /dev/null +++ b/mybulma/node_modules/@babel/helper-validator-identifier/lib/index.js.map @@ -0,0 +1 @@ +{"version":3,"names":[],"sources":["../src/index.ts"],"sourcesContent":["export {\n isIdentifierName,\n isIdentifierChar,\n isIdentifierStart,\n} from \"./identifier\";\nexport {\n isReservedWord,\n isStrictBindOnlyReservedWord,\n isStrictBindReservedWord,\n isStrictReservedWord,\n isKeyword,\n} from \"./keyword\";\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;AAKA"} \ No newline at end of file diff --git a/mybulma/node_modules/@babel/helper-validator-identifier/lib/keyword.js b/mybulma/node_modules/@babel/helper-validator-identifier/lib/keyword.js new file mode 100644 index 0000000..ff6277b --- /dev/null +++ b/mybulma/node_modules/@babel/helper-validator-identifier/lib/keyword.js @@ -0,0 +1,40 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.isKeyword = isKeyword; +exports.isReservedWord = isReservedWord; +exports.isStrictBindOnlyReservedWord = isStrictBindOnlyReservedWord; +exports.isStrictBindReservedWord = isStrictBindReservedWord; +exports.isStrictReservedWord = isStrictReservedWord; +const reservedWords = { + keyword: ["break", "case", "catch", "continue", "debugger", "default", "do", "else", "finally", "for", "function", "if", "return", "switch", "throw", "try", "var", "const", "while", "with", "new", "this", "super", "class", "extends", "export", "import", "null", "true", "false", "in", "instanceof", "typeof", "void", "delete"], + strict: ["implements", "interface", "let", "package", "private", "protected", "public", "static", "yield"], + strictBind: ["eval", "arguments"] +}; +const keywords = new Set(reservedWords.keyword); +const reservedWordsStrictSet = new Set(reservedWords.strict); +const reservedWordsStrictBindSet = new Set(reservedWords.strictBind); + +function isReservedWord(word, inModule) { + return inModule && word === "await" || word === "enum"; +} + +function isStrictReservedWord(word, inModule) { + return isReservedWord(word, inModule) || reservedWordsStrictSet.has(word); +} + +function isStrictBindOnlyReservedWord(word) { + return reservedWordsStrictBindSet.has(word); +} + +function isStrictBindReservedWord(word, inModule) { + return isStrictReservedWord(word, inModule) || isStrictBindOnlyReservedWord(word); +} + +function isKeyword(word) { + return keywords.has(word); +} + +//# sourceMappingURL=keyword.js.map diff --git a/mybulma/node_modules/@babel/helper-validator-identifier/lib/keyword.js.map b/mybulma/node_modules/@babel/helper-validator-identifier/lib/keyword.js.map new file mode 100644 index 0000000..2de8c3e --- /dev/null +++ b/mybulma/node_modules/@babel/helper-validator-identifier/lib/keyword.js.map @@ -0,0 +1 @@ +{"version":3,"names":["reservedWords","keyword","strict","strictBind","keywords","Set","reservedWordsStrictSet","reservedWordsStrictBindSet","isReservedWord","word","inModule","isStrictReservedWord","has","isStrictBindOnlyReservedWord","isStrictBindReservedWord","isKeyword"],"sources":["../src/keyword.ts"],"sourcesContent":["const reservedWords = {\n keyword: [\n \"break\",\n \"case\",\n \"catch\",\n \"continue\",\n \"debugger\",\n \"default\",\n \"do\",\n \"else\",\n \"finally\",\n \"for\",\n \"function\",\n \"if\",\n \"return\",\n \"switch\",\n \"throw\",\n \"try\",\n \"var\",\n \"const\",\n \"while\",\n \"with\",\n \"new\",\n \"this\",\n \"super\",\n \"class\",\n \"extends\",\n \"export\",\n \"import\",\n \"null\",\n \"true\",\n \"false\",\n \"in\",\n \"instanceof\",\n \"typeof\",\n \"void\",\n \"delete\",\n ],\n strict: [\n \"implements\",\n \"interface\",\n \"let\",\n \"package\",\n \"private\",\n \"protected\",\n \"public\",\n \"static\",\n \"yield\",\n ],\n strictBind: [\"eval\", \"arguments\"],\n};\nconst keywords = new Set(reservedWords.keyword);\nconst reservedWordsStrictSet = new Set(reservedWords.strict);\nconst reservedWordsStrictBindSet = new Set(reservedWords.strictBind);\n\n/**\n * Checks if word is a reserved word in non-strict mode\n */\nexport function isReservedWord(word: string, inModule: boolean): boolean {\n return (inModule && word === \"await\") || word === \"enum\";\n}\n\n/**\n * Checks if word is a reserved word in non-binding strict mode\n *\n * Includes non-strict reserved words\n */\nexport function isStrictReservedWord(word: string, inModule: boolean): boolean {\n return isReservedWord(word, inModule) || reservedWordsStrictSet.has(word);\n}\n\n/**\n * Checks if word is a reserved word in binding strict mode, but it is allowed as\n * a normal identifier.\n */\nexport function isStrictBindOnlyReservedWord(word: string): boolean {\n return reservedWordsStrictBindSet.has(word);\n}\n\n/**\n * Checks if word is a reserved word in binding strict mode\n *\n * Includes non-strict reserved words and non-binding strict reserved words\n */\nexport function isStrictBindReservedWord(\n word: string,\n inModule: boolean,\n): boolean {\n return (\n isStrictReservedWord(word, inModule) || isStrictBindOnlyReservedWord(word)\n );\n}\n\nexport function isKeyword(word: string): boolean {\n return keywords.has(word);\n}\n"],"mappings":";;;;;;;;;;AAAA,MAAMA,aAAa,GAAG;EACpBC,OAAO,EAAE,CACP,OADO,EAEP,MAFO,EAGP,OAHO,EAIP,UAJO,EAKP,UALO,EAMP,SANO,EAOP,IAPO,EAQP,MARO,EASP,SATO,EAUP,KAVO,EAWP,UAXO,EAYP,IAZO,EAaP,QAbO,EAcP,QAdO,EAeP,OAfO,EAgBP,KAhBO,EAiBP,KAjBO,EAkBP,OAlBO,EAmBP,OAnBO,EAoBP,MApBO,EAqBP,KArBO,EAsBP,MAtBO,EAuBP,OAvBO,EAwBP,OAxBO,EAyBP,SAzBO,EA0BP,QA1BO,EA2BP,QA3BO,EA4BP,MA5BO,EA6BP,MA7BO,EA8BP,OA9BO,EA+BP,IA/BO,EAgCP,YAhCO,EAiCP,QAjCO,EAkCP,MAlCO,EAmCP,QAnCO,CADW;EAsCpBC,MAAM,EAAE,CACN,YADM,EAEN,WAFM,EAGN,KAHM,EAIN,SAJM,EAKN,SALM,EAMN,WANM,EAON,QAPM,EAQN,QARM,EASN,OATM,CAtCY;EAiDpBC,UAAU,EAAE,CAAC,MAAD,EAAS,WAAT;AAjDQ,CAAtB;AAmDA,MAAMC,QAAQ,GAAG,IAAIC,GAAJ,CAAQL,aAAa,CAACC,OAAtB,CAAjB;AACA,MAAMK,sBAAsB,GAAG,IAAID,GAAJ,CAAQL,aAAa,CAACE,MAAtB,CAA/B;AACA,MAAMK,0BAA0B,GAAG,IAAIF,GAAJ,CAAQL,aAAa,CAACG,UAAtB,CAAnC;;AAKO,SAASK,cAAT,CAAwBC,IAAxB,EAAsCC,QAAtC,EAAkE;EACvE,OAAQA,QAAQ,IAAID,IAAI,KAAK,OAAtB,IAAkCA,IAAI,KAAK,MAAlD;AACD;;AAOM,SAASE,oBAAT,CAA8BF,IAA9B,EAA4CC,QAA5C,EAAwE;EAC7E,OAAOF,cAAc,CAACC,IAAD,EAAOC,QAAP,CAAd,IAAkCJ,sBAAsB,CAACM,GAAvB,CAA2BH,IAA3B,CAAzC;AACD;;AAMM,SAASI,4BAAT,CAAsCJ,IAAtC,EAA6D;EAClE,OAAOF,0BAA0B,CAACK,GAA3B,CAA+BH,IAA/B,CAAP;AACD;;AAOM,SAASK,wBAAT,CACLL,IADK,EAELC,QAFK,EAGI;EACT,OACEC,oBAAoB,CAACF,IAAD,EAAOC,QAAP,CAApB,IAAwCG,4BAA4B,CAACJ,IAAD,CADtE;AAGD;;AAEM,SAASM,SAAT,CAAmBN,IAAnB,EAA0C;EAC/C,OAAOL,QAAQ,CAACQ,GAAT,CAAaH,IAAb,CAAP;AACD"} \ No newline at end of file diff --git a/mybulma/node_modules/@babel/helper-validator-identifier/package.json b/mybulma/node_modules/@babel/helper-validator-identifier/package.json new file mode 100644 index 0000000..6e7de1e --- /dev/null +++ b/mybulma/node_modules/@babel/helper-validator-identifier/package.json @@ -0,0 +1,28 @@ +{ + "name": "@babel/helper-validator-identifier", + "version": "7.19.1", + "description": "Validate identifier/keywords name", + "repository": { + "type": "git", + "url": "https://github.com/babel/babel.git", + "directory": "packages/babel-helper-validator-identifier" + }, + "license": "MIT", + "publishConfig": { + "access": "public" + }, + "main": "./lib/index.js", + "exports": { + ".": "./lib/index.js", + "./package.json": "./package.json" + }, + "devDependencies": { + "@unicode/unicode-15.0.0": "^1.3.1", + "charcodes": "^0.2.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "author": "The Babel Team (https://babel.dev/team)", + "type": "commonjs" +} \ No newline at end of file diff --git a/mybulma/node_modules/@babel/helper-validator-identifier/scripts/generate-identifier-regex.js b/mybulma/node_modules/@babel/helper-validator-identifier/scripts/generate-identifier-regex.js new file mode 100644 index 0000000..aca8710 --- /dev/null +++ b/mybulma/node_modules/@babel/helper-validator-identifier/scripts/generate-identifier-regex.js @@ -0,0 +1,75 @@ +"use strict"; + +// Always use the latest available version of Unicode! +// https://tc39.github.io/ecma262/#sec-conformance +const version = "15.0.0"; + +const start = require("@unicode/unicode-" + + version + + "/Binary_Property/ID_Start/code-points.js").filter(function (ch) { + return ch > 0x7f; +}); +let last = -1; +const cont = [0x200c, 0x200d].concat( + require("@unicode/unicode-" + + version + + "/Binary_Property/ID_Continue/code-points.js").filter(function (ch) { + return ch > 0x7f && search(start, ch, last + 1) == -1; + }) +); + +function search(arr, ch, starting) { + for (let i = starting; arr[i] <= ch && i < arr.length; last = i++) { + if (arr[i] === ch) return i; + } + return -1; +} + +function pad(str, width) { + while (str.length < width) str = "0" + str; + return str; +} + +function esc(code) { + const hex = code.toString(16); + if (hex.length <= 2) return "\\x" + pad(hex, 2); + else return "\\u" + pad(hex, 4); +} + +function generate(chars) { + const astral = []; + let re = ""; + for (let i = 0, at = 0x10000; i < chars.length; i++) { + const from = chars[i]; + let to = from; + while (i < chars.length - 1 && chars[i + 1] == to + 1) { + i++; + to++; + } + if (to <= 0xffff) { + if (from == to) re += esc(from); + else if (from + 1 == to) re += esc(from) + esc(to); + else re += esc(from) + "-" + esc(to); + } else { + astral.push(from - at, to - from); + at = to; + } + } + return { nonASCII: re, astral: astral }; +} + +const startData = generate(start); +const contData = generate(cont); + +console.log("/* prettier-ignore */"); +console.log('let nonASCIIidentifierStartChars = "' + startData.nonASCII + '";'); +console.log("/* prettier-ignore */"); +console.log('let nonASCIIidentifierChars = "' + contData.nonASCII + '";'); +console.log("/* prettier-ignore */"); +console.log( + "const astralIdentifierStartCodes = " + JSON.stringify(startData.astral) + ";" +); +console.log("/* prettier-ignore */"); +console.log( + "const astralIdentifierCodes = " + JSON.stringify(contData.astral) + ";" +); diff --git a/mybulma/node_modules/@babel/highlight/LICENSE b/mybulma/node_modules/@babel/highlight/LICENSE new file mode 100644 index 0000000..f31575e --- /dev/null +++ b/mybulma/node_modules/@babel/highlight/LICENSE @@ -0,0 +1,22 @@ +MIT License + +Copyright (c) 2014-present Sebastian McKenzie and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/mybulma/node_modules/@babel/highlight/README.md b/mybulma/node_modules/@babel/highlight/README.md new file mode 100644 index 0000000..f8887ad --- /dev/null +++ b/mybulma/node_modules/@babel/highlight/README.md @@ -0,0 +1,19 @@ +# @babel/highlight + +> Syntax highlight JavaScript strings for output in terminals. + +See our website [@babel/highlight](https://babeljs.io/docs/en/babel-highlight) for more information. + +## Install + +Using npm: + +```sh +npm install --save-dev @babel/highlight +``` + +or using yarn: + +```sh +yarn add @babel/highlight --dev +``` diff --git a/mybulma/node_modules/@babel/highlight/lib/index.js b/mybulma/node_modules/@babel/highlight/lib/index.js new file mode 100644 index 0000000..856dfd9 --- /dev/null +++ b/mybulma/node_modules/@babel/highlight/lib/index.js @@ -0,0 +1,116 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = highlight; +exports.getChalk = getChalk; +exports.shouldHighlight = shouldHighlight; + +var _jsTokens = require("js-tokens"); + +var _helperValidatorIdentifier = require("@babel/helper-validator-identifier"); + +var _chalk = require("chalk"); + +const sometimesKeywords = new Set(["as", "async", "from", "get", "of", "set"]); + +function getDefs(chalk) { + return { + keyword: chalk.cyan, + capitalized: chalk.yellow, + jsxIdentifier: chalk.yellow, + punctuator: chalk.yellow, + number: chalk.magenta, + string: chalk.green, + regex: chalk.magenta, + comment: chalk.grey, + invalid: chalk.white.bgRed.bold + }; +} + +const NEWLINE = /\r\n|[\n\r\u2028\u2029]/; +const BRACKET = /^[()[\]{}]$/; +let tokenize; +{ + const JSX_TAG = /^[a-z][\w-]*$/i; + + const getTokenType = function (token, offset, text) { + if (token.type === "name") { + if ((0, _helperValidatorIdentifier.isKeyword)(token.value) || (0, _helperValidatorIdentifier.isStrictReservedWord)(token.value, true) || sometimesKeywords.has(token.value)) { + return "keyword"; + } + + if (JSX_TAG.test(token.value) && (text[offset - 1] === "<" || text.slice(offset - 2, offset) == " colorize(str)).join("\n"); + } else { + highlighted += value; + } + } + + return highlighted; +} + +function shouldHighlight(options) { + return !!_chalk.supportsColor || options.forceColor; +} + +function getChalk(options) { + return options.forceColor ? new _chalk.constructor({ + enabled: true, + level: 1 + }) : _chalk; +} + +function highlight(code, options = {}) { + if (code !== "" && shouldHighlight(options)) { + const chalk = getChalk(options); + const defs = getDefs(chalk); + return highlightTokens(defs, code); + } else { + return code; + } +} \ No newline at end of file diff --git a/mybulma/node_modules/@babel/highlight/node_modules/ansi-styles/index.js b/mybulma/node_modules/@babel/highlight/node_modules/ansi-styles/index.js new file mode 100644 index 0000000..90a871c --- /dev/null +++ b/mybulma/node_modules/@babel/highlight/node_modules/ansi-styles/index.js @@ -0,0 +1,165 @@ +'use strict'; +const colorConvert = require('color-convert'); + +const wrapAnsi16 = (fn, offset) => function () { + const code = fn.apply(colorConvert, arguments); + return `\u001B[${code + offset}m`; +}; + +const wrapAnsi256 = (fn, offset) => function () { + const code = fn.apply(colorConvert, arguments); + return `\u001B[${38 + offset};5;${code}m`; +}; + +const wrapAnsi16m = (fn, offset) => function () { + const rgb = fn.apply(colorConvert, arguments); + return `\u001B[${38 + offset};2;${rgb[0]};${rgb[1]};${rgb[2]}m`; +}; + +function assembleStyles() { + const codes = new Map(); + const styles = { + modifier: { + reset: [0, 0], + // 21 isn't widely supported and 22 does the same thing + bold: [1, 22], + dim: [2, 22], + italic: [3, 23], + underline: [4, 24], + inverse: [7, 27], + hidden: [8, 28], + strikethrough: [9, 29] + }, + color: { + black: [30, 39], + red: [31, 39], + green: [32, 39], + yellow: [33, 39], + blue: [34, 39], + magenta: [35, 39], + cyan: [36, 39], + white: [37, 39], + gray: [90, 39], + + // Bright color + redBright: [91, 39], + greenBright: [92, 39], + yellowBright: [93, 39], + blueBright: [94, 39], + magentaBright: [95, 39], + cyanBright: [96, 39], + whiteBright: [97, 39] + }, + bgColor: { + bgBlack: [40, 49], + bgRed: [41, 49], + bgGreen: [42, 49], + bgYellow: [43, 49], + bgBlue: [44, 49], + bgMagenta: [45, 49], + bgCyan: [46, 49], + bgWhite: [47, 49], + + // Bright color + bgBlackBright: [100, 49], + bgRedBright: [101, 49], + bgGreenBright: [102, 49], + bgYellowBright: [103, 49], + bgBlueBright: [104, 49], + bgMagentaBright: [105, 49], + bgCyanBright: [106, 49], + bgWhiteBright: [107, 49] + } + }; + + // Fix humans + styles.color.grey = styles.color.gray; + + for (const groupName of Object.keys(styles)) { + const group = styles[groupName]; + + for (const styleName of Object.keys(group)) { + const style = group[styleName]; + + styles[styleName] = { + open: `\u001B[${style[0]}m`, + close: `\u001B[${style[1]}m` + }; + + group[styleName] = styles[styleName]; + + codes.set(style[0], style[1]); + } + + Object.defineProperty(styles, groupName, { + value: group, + enumerable: false + }); + + Object.defineProperty(styles, 'codes', { + value: codes, + enumerable: false + }); + } + + const ansi2ansi = n => n; + const rgb2rgb = (r, g, b) => [r, g, b]; + + styles.color.close = '\u001B[39m'; + styles.bgColor.close = '\u001B[49m'; + + styles.color.ansi = { + ansi: wrapAnsi16(ansi2ansi, 0) + }; + styles.color.ansi256 = { + ansi256: wrapAnsi256(ansi2ansi, 0) + }; + styles.color.ansi16m = { + rgb: wrapAnsi16m(rgb2rgb, 0) + }; + + styles.bgColor.ansi = { + ansi: wrapAnsi16(ansi2ansi, 10) + }; + styles.bgColor.ansi256 = { + ansi256: wrapAnsi256(ansi2ansi, 10) + }; + styles.bgColor.ansi16m = { + rgb: wrapAnsi16m(rgb2rgb, 10) + }; + + for (let key of Object.keys(colorConvert)) { + if (typeof colorConvert[key] !== 'object') { + continue; + } + + const suite = colorConvert[key]; + + if (key === 'ansi16') { + key = 'ansi'; + } + + if ('ansi16' in suite) { + styles.color.ansi[key] = wrapAnsi16(suite.ansi16, 0); + styles.bgColor.ansi[key] = wrapAnsi16(suite.ansi16, 10); + } + + if ('ansi256' in suite) { + styles.color.ansi256[key] = wrapAnsi256(suite.ansi256, 0); + styles.bgColor.ansi256[key] = wrapAnsi256(suite.ansi256, 10); + } + + if ('rgb' in suite) { + styles.color.ansi16m[key] = wrapAnsi16m(suite.rgb, 0); + styles.bgColor.ansi16m[key] = wrapAnsi16m(suite.rgb, 10); + } + } + + return styles; +} + +// Make the export immutable +Object.defineProperty(module, 'exports', { + enumerable: true, + get: assembleStyles +}); diff --git a/mybulma/node_modules/@babel/highlight/node_modules/ansi-styles/license b/mybulma/node_modules/@babel/highlight/node_modules/ansi-styles/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/mybulma/node_modules/@babel/highlight/node_modules/ansi-styles/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/mybulma/node_modules/@babel/highlight/node_modules/ansi-styles/package.json b/mybulma/node_modules/@babel/highlight/node_modules/ansi-styles/package.json new file mode 100644 index 0000000..65edb48 --- /dev/null +++ b/mybulma/node_modules/@babel/highlight/node_modules/ansi-styles/package.json @@ -0,0 +1,56 @@ +{ + "name": "ansi-styles", + "version": "3.2.1", + "description": "ANSI escape codes for styling strings in the terminal", + "license": "MIT", + "repository": "chalk/ansi-styles", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava", + "screenshot": "svg-term --command='node screenshot' --out=screenshot.svg --padding=3 --width=55 --height=3 --at=1000 --no-cursor" + }, + "files": [ + "index.js" + ], + "keywords": [ + "ansi", + "styles", + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "string", + "tty", + "escape", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "log", + "logging", + "command-line", + "text" + ], + "dependencies": { + "color-convert": "^1.9.0" + }, + "devDependencies": { + "ava": "*", + "babel-polyfill": "^6.23.0", + "svg-term-cli": "^2.1.1", + "xo": "*" + }, + "ava": { + "require": "babel-polyfill" + } +} diff --git a/mybulma/node_modules/@babel/highlight/node_modules/ansi-styles/readme.md b/mybulma/node_modules/@babel/highlight/node_modules/ansi-styles/readme.md new file mode 100644 index 0000000..3158e2d --- /dev/null +++ b/mybulma/node_modules/@babel/highlight/node_modules/ansi-styles/readme.md @@ -0,0 +1,147 @@ +# ansi-styles [![Build Status](https://travis-ci.org/chalk/ansi-styles.svg?branch=master)](https://travis-ci.org/chalk/ansi-styles) + +> [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code#Colors_and_Styles) for styling strings in the terminal + +You probably want the higher-level [chalk](https://github.com/chalk/chalk) module for styling your strings. + + + + +## Install + +``` +$ npm install ansi-styles +``` + + +## Usage + +```js +const style = require('ansi-styles'); + +console.log(`${style.green.open}Hello world!${style.green.close}`); + + +// Color conversion between 16/256/truecolor +// NOTE: If conversion goes to 16 colors or 256 colors, the original color +// may be degraded to fit that color palette. This means terminals +// that do not support 16 million colors will best-match the +// original color. +console.log(style.bgColor.ansi.hsl(120, 80, 72) + 'Hello world!' + style.bgColor.close); +console.log(style.color.ansi256.rgb(199, 20, 250) + 'Hello world!' + style.color.close); +console.log(style.color.ansi16m.hex('#ABCDEF') + 'Hello world!' + style.color.close); +``` + +## API + +Each style has an `open` and `close` property. + + +## Styles + +### Modifiers + +- `reset` +- `bold` +- `dim` +- `italic` *(Not widely supported)* +- `underline` +- `inverse` +- `hidden` +- `strikethrough` *(Not widely supported)* + +### Colors + +- `black` +- `red` +- `green` +- `yellow` +- `blue` +- `magenta` +- `cyan` +- `white` +- `gray` ("bright black") +- `redBright` +- `greenBright` +- `yellowBright` +- `blueBright` +- `magentaBright` +- `cyanBright` +- `whiteBright` + +### Background colors + +- `bgBlack` +- `bgRed` +- `bgGreen` +- `bgYellow` +- `bgBlue` +- `bgMagenta` +- `bgCyan` +- `bgWhite` +- `bgBlackBright` +- `bgRedBright` +- `bgGreenBright` +- `bgYellowBright` +- `bgBlueBright` +- `bgMagentaBright` +- `bgCyanBright` +- `bgWhiteBright` + + +## Advanced usage + +By default, you get a map of styles, but the styles are also available as groups. They are non-enumerable so they don't show up unless you access them explicitly. This makes it easier to expose only a subset in a higher-level module. + +- `style.modifier` +- `style.color` +- `style.bgColor` + +###### Example + +```js +console.log(style.color.green.open); +``` + +Raw escape codes (i.e. without the CSI escape prefix `\u001B[` and render mode postfix `m`) are available under `style.codes`, which returns a `Map` with the open codes as keys and close codes as values. + +###### Example + +```js +console.log(style.codes.get(36)); +//=> 39 +``` + + +## [256 / 16 million (TrueColor) support](https://gist.github.com/XVilka/8346728) + +`ansi-styles` uses the [`color-convert`](https://github.com/Qix-/color-convert) package to allow for converting between various colors and ANSI escapes, with support for 256 and 16 million colors. + +To use these, call the associated conversion function with the intended output, for example: + +```js +style.color.ansi.rgb(100, 200, 15); // RGB to 16 color ansi foreground code +style.bgColor.ansi.rgb(100, 200, 15); // RGB to 16 color ansi background code + +style.color.ansi256.hsl(120, 100, 60); // HSL to 256 color ansi foreground code +style.bgColor.ansi256.hsl(120, 100, 60); // HSL to 256 color ansi foreground code + +style.color.ansi16m.hex('#C0FFEE'); // Hex (RGB) to 16 million color foreground code +style.bgColor.ansi16m.hex('#C0FFEE'); // Hex (RGB) to 16 million color background code +``` + + +## Related + +- [ansi-escapes](https://github.com/sindresorhus/ansi-escapes) - ANSI escape codes for manipulating the terminal + + +## Maintainers + +- [Sindre Sorhus](https://github.com/sindresorhus) +- [Josh Junon](https://github.com/qix-) + + +## License + +MIT diff --git a/mybulma/node_modules/@babel/highlight/node_modules/chalk/index.js b/mybulma/node_modules/@babel/highlight/node_modules/chalk/index.js new file mode 100644 index 0000000..1cc5fa8 --- /dev/null +++ b/mybulma/node_modules/@babel/highlight/node_modules/chalk/index.js @@ -0,0 +1,228 @@ +'use strict'; +const escapeStringRegexp = require('escape-string-regexp'); +const ansiStyles = require('ansi-styles'); +const stdoutColor = require('supports-color').stdout; + +const template = require('./templates.js'); + +const isSimpleWindowsTerm = process.platform === 'win32' && !(process.env.TERM || '').toLowerCase().startsWith('xterm'); + +// `supportsColor.level` → `ansiStyles.color[name]` mapping +const levelMapping = ['ansi', 'ansi', 'ansi256', 'ansi16m']; + +// `color-convert` models to exclude from the Chalk API due to conflicts and such +const skipModels = new Set(['gray']); + +const styles = Object.create(null); + +function applyOptions(obj, options) { + options = options || {}; + + // Detect level if not set manually + const scLevel = stdoutColor ? stdoutColor.level : 0; + obj.level = options.level === undefined ? scLevel : options.level; + obj.enabled = 'enabled' in options ? options.enabled : obj.level > 0; +} + +function Chalk(options) { + // We check for this.template here since calling `chalk.constructor()` + // by itself will have a `this` of a previously constructed chalk object + if (!this || !(this instanceof Chalk) || this.template) { + const chalk = {}; + applyOptions(chalk, options); + + chalk.template = function () { + const args = [].slice.call(arguments); + return chalkTag.apply(null, [chalk.template].concat(args)); + }; + + Object.setPrototypeOf(chalk, Chalk.prototype); + Object.setPrototypeOf(chalk.template, chalk); + + chalk.template.constructor = Chalk; + + return chalk.template; + } + + applyOptions(this, options); +} + +// Use bright blue on Windows as the normal blue color is illegible +if (isSimpleWindowsTerm) { + ansiStyles.blue.open = '\u001B[94m'; +} + +for (const key of Object.keys(ansiStyles)) { + ansiStyles[key].closeRe = new RegExp(escapeStringRegexp(ansiStyles[key].close), 'g'); + + styles[key] = { + get() { + const codes = ansiStyles[key]; + return build.call(this, this._styles ? this._styles.concat(codes) : [codes], this._empty, key); + } + }; +} + +styles.visible = { + get() { + return build.call(this, this._styles || [], true, 'visible'); + } +}; + +ansiStyles.color.closeRe = new RegExp(escapeStringRegexp(ansiStyles.color.close), 'g'); +for (const model of Object.keys(ansiStyles.color.ansi)) { + if (skipModels.has(model)) { + continue; + } + + styles[model] = { + get() { + const level = this.level; + return function () { + const open = ansiStyles.color[levelMapping[level]][model].apply(null, arguments); + const codes = { + open, + close: ansiStyles.color.close, + closeRe: ansiStyles.color.closeRe + }; + return build.call(this, this._styles ? this._styles.concat(codes) : [codes], this._empty, model); + }; + } + }; +} + +ansiStyles.bgColor.closeRe = new RegExp(escapeStringRegexp(ansiStyles.bgColor.close), 'g'); +for (const model of Object.keys(ansiStyles.bgColor.ansi)) { + if (skipModels.has(model)) { + continue; + } + + const bgModel = 'bg' + model[0].toUpperCase() + model.slice(1); + styles[bgModel] = { + get() { + const level = this.level; + return function () { + const open = ansiStyles.bgColor[levelMapping[level]][model].apply(null, arguments); + const codes = { + open, + close: ansiStyles.bgColor.close, + closeRe: ansiStyles.bgColor.closeRe + }; + return build.call(this, this._styles ? this._styles.concat(codes) : [codes], this._empty, model); + }; + } + }; +} + +const proto = Object.defineProperties(() => {}, styles); + +function build(_styles, _empty, key) { + const builder = function () { + return applyStyle.apply(builder, arguments); + }; + + builder._styles = _styles; + builder._empty = _empty; + + const self = this; + + Object.defineProperty(builder, 'level', { + enumerable: true, + get() { + return self.level; + }, + set(level) { + self.level = level; + } + }); + + Object.defineProperty(builder, 'enabled', { + enumerable: true, + get() { + return self.enabled; + }, + set(enabled) { + self.enabled = enabled; + } + }); + + // See below for fix regarding invisible grey/dim combination on Windows + builder.hasGrey = this.hasGrey || key === 'gray' || key === 'grey'; + + // `__proto__` is used because we must return a function, but there is + // no way to create a function with a different prototype + builder.__proto__ = proto; // eslint-disable-line no-proto + + return builder; +} + +function applyStyle() { + // Support varags, but simply cast to string in case there's only one arg + const args = arguments; + const argsLen = args.length; + let str = String(arguments[0]); + + if (argsLen === 0) { + return ''; + } + + if (argsLen > 1) { + // Don't slice `arguments`, it prevents V8 optimizations + for (let a = 1; a < argsLen; a++) { + str += ' ' + args[a]; + } + } + + if (!this.enabled || this.level <= 0 || !str) { + return this._empty ? '' : str; + } + + // Turns out that on Windows dimmed gray text becomes invisible in cmd.exe, + // see https://github.com/chalk/chalk/issues/58 + // If we're on Windows and we're dealing with a gray color, temporarily make 'dim' a noop. + const originalDim = ansiStyles.dim.open; + if (isSimpleWindowsTerm && this.hasGrey) { + ansiStyles.dim.open = ''; + } + + for (const code of this._styles.slice().reverse()) { + // Replace any instances already present with a re-opening code + // otherwise only the part of the string until said closing code + // will be colored, and the rest will simply be 'plain'. + str = code.open + str.replace(code.closeRe, code.open) + code.close; + + // Close the styling before a linebreak and reopen + // after next line to fix a bleed issue on macOS + // https://github.com/chalk/chalk/pull/92 + str = str.replace(/\r?\n/g, `${code.close}$&${code.open}`); + } + + // Reset the original `dim` if we changed it to work around the Windows dimmed gray issue + ansiStyles.dim.open = originalDim; + + return str; +} + +function chalkTag(chalk, strings) { + if (!Array.isArray(strings)) { + // If chalk() was called by itself or with a string, + // return the string itself as a string. + return [].slice.call(arguments, 1).join(' '); + } + + const args = [].slice.call(arguments, 2); + const parts = [strings.raw[0]]; + + for (let i = 1; i < strings.length; i++) { + parts.push(String(args[i - 1]).replace(/[{}\\]/g, '\\$&')); + parts.push(String(strings.raw[i])); + } + + return template(chalk, parts.join('')); +} + +Object.defineProperties(Chalk.prototype, styles); + +module.exports = Chalk(); // eslint-disable-line new-cap +module.exports.supportsColor = stdoutColor; +module.exports.default = module.exports; // For TypeScript diff --git a/mybulma/node_modules/@babel/highlight/node_modules/chalk/index.js.flow b/mybulma/node_modules/@babel/highlight/node_modules/chalk/index.js.flow new file mode 100644 index 0000000..622caaa --- /dev/null +++ b/mybulma/node_modules/@babel/highlight/node_modules/chalk/index.js.flow @@ -0,0 +1,93 @@ +// @flow strict + +type TemplateStringsArray = $ReadOnlyArray; + +export type Level = $Values<{ + None: 0, + Basic: 1, + Ansi256: 2, + TrueColor: 3 +}>; + +export type ChalkOptions = {| + enabled?: boolean, + level?: Level +|}; + +export type ColorSupport = {| + level: Level, + hasBasic: boolean, + has256: boolean, + has16m: boolean +|}; + +export interface Chalk { + (...text: string[]): string, + (text: TemplateStringsArray, ...placeholders: string[]): string, + constructor(options?: ChalkOptions): Chalk, + enabled: boolean, + level: Level, + rgb(r: number, g: number, b: number): Chalk, + hsl(h: number, s: number, l: number): Chalk, + hsv(h: number, s: number, v: number): Chalk, + hwb(h: number, w: number, b: number): Chalk, + bgHex(color: string): Chalk, + bgKeyword(color: string): Chalk, + bgRgb(r: number, g: number, b: number): Chalk, + bgHsl(h: number, s: number, l: number): Chalk, + bgHsv(h: number, s: number, v: number): Chalk, + bgHwb(h: number, w: number, b: number): Chalk, + hex(color: string): Chalk, + keyword(color: string): Chalk, + + +reset: Chalk, + +bold: Chalk, + +dim: Chalk, + +italic: Chalk, + +underline: Chalk, + +inverse: Chalk, + +hidden: Chalk, + +strikethrough: Chalk, + + +visible: Chalk, + + +black: Chalk, + +red: Chalk, + +green: Chalk, + +yellow: Chalk, + +blue: Chalk, + +magenta: Chalk, + +cyan: Chalk, + +white: Chalk, + +gray: Chalk, + +grey: Chalk, + +blackBright: Chalk, + +redBright: Chalk, + +greenBright: Chalk, + +yellowBright: Chalk, + +blueBright: Chalk, + +magentaBright: Chalk, + +cyanBright: Chalk, + +whiteBright: Chalk, + + +bgBlack: Chalk, + +bgRed: Chalk, + +bgGreen: Chalk, + +bgYellow: Chalk, + +bgBlue: Chalk, + +bgMagenta: Chalk, + +bgCyan: Chalk, + +bgWhite: Chalk, + +bgBlackBright: Chalk, + +bgRedBright: Chalk, + +bgGreenBright: Chalk, + +bgYellowBright: Chalk, + +bgBlueBright: Chalk, + +bgMagentaBright: Chalk, + +bgCyanBright: Chalk, + +bgWhiteBrigh: Chalk, + + supportsColor: ColorSupport +}; + +declare module.exports: Chalk; diff --git a/mybulma/node_modules/@babel/highlight/node_modules/chalk/license b/mybulma/node_modules/@babel/highlight/node_modules/chalk/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/mybulma/node_modules/@babel/highlight/node_modules/chalk/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/mybulma/node_modules/@babel/highlight/node_modules/chalk/package.json b/mybulma/node_modules/@babel/highlight/node_modules/chalk/package.json new file mode 100644 index 0000000..bc32468 --- /dev/null +++ b/mybulma/node_modules/@babel/highlight/node_modules/chalk/package.json @@ -0,0 +1,71 @@ +{ + "name": "chalk", + "version": "2.4.2", + "description": "Terminal string styling done right", + "license": "MIT", + "repository": "chalk/chalk", + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && tsc --project types && flow --max-warnings=0 && nyc ava", + "bench": "matcha benchmark.js", + "coveralls": "nyc report --reporter=text-lcov | coveralls" + }, + "files": [ + "index.js", + "templates.js", + "types/index.d.ts", + "index.js.flow" + ], + "keywords": [ + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "string", + "str", + "ansi", + "style", + "styles", + "tty", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "log", + "logging", + "command-line", + "text" + ], + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "devDependencies": { + "ava": "*", + "coveralls": "^3.0.0", + "execa": "^0.9.0", + "flow-bin": "^0.68.0", + "import-fresh": "^2.0.0", + "matcha": "^0.7.0", + "nyc": "^11.0.2", + "resolve-from": "^4.0.0", + "typescript": "^2.5.3", + "xo": "*" + }, + "types": "types/index.d.ts", + "xo": { + "envs": [ + "node", + "mocha" + ], + "ignores": [ + "test/_flow.js" + ] + } +} diff --git a/mybulma/node_modules/@babel/highlight/node_modules/chalk/readme.md b/mybulma/node_modules/@babel/highlight/node_modules/chalk/readme.md new file mode 100644 index 0000000..d298e2c --- /dev/null +++ b/mybulma/node_modules/@babel/highlight/node_modules/chalk/readme.md @@ -0,0 +1,314 @@ +

+
+
+ Chalk +
+
+
+

+ +> Terminal string styling done right + +[![Build Status](https://travis-ci.org/chalk/chalk.svg?branch=master)](https://travis-ci.org/chalk/chalk) [![Coverage Status](https://coveralls.io/repos/github/chalk/chalk/badge.svg?branch=master)](https://coveralls.io/github/chalk/chalk?branch=master) [![](https://img.shields.io/badge/unicorn-approved-ff69b4.svg)](https://www.youtube.com/watch?v=9auOCbH5Ns4) [![XO code style](https://img.shields.io/badge/code_style-XO-5ed9c7.svg)](https://github.com/xojs/xo) [![Mentioned in Awesome Node.js](https://awesome.re/mentioned-badge.svg)](https://github.com/sindresorhus/awesome-nodejs) + +### [See what's new in Chalk 2](https://github.com/chalk/chalk/releases/tag/v2.0.0) + + + + +## Highlights + +- Expressive API +- Highly performant +- Ability to nest styles +- [256/Truecolor color support](#256-and-truecolor-color-support) +- Auto-detects color support +- Doesn't extend `String.prototype` +- Clean and focused +- Actively maintained +- [Used by ~23,000 packages](https://www.npmjs.com/browse/depended/chalk) as of December 31, 2017 + + +## Install + +```console +$ npm install chalk +``` + + + + + + +## Usage + +```js +const chalk = require('chalk'); + +console.log(chalk.blue('Hello world!')); +``` + +Chalk comes with an easy to use composable API where you just chain and nest the styles you want. + +```js +const chalk = require('chalk'); +const log = console.log; + +// Combine styled and normal strings +log(chalk.blue('Hello') + ' World' + chalk.red('!')); + +// Compose multiple styles using the chainable API +log(chalk.blue.bgRed.bold('Hello world!')); + +// Pass in multiple arguments +log(chalk.blue('Hello', 'World!', 'Foo', 'bar', 'biz', 'baz')); + +// Nest styles +log(chalk.red('Hello', chalk.underline.bgBlue('world') + '!')); + +// Nest styles of the same type even (color, underline, background) +log(chalk.green( + 'I am a green line ' + + chalk.blue.underline.bold('with a blue substring') + + ' that becomes green again!' +)); + +// ES2015 template literal +log(` +CPU: ${chalk.red('90%')} +RAM: ${chalk.green('40%')} +DISK: ${chalk.yellow('70%')} +`); + +// ES2015 tagged template literal +log(chalk` +CPU: {red ${cpu.totalPercent}%} +RAM: {green ${ram.used / ram.total * 100}%} +DISK: {rgb(255,131,0) ${disk.used / disk.total * 100}%} +`); + +// Use RGB colors in terminal emulators that support it. +log(chalk.keyword('orange')('Yay for orange colored text!')); +log(chalk.rgb(123, 45, 67).underline('Underlined reddish color')); +log(chalk.hex('#DEADED').bold('Bold gray!')); +``` + +Easily define your own themes: + +```js +const chalk = require('chalk'); + +const error = chalk.bold.red; +const warning = chalk.keyword('orange'); + +console.log(error('Error!')); +console.log(warning('Warning!')); +``` + +Take advantage of console.log [string substitution](https://nodejs.org/docs/latest/api/console.html#console_console_log_data_args): + +```js +const name = 'Sindre'; +console.log(chalk.green('Hello %s'), name); +//=> 'Hello Sindre' +``` + + +## API + +### chalk.` + + +
+

Code coverage report for __root__/

+

+ Statements: 100% (4 / 4)      + Branches: 100% (2 / 2)      + Functions: 100% (1 / 1)      + Lines: 100% (4 / 4)      + Ignored: none      +

+
All files » __root__/
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FileStatementsBranchesFunctionsLines
index.js100%(4 / 4)100%(2 / 2)100%(1 / 1)100%(4 / 4)
+
+
+ + + + + + diff --git a/mybulma/node_modules/node-gyp/node_modules/unique-filename/coverage/__root__/index.js.html b/mybulma/node_modules/node-gyp/node_modules/unique-filename/coverage/__root__/index.js.html new file mode 100644 index 0000000..02e5768 --- /dev/null +++ b/mybulma/node_modules/node-gyp/node_modules/unique-filename/coverage/__root__/index.js.html @@ -0,0 +1,69 @@ + + + + Code coverage report for index.js + + + + + + +
+

Code coverage report for index.js

+

+ Statements: 100% (4 / 4)      + Branches: 100% (2 / 2)      + Functions: 100% (1 / 1)      + Lines: 100% (4 / 4)      + Ignored: none      +

+
All files » __root__/ » index.js
+
+
+

+
+
1 +2 +3 +4 +5 +6 +7 +8 +9  +1 +  +1 +  +1 +6 +  + 
'use strict'
+var path = require('path')
+ 
+var uniqueSlug = require('unique-slug')
+ 
+module.exports = function (filepath, prefix, uniq) {
+  return path.join(filepath, (prefix ? prefix + '-' : '') + uniqueSlug(uniq))
+}
+ 
+ +
+ + + + + + diff --git a/mybulma/node_modules/node-gyp/node_modules/unique-filename/coverage/base.css b/mybulma/node_modules/node-gyp/node_modules/unique-filename/coverage/base.css new file mode 100644 index 0000000..a6a2f32 --- /dev/null +++ b/mybulma/node_modules/node-gyp/node_modules/unique-filename/coverage/base.css @@ -0,0 +1,182 @@ +body, html { + margin:0; padding: 0; +} +body { + font-family: Helvetica Neue, Helvetica,Arial; + font-size: 10pt; +} +div.header, div.footer { + background: #eee; + padding: 1em; +} +div.header { + z-index: 100; + position: fixed; + top: 0; + border-bottom: 1px solid #666; + width: 100%; +} +div.footer { + border-top: 1px solid #666; +} +div.body { + margin-top: 10em; +} +div.meta { + font-size: 90%; + text-align: center; +} +h1, h2, h3 { + font-weight: normal; +} +h1 { + font-size: 12pt; +} +h2 { + font-size: 10pt; +} +pre { + font-family: Consolas, Menlo, Monaco, monospace; + margin: 0; + padding: 0; + line-height: 1.3; + font-size: 14px; + -moz-tab-size: 2; + -o-tab-size: 2; + tab-size: 2; +} + +div.path { font-size: 110%; } +div.path a:link, div.path a:visited { color: #000; } +table.coverage { border-collapse: collapse; margin:0; padding: 0 } + +table.coverage td { + margin: 0; + padding: 0; + color: #111; + vertical-align: top; +} +table.coverage td.line-count { + width: 50px; + text-align: right; + padding-right: 5px; +} +table.coverage td.line-coverage { + color: #777 !important; + text-align: right; + border-left: 1px solid #666; + border-right: 1px solid #666; +} + +table.coverage td.text { +} + +table.coverage td span.cline-any { + display: inline-block; + padding: 0 5px; + width: 40px; +} +table.coverage td span.cline-neutral { + background: #eee; +} +table.coverage td span.cline-yes { + background: #b5d592; + color: #999; +} +table.coverage td span.cline-no { + background: #fc8c84; +} + +.cstat-yes { color: #111; } +.cstat-no { background: #fc8c84; color: #111; } +.fstat-no { background: #ffc520; color: #111 !important; } +.cbranch-no { background: yellow !important; color: #111; } + +.cstat-skip { background: #ddd; color: #111; } +.fstat-skip { background: #ddd; color: #111 !important; } +.cbranch-skip { background: #ddd !important; color: #111; } + +.missing-if-branch { + display: inline-block; + margin-right: 10px; + position: relative; + padding: 0 4px; + background: black; + color: yellow; +} + +.skip-if-branch { + display: none; + margin-right: 10px; + position: relative; + padding: 0 4px; + background: #ccc; + color: white; +} + +.missing-if-branch .typ, .skip-if-branch .typ { + color: inherit !important; +} + +.entity, .metric { font-weight: bold; } +.metric { display: inline-block; border: 1px solid #333; padding: 0.3em; background: white; } +.metric small { font-size: 80%; font-weight: normal; color: #666; } + +div.coverage-summary table { border-collapse: collapse; margin: 3em; font-size: 110%; } +div.coverage-summary td, div.coverage-summary table th { margin: 0; padding: 0.25em 1em; border-top: 1px solid #666; border-bottom: 1px solid #666; } +div.coverage-summary th { text-align: left; border: 1px solid #666; background: #eee; font-weight: normal; } +div.coverage-summary th.file { border-right: none !important; } +div.coverage-summary th.pic { border-left: none !important; text-align: right; } +div.coverage-summary th.pct { border-right: none !important; } +div.coverage-summary th.abs { border-left: none !important; text-align: right; } +div.coverage-summary td.pct { text-align: right; border-left: 1px solid #666; } +div.coverage-summary td.abs { text-align: right; font-size: 90%; color: #444; border-right: 1px solid #666; } +div.coverage-summary td.file { border-left: 1px solid #666; white-space: nowrap; } +div.coverage-summary td.pic { min-width: 120px !important; } +div.coverage-summary a:link { text-decoration: none; color: #000; } +div.coverage-summary a:visited { text-decoration: none; color: #777; } +div.coverage-summary a:hover { text-decoration: underline; } +div.coverage-summary tfoot td { border-top: 1px solid #666; } + +div.coverage-summary .sorter { + height: 10px; + width: 7px; + display: inline-block; + margin-left: 0.5em; + background: url(sort-arrow-sprite.png) no-repeat scroll 0 0 transparent; +} +div.coverage-summary .sorted .sorter { + background-position: 0 -20px; +} +div.coverage-summary .sorted-desc .sorter { + background-position: 0 -10px; +} + +.high { background: #b5d592 !important; } +.medium { background: #ffe87c !important; } +.low { background: #fc8c84 !important; } + +span.cover-fill, span.cover-empty { + display:inline-block; + border:1px solid #444; + background: white; + height: 12px; +} +span.cover-fill { + background: #ccc; + border-right: 1px solid #444; +} +span.cover-empty { + background: white; + border-left: none; +} +span.cover-full { + border-right: none !important; +} +pre.prettyprint { + border: none !important; + padding: 0 !important; + margin: 0 !important; +} +.com { color: #999 !important; } +.ignore-none { color: #999; font-weight: normal; } diff --git a/mybulma/node_modules/node-gyp/node_modules/unique-filename/coverage/index.html b/mybulma/node_modules/node-gyp/node_modules/unique-filename/coverage/index.html new file mode 100644 index 0000000..b10d186 --- /dev/null +++ b/mybulma/node_modules/node-gyp/node_modules/unique-filename/coverage/index.html @@ -0,0 +1,73 @@ + + + + Code coverage report for All files + + + + + + +
+

Code coverage report for All files

+

+ Statements: 100% (4 / 4)      + Branches: 100% (2 / 2)      + Functions: 100% (1 / 1)      + Lines: 100% (4 / 4)      + Ignored: none      +

+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FileStatementsBranchesFunctionsLines
__root__/100%(4 / 4)100%(2 / 2)100%(1 / 1)100%(4 / 4)
+
+
+ + + + + + diff --git a/mybulma/node_modules/node-gyp/node_modules/unique-filename/coverage/prettify.css b/mybulma/node_modules/node-gyp/node_modules/unique-filename/coverage/prettify.css new file mode 100644 index 0000000..b317a7c --- /dev/null +++ b/mybulma/node_modules/node-gyp/node_modules/unique-filename/coverage/prettify.css @@ -0,0 +1 @@ +.pln{color:#000}@media screen{.str{color:#080}.kwd{color:#008}.com{color:#800}.typ{color:#606}.lit{color:#066}.pun,.opn,.clo{color:#660}.tag{color:#008}.atn{color:#606}.atv{color:#080}.dec,.var{color:#606}.fun{color:red}}@media print,projection{.str{color:#060}.kwd{color:#006;font-weight:bold}.com{color:#600;font-style:italic}.typ{color:#404;font-weight:bold}.lit{color:#044}.pun,.opn,.clo{color:#440}.tag{color:#006;font-weight:bold}.atn{color:#404}.atv{color:#060}}pre.prettyprint{padding:2px;border:1px solid #888}ol.linenums{margin-top:0;margin-bottom:0}li.L0,li.L1,li.L2,li.L3,li.L5,li.L6,li.L7,li.L8{list-style-type:none}li.L1,li.L3,li.L5,li.L7,li.L9{background:#eee} diff --git a/mybulma/node_modules/node-gyp/node_modules/unique-filename/coverage/prettify.js b/mybulma/node_modules/node-gyp/node_modules/unique-filename/coverage/prettify.js new file mode 100644 index 0000000..ef51e03 --- /dev/null +++ b/mybulma/node_modules/node-gyp/node_modules/unique-filename/coverage/prettify.js @@ -0,0 +1 @@ +window.PR_SHOULD_USE_CONTINUATION=true;(function(){var h=["break,continue,do,else,for,if,return,while"];var u=[h,"auto,case,char,const,default,double,enum,extern,float,goto,int,long,register,short,signed,sizeof,static,struct,switch,typedef,union,unsigned,void,volatile"];var p=[u,"catch,class,delete,false,import,new,operator,private,protected,public,this,throw,true,try,typeof"];var l=[p,"alignof,align_union,asm,axiom,bool,concept,concept_map,const_cast,constexpr,decltype,dynamic_cast,explicit,export,friend,inline,late_check,mutable,namespace,nullptr,reinterpret_cast,static_assert,static_cast,template,typeid,typename,using,virtual,where"];var x=[p,"abstract,boolean,byte,extends,final,finally,implements,import,instanceof,null,native,package,strictfp,super,synchronized,throws,transient"];var R=[x,"as,base,by,checked,decimal,delegate,descending,dynamic,event,fixed,foreach,from,group,implicit,in,interface,internal,into,is,lock,object,out,override,orderby,params,partial,readonly,ref,sbyte,sealed,stackalloc,string,select,uint,ulong,unchecked,unsafe,ushort,var"];var r="all,and,by,catch,class,else,extends,false,finally,for,if,in,is,isnt,loop,new,no,not,null,of,off,on,or,return,super,then,true,try,unless,until,when,while,yes";var w=[p,"debugger,eval,export,function,get,null,set,undefined,var,with,Infinity,NaN"];var s="caller,delete,die,do,dump,elsif,eval,exit,foreach,for,goto,if,import,last,local,my,next,no,our,print,package,redo,require,sub,undef,unless,until,use,wantarray,while,BEGIN,END";var I=[h,"and,as,assert,class,def,del,elif,except,exec,finally,from,global,import,in,is,lambda,nonlocal,not,or,pass,print,raise,try,with,yield,False,True,None"];var f=[h,"alias,and,begin,case,class,def,defined,elsif,end,ensure,false,in,module,next,nil,not,or,redo,rescue,retry,self,super,then,true,undef,unless,until,when,yield,BEGIN,END"];var H=[h,"case,done,elif,esac,eval,fi,function,in,local,set,then,until"];var A=[l,R,w,s+I,f,H];var e=/^(DIR|FILE|vector|(de|priority_)?queue|list|stack|(const_)?iterator|(multi)?(set|map)|bitset|u?(int|float)\d*)/;var C="str";var z="kwd";var j="com";var O="typ";var G="lit";var L="pun";var F="pln";var m="tag";var E="dec";var J="src";var P="atn";var n="atv";var N="nocode";var M="(?:^^\\.?|[+-]|\\!|\\!=|\\!==|\\#|\\%|\\%=|&|&&|&&=|&=|\\(|\\*|\\*=|\\+=|\\,|\\-=|\\->|\\/|\\/=|:|::|\\;|<|<<|<<=|<=|=|==|===|>|>=|>>|>>=|>>>|>>>=|\\?|\\@|\\[|\\^|\\^=|\\^\\^|\\^\\^=|\\{|\\||\\|=|\\|\\||\\|\\|=|\\~|break|case|continue|delete|do|else|finally|instanceof|return|throw|try|typeof)\\s*";function k(Z){var ad=0;var S=false;var ac=false;for(var V=0,U=Z.length;V122)){if(!(al<65||ag>90)){af.push([Math.max(65,ag)|32,Math.min(al,90)|32])}if(!(al<97||ag>122)){af.push([Math.max(97,ag)&~32,Math.min(al,122)&~32])}}}}af.sort(function(av,au){return(av[0]-au[0])||(au[1]-av[1])});var ai=[];var ap=[NaN,NaN];for(var ar=0;arat[0]){if(at[1]+1>at[0]){an.push("-")}an.push(T(at[1]))}}an.push("]");return an.join("")}function W(al){var aj=al.source.match(new RegExp("(?:\\[(?:[^\\x5C\\x5D]|\\\\[\\s\\S])*\\]|\\\\u[A-Fa-f0-9]{4}|\\\\x[A-Fa-f0-9]{2}|\\\\[0-9]+|\\\\[^ux0-9]|\\(\\?[:!=]|[\\(\\)\\^]|[^\\x5B\\x5C\\(\\)\\^]+)","g"));var ah=aj.length;var an=[];for(var ak=0,am=0;ak=2&&ai==="["){aj[ak]=X(ag)}else{if(ai!=="\\"){aj[ak]=ag.replace(/[a-zA-Z]/g,function(ao){var ap=ao.charCodeAt(0);return"["+String.fromCharCode(ap&~32,ap|32)+"]"})}}}}return aj.join("")}var aa=[];for(var V=0,U=Z.length;V=0;){S[ac.charAt(ae)]=Y}}var af=Y[1];var aa=""+af;if(!ag.hasOwnProperty(aa)){ah.push(af);ag[aa]=null}}ah.push(/[\0-\uffff]/);V=k(ah)})();var X=T.length;var W=function(ah){var Z=ah.sourceCode,Y=ah.basePos;var ad=[Y,F];var af=0;var an=Z.match(V)||[];var aj={};for(var ae=0,aq=an.length;ae=5&&"lang-"===ap.substring(0,5);if(am&&!(ai&&typeof ai[1]==="string")){am=false;ap=J}if(!am){aj[ag]=ap}}var ab=af;af+=ag.length;if(!am){ad.push(Y+ab,ap)}else{var al=ai[1];var ak=ag.indexOf(al);var ac=ak+al.length;if(ai[2]){ac=ag.length-ai[2].length;ak=ac-al.length}var ar=ap.substring(5);B(Y+ab,ag.substring(0,ak),W,ad);B(Y+ab+ak,al,q(ar,al),ad);B(Y+ab+ac,ag.substring(ac),W,ad)}}ah.decorations=ad};return W}function i(T){var W=[],S=[];if(T.tripleQuotedStrings){W.push([C,/^(?:\'\'\'(?:[^\'\\]|\\[\s\S]|\'{1,2}(?=[^\']))*(?:\'\'\'|$)|\"\"\"(?:[^\"\\]|\\[\s\S]|\"{1,2}(?=[^\"]))*(?:\"\"\"|$)|\'(?:[^\\\']|\\[\s\S])*(?:\'|$)|\"(?:[^\\\"]|\\[\s\S])*(?:\"|$))/,null,"'\""])}else{if(T.multiLineStrings){W.push([C,/^(?:\'(?:[^\\\']|\\[\s\S])*(?:\'|$)|\"(?:[^\\\"]|\\[\s\S])*(?:\"|$)|\`(?:[^\\\`]|\\[\s\S])*(?:\`|$))/,null,"'\"`"])}else{W.push([C,/^(?:\'(?:[^\\\'\r\n]|\\.)*(?:\'|$)|\"(?:[^\\\"\r\n]|\\.)*(?:\"|$))/,null,"\"'"])}}if(T.verbatimStrings){S.push([C,/^@\"(?:[^\"]|\"\")*(?:\"|$)/,null])}var Y=T.hashComments;if(Y){if(T.cStyleComments){if(Y>1){W.push([j,/^#(?:##(?:[^#]|#(?!##))*(?:###|$)|.*)/,null,"#"])}else{W.push([j,/^#(?:(?:define|elif|else|endif|error|ifdef|include|ifndef|line|pragma|undef|warning)\b|[^\r\n]*)/,null,"#"])}S.push([C,/^<(?:(?:(?:\.\.\/)*|\/?)(?:[\w-]+(?:\/[\w-]+)+)?[\w-]+\.h|[a-z]\w*)>/,null])}else{W.push([j,/^#[^\r\n]*/,null,"#"])}}if(T.cStyleComments){S.push([j,/^\/\/[^\r\n]*/,null]);S.push([j,/^\/\*[\s\S]*?(?:\*\/|$)/,null])}if(T.regexLiterals){var X=("/(?=[^/*])(?:[^/\\x5B\\x5C]|\\x5C[\\s\\S]|\\x5B(?:[^\\x5C\\x5D]|\\x5C[\\s\\S])*(?:\\x5D|$))+/");S.push(["lang-regex",new RegExp("^"+M+"("+X+")")])}var V=T.types;if(V){S.push([O,V])}var U=(""+T.keywords).replace(/^ | $/g,"");if(U.length){S.push([z,new RegExp("^(?:"+U.replace(/[\s,]+/g,"|")+")\\b"),null])}W.push([F,/^\s+/,null," \r\n\t\xA0"]);S.push([G,/^@[a-z_$][a-z_$@0-9]*/i,null],[O,/^(?:[@_]?[A-Z]+[a-z][A-Za-z_$@0-9]*|\w+_t\b)/,null],[F,/^[a-z_$][a-z_$@0-9]*/i,null],[G,new RegExp("^(?:0x[a-f0-9]+|(?:\\d(?:_\\d+)*\\d*(?:\\.\\d*)?|\\.\\d\\+)(?:e[+\\-]?\\d+)?)[a-z]*","i"),null,"0123456789"],[F,/^\\[\s\S]?/,null],[L,/^.[^\s\w\.$@\'\"\`\/\#\\]*/,null]);return g(W,S)}var K=i({keywords:A,hashComments:true,cStyleComments:true,multiLineStrings:true,regexLiterals:true});function Q(V,ag){var U=/(?:^|\s)nocode(?:\s|$)/;var ab=/\r\n?|\n/;var ac=V.ownerDocument;var S;if(V.currentStyle){S=V.currentStyle.whiteSpace}else{if(window.getComputedStyle){S=ac.defaultView.getComputedStyle(V,null).getPropertyValue("white-space")}}var Z=S&&"pre"===S.substring(0,3);var af=ac.createElement("LI");while(V.firstChild){af.appendChild(V.firstChild)}var W=[af];function ae(al){switch(al.nodeType){case 1:if(U.test(al.className)){break}if("BR"===al.nodeName){ad(al);if(al.parentNode){al.parentNode.removeChild(al)}}else{for(var an=al.firstChild;an;an=an.nextSibling){ae(an)}}break;case 3:case 4:if(Z){var am=al.nodeValue;var aj=am.match(ab);if(aj){var ai=am.substring(0,aj.index);al.nodeValue=ai;var ah=am.substring(aj.index+aj[0].length);if(ah){var ak=al.parentNode;ak.insertBefore(ac.createTextNode(ah),al.nextSibling)}ad(al);if(!ai){al.parentNode.removeChild(al)}}}break}}function ad(ak){while(!ak.nextSibling){ak=ak.parentNode;if(!ak){return}}function ai(al,ar){var aq=ar?al.cloneNode(false):al;var ao=al.parentNode;if(ao){var ap=ai(ao,1);var an=al.nextSibling;ap.appendChild(aq);for(var am=an;am;am=an){an=am.nextSibling;ap.appendChild(am)}}return aq}var ah=ai(ak.nextSibling,0);for(var aj;(aj=ah.parentNode)&&aj.nodeType===1;){ah=aj}W.push(ah)}for(var Y=0;Y=S){ah+=2}if(V>=ap){Z+=2}}}var t={};function c(U,V){for(var S=V.length;--S>=0;){var T=V[S];if(!t.hasOwnProperty(T)){t[T]=U}else{if(window.console){console.warn("cannot override language handler %s",T)}}}}function q(T,S){if(!(T&&t.hasOwnProperty(T))){T=/^\s*]*(?:>|$)/],[j,/^<\!--[\s\S]*?(?:-\->|$)/],["lang-",/^<\?([\s\S]+?)(?:\?>|$)/],["lang-",/^<%([\s\S]+?)(?:%>|$)/],[L,/^(?:<[%?]|[%?]>)/],["lang-",/^]*>([\s\S]+?)<\/xmp\b[^>]*>/i],["lang-js",/^]*>([\s\S]*?)(<\/script\b[^>]*>)/i],["lang-css",/^]*>([\s\S]*?)(<\/style\b[^>]*>)/i],["lang-in.tag",/^(<\/?[a-z][^<>]*>)/i]]),["default-markup","htm","html","mxml","xhtml","xml","xsl"]);c(g([[F,/^[\s]+/,null," \t\r\n"],[n,/^(?:\"[^\"]*\"?|\'[^\']*\'?)/,null,"\"'"]],[[m,/^^<\/?[a-z](?:[\w.:-]*\w)?|\/?>$/i],[P,/^(?!style[\s=]|on)[a-z](?:[\w:-]*\w)?/i],["lang-uq.val",/^=\s*([^>\'\"\s]*(?:[^>\'\"\s\/]|\/(?=\s)))/],[L,/^[=<>\/]+/],["lang-js",/^on\w+\s*=\s*\"([^\"]+)\"/i],["lang-js",/^on\w+\s*=\s*\'([^\']+)\'/i],["lang-js",/^on\w+\s*=\s*([^\"\'>\s]+)/i],["lang-css",/^style\s*=\s*\"([^\"]+)\"/i],["lang-css",/^style\s*=\s*\'([^\']+)\'/i],["lang-css",/^style\s*=\s*([^\"\'>\s]+)/i]]),["in.tag"]);c(g([],[[n,/^[\s\S]+/]]),["uq.val"]);c(i({keywords:l,hashComments:true,cStyleComments:true,types:e}),["c","cc","cpp","cxx","cyc","m"]);c(i({keywords:"null,true,false"}),["json"]);c(i({keywords:R,hashComments:true,cStyleComments:true,verbatimStrings:true,types:e}),["cs"]);c(i({keywords:x,cStyleComments:true}),["java"]);c(i({keywords:H,hashComments:true,multiLineStrings:true}),["bsh","csh","sh"]);c(i({keywords:I,hashComments:true,multiLineStrings:true,tripleQuotedStrings:true}),["cv","py"]);c(i({keywords:s,hashComments:true,multiLineStrings:true,regexLiterals:true}),["perl","pl","pm"]);c(i({keywords:f,hashComments:true,multiLineStrings:true,regexLiterals:true}),["rb"]);c(i({keywords:w,cStyleComments:true,regexLiterals:true}),["js"]);c(i({keywords:r,hashComments:3,cStyleComments:true,multilineStrings:true,tripleQuotedStrings:true,regexLiterals:true}),["coffee"]);c(g([],[[C,/^[\s\S]+/]]),["regex"]);function d(V){var U=V.langExtension;try{var S=a(V.sourceNode);var T=S.sourceCode;V.sourceCode=T;V.spans=S.spans;V.basePos=0;q(U,T)(V);D(V)}catch(W){if("console" in window){console.log(W&&W.stack?W.stack:W)}}}function y(W,V,U){var S=document.createElement("PRE");S.innerHTML=W;if(U){Q(S,U)}var T={langExtension:V,numberLines:U,sourceNode:S};d(T);return S.innerHTML}function b(ad){function Y(af){return document.getElementsByTagName(af)}var ac=[Y("pre"),Y("code"),Y("xmp")];var T=[];for(var aa=0;aa=0){var ah=ai.match(ab);var am;if(!ah&&(am=o(aj))&&"CODE"===am.tagName){ah=am.className.match(ab)}if(ah){ah=ah[1]}var al=false;for(var ak=aj.parentNode;ak;ak=ak.parentNode){if((ak.tagName==="pre"||ak.tagName==="code"||ak.tagName==="xmp")&&ak.className&&ak.className.indexOf("prettyprint")>=0){al=true;break}}if(!al){var af=aj.className.match(/\blinenums\b(?::(\d+))?/);af=af?af[1]&&af[1].length?+af[1]:true:false;if(af){Q(aj,af)}S={langExtension:ah,sourceNode:aj,numberLines:af};d(S)}}}if(X]*(?:>|$)/],[PR.PR_COMMENT,/^<\!--[\s\S]*?(?:-\->|$)/],[PR.PR_PUNCTUATION,/^(?:<[%?]|[%?]>)/],["lang-",/^<\?([\s\S]+?)(?:\?>|$)/],["lang-",/^<%([\s\S]+?)(?:%>|$)/],["lang-",/^]*>([\s\S]+?)<\/xmp\b[^>]*>/i],["lang-handlebars",/^]*type\s*=\s*['"]?text\/x-handlebars-template['"]?\b[^>]*>([\s\S]*?)(<\/script\b[^>]*>)/i],["lang-js",/^]*>([\s\S]*?)(<\/script\b[^>]*>)/i],["lang-css",/^]*>([\s\S]*?)(<\/style\b[^>]*>)/i],["lang-in.tag",/^(<\/?[a-z][^<>]*>)/i],[PR.PR_DECLARATION,/^{{[#^>/]?\s*[\w.][^}]*}}/],[PR.PR_DECLARATION,/^{{&?\s*[\w.][^}]*}}/],[PR.PR_DECLARATION,/^{{{>?\s*[\w.][^}]*}}}/],[PR.PR_COMMENT,/^{{![^}]*}}/]]),["handlebars","hbs"]);PR.registerLangHandler(PR.createSimpleLexer([[PR.PR_PLAIN,/^[ \t\r\n\f]+/,null," \t\r\n\f"]],[[PR.PR_STRING,/^\"(?:[^\n\r\f\\\"]|\\(?:\r\n?|\n|\f)|\\[\s\S])*\"/,null],[PR.PR_STRING,/^\'(?:[^\n\r\f\\\']|\\(?:\r\n?|\n|\f)|\\[\s\S])*\'/,null],["lang-css-str",/^url\(([^\)\"\']*)\)/i],[PR.PR_KEYWORD,/^(?:url|rgb|\!important|@import|@page|@media|@charset|inherit)(?=[^\-\w]|$)/i,null],["lang-css-kw",/^(-?(?:[_a-z]|(?:\\[0-9a-f]+ ?))(?:[_a-z0-9\-]|\\(?:\\[0-9a-f]+ ?))*)\s*:/i],[PR.PR_COMMENT,/^\/\*[^*]*\*+(?:[^\/*][^*]*\*+)*\//],[PR.PR_COMMENT,/^(?:)/],[PR.PR_LITERAL,/^(?:\d+|\d*\.\d+)(?:%|[a-z]+)?/i],[PR.PR_LITERAL,/^#(?:[0-9a-f]{3}){1,2}/i],[PR.PR_PLAIN,/^-?(?:[_a-z]|(?:\\[\da-f]+ ?))(?:[_a-z\d\-]|\\(?:\\[\da-f]+ ?))*/i],[PR.PR_PUNCTUATION,/^[^\s\w\'\"]+/]]),["css"]);PR.registerLangHandler(PR.createSimpleLexer([],[[PR.PR_KEYWORD,/^-?(?:[_a-z]|(?:\\[\da-f]+ ?))(?:[_a-z\d\-]|\\(?:\\[\da-f]+ ?))*/i]]),["css-kw"]);PR.registerLangHandler(PR.createSimpleLexer([],[[PR.PR_STRING,/^[^\)\"\']+/]]),["css-str"]); diff --git a/mybulma/node_modules/node-gyp/node_modules/unique-filename/coverage/sort-arrow-sprite.png b/mybulma/node_modules/node-gyp/node_modules/unique-filename/coverage/sort-arrow-sprite.png new file mode 100644 index 0000000..03f704a Binary files /dev/null and b/mybulma/node_modules/node-gyp/node_modules/unique-filename/coverage/sort-arrow-sprite.png differ diff --git a/mybulma/node_modules/node-gyp/node_modules/unique-filename/coverage/sorter.js b/mybulma/node_modules/node-gyp/node_modules/unique-filename/coverage/sorter.js new file mode 100644 index 0000000..6afb736 --- /dev/null +++ b/mybulma/node_modules/node-gyp/node_modules/unique-filename/coverage/sorter.js @@ -0,0 +1,156 @@ +var addSorting = (function () { + "use strict"; + var cols, + currentSort = { + index: 0, + desc: false + }; + + // returns the summary table element + function getTable() { return document.querySelector('.coverage-summary table'); } + // returns the thead element of the summary table + function getTableHeader() { return getTable().querySelector('thead tr'); } + // returns the tbody element of the summary table + function getTableBody() { return getTable().querySelector('tbody'); } + // returns the th element for nth column + function getNthColumn(n) { return getTableHeader().querySelectorAll('th')[n]; } + + // loads all columns + function loadColumns() { + var colNodes = getTableHeader().querySelectorAll('th'), + colNode, + cols = [], + col, + i; + + for (i = 0; i < colNodes.length; i += 1) { + colNode = colNodes[i]; + col = { + key: colNode.getAttribute('data-col'), + sortable: !colNode.getAttribute('data-nosort'), + type: colNode.getAttribute('data-type') || 'string' + }; + cols.push(col); + if (col.sortable) { + col.defaultDescSort = col.type === 'number'; + colNode.innerHTML = colNode.innerHTML + ''; + } + } + return cols; + } + // attaches a data attribute to every tr element with an object + // of data values keyed by column name + function loadRowData(tableRow) { + var tableCols = tableRow.querySelectorAll('td'), + colNode, + col, + data = {}, + i, + val; + for (i = 0; i < tableCols.length; i += 1) { + colNode = tableCols[i]; + col = cols[i]; + val = colNode.getAttribute('data-value'); + if (col.type === 'number') { + val = Number(val); + } + data[col.key] = val; + } + return data; + } + // loads all row data + function loadData() { + var rows = getTableBody().querySelectorAll('tr'), + i; + + for (i = 0; i < rows.length; i += 1) { + rows[i].data = loadRowData(rows[i]); + } + } + // sorts the table using the data for the ith column + function sortByIndex(index, desc) { + var key = cols[index].key, + sorter = function (a, b) { + a = a.data[key]; + b = b.data[key]; + return a < b ? -1 : a > b ? 1 : 0; + }, + finalSorter = sorter, + tableBody = document.querySelector('.coverage-summary tbody'), + rowNodes = tableBody.querySelectorAll('tr'), + rows = [], + i; + + if (desc) { + finalSorter = function (a, b) { + return -1 * sorter(a, b); + }; + } + + for (i = 0; i < rowNodes.length; i += 1) { + rows.push(rowNodes[i]); + tableBody.removeChild(rowNodes[i]); + } + + rows.sort(finalSorter); + + for (i = 0; i < rows.length; i += 1) { + tableBody.appendChild(rows[i]); + } + } + // removes sort indicators for current column being sorted + function removeSortIndicators() { + var col = getNthColumn(currentSort.index), + cls = col.className; + + cls = cls.replace(/ sorted$/, '').replace(/ sorted-desc$/, ''); + col.className = cls; + } + // adds sort indicators for current column being sorted + function addSortIndicators() { + getNthColumn(currentSort.index).className += currentSort.desc ? ' sorted-desc' : ' sorted'; + } + // adds event listeners for all sorter widgets + function enableUI() { + var i, + el, + ithSorter = function ithSorter(i) { + var col = cols[i]; + + return function () { + var desc = col.defaultDescSort; + + if (currentSort.index === i) { + desc = !currentSort.desc; + } + sortByIndex(i, desc); + removeSortIndicators(); + currentSort.index = i; + currentSort.desc = desc; + addSortIndicators(); + }; + }; + for (i =0 ; i < cols.length; i += 1) { + if (cols[i].sortable) { + el = getNthColumn(i).querySelector('.sorter'); + if (el.addEventListener) { + el.addEventListener('click', ithSorter(i)); + } else { + el.attachEvent('onclick', ithSorter(i)); + } + } + } + } + // adds sorting functionality to the UI + return function () { + if (!getTable()) { + return; + } + cols = loadColumns(); + loadData(cols); + addSortIndicators(); + enableUI(); + }; +})(); + +window.addEventListener('load', addSorting); diff --git a/mybulma/node_modules/node-gyp/node_modules/unique-filename/index.js b/mybulma/node_modules/node-gyp/node_modules/unique-filename/index.js new file mode 100644 index 0000000..02bf1e2 --- /dev/null +++ b/mybulma/node_modules/node-gyp/node_modules/unique-filename/index.js @@ -0,0 +1,8 @@ +'use strict' +var path = require('path') + +var uniqueSlug = require('unique-slug') + +module.exports = function (filepath, prefix, uniq) { + return path.join(filepath, (prefix ? prefix + '-' : '') + uniqueSlug(uniq)) +} diff --git a/mybulma/node_modules/node-gyp/node_modules/unique-filename/package.json b/mybulma/node_modules/node-gyp/node_modules/unique-filename/package.json new file mode 100644 index 0000000..bc429aa --- /dev/null +++ b/mybulma/node_modules/node-gyp/node_modules/unique-filename/package.json @@ -0,0 +1,27 @@ +{ + "name": "unique-filename", + "version": "1.1.1", + "description": "Generate a unique filename for use in temporary directories or caches.", + "main": "index.js", + "scripts": { + "test": "standard && tap test" + }, + "repository": { + "type": "git", + "url": "https://github.com/iarna/unique-filename.git" + }, + "keywords": [], + "author": "Rebecca Turner (http://re-becca.org/)", + "license": "ISC", + "bugs": { + "url": "https://github.com/iarna/unique-filename/issues" + }, + "homepage": "https://github.com/iarna/unique-filename", + "devDependencies": { + "standard": "^5.4.1", + "tap": "^2.3.1" + }, + "dependencies": { + "unique-slug": "^2.0.0" + } +} diff --git a/mybulma/node_modules/node-gyp/node_modules/unique-filename/test/index.js b/mybulma/node_modules/node-gyp/node_modules/unique-filename/test/index.js new file mode 100644 index 0000000..105b4e5 --- /dev/null +++ b/mybulma/node_modules/node-gyp/node_modules/unique-filename/test/index.js @@ -0,0 +1,23 @@ +'sue strict' +var t = require('tap') +var uniqueFilename = require('../index.js') + +t.plan(6) + +var randomTmpfile = uniqueFilename('tmp') +t.like(randomTmpfile, /^tmp.[a-f0-9]{8}$/, 'random tmp file') + +var randomAgain = uniqueFilename('tmp') +t.notEqual(randomAgain, randomTmpfile, 'random tmp files are not the same') + +var randomPrefixedTmpfile = uniqueFilename('tmp', 'my-test') +t.like(randomPrefixedTmpfile, /^tmp.my-test-[a-f0-9]{8}$/, 'random prefixed tmp file') + +var randomPrefixedAgain = uniqueFilename('tmp', 'my-test') +t.notEqual(randomPrefixedAgain, randomPrefixedTmpfile, 'random prefixed tmp files are not the same') + +var uniqueTmpfile = uniqueFilename('tmp', 'testing', '/my/thing/to/uniq/on') +t.like(uniqueTmpfile, /^tmp.testing-7ddd44c0$/, 'unique filename') + +var uniqueAgain = uniqueFilename('tmp', 'testing', '/my/thing/to/uniq/on') +t.is(uniqueTmpfile, uniqueAgain, 'same unique string component produces same filename') diff --git a/mybulma/node_modules/node-gyp/node_modules/unique-slug/.travis.yml b/mybulma/node_modules/node-gyp/node_modules/unique-slug/.travis.yml new file mode 100644 index 0000000..5651fce --- /dev/null +++ b/mybulma/node_modules/node-gyp/node_modules/unique-slug/.travis.yml @@ -0,0 +1,10 @@ +language: node_js +sudo: false +before_install: + - "npm -g install npm" +node_js: + - "6" + - "8" + - "10" + - "lts/*" + - "node" diff --git a/mybulma/node_modules/node-gyp/node_modules/unique-slug/LICENSE b/mybulma/node_modules/node-gyp/node_modules/unique-slug/LICENSE new file mode 100644 index 0000000..7953647 --- /dev/null +++ b/mybulma/node_modules/node-gyp/node_modules/unique-slug/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright npm, Inc + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/mybulma/node_modules/node-gyp/node_modules/unique-slug/README.md b/mybulma/node_modules/node-gyp/node_modules/unique-slug/README.md new file mode 100644 index 0000000..87f92f1 --- /dev/null +++ b/mybulma/node_modules/node-gyp/node_modules/unique-slug/README.md @@ -0,0 +1,19 @@ +unique-slug +=========== + +Generate a unique character string suitible for use in files and URLs. + +``` +var uniqueSlug = require('unique-slug') + +var randomSlug = uniqueSlug() +var fileSlug = uniqueSlug('/etc/passwd') +``` + +### uniqueSlug(*str*) → String (8 chars) + +If *str* is passed in then the return value will be its murmur hash in +hex. + +If *str* is not passed in, it will be 4 randomly generated bytes +converted into 8 hexadecimal characters. diff --git a/mybulma/node_modules/node-gyp/node_modules/unique-slug/index.js b/mybulma/node_modules/node-gyp/node_modules/unique-slug/index.js new file mode 100644 index 0000000..fa4761a --- /dev/null +++ b/mybulma/node_modules/node-gyp/node_modules/unique-slug/index.js @@ -0,0 +1,11 @@ +'use strict' +var MurmurHash3 = require('imurmurhash') + +module.exports = function (uniq) { + if (uniq) { + var hash = new MurmurHash3(uniq) + return ('00000000' + hash.result().toString(16)).substr(-8) + } else { + return (Math.random().toString(16) + '0000000').substr(2, 8) + } +} diff --git a/mybulma/node_modules/node-gyp/node_modules/unique-slug/package.json b/mybulma/node_modules/node-gyp/node_modules/unique-slug/package.json new file mode 100644 index 0000000..2142e68 --- /dev/null +++ b/mybulma/node_modules/node-gyp/node_modules/unique-slug/package.json @@ -0,0 +1,23 @@ +{ + "name": "unique-slug", + "version": "2.0.2", + "description": "Generate a unique character string suitible for use in files and URLs.", + "main": "index.js", + "scripts": { + "test": "standard && tap --coverage test" + }, + "keywords": [], + "author": "Rebecca Turner (http://re-becca.org)", + "license": "ISC", + "devDependencies": { + "standard": "^12.0.1", + "tap": "^12.7.0" + }, + "repository": { + "type": "git", + "url": "git://github.com/iarna/unique-slug.git" + }, + "dependencies": { + "imurmurhash": "^0.1.4" + } +} diff --git a/mybulma/node_modules/node-gyp/node_modules/unique-slug/test/index.js b/mybulma/node_modules/node-gyp/node_modules/unique-slug/test/index.js new file mode 100644 index 0000000..0f4ccad --- /dev/null +++ b/mybulma/node_modules/node-gyp/node_modules/unique-slug/test/index.js @@ -0,0 +1,13 @@ +'use strict' +var t = require('tap') +var uniqueSlug = require('../index.js') + +t.plan(5) +var slugA = uniqueSlug() +t.is(slugA.length, 8, 'random slugs are 8 chars') +t.notEqual(slugA, uniqueSlug(), "two slugs aren't the same") +var base = '/path/to/thingy' +var slugB = uniqueSlug(base) +t.is(slugB.length, 8, 'string based slugs are 8 chars') +t.is(slugB, uniqueSlug(base), 'two string based slugs, from the same string are the same') +t.notEqual(slugB, uniqueSlug(slugA), 'two string based slongs, from diff strings are different') diff --git a/mybulma/node_modules/node-gyp/src/win_delay_load_hook.cc b/mybulma/node_modules/node-gyp/src/win_delay_load_hook.cc new file mode 100644 index 0000000..169f802 --- /dev/null +++ b/mybulma/node_modules/node-gyp/src/win_delay_load_hook.cc @@ -0,0 +1,39 @@ +/* + * When this file is linked to a DLL, it sets up a delay-load hook that + * intervenes when the DLL is trying to load the host executable + * dynamically. Instead of trying to locate the .exe file it'll just + * return a handle to the process image. + * + * This allows compiled addons to work when the host executable is renamed. + */ + +#ifdef _MSC_VER + +#pragma managed(push, off) + +#ifndef WIN32_LEAN_AND_MEAN +#define WIN32_LEAN_AND_MEAN +#endif + +#include + +#include +#include + +static FARPROC WINAPI load_exe_hook(unsigned int event, DelayLoadInfo* info) { + HMODULE m; + if (event != dliNotePreLoadLibrary) + return NULL; + + if (_stricmp(info->szDll, HOST_BINARY) != 0) + return NULL; + + m = GetModuleHandle(NULL); + return (FARPROC) m; +} + +decltype(__pfnDliNotifyHook2) __pfnDliNotifyHook2 = load_exe_hook; + +#pragma managed(pop) + +#endif diff --git a/mybulma/node_modules/node-gyp/test/common.js b/mybulma/node_modules/node-gyp/test/common.js new file mode 100644 index 0000000..b714ee2 --- /dev/null +++ b/mybulma/node_modules/node-gyp/test/common.js @@ -0,0 +1,3 @@ +const envPaths = require('env-paths') + +module.exports.devDir = () => envPaths('node-gyp', { suffix: '' }).cache diff --git a/mybulma/node_modules/node-gyp/test/fixtures/VS_2017_BuildTools_minimal.txt b/mybulma/node_modules/node-gyp/test/fixtures/VS_2017_BuildTools_minimal.txt new file mode 100644 index 0000000..244f6b0 --- /dev/null +++ b/mybulma/node_modules/node-gyp/test/fixtures/VS_2017_BuildTools_minimal.txt @@ -0,0 +1 @@ +[{"path":"C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\BuildTools","version":"15.9.28307.665","packages":["Microsoft.VisualStudio.Product.BuildTools","Microsoft.VisualStudio.Component.VC.CoreIde","Microsoft.VisualStudio.VC.Ide.Pro","Microsoft.VisualStudio.VC.Ide.Pro.Resources","Microsoft.VisualStudio.VC.Templates.Pro","Microsoft.VisualStudio.VC.Templates.Pro.Resources","Microsoft.VisualStudio.VC.Items.Pro","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Reduced","Microsoft.VisualStudio.VC.Ide.MDD","Microsoft.VisualStudio.VC.Ide.x64","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Express","Microsoft.VisualStudio.PackageGroup.Debugger.Script","Microsoft.VisualStudio.JavaScript.LanguageService","Microsoft.VisualStudio.JavaScript.LanguageService.Resources","Microsoft.VisualStudio.Debugger.Script.Msi","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.VC.Ide.WinXPlus","Microsoft.VisualStudio.VC.Ide.Dskx","Microsoft.VisualStudio.VC.Ide.Dskx.Resources","Microsoft.VisualStudio.VC.Ide.Core","Microsoft.VisualStudio.VC.Ide.Core.Resources","Microsoft.VisualStudio.VC.Ide.Base","Microsoft.VisualStudio.VC.Ide.LanguageService","Microsoft.VisualStudio.VC.Ide.ResourceEditor","Microsoft.VisualStudio.VC.Ide.ResourceEditor.Resources","Microsoft.VisualStudio.VC.Ide.ProjectSystem","Microsoft.VisualStudio.VC.Ide.ProjectSystem.Resources","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine.Resources","Microsoft.VisualStudio.VC.Ide.LanguageService.Resources","Microsoft.VisualStudio.VC.Ide.Base.Resources","Microsoft.VisualStudio.PackageGroup.Core","Microsoft.VisualStudio.TestTools.TeamFoundationClient","Microsoft.VisualStudio.PackageGroup.Debugger.Core","Microsoft.VisualStudio.Debugger.VSCodeDebuggerHost","Microsoft.VisualStudio.VC.Ide.Debugger","Microsoft.VisualStudio.VC.Ide.Debugger.Resources","Microsoft.VisualStudio.VC.Ide.Common","Microsoft.VisualStudio.VC.Ide.Common.Resources","Microsoft.VisualStudio.Debugger.Parallel","Microsoft.VisualStudio.Debugger.Parallel.Resources","Microsoft.VisualStudio.Debugger.CollectionAgents","Microsoft.VisualStudio.Debugger.Managed","Microsoft.CodeAnalysis.VisualStudio.Setup.Resources","Microsoft.CodeAnalysis.VisualStudio.Setup","Microsoft.CodeAnalysis.ExpressionEvaluator.Resources","Microsoft.CodeAnalysis.ExpressionEvaluator","Microsoft.VisualStudio.Debugger.Managed.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger","Microsoft.VisualStudio.VC.MSVCDis","Microsoft.VisualStudio.ScriptedHost","Microsoft.VisualStudio.ScriptedHost.Targeted","Microsoft.VisualStudio.ScriptedHost.Resources","Microsoft.IntelliTrace.DiagnosticsHub","Microsoft.VisualStudio.Debugger.Resources","Microsoft.PackageGroup.ClientDiagnostics","Microsoft.VisualStudio.AppResponsiveness","Microsoft.VisualStudio.AppResponsiveness.Targeted","Microsoft.VisualStudio.AppResponsiveness.Resources","Microsoft.VisualStudio.ClientDiagnostics","Microsoft.VisualStudio.ClientDiagnostics.Targeted","Microsoft.VisualStudio.ClientDiagnostics.Resources","Microsoft.VisualStudio.PackageGroup.CommunityCore","Microsoft.VisualStudio.ProjectSystem.Full","Microsoft.VisualStudio.ProjectSystem","Microsoft.VisualStudio.Community.x86","Microsoft.VisualStudio.Community.x64","Microsoft.VisualStudio.Community","Microsoft.IntelliTrace.CollectorCab","Microsoft.VisualStudio.Community.Resources","Microsoft.VisualStudio.WebSiteProject.DTE","Microsoft.MSHtml","Microsoft.VisualStudio.Community.Msi.Resources","Microsoft.VisualStudio.Community.Msi","Microsoft.VisualStudio.MinShell.Interop.Msi","Microsoft.VisualStudio.PackageGroup.CoreEditor","PortableFacades","Microsoft.VisualStudio.VirtualTree","Microsoft.VisualStudio.PackageGroup.Progression","Microsoft.VisualStudio.PerformanceProvider","Microsoft.VisualStudio.GraphModel","Microsoft.VisualStudio.GraphProvider","Microsoft.DiaSymReader","Microsoft.VisualStudio.TextMateGrammars","Microsoft.VisualStudio.PackageGroup.TeamExplorer","Microsoft.TeamFoundation.OfficeIntegration","Microsoft.TeamFoundation.OfficeIntegration.Resources","Microsoft.VisualStudio.TeamExplorer","Microsoft.ServiceHub","Microsoft.VisualStudio.ProjectServices","Microsoft.VisualStudio.SLNX.VSIX","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.PackageGroup.MinShell","Microsoft.VisualStudio.MinShell.Msi","Microsoft.VisualStudio.MinShell.Msi.Resources","Microsoft.VisualStudio.MinShell.Interop","Microsoft.VisualStudio.Log","Microsoft.VisualStudio.Log.Targeted","Microsoft.VisualStudio.Log.Resources","Microsoft.VisualStudio.Finalizer","Microsoft.VisualStudio.CoreEditor","Microsoft.VisualStudio.Connected","Microsoft.VisualStudio.Connected.Resources","Microsoft.VisualStudio.MinShell","Microsoft.VisualStudio.MinShell.Platform","Microsoft.VisualStudio.MinShell.Platform.Resources","Microsoft.VisualStudio.MefHosting","Microsoft.VisualStudio.MefHosting.Resources","Microsoft.VisualStudio.Initializer","Microsoft.VisualStudio.ExtensionManager","Microsoft.VisualStudio.Editors","Microsoft.Net.4.TargetingPack","Microsoft.VisualStudio.Component.Windows10SDK.17134","Win10SDK_10.0.17134","Microsoft.VisualStudio.Component.VC.Tools.x86.x64","Microsoft.VisualCpp.CodeAnalysis.Extensions","Microsoft.VisualCpp.CodeAnalysis.Extensions.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86.Resources","Microsoft.VisualCpp.CodeAnalysis.Extensions.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64.Resources","Microsoft.VisualStudio.Component.Static.Analysis.Tools","Microsoft.VisualStudio.StaticAnalysis","Microsoft.VisualStudio.StaticAnalysis.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX86","Microsoft.VisualCpp.VCTip.HostX64.TargetX86","Microsoft.VisualCpp.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX64","Microsoft.VisualCpp.VCTip.HostX64.TargetX64","Microsoft.VisualCpp.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX64","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.PGO.X86","Microsoft.VisualCpp.PGO.X64","Microsoft.VisualCpp.PGO.Headers","Microsoft.VisualCpp.CRT.x86.Store","Microsoft.VisualCpp.CRT.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.x64.Store","Microsoft.VisualCpp.CRT.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.ClickOnce.Msi","Microsoft.VisualStudio.PackageGroup.VC.Tools.x86","Microsoft.VisualCpp.Tools.HostX86.TargetX64","Microsoft.VisualCpp.VCTip.hostX86.targetX64","Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Tools.HostX86.TargetX86","Microsoft.VisualCpp.VCTip.hostX86.targetX86","Microsoft.VisualCpp.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Tools.Core.Resources","Microsoft.VisualCpp.Tools.Core.x86","Microsoft.VisualCpp.Tools.Common.Utils","Microsoft.VisualCpp.Tools.Common.Utils.Resources","Microsoft.VisualCpp.DIA.SDK","Microsoft.VisualCpp.CRT.x86.Desktop","Microsoft.VisualCpp.CRT.x64.Desktop","Microsoft.VisualCpp.CRT.Source","Microsoft.VisualCpp.CRT.Redist.X86","Microsoft.VisualCpp.CRT.Redist.X64","Microsoft.VisualCpp.CRT.Redist.Resources","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.CRT.Headers","Microsoft.VisualStudio.VC.MSBuild.X86","Microsoft.VisualStudio.VC.MSBuild.X64","Microsoft.VS.VC.MSBuild.X64.Resources","Microsoft.VisualStudio.VC.MSBuild.Base","Microsoft.VisualStudio.VC.MSBuild.Base.Resources","Microsoft.VisualStudio.VC.MSBuild.ARM","Microsoft.VisualStudio.Workload.MSBuildTools","Microsoft.VisualStudio.Component.CoreBuildTools","Microsoft.VisualStudio.Setup.Configuration","Microsoft.VisualStudio.PackageGroup.VsDevCmd","Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk","Microsoft.VisualStudio.VsDevCmd.Core.WinSdk","Microsoft.VisualStudio.VsDevCmd.Core.DotNet","Microsoft.VisualStudio.VC.DevCmd","Microsoft.VisualStudio.VC.DevCmd.Resources","Microsoft.VisualStudio.BuildTools.Resources","Microsoft.VisualStudio.Net.Eula.Resources","Microsoft.Build.Dependencies","Microsoft.Build.FileTracker.Msi","Microsoft.Component.MSBuild","Microsoft.PythonTools.BuildCore.Vsix","Microsoft.NuGet.Build.Tasks","Microsoft.VisualStudio.Component.Roslyn.Compiler","Microsoft.CodeAnalysis.Compilers.Resources","Microsoft.CodeAnalysis.Compilers","Microsoft.Net.PackageGroup.4.6.1.Redist","Microsoft.VisualStudio.NativeImageSupport","Microsoft.Build"]}] diff --git a/mybulma/node_modules/node-gyp/test/fixtures/VS_2017_Community_workload.txt b/mybulma/node_modules/node-gyp/test/fixtures/VS_2017_Community_workload.txt new file mode 100644 index 0000000..dd5e77d --- /dev/null +++ b/mybulma/node_modules/node-gyp/test/fixtures/VS_2017_Community_workload.txt @@ -0,0 +1 @@ +[{"path":"C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community","version":"15.9.28307.665","packages":["Microsoft.VisualStudio.Component.Windows10SDK.IpOverUsb","Win10SDK_IpOverUsb","Microsoft.VisualStudio.Component.VC.ATL.ARM64","Microsoft.VisualCpp.ATL.ARM64","Microsoft.VisualStudio.Component.VC.ATL.ARM","Microsoft.VisualCpp.ATL.ARM","Microsoft.VisualStudio.Component.VC.Tools.ARM","Microsoft.VisualCpp.Tools.HostX64.TargetX86.Resources","Microsoft.VisualStudio.Graphics.Analyzer.Resources","Microsoft.Icecap.Analysis","Microsoft.VisualCpp.CRT.Redist.arm.OneCore.Desktop","Microsoft.VisualCpp.CRT.arm.Store","Microsoft.VisualCpp.CRT.arm.Desktop","Microsoft.VisualStudio.PackageGroup.VC.Tools.x64.ARM","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetarm","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetARM.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetARM","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetARM.Resources","Microsoft.VisualCpp.Premium.Tools.ARM.Base","Microsoft.VisualCpp.Premium.Tools.ARM.Base.Resources","Microsoft.VisualCpp.PGO.ARM","Microsoft.VisualCpp.Tools.HostX64.TargetX64","Microsoft.VisualStudio.Product.Community","Microsoft.VisualCpp.Tools.Hostx86.Targetarm","Microsoft.VisualStudio.Component.VC.Tools.ARM64","Microsoft.VisualStudio.VC.MSBuild.Arm64","Microsoft.VisualCpp.CRT.Redist.ARM64.OneCore.Desktop","Microsoft.VisualCpp.VCTip.HostX64.TargetX64","Microsoft.VisualCpp.CRT.ARM64.OneCore.Desktop","Microsoft.VisualCpp.CRT.ARM64.Store","Microsoft.VisualCpp.CRT.ARM64.Desktop","Microsoft.VisualCpp.Tools.HostX64.TargetX64.Resources","Microsoft.Icecap.Analysis.Resources","Microsoft.VisualCpp.VCTip.hostX86.targetARM","Microsoft.VisualStudio.PackageGroup.VC.Tools.x64.ARM64","Microsoft.VisualCpp.Tools.Core","Microsoft.VisualCpp.PGO.ARM64","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetarm64","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetARM64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetARM64","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetARM64.Resources","Microsoft.VisualCpp.Premium.Tools.ARM64.Base","Microsoft.VisualCpp.Tools.HostX86.TargetX64","Microsoft.VisualCpp.Tools.HostX86.TargetARM.Resources","Microsoft.VisualCpp.CRT.Redist.ARM64","Microsoft.VisualCpp.CRT.arm.OneCore.Desktop","Microsoft.VisualCpp.CodeAnalysis.Extensions.X86","Microsoft.VisualCpp.CodeAnalysis.Extensions.X64","Microsoft.VisualCpp.VCTip.HostX64.TargetX86","Component.WixToolset.VisualStudioExtension.Dev15","WixToolset.VisualStudioExtension.Dev15","Microsoft.VisualCpp.MFC.X64","Microsoft.VisualCpp.ATL.Headers","Microsoft.VisualStudio.Component.VC.CMake.Project","Microsoft.VisualStudio.VC.CMake","Microsoft.VisualStudio.VC.CMake.Project","Microsoft.VisualStudio.Component.Windows10SDK.17763","Microsoft.VisualStudio.VC.MSBuild.Base.Resources","MLGen","Microsoft.VisualStudio.Graphics.Analyzer","Microsoft.VisualStudio.Component.TestTools.Core","Microsoft.VisualCpp.Tools.Core.x86","Microsoft.VisualCpp.CRT.x86.OneCore.Desktop","Microsoft.VisualCpp.DIA.SDK","Microsoft.VisualCpp.CRT.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.ClickOnce.Msi","Microsoft.VisualStudio.NuGet.Licenses","SQLCommon","Microsoft.VisualStudio.VC.MSBuild.X86","Microsoft.VisualCpp.Tools.HostX64.TargetARM","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64.Resources","Microsoft.VisualCpp.HTMLHelpWorkshop.Msi","Microsoft.Icecap.Collection.Msi.Resources","Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.VCTip.hostX64.targetARM","Microsoft.VisualStudio.VC.Ide.Dskx.Resources","Microsoft.VisualStudio.VC.Templates.UnitTest","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CPP","Microsoft.VisualStudio.VC.Ide.Core","Microsoft.VisualStudio.Graphics.Appid","Microsoft.VisualCpp.ATL.Source","Microsoft.VisualStudio.VC.Ide.Core.Resources","Microsoft.VisualStudio.Debugger.ImmersiveActivateHelper.Msi","Microsoft.VisualStudio.Debugger.JustInTime","Microsoft.DiagnosticsHub.CpuSampling","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Common","Microsoft.VisualStudio.TestTools.TP.Legacy.Common.Res","Microsoft.VisualStudio.ProTools.Resources","Microsoft.VisualStudio.Community.Msi","Microsoft.VisualCpp.Tools.HostX64.TargetARM.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Agent","Microsoft.Component.MSBuild","Microsoft.VisualStudio.Graphics.Msi","Microsoft.VisualStudio.WebToolsExtensions","Microsoft.VisualCpp.Tools.Hostx86.Targetarm64","Microsoft.VisualStudio.TextTemplating.MSBuild","Microsoft.VisualCpp.VCTip.hostX86.targetARM64","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine","Microsoft.VisualCpp.Tools.HostX86.TargetARM64.Resources","Microsoft.VisualStudio.RazorExtension","Microsoft.VisualCpp.CRT.x86.Store","Microsoft.VisualCpp.Tools.Core.Resources","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualCpp.MFC.Source","Microsoft.VisualCpp.CRT.x86.Desktop","Microsoft.VisualStudio.VC.MSBuild.X64","Microsoft.VisualStudio.VC.Items.Pro","Microsoft.VisualStudio.Graphics.Viewers","Microsoft.VisualCpp.CRT.x64.Desktop","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86.Resources","Microsoft.VisualCpp.MFC.Redist.X86","Microsoft.VisualStudio.WebToolsExtensions.Chip","Microsoft.DiagnosticsHub.Runtime.Resources","Microsoft.DiagnosticsHub.CpuSampling.Targeted","Microsoft.VisualStudio.VC.Ide.LanguageService.Resources","Microsoft.VisualStudio.Component.VC.DiagnosticTools","Microsoft.VisualCpp.MFC.Redist.X64","Microsoft.VisualStudio.PackageGroup.TestTools.Native","Microsoft.VisualStudio.Graphics.Viewers.Resources","Microsoft.VisualCpp.MFC.MBCS","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Component.TextTemplating","Win10SDK_10.0.17763","Microsoft.VisualStudio.VC.Ide.Base.Resources","Microsoft.VisualCpp.MFC.MBCS.X64","Microsoft.VisualStudio.PackageGroup.TestTools.CodeCoverage","Microsoft.VisualStudio.Graphics.EnableTools","Microsoft.VisualStudio.Graphics.Appid.Resources","Microsoft.VisualStudio.VC.MSBuild.Base","Microsoft.VisualStudio.VC.MSBuild.ARM","Microsoft.VisualCpp.MFC.Headers","Microsoft.VisualCpp.CRT.Redist.x86.OneCore.Desktop","Microsoft.VisualCpp.Tools.HostX86.TargetX86","Microsoft.VisualStudio.VC.Ide.Base","Microsoft.VisualStudio.Graphics.Analyzer.Targeted","Microsoft.VisualCpp.CRT.Headers","Microsoft.DiagnosticsHub.Runtime.Targeted","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86","Microsoft.VisualCpp.Tools.HostX64.TargetARM64","Microsoft.VisualCpp.VCTip.hostX64.targetARM64","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86.Resources","Microsoft.Icecap.Collection.Msi","Microsoft.VisualCpp.ATL.X86","Microsoft.VisualCpp.Tools.HostX64.TargetARM64.Resources","Microsoft.VisualStudio.Component.VC.ATLMFC","Microsoft.VisualCpp.VCTip.hostX86.targetX86","Microsoft.Icecap.Collection.Msi.Resources.Targeted","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86","Microsoft.VisualStudio.Component.Graphics.Tools","Microsoft.VisualStudio.WebTools.Resources","Microsoft.VisualCpp.ATL.X64","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86.Resources","Microsoft.VisualStudio.Component.Graphics.Win81","Microsoft.VisualStudio.VC.Ide.MDD","Microsoft.VisualStudio.VC.Ide.ResourceEditor","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64","Microsoft.Icecap.Analysis.Resources.Targeted","Microsoft.VisualStudio.Debugger.Script.Msi","Microsoft.VisualStudio.Component.VC.CoreIde","Microsoft.VisualStudio.VC.Ide.MFC.Resources","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.PackageGroup.VC.Tools.x86","Microsoft.VisualStudio.TextTemplating.Core","Microsoft.VisualStudio.JavaScript.LanguageService","Microsoft.VisualStudio.VC.Ide.ResourceEditor.Resources","Microsoft.VisualStudio.VC.Ide.ProjectSystem.Resources","Microsoft.VisualStudio.Component.VC.TestAdapterForBoostTest","Microsoft.VisualStudio.VC.Ide.ProjectSystem","Microsoft.VisualStudio.VC.Ide.Dskx","Microsoft.VisualCpp.Tools.HostX86.TargetX86.Resources","Microsoft.CredentialProvider","Microsoft.VisualStudio.VC.Templates.Desktop","Microsoft.VisualStudio.VC.Ide.Pro.Resources","Microsoft.VisualStudio.ComponentGroup.WebToolsExtensions","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core","Microsoft.VisualStudio.TextTemplating.Integration","Microsoft.VisualStudio.Component.NuGet","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Reduced","Microsoft.VisualCpp.PGO.Headers","Microsoft.DiagnosticsHub.Collection","Microsoft.Icecap.Collection.Msi.Targeted","Microsoft.VisualStudio.VC.Ide.LanguageService","Microsoft.VisualStudio.WebTools.WSP.FSA","Microsoft.VisualStudio.Graphics.Msi","Microsoft.VisualCpp.CRT.Redist.X86","Microsoft.VisualStudio.Branding.Community","Microsoft.VisualStudio.VC.Ide.x64","Microsoft.VisualStudio.WebToolsExtensions.Common","Microsoft.VisualStudio.WebTools.MSBuild","Microsoft.VisualStudio.NuGet.Core","Microsoft.DiagnosticsHub.Collection.Service","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine.Resources","Microsoft.CodeAnalysis.ExpressionEvaluator","Microsoft.VisualCpp.CRT.Redist.X64","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VS.VC.MSBuild.X64.Resources","Microsoft.VisualCpp.CRT.Source","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips.Resources","Microsoft.VisualStudio.VC.Ide.WinXPlus","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX64","Microsoft.VisualCpp.CRT.Redist.Resources","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.Net.4.TargetingPack","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualCpp.CRT.x64.Store","Microsoft.VisualStudio.VC.Ide.Debugger.Resources","Microsoft.DiaSymReader.Native","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualStudio.StaticAnalysis","Microsoft.VisualStudio.TestTools.TeamFoundationClient","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.VC.Ide.Common","Microsoft.VisualStudio.Community.Extra.Resources","Microsoft.VisualStudio.Component.Roslyn.LanguageServices","Microsoft.DiagnosticsHub.Collection.StopService.Install","Microsoft.VisualStudio.InteractiveWindow","Microsoft.PackageGroup.DiagnosticsHub.Platform","Microsoft.VisualStudio.StaticAnalysis.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.VC.Ide.Common.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX86","Microsoft.VisualStudio.VC.DevCmd","Microsoft.VisualStudio.Community.Extra","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Msi","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.TestTools","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core.Resources","Microsoft.VisualStudio.PackageGroup.TestTools.Core","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V2.CLI","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.Component.VC.Tools.x86.x64","Microsoft.VisualStudio.TestTools.Pex.Common","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.Legacy","Microsoft.VisualStudio.PackageGroup.MinShell.Interop","Microsoft.CodeAnalysis.ExpressionEvaluator.Resources","Microsoft.VisualCpp.CodeAnalysis.Extensions","Microsoft.VisualStudio.PackageGroup.CoreEditor","Microsoft.VisualStudio.Component.Roslyn.Compiler","Microsoft.VisualStudio.ScriptedHost.Targeted","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Professional","Microsoft.VisualStudio.Debugger.Resources","Microsoft.VisualStudio.Debugger.Parallel","Microsoft.VisualStudio.Debugger.Parallel.Resources","Microsoft.VisualCpp.PGO.X64","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.GraphModel","Microsoft.VisualStudio.PackageGroup.TestTools.DataCollectors","sqlsysclrtypes","Microsoft.VisualStudio.ProTools","Component.Microsoft.VisualStudio.RazorExtension","Microsoft.VisualStudio.TestTools.TestPlatform.V2.CLI","Microsoft.Build.Dependencies","Microsoft.VisualStudio.WebTools.WSP.FSA.Resources","Microsoft.VisualStudio.Component.Static.Analysis.Tools","Microsoft.VisualStudio.VC.Ide.ATL.Resources","Microsoft.VisualStudio.VC.Templates.UnitTest.Resources","Microsoft.VisualStudio.Debugger.Managed","Microsoft.VisualStudio.Workload.NativeDesktop","Microsoft.VisualStudio.Component.VC.TestAdapterForGoogleTest","Microsoft.VisualStudio.Debugger.JustInTime.Msi","Microsoft.Net.PackageGroup.4.6.1.Redist","Microsoft.VisualStudio.Debugger.VSCodeDebuggerHost","sqlsysclrtypes","Microsoft.VisualStudio.Debugger.Managed.Resources","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Common","Microsoft.VisualStudio.VC.Ide.Debugger","Microsoft.VisualStudio.AppResponsiveness","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.TestTools.TestWIExtension","Microsoft.VisualStudio.VC.Ide.Pro","Microsoft.VisualStudio.PackageGroup.Debugger.Core","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Express","Microsoft.VisualStudio.WebTools","Microsoft.VisualStudio.Component.VC.Redist.14.Latest","Microsoft.VisualStudio.VsDevCmd.Core.WinSdk","Microsoft.VisualStudio.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.TextTemplating.Integration.Resources","Microsoft.VisualStudio.Debugger.CollectionAgents","Microsoft.VisualStudio.Debugger","Microsoft.VisualStudio.PackageGroup.Debugger.Script","Microsoft.VisualStudio.VC.MSVCDis","Microsoft.VisualStudio.ScriptedHost","Microsoft.VisualStudio.ClientDiagnostics.Targeted","Microsoft.VisualStudio.ScriptedHost.Resources","Microsoft.TeamFoundation.OfficeIntegration.Resources","Microsoft.IntelliTrace.DiagnosticsHub","Microsoft.VisualStudio.JavaScript.LanguageService.Resources","Microsoft.VisualStudio.VC.Ide.TestAdapterForGoogleTest","Microsoft.VisualStudio.PackageGroup.Community","Microsoft.VisualStudio.ClientDiagnostics","Microsoft.VisualStudio.Component.Windows10SDK.17134","Microsoft.VisualStudio.PackageGroup.Core","PortableFacades","Microsoft.DiaSymReader","Microsoft.DiagnosticsHub.Runtime","Microsoft.VisualStudio.Component.CoreEditor","Microsoft.VisualStudio.AppResponsiveness.Targeted","Microsoft.VisualStudio.AppResponsiveness.Resources","Microsoft.VisualStudio.Community","Microsoft.TeamFoundation.OfficeIntegration","Microsoft.VisualStudio.WebSiteProject.DTE","Microsoft.VisualStudio.ClientDiagnostics.Resources","Microsoft.VisualStudio.ProjectSystem.Full","Microsoft.VisualStudio.ProjectSystem","Microsoft.VisualCpp.Tools.Common.UtilsPrereq","Microsoft.IntelliTrace.CollectorCab","Microsoft.VisualStudio.Community.Resources","Microsoft.VisualCpp.Tools.Common.Utils","Microsoft.ServiceHub","Microsoft.VisualStudio.Editors","Microsoft.VisualStudio.TeamExplorer","Microsoft.CodeAnalysis.VisualStudio.InteractiveComponents.Resources","Microsoft.VisualStudio.MinShell.Interop.Msi","Microsoft.VisualStudio.GraphProvider","Microsoft.CodeAnalysis.VisualStudio.InteractiveComponents","Microsoft.CodeAnalysis.VisualStudio.Setup.Interactive.Resources","Microsoft.CodeAnalysis.VisualStudio.Setup.Resources","Microsoft.VisualStudio.Community.x86","Microsoft.VisualStudio.Community.x64","Microsoft.CodeAnalysis.VisualStudio.Setup","Microsoft.NuGet.Build.Tasks","Microsoft.PackageGroup.ClientDiagnostics","Microsoft.CodeAnalysis.Compilers.Resources","Microsoft.CodeAnalysis.Compilers","Microsoft.VisualCpp.Tools.Common.Utils.Resources","Microsoft.VisualStudio.Net.Eula.Resources","Microsoft.VisualStudio.PackageGroup.CommunityCore","Microsoft.Build","Microsoft.VisualStudio.VC.Ide.TestAdapterForBoostTest","Microsoft.VisualStudio.VC.Ide.ATL","Microsoft.VisualStudio.TextMateGrammars","Microsoft.VisualStudio.Workload.CoreEditor","Microsoft.VisualStudio.MinShell.Interop","Microsoft.Build.FileTracker.Msi","Microsoft.VisualStudio.ComponentGroup.NativeDesktop.Core","Microsoft.MSHtml","Microsoft.VisualStudio.Community.Msi.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips","Microsoft.VisualStudio.Devenv.Msi","Microsoft.VisualStudio.Component.VC.ATL","Microsoft.VisualStudio.VC.Templates.Pro","Microsoft.VisualCpp.CRT.Redist.x64.OneCore.Desktop","Microsoft.VisualStudio.SLNX.VSIX","Microsoft.VisualStudio.CoreEditor","Win10SDK_10.0.17134","Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk","Microsoft.VisualStudio.Component.Debugger.JustInTime","Microsoft.VisualStudio.VC.Ide.MFC","Microsoft.VisualStudio.VsDevCmd.Core.DotNet","Microsoft.VisualStudio.PackageGroup.VsDevCmd","Microsoft.VisualStudio.Finalizer","Microsoft.VisualStudio.VirtualTree","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.ProjectServices","Microsoft.VisualStudio.VC.DevCmd.Resources","Microsoft.VisualStudio.MinShell","Microsoft.VisualStudio.PackageGroup.Progression","Microsoft.VisualStudio.PerformanceProvider","Microsoft.VisualStudio.Connected.Resources","Microsoft.VisualStudio.Log","Microsoft.VisualStudio.PackageGroup.TeamExplorer","Microsoft.VisualStudio.Log.Targeted","Microsoft.VisualStudio.MinShell.Platform","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.VC.Templates.Pro.Resources","Microsoft.VisualStudio.Devenv","Microsoft.VisualCpp.VCTip.hostX86.targetX64","Microsoft.VisualStudio.Devenv.Resources","Microsoft.VisualStudio.MinShell.Platform.Resources","Microsoft.VisualStudio.Connected","Microsoft.VisualStudio.MefHosting","Microsoft.DiagnosticsHub.Collection.StopService.Uninstall","Microsoft.VisualStudio.PackageGroup.MinShell","Microsoft.VisualStudio.MefHosting.Resources","Microsoft.VisualCpp.MFC.X86","Microsoft.VisualStudio.Log.Resources","Microsoft.Icecap.Analysis.Targeted","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.PGO.X86","Microsoft.VisualStudio.ExtensionManager","Microsoft.VisualStudio.MinShell.x86","Microsoft.VisualStudio.MinShell.Msi","Microsoft.VisualStudio.Setup.Configuration","Microsoft.VisualStudio.LanguageServer","Microsoft.VisualStudio.NativeImageSupport","Microsoft.VisualStudio.MinShell.Msi.Resources","Microsoft.VisualStudio.Devenv.Config","Microsoft.VisualStudio.MinShell.Resources","Microsoft.VisualStudio.Initializer","Microsoft.Net.PackageGroup.4.6.Redist"]}] diff --git a/mybulma/node_modules/node-gyp/test/fixtures/VS_2017_Express.txt b/mybulma/node_modules/node-gyp/test/fixtures/VS_2017_Express.txt new file mode 100644 index 0000000..c4b3b5f --- /dev/null +++ b/mybulma/node_modules/node-gyp/test/fixtures/VS_2017_Express.txt @@ -0,0 +1 @@ +[{"path":"C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\WDExpress","version":"15.9.28307.858","packages":["Microsoft.VisualStudio.Product.WDExpress","Microsoft.VisualStudio.Workload.WDExpress","Microsoft.VisualStudio.Component.Windows10SDK.17763","MLGen","Win10SDK_10.0.17763","Microsoft.VisualStudio.Component.Windows10SDK.14393","Win10SDK_10.0.14393.795","Microsoft.VisualStudio.VC.Items.Pro","Microsoft.VisualStudio.VC.Ide.Pro","Microsoft.VisualStudio.VC.Ide.Pro.Resources","Microsoft.VisualStudio.Component.VC.Tools.ARM64","Microsoft.VisualStudio.VC.MSBuild.Arm64","Microsoft.VisualCpp.CRT.Redist.ARM64.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.ARM64","Microsoft.VisualCpp.CRT.ARM64.OneCore.Desktop","Microsoft.VisualCpp.CRT.ARM64.Store","Microsoft.VisualCpp.CRT.ARM64.Desktop","Microsoft.VisualCpp.Tools.Hostx86.Targetarm64","Microsoft.VisualCpp.VCTip.hostX86.targetARM64","Microsoft.VisualCpp.Tools.HostX86.TargetARM64.Resources","Microsoft.VisualStudio.Component.VC.Tools.ARM","Microsoft.VisualCpp.Tools.Hostx86.Targetarm","Microsoft.VisualCpp.VCTip.hostX86.targetARM","Microsoft.VisualCpp.Tools.HostX86.TargetARM.Resources","Microsoft.VisualCpp.CRT.x86.Store","Microsoft.VisualCpp.CRT.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.x64.Store","Microsoft.VisualCpp.CRT.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.arm.OneCore.Desktop","Microsoft.VisualCpp.CRT.arm.OneCore.Desktop","Microsoft.VisualCpp.CRT.arm.Store","Microsoft.VisualCpp.CRT.arm.Desktop","Microsoft.VisualStudio.VC.Templates.UnitTest","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CPP","Microsoft.VisualStudio.VC.Templates.UnitTest.Resources","Microsoft.VisualStudio.VC.Templates.Desktop","Microsoft.VisualStudio.VC.Templates.Pro","Microsoft.VisualStudio.VC.Templates.Pro.Resources","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Express","Microsoft.VisualStudio.PackageGroup.Debugger.Script","Microsoft.VisualStudio.JavaScript.LanguageService","Microsoft.VisualStudio.JavaScript.LanguageService.Resources","Microsoft.VisualStudio.Debugger.Script.Msi","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.VC.MSBuild.X64","Microsoft.VS.VC.MSBuild.X64.Resources","Microsoft.VisualStudio.VC.MSBuild.ARM","Microsoft.VisualStudio.VC.MSBuild.X86","Microsoft.VisualStudio.VC.MSBuild.Base","Microsoft.VisualStudio.VC.MSBuild.Base.Resources","Microsoft.VisualStudio.VC.Ide.WinXPlus","Microsoft.VisualStudio.VC.Ide.Dskx","Microsoft.VisualStudio.VC.Ide.Dskx.Resources","Microsoft.VisualStudio.VC.Ide.Core","Microsoft.VisualStudio.VC.Ide.Core.Resources","Microsoft.VisualStudio.VC.Ide.Base","Microsoft.VisualStudio.VC.Ide.Base.Resources","Microsoft.VisualStudio.Component.VC.CLI.Support","Microsoft.VisualCpp.CLI.X86","Microsoft.VisualCpp.CLI.X64","Microsoft.VisualCpp.CLI.Source","Microsoft.VisualCpp.CLI.ARM64","Microsoft.VisualCpp.CLI.ARM","Microsoft.VisualStudio.VC.Templates.CLR","Microsoft.VisualStudio.VC.Ide.LanguageService","Microsoft.VisualStudio.VC.Ide.ResourceEditor","Microsoft.VisualStudio.VC.Ide.ResourceEditor.Resources","Microsoft.VisualStudio.VC.Ide.ProjectSystem","Microsoft.VisualStudio.VC.Ide.ProjectSystem.Resources","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine.Resources","Microsoft.VisualStudio.VC.Ide.LanguageService.Resources","Microsoft.VisualStudio.VC.Templates.CLR.Resources","Microsoft.Component.VC.Runtime.OSSupport","Microsoft.Windows.UniversalCRT.Tools.Msi","Microsoft.Windows.UniversalCRT.Tools.Msi","Microsoft.Windows.UniversalCRT.ExtensionSDK.Msi","Microsoft.Windows.UniversalCRT.HeadersLibsSources.Msi","Microsoft.VisualStudio.PackageGroup.VC.Tools.x86","Microsoft.VisualCpp.Tools.HostX86.TargetX64","Microsoft.VisualCpp.VCTip.hostX86.targetX64","Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Tools.HostX86.TargetX86","Microsoft.VisualCpp.VCTip.hostX86.targetX86","Microsoft.VisualCpp.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Tools.Core.Resources","Microsoft.VisualCpp.Tools.Core.x86","Microsoft.VisualCpp.Tools.Common.Utils","Microsoft.VisualCpp.Tools.Common.Utils.Resources","Microsoft.VisualCpp.DIA.SDK","Microsoft.VisualCpp.CRT.x86.Desktop","Microsoft.VisualCpp.CRT.x64.Desktop","Microsoft.VisualCpp.CRT.Source","Microsoft.VisualCpp.CRT.Redist.X86","Microsoft.VisualCpp.CRT.Redist.X64","Microsoft.VisualCpp.CRT.Redist.Resources","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.CRT.Headers","Microsoft.Component.HelpViewer","Microsoft.HelpViewer","Microsoft.VisualStudio.Help.Configuration.Msi","Microsoft.VisualStudio.Component.SQL.DataSources","Microsoft.VisualStudio.Component.SQL.SSDT","Microsoft.VisualStudio.Component.SQL.CMDUtils","sqlcmdlnutils","Microsoft.VisualStudio.Component.Common.Azure.Tools","Microsoft.VisualStudio.Azure.CommonAzureTools","SSDT","Microsoft.VisualStudio.Component.SQL.ADAL","sql_adalsql","Microsoft.VisualStudio.Component.NuGet","Microsoft.CredentialProvider","Microsoft.VisualStudio.NuGet.Licenses","Microsoft.VisualStudio.Component.SQL.LocalDB.Runtime","Microsoft.VisualStudio.Component.SQL.NCLI","sqllocaldb","sqlncli","Microsoft.VisualStudio.Component.EntityFramework","Microsoft.VisualStudio.PackageGroup.DslRuntime","Microsoft.VisualStudio.Dsl.Core","Microsoft.VisualStudio.Dsl.GraphObject","Microsoft.VisualStudio.Dsl.Core.Resources","Microsoft.VisualStudio.EntityFrameworkTools","Microsoft.VisualStudio.EntityFrameworkTools.Msi","Microsoft.VisualStudio.Component.Roslyn.LanguageServices","Microsoft.VisualStudio.InteractiveWindow","Microsoft.DiaSymReader.Native","Microsoft.VisualStudio.Component.Static.Analysis.Tools","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualStudio.StaticAnalysis","Microsoft.VisualStudio.StaticAnalysis.Resources","Microsoft.CodeAnalysis.VisualStudio.InteractiveComponents.Resources","Microsoft.CodeAnalysis.VisualStudio.InteractiveComponents","Microsoft.CodeAnalysis.VisualStudio.Setup.Interactive.Resources","Microsoft.Net.ComponentGroup.TargetingPacks.Common","Microsoft.Net.Component.4.6.TargetingPack","Microsoft.Net.4.6.TargetingPack","Microsoft.Net.Component.4.5.2.TargetingPack","Microsoft.Net.4.5.2.TargetingPack","Microsoft.Net.Component.4.5.1.TargetingPack","Microsoft.Net.4.5.1.TargetingPack","Microsoft.Net.Component.4.5.TargetingPack","Microsoft.Net.4.5.TargetingPack","Microsoft.Net.Component.4.TargetingPack","Microsoft.Net.4.TargetingPack","Microsoft.Net.ComponentGroup.DevelopmentPrerequisites","Microsoft.Net.Component.4.6.1.TargetingPack","Microsoft.Net.4.6.1.TargetingPack","Microsoft.Net.Cumulative.TargetingPack.Resources","Microsoft.Net.Component.4.6.1.SDK","Microsoft.Net.4.6.1.SDK","Microsoft.VisualStudio.Component.TextTemplating","Microsoft.VisualStudio.TextTemplating.MSBuild","Microsoft.VisualStudio.TextTemplating.Integration","Microsoft.VisualStudio.TextTemplating.Core","Microsoft.VisualStudio.TextTemplating.Integration.Resources","Microsoft.VisualStudio.Component.VisualStudioData","Microsoft.VisualStudio.Component.SQL.CLR","Microsoft.VisualStudio.ProTools","sqlsysclrtypes","sqlsysclrtypes","SQLCommon","Microsoft.VisualStudio.ProTools.Resources","Microsoft.VisualStudio.XamlDiagnostics","Microsoft.VisualStudio.XamlDiagnostics.Resources","Microsoft.VisualStudio.XamlDesigner","Microsoft.VisualStudio.XamlDesigner.Resources","Microsoft.VisualStudio.XamlDesigner.Executables","Microsoft.VisualStudio.XamlShared","Microsoft.VisualStudio.XamlShared.Resources","Microsoft.VisualStudio.PackageGroup.TestTools.Managed","Microsoft.VisualStudio.PackageGroup.IntelliTrace.Core","Microsoft.IntelliTrace.Core","Microsoft.IntelliTrace.Core.Targeted","Microsoft.IntelliTrace.ProfilerProxy.Msi.x64","Microsoft.IntelliTrace.ProfilerProxy.Msi","Microsoft.VisualStudio.NuGet.Core","Microsoft.VisualStudio.TestWindow.SourceBasedTestDiscovery","Microsoft.VisualStudio.TestWindow.Dotnet","Microsoft.VisualStudio.TestTools.TestGeneration","Microsoft.VisualStudio.PackageGroup.TestTools.CodeCoverage","Microsoft.VisualStudio.PackageGroup.TestTools.Enterprise","Microsoft.VisualStudio.PackageGroup.TestTools.MSTestV2.Managed","Microsoft.VisualStudio.TestTools.MSTestV2.WizardExtension.UnitTest","Microsoft.VisualStudio.PackageGroup.TestTools.Core","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V2.CLI","Microsoft.VisualStudio.TestTools.TestPlatform.V2.CLI","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.TestTools.Pex.Common","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.Legacy","Microsoft.VisualStudio.PackageGroup.MinShell.Interop","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Msi","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Common","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.TestTools","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Professional","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Common","Microsoft.VisualStudio.TestTools.TP.Legacy.Common.Res","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Agent","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.TestTools.TestWIExtension","Microsoft.VisualStudio.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.PackageGroup.TestTools.DataCollectors","Microsoft.Component.ClickOnce","Microsoft.VisualStudio.PackageGroup.ClickOnce.MSBuild","Microsoft.VisualCpp.CRT.ClickOnce.Msi","Microsoft.ClickOnce.SignTool.Msi","Microsoft.SQL.ClickOnceBootstrapper.Msi","Microsoft.Net.ClickOnceBootstrapper","Microsoft.ClickOnce.BootStrapper.Msi.Resources","Microsoft.ClickOnce.BootStrapper.Msi","Microsoft.VisualStudio.WebTools.WSP.FSA","Microsoft.VisualStudio.WebTools.WSP.FSA.Resources","Microsoft.VisualStudio.PackageGroup.Community","Microsoft.VisualStudio.Community.Extra.Resources","Microsoft.VisualStudio.Community.Extra","Microsoft.VisualStudio.PackageGroup.Core","Microsoft.VisualStudio.TestTools.TeamFoundationClient","Microsoft.VisualStudio.PackageGroup.Debugger.Core","Microsoft.VisualStudio.Debugger.VSCodeDebuggerHost","Microsoft.VisualStudio.VC.Ide.Debugger","Microsoft.VisualStudio.VC.Ide.Debugger.Resources","Microsoft.VisualStudio.VC.Ide.Common","Microsoft.VisualStudio.VC.Ide.Common.Resources","Microsoft.VisualStudio.Debugger.Parallel","Microsoft.VisualStudio.Debugger.Parallel.Resources","Microsoft.VisualStudio.Debugger.CollectionAgents","Microsoft.VisualStudio.Debugger.Managed","Microsoft.CodeAnalysis.VisualStudio.Setup.Resources","Microsoft.CodeAnalysis.VisualStudio.Setup","Microsoft.CodeAnalysis.ExpressionEvaluator.Resources","Microsoft.CodeAnalysis.ExpressionEvaluator","Microsoft.VisualStudio.Debugger.Managed.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger","Microsoft.VisualStudio.VC.MSVCDis","Microsoft.VisualStudio.ScriptedHost","Microsoft.VisualStudio.ScriptedHost.Targeted","Microsoft.VisualStudio.ScriptedHost.Resources","Microsoft.IntelliTrace.DiagnosticsHub","Microsoft.VisualStudio.Debugger.Resources","Microsoft.PackageGroup.ClientDiagnostics","Microsoft.VisualStudio.AppResponsiveness","Microsoft.VisualStudio.AppResponsiveness.Targeted","Microsoft.VisualStudio.AppResponsiveness.Resources","Microsoft.VisualStudio.ClientDiagnostics","Microsoft.VisualStudio.ClientDiagnostics.Targeted","Microsoft.VisualStudio.ClientDiagnostics.Resources","Microsoft.VisualStudio.PackageGroup.CommunityCore","Microsoft.VisualStudio.ProjectSystem.Full","Microsoft.VisualStudio.ProjectSystem","Microsoft.VisualStudio.Community.x86","Microsoft.VisualStudio.Community.x64","Microsoft.VisualStudio.Community","Microsoft.IntelliTrace.CollectorCab","Microsoft.VisualStudio.Community.Resources","Microsoft.VisualStudio.Net.Eula.Resources","Microsoft.VisualStudio.WebSiteProject.DTE","Microsoft.MSHtml","Microsoft.VisualStudio.Community.Msi.Resources","Microsoft.VisualStudio.Community.Msi","Microsoft.VisualStudio.MinShell.Interop.Msi","Microsoft.VisualStudio.Editors","Microsoft.VisualStudio.ClickOnce.Resources","Microsoft.VisualStudio.ClickOnce","Microsoft.Component.MSBuild","Microsoft.NuGet.Build.Tasks","Microsoft.VisualStudio.Component.Roslyn.Compiler","Microsoft.CodeAnalysis.Compilers.Resources","Microsoft.CodeAnalysis.Compilers","Microsoft.Net.PackageGroup.4.6.1.Redist","Microsoft.VisualStudio.TemplateEngine","Microsoft.VisualStudio.WebToolsExtensions.Common","Microsoft.NET.Sdk","Microsoft.VisualStudio.PackageGroup.TestTools.Templates.Managed","Microsoft.VisualStudio.TestTools.Templates.Managed","Microsoft.VisualStudio.TestTools.Templates.Managed.Resources","Microsoft.VisualStudio.Templates.VB.MSTestv2.Desktop.UnitTest","Microsoft.VisualStudio.Templates.CS.MSTestv2.Desktop.UnitTest","Microsoft.VisualStudio.Templates.VB.Wpf","Microsoft.VisualStudio.Templates.VB.Wpf.Resources","Microsoft.VisualStudio.Templates.VB.Winforms","Microsoft.VisualStudio.Templates.VB.ManagedCore","Microsoft.VisualStudio.Templates.VB.Shared","Microsoft.VisualStudio.Templates.VB.Shared.Resources","Microsoft.VisualStudio.Templates.VB.ManagedCore.Resources","Microsoft.VisualStudio.Templates.CS.GettingStarted.Desktop.Package","Microsoft.VisualStudio.Templates.GetStarted.Desktop.Setup","Microsoft.VisualStudio.Templates.CS.GettingStarted.Console.Package","Microsoft.VisualStudio.Templates.GetStarted.Resources","Microsoft.VisualStudio.Templates.GetStarted.Common.Setup","Microsoft.VisualStudio.Templates.GetStarted.Console.Setup","Microsoft.VisualStudio.Templates.CS.Wpf","Microsoft.VisualStudio.Templates.CS.Wpf.Resources","Microsoft.VisualStudio.Templates.CS.Winforms","Microsoft.VisualStudio.Templates.CS.ManagedCore","Microsoft.VisualStudio.Templates.CS.Shared","Microsoft.VisualStudio.Templates.Editorconfig.Wizard.Setup","Templates.Editorconfig.SolutionFile.Setup","Microsoft.VisualStudio.Templates.CS.Shared.Resources","Microsoft.VisualStudio.Templates.CS.ManagedCore.Resources","Microsoft.VisualStudio.Component.CoreEditor","Microsoft.VisualStudio.PackageGroup.CoreEditor","PortableFacades","Microsoft.VisualStudio.PackageGroup.VsDevCmd","Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk","Microsoft.VisualStudio.VsDevCmd.Core.WinSdk","Microsoft.VisualStudio.VsDevCmd.Core.DotNet","Microsoft.VisualStudio.VC.DevCmd","Microsoft.VisualStudio.VC.DevCmd.Resources","Microsoft.VisualStudio.VirtualTree","Microsoft.VisualStudio.PackageGroup.Progression","Microsoft.VisualStudio.PerformanceProvider","Microsoft.VisualStudio.GraphModel","Microsoft.VisualStudio.GraphProvider","Microsoft.DiaSymReader","Microsoft.Build.Dependencies","Microsoft.Build.FileTracker.Msi","Microsoft.Build","Microsoft.VisualStudio.TextMateGrammars","Microsoft.VisualStudio.PackageGroup.TeamExplorer","Microsoft.TeamFoundation.OfficeIntegration","Microsoft.TeamFoundation.OfficeIntegration.Resources","Microsoft.VisualStudio.TeamExplorer","Microsoft.ServiceHub","Microsoft.VisualStudio.ProjectServices","Microsoft.VisualStudio.SLNX.VSIX","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.PackageGroup.MinShell","Microsoft.VisualStudio.MinShell.Interop","Microsoft.VisualStudio.Log","Microsoft.VisualStudio.Log.Targeted","Microsoft.VisualStudio.Log.Resources","Microsoft.VisualStudio.Finalizer","Microsoft.VisualStudio.WDExpress","Microsoft.VisualStudio.WDExpress.Resources","Microsoft.VisualStudio.CoreEditor","Microsoft.VisualStudio.Connected","Microsoft.VisualStudio.Connected.Resources","Microsoft.VisualStudio.MinShell","Microsoft.VisualStudio.Setup.Configuration","Microsoft.VisualStudio.MinShell.Platform","Microsoft.VisualStudio.MinShell.Platform.Resources","Microsoft.VisualStudio.MefHosting","Microsoft.VisualStudio.MefHosting.Resources","Microsoft.VisualStudio.Initializer","Microsoft.VisualStudio.ExtensionManager","Microsoft.VisualStudio.MinShell.x86","Microsoft.VisualStudio.NativeImageSupport","Microsoft.VisualStudio.MinShell.Msi","Microsoft.VisualStudio.MinShell.Msi.Resources","Microsoft.VisualStudio.LanguageServer","Microsoft.VisualStudio.MinShell.Resources","Microsoft.Net.PackageGroup.4.6.Redist","Microsoft.VisualStudio.Branding.WDExpress"]}] diff --git a/mybulma/node_modules/node-gyp/test/fixtures/VS_2017_Unusable.txt b/mybulma/node_modules/node-gyp/test/fixtures/VS_2017_Unusable.txt new file mode 100644 index 0000000..fc0a257 --- /dev/null +++ b/mybulma/node_modules/node-gyp/test/fixtures/VS_2017_Unusable.txt @@ -0,0 +1 @@ +[{"path":"C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\BuildToolsUnusable","version":"15.9.28307.665","packages":["Microsoft.VisualStudio.Product.BuildTools","Microsoft.VisualStudio.Component.Windows10SDK.17134","Win10SDK_10.0.17134","Microsoft.VisualStudio.Component.VC.Tools.x86.x64","Microsoft.VisualCpp.CodeAnalysis.Extensions","Microsoft.VisualCpp.CodeAnalysis.Extensions.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86.Resources","Microsoft.VisualCpp.CodeAnalysis.Extensions.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64.Resources","Microsoft.VisualStudio.Component.Static.Analysis.Tools","Microsoft.VisualStudio.StaticAnalysis","Microsoft.VisualStudio.StaticAnalysis.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX86","Microsoft.VisualCpp.VCTip.HostX64.TargetX86","Microsoft.VisualCpp.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX64","Microsoft.VisualCpp.VCTip.HostX64.TargetX64","Microsoft.VisualCpp.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX64","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.PGO.X86","Microsoft.VisualCpp.PGO.X64","Microsoft.VisualCpp.PGO.Headers","Microsoft.VisualCpp.CRT.x86.Store","Microsoft.VisualCpp.CRT.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.x64.Store","Microsoft.VisualCpp.CRT.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.ClickOnce.Msi","Microsoft.VisualStudio.PackageGroup.VC.Tools.x86","Microsoft.VisualCpp.Tools.HostX86.TargetX64","Microsoft.VisualCpp.VCTip.hostX86.targetX64","Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Tools.HostX86.TargetX86","Microsoft.VisualCpp.VCTip.hostX86.targetX86","Microsoft.VisualCpp.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Tools.Core.Resources","Microsoft.VisualCpp.Tools.Core.x86","Microsoft.VisualCpp.Tools.Common.Utils","Microsoft.VisualCpp.Tools.Common.Utils.Resources","Microsoft.VisualCpp.DIA.SDK","Microsoft.VisualCpp.CRT.x86.Desktop","Microsoft.VisualCpp.CRT.x64.Desktop","Microsoft.VisualCpp.CRT.Source","Microsoft.VisualCpp.CRT.Redist.X86","Microsoft.VisualCpp.CRT.Redist.X64","Microsoft.VisualCpp.CRT.Redist.Resources","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.CRT.Headers","Microsoft.VisualStudio.Workload.MSBuildTools","Microsoft.VisualStudio.Component.CoreBuildTools","Microsoft.VisualStudio.Setup.Configuration","Microsoft.VisualStudio.PackageGroup.VsDevCmd","Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk","Microsoft.VisualStudio.VsDevCmd.Core.WinSdk","Microsoft.VisualStudio.VsDevCmd.Core.DotNet","Microsoft.VisualStudio.VC.DevCmd","Microsoft.VisualStudio.VC.DevCmd.Resources","Microsoft.VisualStudio.BuildTools.Resources","Microsoft.VisualStudio.Net.Eula.Resources","Microsoft.Build.Dependencies","Microsoft.Build.FileTracker.Msi","Microsoft.Component.MSBuild","Microsoft.PythonTools.BuildCore.Vsix","Microsoft.NuGet.Build.Tasks","Microsoft.VisualStudio.Component.Roslyn.Compiler","Microsoft.CodeAnalysis.Compilers.Resources","Microsoft.CodeAnalysis.Compilers","Microsoft.Net.PackageGroup.4.6.1.Redist","Microsoft.Net.4.6.1.FullRedist.NonThreshold","Microsoft.Windows.UniversalCRT.Msu.81","Microsoft.VisualStudio.NativeImageSupport","Microsoft.Build"]}] diff --git a/mybulma/node_modules/node-gyp/test/fixtures/VS_2019_BuildTools_minimal.txt b/mybulma/node_modules/node-gyp/test/fixtures/VS_2019_BuildTools_minimal.txt new file mode 100644 index 0000000..f07d254 --- /dev/null +++ b/mybulma/node_modules/node-gyp/test/fixtures/VS_2019_BuildTools_minimal.txt @@ -0,0 +1 @@ +[{"path":"C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools","version":"16.1.28922.388","packages":["Microsoft.VisualStudio.Product.BuildTools","Microsoft.VisualStudio.Component.VC.CoreIde","Microsoft.VisualStudio.VC.Ide.Pro","Microsoft.VisualStudio.VC.Ide.Pro.Resources","Microsoft.VisualStudio.VC.Templates.Pro","Microsoft.VisualStudio.VC.Templates.Pro.Resources","Microsoft.VisualStudio.VC.Items.Pro","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Reduced","Microsoft.VisualStudio.VC.Ide.MDD","Microsoft.VisualStudio.PackageGroup.Core","Microsoft.VisualStudio.CodeSense.Community","Microsoft.VisualStudio.TestTools.TeamFoundationClient","Microsoft.PackageGroup.ClientDiagnostics","Microsoft.VisualStudio.AppResponsiveness","Microsoft.VisualStudio.AppResponsiveness.Targeted","Microsoft.VisualStudio.AppResponsiveness.Resources","Microsoft.VisualStudio.ClientDiagnostics","Microsoft.VisualStudio.ClientDiagnostics.Targeted","Microsoft.VisualStudio.ClientDiagnostics.Resources","Microsoft.VisualStudio.PackageGroup.CommunityCore","Microsoft.VisualStudio.ProjectSystem.Full","Microsoft.VisualStudio.ProjectSystem","Microsoft.VisualStudio.Community.x86","Microsoft.VisualStudio.Community.x64","Microsoft.VisualStudio.Community","Microsoft.IntelliTrace.CollectorCab","Microsoft.VisualStudio.Community.Resources","Microsoft.VisualStudio.WebSiteProject.DTE","Microsoft.MSHtml","Microsoft.VisualStudio.Platform.CallHierarchy","Microsoft.VisualStudio.Community.Msi.Resources","Microsoft.VisualStudio.Community.Msi","Microsoft.VisualStudio.MinShell.Interop.Msi","Microsoft.VisualStudio.PackageGroup.CoreEditor","Microsoft.VisualStudio.VirtualTree","Microsoft.VisualStudio.PackageGroup.Progression","Microsoft.VisualStudio.PerformanceProvider","Microsoft.VisualStudio.GraphModel","Microsoft.VisualStudio.GraphProvider","Microsoft.VisualStudio.TextMateGrammars","Microsoft.VisualStudio.PackageGroup.TeamExplorer.Common","Microsoft.VisualStudio.TeamExplorer","Microsoft.ServiceHub","Microsoft.VisualStudio.ProjectServices","Microsoft.VisualStudio.OpenFolder.VSIX","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.PackageGroup.MinShell","Microsoft.VisualStudio.MinShell.Msi","Microsoft.VisualStudio.MinShell.Msi.Resources","Microsoft.VisualStudio.MinShell.Interop","Microsoft.VisualStudio.Log","Microsoft.VisualStudio.Log.Targeted","Microsoft.VisualStudio.Log.Resources","Microsoft.VisualStudio.Finalizer","Microsoft.VisualStudio.CoreEditor","Microsoft.VisualStudio.Platform.NavigateTo","Microsoft.VisualStudio.Connected","Microsoft.VisualStudio.Connected.Resources","Microsoft.VisualStudio.VC.Ide.x64","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Express","Microsoft.VisualStudio.PackageGroup.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Msi","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.VC.Ide.WinXPlus","Microsoft.VisualStudio.VC.Ide.Dskx","Microsoft.VisualStudio.VC.Ide.Dskx.Resources","Microsoft.VisualStudio.VC.Ide.Base","Microsoft.VisualStudio.VC.Ide.LanguageService","Microsoft.VisualStudio.VC.Ide.Core","Microsoft.VisualStudio.VisualC.Logging","Microsoft.VisualStudio.VC.Ide.Core.Resources","Microsoft.VisualStudio.VC.Ide.VCPkgDatabase","Microsoft.VisualStudio.VC.Ide.ResourceEditor","Microsoft.VisualStudio.VC.Ide.ResourceEditor.Resources","Microsoft.VisualStudio.VC.Ide.ProjectSystem","Microsoft.VisualStudio.VC.Ide.ProjectSystem.Resources","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine.Resources","Microsoft.VisualStudio.VC.Ide.LanguageService.Resources","Microsoft.VisualStudio.VC.Ide.Base.Resources","Microsoft.Net.4.TargetingPack","Microsoft.VisualStudio.PackageGroup.Debugger.Core","Microsoft.VisualStudio.PackageGroup.Debugger.TimeTravel.Record","Microsoft.VisualStudio.Debugger.TimeTravel.Runtime","Microsoft.VisualStudio.Debugger.TimeTravel.Runtime","Microsoft.VisualStudio.Debugger.TimeTravel.Agent","Microsoft.VisualStudio.Debugger.TimeTravel.Record","Microsoft.VisualStudio.Debugger.VSCodeDebuggerHost","Microsoft.VisualStudio.VC.Ide.Debugger","Microsoft.VisualStudio.VC.Ide.Debugger.Concord","Microsoft.VisualStudio.VC.Ide.Debugger.Concord.Resources","Microsoft.VisualStudio.VC.Ide.Debugger.Resources","Microsoft.VisualStudio.VC.Ide.Common","Microsoft.VisualStudio.VC.Ide.Common.Resources","Microsoft.VisualStudio.Debugger.Parallel","Microsoft.VisualStudio.Debugger.Parallel.Resources","Microsoft.VisualStudio.Debugger.CollectionAgents","Microsoft.VisualStudio.Debugger.Managed","Microsoft.DiaSymReader","Microsoft.CodeAnalysis.ExpressionEvaluator","Microsoft.VisualStudio.Debugger.Concord.Managed","Microsoft.VisualStudio.Debugger.Concord.Managed.Resources","Microsoft.VisualStudio.Debugger.Managed.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger","Microsoft.VisualStudio.PerfLib","Microsoft.VisualStudio.Debugger.Package.DiagHub.Client.VSx86","Microsoft.VisualStudio.Debugger.Remote.DiagHub.Client","Microsoft.VisualStudio.Debugger.Remote.DiagHub.Client","Microsoft.VisualStudio.VC.MSVCDis","Microsoft.VisualStudio.ScriptedHost","Microsoft.VisualStudio.ScriptedHost.Targeted","Microsoft.VisualStudio.ScriptedHost.Resources","Microsoft.VisualStudio.Editors","Microsoft.IntelliTrace.DiagnosticsHub","Microsoft.VisualStudio.MinShell","Microsoft.VisualStudio.MinShell.Platform","Microsoft.VisualStudio.MinShell.Platform.Resources","Microsoft.VisualStudio.MefHosting","Microsoft.VisualStudio.MefHosting.Resources","Microsoft.VisualStudio.Initializer","Microsoft.VisualStudio.ExtensionManager","Microsoft.VisualStudio.Platform.Editor","Microsoft.VisualStudio.Debugger.Concord","Microsoft.VisualStudio.Debugger.Concord.Resources","Microsoft.VisualStudio.Debugger.Resources","Microsoft.CodeAnalysis.VisualStudio.Setup","Microsoft.VisualStudio.Component.Windows10SDK.17134","Win10SDK_10.0.17134","Microsoft.VisualStudio.Component.VC.Tools.x86.x64","Microsoft.VisualCpp.CodeAnalysis.Extensions","Microsoft.VisualCpp.CodeAnalysis.Extensions.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86.Resources","Microsoft.VisualCpp.CodeAnalysis.Extensions.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64.Resources","Microsoft.VisualStudio.StaticAnalysis","Microsoft.VisualStudio.StaticAnalysis.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX86","Microsoft.VisualCpp.VCTip.HostX64.TargetX86","Microsoft.VisualCpp.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX64","Microsoft.VisualCpp.VCTip.HostX64.TargetX64","Microsoft.VisualCpp.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX64","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.PGO.X86","Microsoft.VisualCpp.PGO.X64","Microsoft.VisualCpp.PGO.Headers","Microsoft.VisualCpp.CRT.x86.Store","Microsoft.VisualCpp.CRT.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.x64.Store","Microsoft.VisualCpp.CRT.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.ClickOnce.Msi","Microsoft.VisualStudio.PackageGroup.VC.Tools.x86","Microsoft.VisualCpp.Tools.HostX86.TargetX64","Microsoft.VisualCpp.VCTip.hostX86.targetX64","Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Tools.HostX86.TargetX86","Microsoft.VisualCpp.VCTip.hostX86.targetX86","Microsoft.VisualCpp.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Tools.Core.Resources","Microsoft.VisualCpp.Tools.Core.x86","Microsoft.VisualCpp.Tools.Common.Utils","Microsoft.VisualCpp.Tools.Common.Utils.Resources","Microsoft.VisualCpp.DIA.SDK","Microsoft.VisualCpp.CRT.x86.Desktop","Microsoft.VisualCpp.CRT.x64.Desktop","Microsoft.VisualCpp.CRT.Source","Microsoft.VisualCpp.CRT.Redist.X86","Microsoft.VisualCpp.CRT.Redist.X64","Microsoft.VisualCpp.CRT.Redist.Resources","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.CRT.Headers","Microsoft.VisualStudio.VC.MSBuild.x86.v142","Microsoft.VisualStudio.VC.MSBuild.X86","Microsoft.VisualStudio.VC.MSBuild.X64.v142","Microsoft.VisualStudio.VC.MSBuild.X64","Microsoft.VS.VC.MSBuild.X64.Resources","Microsoft.VisualStudio.VC.MSBuild.ARM.v142","Microsoft.VisualStudio.VC.MSBuild.ARM","Microsoft.VisualStudio.VC.MSBuild.Base","Microsoft.VisualStudio.VC.MSBuild.Base.Resources","Microsoft.VisualStudio.Workload.MSBuildTools","Microsoft.VisualStudio.Component.CoreBuildTools","Microsoft.VisualStudio.Setup.Configuration","Microsoft.VisualStudio.PackageGroup.VsDevCmd","Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk","Microsoft.VisualStudio.VsDevCmd.Core.WinSdk","Microsoft.VisualStudio.VsDevCmd.Core.DotNet","Microsoft.VisualStudio.VC.DevCmd","Microsoft.VisualStudio.VC.DevCmd.Resources","Microsoft.VisualStudio.BuildTools.Resources","Microsoft.VisualStudio.Net.Eula.Resources","Microsoft.Build.Dependencies","Microsoft.Build.FileTracker.Msi","Microsoft.Component.MSBuild","Microsoft.PythonTools.BuildCore.Vsix","Microsoft.NuGet.Build.Tasks","Microsoft.VisualStudio.Component.Roslyn.Compiler","Microsoft.CodeAnalysis.Compilers","Microsoft.Net.PackageGroup.4.7.2.Redist","Microsoft.VisualStudio.NativeImageSupport","Microsoft.Build","Microsoft.VisualStudio.PackageGroup.NuGet","Microsoft.VisualStudio.NuGet.BuildTools"]}] diff --git a/mybulma/node_modules/node-gyp/test/fixtures/VS_2019_Community_workload.txt b/mybulma/node_modules/node-gyp/test/fixtures/VS_2019_Community_workload.txt new file mode 100644 index 0000000..50071c2 --- /dev/null +++ b/mybulma/node_modules/node-gyp/test/fixtures/VS_2019_Community_workload.txt @@ -0,0 +1 @@ +[{"path":"C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community","version":"16.1.28922.388","packages":["Microsoft.VisualStudio.Workload.NativeDesktop","Microsoft.VisualStudio.Component.VC.TestAdapterForGoogleTest","Microsoft.VisualStudio.VC.Ide.TestAdapterForGoogleTest","Microsoft.VisualStudio.Component.VC.TestAdapterForBoostTest","Microsoft.VisualStudio.VC.Ide.TestAdapterForBoostTest","Microsoft.VisualStudio.Component.VC.ATL","Microsoft.VisualStudio.VC.Ide.ATL","Microsoft.VisualStudio.VC.Ide.ATL.Resources","Microsoft.VisualCpp.ATL.X86","Microsoft.VisualCpp.ATL.X64","Microsoft.VisualCpp.ATL.Source","Microsoft.VisualCpp.ATL.Headers","Microsoft.VisualStudio.Component.VC.CMake.Project","Microsoft.VisualStudio.VC.CMake","Microsoft.VisualStudio.VC.CMake.Project","Microsoft.VisualStudio.VC.ExternalBuildFramework","Microsoft.VisualStudio.ComponentGroup.NativeDesktop.Core","Microsoft.VisualStudio.PackageGroup.TestTools.Native","Microsoft.VisualStudio.Component.VC.Redist.14.Latest","Microsoft.VisualStudio.VC.Templates.UnitTest","Microsoft.VisualStudio.VC.UnitTest.Desktop.Build.Core","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CPP","Microsoft.VisualStudio.VC.Templates.UnitTest.Resources","Microsoft.VisualStudio.VC.Templates.Desktop","Microsoft.VisualStudio.Component.Debugger.JustInTime","Microsoft.VisualStudio.Debugger.ImmersiveActivateHelper.Msi","Microsoft.VisualStudio.Debugger.JustInTime","Microsoft.VisualStudio.Debugger.JustInTime.Msi","Microsoft.VisualStudio.Component.Windows10SDK.17763","Win10SDK_10.0.17763","Microsoft.VisualStudio.Component.VC.DiagnosticTools","Microsoft.VisualStudio.Component.Graphics.Tools","Microsoft.VisualStudio.Component.VC.Tools.x86.x64","Microsoft.VisualCpp.CodeAnalysis.Extensions","Microsoft.VisualCpp.CodeAnalysis.Extensions.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86.Resources","Microsoft.VisualCpp.CodeAnalysis.Extensions.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX86","Microsoft.VisualCpp.VCTip.HostX64.TargetX86","Microsoft.VisualCpp.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX64","Microsoft.VisualCpp.VCTip.HostX64.TargetX64","Microsoft.VisualCpp.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX64","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.PGO.X86","Microsoft.VisualCpp.PGO.X64","Microsoft.VisualCpp.PGO.Headers","Microsoft.VisualCpp.CRT.x86.Store","Microsoft.VisualCpp.CRT.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.x64.Store","Microsoft.VisualCpp.CRT.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x64.OneCore.Desktop","Microsoft.VisualStudio.PackageGroup.VC.Tools.x86","Microsoft.VisualCpp.Tools.HostX86.TargetX64","Microsoft.VisualCpp.VCTip.hostX86.targetX64","Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Tools.HostX86.TargetX86","Microsoft.VisualCpp.VCTip.hostX86.targetX86","Microsoft.VisualCpp.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Tools.Core.Resources","Microsoft.VisualCpp.Tools.Core.x86","Microsoft.VisualCpp.DIA.SDK","Microsoft.VisualCpp.CRT.x86.Desktop","Microsoft.VisualCpp.CRT.x64.Desktop","Microsoft.VisualCpp.CRT.Source","Microsoft.VisualCpp.CRT.Redist.X86","Microsoft.VisualCpp.CRT.Redist.X64","Microsoft.VisualCpp.CRT.Redist.Resources","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.VisualCpp.CRT.Headers","Microsoft.VisualStudio.Graphics.Viewers","Microsoft.VisualStudio.Graphics.Viewers.Resources","Microsoft.VisualStudio.Graphics.Msi","Microsoft.VisualStudio.Graphics.Msi","Microsoft.VisualStudio.Graphics.Analyzer","Microsoft.VisualStudio.Graphics.Analyzer.Targeted","Microsoft.VisualStudio.Graphics.Analyzer.Resources","Microsoft.VisualStudio.Graphics.Appid","Microsoft.VisualStudio.Graphics.Appid.Resources","Microsoft.VisualStudio.Component.VC.CoreIde","Microsoft.VisualStudio.VC.Ide.Pro","Microsoft.VisualStudio.VC.Ide.Pro.Resources","Microsoft.VisualStudio.VC.Templates.Pro","Microsoft.VisualStudio.VC.Templates.Pro.Resources","Microsoft.VisualStudio.VC.Items.Pro","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Reduced","Microsoft.VisualStudio.VC.Ide.x64","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Express","Microsoft.VisualStudio.VC.MSBuild.X64.v142","Microsoft.VisualStudio.VC.MSBuild.X64","Microsoft.VS.VC.MSBuild.X64.Resources","Microsoft.VisualStudio.VC.MSBuild.ARM.v142","Microsoft.VisualStudio.VC.MSBuild.ARM","Microsoft.VisualStudio.VC.MSBuild.x86.v142","Microsoft.VisualStudio.VC.MSBuild.X86","Microsoft.VisualStudio.VC.MSBuild.Base","Microsoft.VisualStudio.VC.MSBuild.Base.Resources","Microsoft.VisualStudio.VC.Ide.WinXPlus","Microsoft.VisualStudio.VC.Ide.Dskx","Microsoft.VisualStudio.VC.Ide.Dskx.Resources","Microsoft.VisualStudio.VC.Ide.Base","Microsoft.VisualStudio.VC.Ide.LanguageService","Microsoft.VisualStudio.VC.Ide.Core","Microsoft.VisualStudio.VC.Ide.Core.Resources","Microsoft.VisualStudio.VC.Ide.VCPkgDatabase","Microsoft.VisualStudio.VC.Ide.ProjectSystem","Microsoft.VisualStudio.VC.Ide.ProjectSystem.Resources","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine.Resources","Microsoft.VisualStudio.VC.Ide.LanguageService.Resources","Microsoft.VisualStudio.VC.Ide.Base.Resources","Component.Microsoft.VisualStudio.LiveShare","Microsoft.VisualStudio.LiveShare","Microsoft.Icecap.Analysis","Microsoft.Icecap.Analysis.Targeted","Microsoft.Icecap.Analysis.Resources","Microsoft.Icecap.Analysis.Resources.Targeted","Microsoft.Icecap.Collection.Msi","Microsoft.Icecap.Collection.Msi.Targeted","Microsoft.Icecap.Collection.Msi.Resources","Microsoft.Icecap.Collection.Msi.Resources.Targeted","Microsoft.DiagnosticsHub.Instrumentation","Microsoft.DiagnosticsHub.CpuSampling.ExternalDependencies","Microsoft.DiagnosticsHub.CpuSampling","Microsoft.DiagnosticsHub.CpuSampling.Targeted","Microsoft.PackageGroup.DiagnosticsHub.Platform","Microsoft.DiagnosticsHub.Runtime.ExternalDependencies","Microsoft.DiagnosticsHub.Runtime.ExternalDependencies.Targeted","Microsoft.DiagnosticsHub.Collection.ExternalDependencies.x64","Microsoft.DiagnosticsHub.Collection.StopService.Uninstall","Microsoft.DiagnosticsHub.Runtime","Microsoft.DiagnosticsHub.Runtime.Targeted","Microsoft.DiagnosticsHub.Collection","Microsoft.DiagnosticsHub.Collection.Service","Microsoft.DiagnosticsHub.Collection.StopService.Install","Microsoft.VisualStudio.Component.IntelliCode","Microsoft.VisualStudio.IntelliCode","Microsoft.Net.4.TargetingPack","Microsoft.VisualStudio.VC.Ide.ResourceEditor","Microsoft.VisualStudio.VC.Ide.ResourceEditor.Resources","Microsoft.VisualStudio.PackageGroup.TestTools.CodeCoverage","Microsoft.VisualStudio.PackageGroup.TestTools.Core","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V2.CLI","Microsoft.VisualStudio.TestTools.TestPlatform.V2.CLI","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.TestTools.Pex.Common","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.Legacy","Microsoft.VisualStudio.PackageGroup.MinShell.Interop","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Msi","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Common","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.TestTools","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Professional","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Common","Microsoft.VisualStudio.TestTools.TP.Legacy.Common.Res","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Agent","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.TestTools.TestWIExtension","Microsoft.VisualStudio.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.PackageGroup.TestTools.DataCollectors","Microsoft.VisualStudio.LiveShareApi","Microsoft.VisualStudio.Component.TextTemplating","Microsoft.VisualStudio.TextTemplating.MSBuild","Microsoft.VisualStudio.TextTemplating.Integration","Microsoft.VisualStudio.TextTemplating.Core","Microsoft.VisualStudio.TextTemplating.Integration.Resources","Microsoft.VisualCpp.CRT.ClickOnce.Msi","Microsoft.Component.MSBuild","Microsoft.NuGet.Build.Tasks","Microsoft.DiagnosticsHub.KB2882822.Win7","Microsoft.VisualStudio.PackageGroup.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Msi","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.ComponentGroup.WebToolsExtensions","Microsoft.VisualStudio.ProTools","sqlsysclrtypes","sqlsysclrtypes","SQLCommon","Microsoft.VisualStudio.ProTools.Resources","Microsoft.VisualStudio.WebToolsExtensions","Microsoft.VisualStudio.WebTools","Microsoft.VisualStudio.WebTools.Resources","Microsoft.VisualStudio.WebTools.MSBuild","Microsoft.VisualStudio.WebTools.WSP.FSA","Microsoft.VisualStudio.WebTools.WSP.FSA.Resources","Microsoft.VisualStudio.VC.Ide.MDD","Microsoft.VisualStudio.VisualC.Logging","Microsoft.WebTools.Shared","Microsoft.WebTools.DotNet.Core.ItemTemplates","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.Redist.14","Microsoft.Windows.UniversalCRT.Msu.7","Microsoft.VisualStudio.StaticAnalysis","Microsoft.VisualStudio.StaticAnalysis.Resources","Microsoft.VisualStudio.Component.Roslyn.Compiler","Microsoft.CodeAnalysis.Compilers","Microsoft.VisualStudio.Component.NuGet","Microsoft.CredentialProvider","Microsoft.VisualStudio.NuGet.PowershellBindingRedirect","Microsoft.VisualStudio.NuGet.Licenses","Microsoft.VisualStudio.PackageGroup.Community","Microsoft.VisualStudio.Community.Extra.Resources","Microsoft.VisualStudio.Community.Extra","Microsoft.VisualStudio.PackageGroup.Core","Microsoft.VisualStudio.CodeSense.Community","Microsoft.VisualStudio.TestTools.TeamFoundationClient","Microsoft.VisualStudio.PackageGroup.Debugger.Core","Microsoft.VisualStudio.PackageGroup.Debugger.TimeTravel.Record","Microsoft.VisualStudio.Debugger.TimeTravel.Runtime","Microsoft.VisualStudio.Debugger.TimeTravel.Runtime","Microsoft.VisualStudio.Debugger.TimeTravel.Agent","Microsoft.VisualStudio.Debugger.TimeTravel.Record","Microsoft.VisualStudio.Debugger.VSCodeDebuggerHost","Microsoft.VisualStudio.VC.Ide.Debugger","Microsoft.VisualStudio.VC.Ide.Debugger.Concord","Microsoft.VisualStudio.VC.Ide.Debugger.Concord.Resources","Microsoft.VisualStudio.VC.Ide.Debugger.Resources","Microsoft.VisualStudio.VC.Ide.Common","Microsoft.VisualStudio.VC.Ide.Common.Resources","Microsoft.VisualStudio.Debugger.Parallel","Microsoft.VisualStudio.Debugger.Parallel.Resources","Microsoft.VisualStudio.Debugger.CollectionAgents","Microsoft.VisualStudio.Debugger.Managed","Microsoft.CodeAnalysis.VisualStudio.Setup","Microsoft.CodeAnalysis.ExpressionEvaluator","Microsoft.VisualStudio.Debugger.Concord.Managed","Microsoft.VisualStudio.Debugger.Concord.Managed.Resources","Microsoft.VisualStudio.Debugger.Managed.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Remote.DbgHelp.Win8","Microsoft.VisualStudio.Debugger.Concord.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Remote.DbgHelp.Win8","Microsoft.VisualStudio.Debugger.Concord.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger","Microsoft.VisualStudio.Debugger.Package.DiagHub.Client.VSx86","Microsoft.VisualStudio.Debugger.Remote.DiagHub.Client","Microsoft.VisualStudio.Debugger.Remote.DiagHub.Client","Microsoft.VisualStudio.Debugger.DbgHelp.Win8","Microsoft.VisualStudio.VC.MSVCDis","Microsoft.VisualStudio.ScriptedHost","Microsoft.VisualStudio.ScriptedHost.Targeted","Microsoft.VisualStudio.ScriptedHost.Resources","Microsoft.IntelliTrace.DiagnosticsHub","Microsoft.VisualStudio.Debugger.Concord","Microsoft.VisualStudio.Debugger.Concord.Resources","Microsoft.VisualStudio.Debugger.Resources","Microsoft.PackageGroup.ClientDiagnostics","Microsoft.VisualStudio.AppResponsiveness","Microsoft.VisualStudio.AppResponsiveness.Targeted","Microsoft.VisualStudio.AppResponsiveness.Resources","Microsoft.VisualStudio.ClientDiagnostics","Microsoft.VisualStudio.ClientDiagnostics.Targeted","Microsoft.VisualStudio.ClientDiagnostics.Resources","Microsoft.VisualStudio.PackageGroup.CommunityCore","Microsoft.VisualStudio.ProjectSystem.Full","Microsoft.VisualStudio.ProjectSystem","Microsoft.VisualStudio.Community.x86","Microsoft.VisualStudio.Community.x64","Microsoft.VisualStudio.Community","Microsoft.IntelliTrace.CollectorCab","Microsoft.VisualStudio.Community.Resources","Microsoft.VisualStudio.Net.Eula.Resources","Microsoft.VisualStudio.WebSiteProject.DTE","Microsoft.MSHtml","Microsoft.VisualStudio.Platform.CallHierarchy","Microsoft.VisualStudio.Community.Msi.Resources","Microsoft.VisualStudio.Community.Msi","Microsoft.VisualStudio.Devenv.Msi","Microsoft.VisualStudio.MinShell.Interop.Msi","Microsoft.VisualStudio.Editors","Microsoft.VisualStudio.Product.Community","Microsoft.VisualStudio.Workload.CoreEditor","Microsoft.VisualStudio.Component.CoreEditor","Microsoft.VisualStudio.PackageGroup.CoreEditor","Microsoft.VisualCpp.Tools.Common.UtilsPrereq","Microsoft.VisualCpp.Tools.Common.Utils","Microsoft.VisualCpp.Tools.Common.Utils.Resources","Microsoft.VisualStudio.PackageGroup.VsDevCmd","Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk","Microsoft.VisualStudio.VsDevCmd.Core.WinSdk","Microsoft.VisualStudio.VsDevCmd.Core.DotNet","Microsoft.VisualStudio.VC.DevCmd","Microsoft.VisualStudio.VC.DevCmd.Resources","Microsoft.VisualStudio.VirtualTree","Microsoft.VisualStudio.PackageGroup.Progression","Microsoft.VisualStudio.PerformanceProvider","Microsoft.VisualStudio.GraphModel","Microsoft.VisualStudio.GraphProvider","Microsoft.DiaSymReader","Microsoft.Build.Dependencies","Microsoft.Build.FileTracker.Msi","Microsoft.Build","Microsoft.VisualStudio.PackageGroup.NuGet","Microsoft.VisualStudio.NuGet.Core","Microsoft.VisualStudio.TextMateGrammars","Microsoft.VisualStudio.PackageGroup.TeamExplorer.Common","Microsoft.VisualStudio.TeamExplorer","Microsoft.ServiceHub","Microsoft.VisualStudio.ProjectServices","Microsoft.VisualStudio.OpenFolder.VSIX","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.PackageGroup.MinShell","Microsoft.VisualStudio.MinShell.Interop","Microsoft.VisualStudio.Log","Microsoft.VisualStudio.Log.Targeted","Microsoft.VisualStudio.Log.Resources","Microsoft.VisualStudio.Finalizer","Microsoft.VisualStudio.Devenv","Microsoft.VisualStudio.Devenv.Resources","Microsoft.VisualStudio.CoreEditor","Microsoft.VisualStudio.Platform.NavigateTo","Microsoft.VisualStudio.Connected","Microsoft.VisualStudio.PerfLib","Microsoft.VisualStudio.Connected.Resources","Microsoft.VisualStudio.MinShell","Microsoft.VisualStudio.Setup.Configuration","Microsoft.VisualStudio.MinShell.Platform","Microsoft.VisualStudio.MinShell.Platform.Resources","Microsoft.VisualStudio.MefHosting","Microsoft.VisualStudio.MefHosting.Resources","Microsoft.VisualStudio.Initializer","Microsoft.VisualStudio.ExtensionManager","Microsoft.VisualStudio.Platform.Editor","Microsoft.VisualStudio.MinShell.x86","Microsoft.VisualStudio.NativeImageSupport","Microsoft.VisualStudio.MinShell.Msi","Microsoft.VisualStudio.MinShell.Msi.Resources","Microsoft.VisualStudio.LanguageServer","Microsoft.VisualStudio.Devenv.Config","Microsoft.VisualStudio.MinShell.Resources","Microsoft.Net.PackageGroup.4.7.2.Redist","Microsoft.Net.4.7.2.FullRedist","Microsoft.VisualStudio.Branding.Community"]}] diff --git a/mybulma/node_modules/node-gyp/test/fixtures/VS_2019_Preview.txt b/mybulma/node_modules/node-gyp/test/fixtures/VS_2019_Preview.txt new file mode 100644 index 0000000..806509e --- /dev/null +++ b/mybulma/node_modules/node-gyp/test/fixtures/VS_2019_Preview.txt @@ -0,0 +1 @@ +[{"path":"C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Preview","version":"16.0.28608.199","packages":["Microsoft.VisualStudio.Product.Enterprise","Microsoft.VisualStudio.Workload.NativeDesktop","Microsoft.VisualStudio.Component.VC.TestAdapterForGoogleTest","Microsoft.VisualStudio.VC.Ide.TestAdapterForGoogleTest","Microsoft.VisualStudio.Component.VC.TestAdapterForBoostTest","Microsoft.VisualStudio.VC.Ide.TestAdapterForBoostTest","Microsoft.VisualStudio.Component.VC.ATL","Microsoft.VisualStudio.VC.Ide.ATL","Microsoft.VisualStudio.VC.Ide.ATL.Resources","Microsoft.VisualCpp.ATL.X86","Microsoft.VisualCpp.ATL.X64","Microsoft.VisualCpp.ATL.Source","Microsoft.VisualCpp.ATL.Headers","Microsoft.VisualStudio.Component.VC.CMake.Project","Microsoft.VisualStudio.VC.CMake","Microsoft.VisualStudio.VC.CMake.Project","Microsoft.VisualStudio.VC.ExternalBuildFramework","Microsoft.VisualStudio.Component.VC.DiagnosticTools","Microsoft.VisualStudio.Component.Graphics.Tools","Microsoft.VisualStudio.Graphics.Viewers","Microsoft.VisualStudio.Graphics.Viewers.Resources","Microsoft.VisualStudio.Graphics.EnableTools","Microsoft.VisualStudio.Graphics.Msi","Microsoft.VisualStudio.Graphics.Msi","Microsoft.VisualStudio.Graphics.Analyzer","Microsoft.VisualStudio.Graphics.Analyzer.Targeted","Microsoft.VisualStudio.Graphics.Analyzer.Resources","Microsoft.VisualStudio.Graphics.Appid","Microsoft.VisualStudio.Graphics.Appid.Resources","Microsoft.VisualStudio.Component.Windows10SDK.17763","Win10SDK_10.0.17763","Microsoft.VisualStudio.Component.VC.Tools.x86.x64","Microsoft.VisualCpp.CodeAnalysis.Extensions","Microsoft.VisualCpp.CodeAnalysis.Extensions.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86.Resources","Microsoft.VisualCpp.CodeAnalysis.Extensions.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX86","Microsoft.VisualCpp.VCTip.HostX64.TargetX86","Microsoft.VisualCpp.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX64","Microsoft.VisualCpp.VCTip.HostX64.TargetX64","Microsoft.VisualCpp.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX64","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.PGO.X86","Microsoft.VisualCpp.PGO.X64","Microsoft.VisualCpp.PGO.Headers","Microsoft.VisualCpp.CRT.x86.Store","Microsoft.VisualCpp.CRT.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.x64.Store","Microsoft.VisualCpp.CRT.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x64.OneCore.Desktop","Microsoft.VisualStudio.PackageGroup.VC.Tools.x86","Microsoft.VisualCpp.Tools.HostX86.TargetX64","Microsoft.VisualCpp.VCTip.hostX86.targetX64","Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Tools.HostX86.TargetX86","Microsoft.VisualCpp.VCTip.hostX86.targetX86","Microsoft.VisualCpp.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Tools.Core.Resources","Microsoft.VisualCpp.Tools.Core.x86","Microsoft.VisualCpp.DIA.SDK","Microsoft.VisualCpp.CRT.x86.Desktop","Microsoft.VisualCpp.CRT.x64.Desktop","Microsoft.VisualCpp.CRT.Source","Microsoft.VisualCpp.CRT.Redist.X86","Microsoft.VisualCpp.CRT.Redist.X64","Microsoft.VisualCpp.CRT.Redist.Resources","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.CRT.Headers","Microsoft.VisualStudio.ComponentGroup.NativeDesktop.Core","Microsoft.VisualStudio.PackageGroup.TestTools.Native","Microsoft.VisualStudio.ComponentGroup.ArchitectureTools.Native","Microsoft.VisualStudio.Component.ClassDesigner","Microsoft.VisualStudio.ClassDesigner","Microsoft.VisualStudio.ClassDesigner.Resources","Microsoft.VisualStudio.Component.VC.Redist.14.Latest","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.VisualStudio.VC.Templates.UnitTest","Microsoft.VisualStudio.VC.UnitTest.Desktop.Build.Core","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CPP","Microsoft.VisualStudio.VC.Templates.UnitTest.Resources","Microsoft.VisualStudio.VC.Templates.Desktop","Microsoft.VisualStudio.Component.VC.CoreIde","Microsoft.VisualStudio.VC.Ide.Pro","Microsoft.VisualStudio.VC.Ide.Pro.Resources","Microsoft.VisualStudio.VC.Templates.Pro","Microsoft.VisualStudio.VC.Templates.Pro.Resources","Microsoft.VisualStudio.VC.Items.Pro","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Reduced","Microsoft.VisualStudio.VC.Ide.x64","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Express","Microsoft.VisualStudio.VC.MSBuild.X64.v142","Microsoft.VisualStudio.VC.MSBuild.X64","Microsoft.VS.VC.MSBuild.X64.Resources","Microsoft.VisualStudio.VC.MSBuild.ARM.v142","Microsoft.VisualStudio.VC.MSBuild.ARM","Microsoft.VisualStudio.VC.MSBuild.x86.v142","Microsoft.VisualStudio.VC.MSBuild.X86","Microsoft.VisualStudio.VC.MSBuild.Base","Microsoft.VisualStudio.VC.MSBuild.Base.Resources","Microsoft.VisualStudio.VC.Ide.WinXPlus","Microsoft.VisualStudio.VC.Ide.Dskx","Microsoft.VisualStudio.VC.Ide.Dskx.Resources","Microsoft.VisualStudio.VC.Ide.Base","Microsoft.VisualStudio.VC.Ide.LanguageService","Microsoft.VisualStudio.VC.Ide.Core","Microsoft.VisualStudio.VC.Ide.Core.Resources","Microsoft.VisualStudio.VC.Ide.VCPkgDatabase","Microsoft.VisualStudio.VC.Ide.Progression.Enterprise","Microsoft.VisualStudio.VC.Ide.ProjectSystem","Microsoft.VisualStudio.VC.Ide.ProjectSystem.Resources","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine.Resources","Microsoft.VisualStudio.VC.Ide.LanguageService.Resources","Microsoft.VisualStudio.VC.Ide.Base.Resources","Microsoft.VisualStudio.Component.CodeMap","Microsoft.VisualStudio.Component.GraphDocument","Microsoft.VisualStudio.Vmp","Microsoft.VisualStudio.GraphDocument","Microsoft.VisualStudio.GraphDocument.Resources","Microsoft.VisualStudio.CodeMap","Microsoft.VisualStudio.Component.SQL.LocalDB.Runtime","Microsoft.VisualStudio.Component.SQL.NCLI","sqllocaldb","sqlncli","Microsoft.VisualStudio.ComponentGroup.WebToolsExtensions","Microsoft.VisualStudio.WebToolsExtensions","Microsoft.VisualStudio.WebTools","Microsoft.VisualStudio.WebTools.Resources","Microsoft.VisualStudio.WebTools.MSBuild","Microsoft.VisualStudio.PackageGroup.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Msi","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.VC.Ide.MDD","Microsoft.VisualStudio.Component.NuGet","Microsoft.CredentialProvider","Component.Microsoft.VisualStudio.LiveShare","Microsoft.VisualStudio.LiveShare","Microsoft.VisualStudio.Component.Debugger.JustInTime","Microsoft.VisualStudio.Debugger.ImmersiveActivateHelper.Msi","Microsoft.VisualStudio.Debugger.JustInTime","Microsoft.VisualStudio.Debugger.JustInTime.Msi","Microsoft.VisualStudio.Component.IntelliTrace.FrontEnd","Microsoft.IntelliTrace.DiagnosticsHubAgent.Targeted","Microsoft.IntelliTrace.Debugger","Microsoft.IntelliTrace.Debugger.Targeted","Microsoft.IntelliTrace.FrontEnd","Microsoft.DiagnosticsHub.Instrumentation","Microsoft.DiagnosticsHub.CpuSampling","Microsoft.DiagnosticsHub.CpuSampling.Targeted","Microsoft.PackageGroup.DiagnosticsHub.Platform","Microsoft.DiagnosticsHub.Collection.StopService.Uninstall","Microsoft.DiagnosticsHub.Runtime","Microsoft.DiagnosticsHub.Runtime.Targeted","Microsoft.DiagnosticsHub.Runtime.Resources","Microsoft.DiagnosticsHub.Collection","Microsoft.DiagnosticsHub.Collection.Service","Microsoft.DiagnosticsHub.Collection.StopService.Install","Microsoft.VisualStudio.Dsl.GraphObject","Microsoft.Net.4.TargetingPack","Microsoft.VisualStudio.VC.Ide.ResourceEditor","Microsoft.VisualStudio.VC.Ide.ResourceEditor.Resources","Microsoft.VisualStudio.NuGet.Licenses","Microsoft.WebTools.Shared","Microsoft.VisualStudio.WebToolsExtensions.DotNet.Core.ItemTemplates","Microsoft.VisualStudio.Component.TextTemplating","Microsoft.VisualStudio.TextTemplating.MSBuild","Microsoft.VisualStudio.TextTemplating.Integration","Microsoft.VisualStudio.TextTemplating.Core","Microsoft.VisualStudio.TextTemplating.Integration.Resources","Microsoft.VisualStudio.ProTools","sqlsysclrtypes","sqlsysclrtypes","SQLCommon","Microsoft.VisualStudio.ProTools.Resources","Microsoft.VisualStudio.NuGet.Core","Microsoft.VisualStudio.PackageGroup.TestTools.CodeCoverage","Microsoft.VisualStudio.PackageGroup.IntelliTrace.Core","Microsoft.IntelliTrace.Core","Microsoft.IntelliTrace.Core.Concord","Microsoft.IntelliTrace.Core.Targeted","Microsoft.IntelliTrace.ProfilerProxy.Msi.x64","Microsoft.IntelliTrace.ProfilerProxy.Msi","Microsoft.VisualStudio.TestTools.DynamicCodeCoverage","Microsoft.VisualStudio.TestTools.CodeCoverage.Msi","Microsoft.VisualStudio.TestTools.CodeCoverage","Microsoft.Icecap.Analysis","Microsoft.Icecap.Analysis.Targeted","Microsoft.Icecap.Analysis.Resources","Microsoft.Icecap.Analysis.Resources.Targeted","Microsoft.Icecap.Collection.Msi","Microsoft.Icecap.Collection.Msi.Targeted","Microsoft.Icecap.Collection.Msi.Resources","Microsoft.Icecap.Collection.Msi.Resources.Targeted","Microsoft.VisualStudio.PackageGroup.TestTools.Core","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V2.CLI","Microsoft.VisualStudio.TestTools.TestPlatform.V2.CLI","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.TestTools.Pex.Common","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.Legacy","Microsoft.VisualStudio.PackageGroup.MinShell.Interop","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Msi","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Common","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.TestTools","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Remote","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Professional","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core.Premium","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Common","Microsoft.VisualStudio.TestTools.TP.Legacy.Common.Res","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Agent","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.TestTools.TestWIExtension","Microsoft.VisualStudio.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.PackageGroup.TestTools.DataCollectors","Microsoft.VisualStudio.TestTools.NE.Msi.Targeted","Microsoft.VisualStudio.TestTools.NetworkEmulation","Microsoft.VisualStudio.TestTools.DataCollectors","Microsoft.VisualCpp.CRT.ClickOnce.Msi","Microsoft.VisualStudio.WebTools.WSP.FSA","Microsoft.VisualStudio.WebTools.WSP.FSA.Resources","Microsoft.VisualStudio.Component.Static.Analysis.Tools","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualStudio.StaticAnalysis","Microsoft.VisualStudio.StaticAnalysis.Resources","Microsoft.VisualStudio.PackageGroup.Community","Microsoft.VisualStudio.Community.Extra.Resources","Microsoft.VisualStudio.Community.Extra","Microsoft.VisualStudio.PackageGroup.Core","Microsoft.VisualStudio.CodeSense","Microsoft.VisualStudio.CodeSense.Community","Microsoft.VisualStudio.TestTools.TeamFoundationClient","Microsoft.VisualStudio.PackageGroup.Debugger.Core","Microsoft.VisualStudio.PackageGroup.Debugger.TimeTravel.Record","Microsoft.VisualStudio.Debugger.TimeTravel.Runtime","Microsoft.VisualStudio.Debugger.TimeTravel.Runtime","Microsoft.VisualStudio.Debugger.TimeTravel.Agent","Microsoft.VisualStudio.Debugger.TimeTravel.Record","Microsoft.VisualStudio.Debugger.VSCodeDebuggerHost","Microsoft.VisualStudio.VC.Ide.Debugger","Microsoft.VisualStudio.VC.Ide.Debugger.Concord","Microsoft.VisualStudio.VC.Ide.Debugger.Concord.Resources","Microsoft.VisualStudio.VC.Ide.Debugger.Resources","Microsoft.VisualStudio.VC.Ide.Common","Microsoft.VisualStudio.VC.Ide.Common.Resources","Microsoft.VisualStudio.Debugger.Parallel","Microsoft.VisualStudio.Debugger.Parallel.Resources","Microsoft.VisualStudio.Debugger.CollectionAgents","Microsoft.VisualStudio.Debugger.Managed","Microsoft.VisualStudio.Debugger.Concord.Managed","Microsoft.VisualStudio.Debugger.Concord.Managed.Resources","Microsoft.VisualStudio.Debugger.Managed.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger","Microsoft.VisualStudio.Debugger.Package.DiagHub.Client.VSx86","Microsoft.VisualStudio.Debugger.Remote.DiagHub.Client","Microsoft.VisualStudio.Debugger.Remote.DiagHub.Client","Microsoft.VisualStudio.VC.MSVCDis","Microsoft.VisualStudio.ScriptedHost","Microsoft.VisualStudio.ScriptedHost.Targeted","Microsoft.VisualStudio.ScriptedHost.Resources","Microsoft.IntelliTrace.DiagnosticsHub","Microsoft.VisualStudio.Debugger.Concord","Microsoft.VisualStudio.Debugger.Concord.Resources","Microsoft.VisualStudio.Debugger.Resources","Microsoft.PackageGroup.ClientDiagnostics","Microsoft.VisualStudio.AppResponsiveness","Microsoft.VisualStudio.AppResponsiveness.Targeted","Microsoft.VisualStudio.AppResponsiveness.Resources","Microsoft.VisualStudio.ClientDiagnostics","Microsoft.VisualStudio.ClientDiagnostics.Targeted","Microsoft.VisualStudio.ClientDiagnostics.Resources","Microsoft.VisualStudio.PackageGroup.ProfessionalCore","Microsoft.VisualStudio.Professional","Microsoft.VisualStudio.Professional.Msi","Microsoft.VisualStudio.PackageGroup.EnterpriseCore","Microsoft.VisualStudio.Enterprise.Msi","Microsoft.VisualStudio.Enterprise","Microsoft.ShDocVw","Microsoft.VisualStudio.PackageGroup.CommunityCore","Microsoft.VisualStudio.ProjectSystem.Full","Microsoft.VisualStudio.ProjectSystem","Microsoft.VisualStudio.Community.x86","Microsoft.VisualStudio.Community.x64","Microsoft.VisualStudio.Community","Microsoft.IntelliTrace.CollectorCab","Microsoft.VisualStudio.Community.Resources","Microsoft.VisualStudio.WebSiteProject.DTE","Microsoft.MSHtml","Microsoft.VisualStudio.Platform.CallHierarchy","Microsoft.VisualStudio.Community.Msi.Resources","Microsoft.VisualStudio.Community.Msi","Microsoft.VisualStudio.Devenv.Msi","Microsoft.VisualStudio.MinShell.Interop.Msi","Microsoft.VisualStudio.Editors","Microsoft.VisualStudio.Net.Eula.Resources","Microsoft.CodeAnalysis.ExpressionEvaluator","Microsoft.CodeAnalysis.VisualStudio.Setup","Microsoft.Component.MSBuild","Microsoft.NuGet.Build.Tasks","Microsoft.VisualStudio.Component.Roslyn.Compiler","Microsoft.CodeAnalysis.Compilers","Microsoft.VisualStudio.Workload.CoreEditor","Microsoft.VisualStudio.Component.CoreEditor","Microsoft.VisualStudio.PackageGroup.CoreEditor","Microsoft.VisualCpp.Tools.Common.UtilsPrereq","Microsoft.VisualCpp.Tools.Common.Utils","Microsoft.VisualCpp.Tools.Common.Utils.Resources","Microsoft.VisualStudio.PackageGroup.VsDevCmd","Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk","Microsoft.VisualStudio.VsDevCmd.Core.WinSdk","Microsoft.VisualStudio.VsDevCmd.Core.DotNet","Microsoft.VisualStudio.VC.DevCmd","Microsoft.VisualStudio.VC.DevCmd.Resources","Microsoft.VisualStudio.VirtualTree","Microsoft.VisualStudio.PackageGroup.Progression","Microsoft.VisualStudio.PerformanceProvider","Microsoft.VisualStudio.GraphModel","Microsoft.VisualStudio.GraphProvider","Microsoft.DiaSymReader","Microsoft.Build.Dependencies","Microsoft.Build.FileTracker.Msi","Microsoft.Build","Microsoft.VisualStudio.TextMateGrammars","Microsoft.VisualStudio.PackageGroup.TeamExplorer.Common","Microsoft.VisualStudio.TeamExplorer","Microsoft.ServiceHub","Microsoft.VisualStudio.ProjectServices","Microsoft.VisualStudio.SLNX.VSIX","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.PackageGroup.MinShell","Microsoft.VisualStudio.MinShell.Interop","Microsoft.VisualStudio.Log","Microsoft.VisualStudio.Log.Targeted","Microsoft.VisualStudio.Log.Resources","Microsoft.VisualStudio.Finalizer","Microsoft.VisualStudio.Devenv","Microsoft.VisualStudio.Devenv.Resources","Microsoft.VisualStudio.CoreEditor","Microsoft.VisualStudio.Platform.NavigateTo","Microsoft.VisualStudio.Connected","Microsoft.VisualStudio.Connected.Resources","Microsoft.VisualStudio.MinShell","Microsoft.VisualStudio.Setup.Configuration","Microsoft.VisualStudio.Platform.Search","Microsoft.VisualStudio.MinShell.Platform","Microsoft.VisualStudio.MinShell.Platform.Resources","Microsoft.VisualStudio.MefHosting","Microsoft.VisualStudio.MefHosting.Resources","Microsoft.VisualStudio.Initializer","Microsoft.VisualStudio.ExtensionManager","Microsoft.VisualStudio.Platform.Editor","Microsoft.VisualStudio.MinShell.x86","Microsoft.VisualStudio.NativeImageSupport","Microsoft.VisualStudio.MinShell.Msi","Microsoft.VisualStudio.MinShell.Msi.Resources","Microsoft.VisualStudio.LanguageServer","Microsoft.VisualStudio.Devenv.Config","Microsoft.VisualStudio.MinShell.Resources","Microsoft.Net.PackageGroup.4.7.2.Redist","Microsoft.VisualStudio.Branding.Enterprise"]}] diff --git a/mybulma/node_modules/node-gyp/test/fixtures/ca-bundle.crt b/mybulma/node_modules/node-gyp/test/fixtures/ca-bundle.crt new file mode 100644 index 0000000..fb1dea9 --- /dev/null +++ b/mybulma/node_modules/node-gyp/test/fixtures/ca-bundle.crt @@ -0,0 +1,40 @@ +-----BEGIN CERTIFICATE----- +MIIDJjCCAg4CAhnOMA0GCSqGSIb3DQEBBQUAMH0xCzAJBgNVBAYTAlVTMQswCQYD +VQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzEZMBcGA1UECgwQU3Ryb25n +TG9vcCwgSW5jLjESMBAGA1UECwwJU3Ryb25nT3BzMRowGAYDVQQDDBFjYS5zdHJv +bmdsb29wLmNvbTAeFw0xNTEyMDgyMzM1MzNaFw00MzA0MjQyMzM1MzNaMBkxFzAV +BgNVBAMMDnN0cm9uZ2xvb3AuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB +CgKCAQEAwOYI7OZ2FX/YjRgLZoDQlbPc5UZXU/j0e1wwiJNPtPEax9Y5Uoza0Pnt +Ikzkc2SfvQ+IJrhXo385tI0W5juuqbHnE7UrjUuPjUX6NHevkxcs/flmjan5wnZM +cPsGhH71WDuUEEflvZihf2Se2x+xgZtMhc5XGmVmRuZFYKvkgUhA2/w8/QrK+jPT +n9QRJxZjWNh2RBdC1B7u4jffSmOSUljYFH1I2eTeY+Rdi6YUIYSU9gEoZxsv3Tia +SomfMF5jt2Mouo6MzA+IhLvvFjcrcph1Qxgi9RkfdCMMd+Ipm9YWELkyG1bDRpQy +0iyHD4gvVsAqz1Y2KdRSdc3Kt+nTqwIDAQABoxkwFzAVBgNVHREEDjAMhwQAAAAA +hwR/AAABMA0GCSqGSIb3DQEBBQUAA4IBAQAhy4J0hML3NgmDRHdL5/iTucBe22Mf +jJjg2aifD1S187dHm+Il4qZNO2plWwAhN0h704f+8wpsaALxUvBIu6nvlvcMP5PH +jGN5JLe2Km3UaPvYOQU2SgacLilu+uBcIo2JSHLV6O7ziqUj5Gior6YxDLCtEZie +Ea8aX5/YjuACtEMJ1JjRqjgkM66XAoUe0E8onOK3FgTIO3tGoTJwRp0zS50pFuP0 +PsZtT04ck6mmXEXXknNoAyBCvPypfms9OHqcUIW9fiQnrGbS/Ri4QSQYj0DtFk/1 +na4fY1gf3zTHxH8259b/TOOaPfTnCEsOQtjUrWNR4xhmVZ+HJy4yytUW +-----END CERTIFICATE----- +-----BEGIN CERTIFICATE----- +MIIDbzCCAlcCAmm6MA0GCSqGSIb3DQEBCwUAMH0xCzAJBgNVBAYTAlVTMQswCQYD +VQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzEZMBcGA1UECgwQU3Ryb25n +TG9vcCwgSW5jLjESMBAGA1UECwwJU3Ryb25nT3BzMRowGAYDVQQDDBFjYS5zdHJv +bmdsb29wLmNvbTAeFw0xNTEyMDgyMzM1MzNaFw00MzA0MjQyMzM1MzNaMH0xCzAJ +BgNVBAYTAlVTMQswCQYDVQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzEZ +MBcGA1UECgwQU3Ryb25nTG9vcCwgSW5jLjESMBAGA1UECwwJU3Ryb25nT3BzMRow +GAYDVQQDDBFjYS5zdHJvbmdsb29wLmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBANfj86jkvvYDjHBgiqWhk9Cj+bqiMq3MqnV0CBO4iuK33Fo6XssE +H+yVdXlIBFbFe6t655MdBVOR2Sfj7WqNh96vhu6PyDHiwcQlTaiLU6nhIed1J4Wv +lvnJHFmp8Wbtx5AgLT4UYu03ftvXEl2DLi3vhSL2tRM1ebXHB/KPbRWkb25DPX0P +foOHot3f2dgNe2x6kponf7E/QDmAu3s7Nlkfh+ryDhgGU7wocXEhXbprNqRqOGNo +xbXgUI+/9XDxYT/7Gn5LF/fPjtN+aB0SKMnTsDhprVlZie83mlqJ46fOOrR+vrsQ +mi/1m/TadrARtZoIExC/cQRdVM05EK4tUa8CAwEAATANBgkqhkiG9w0BAQsFAAOC +AQEAQ7k5WhyhDTIGYCNzRnrMHWSzGqa1y4tJMW06wafJNRqTm1cthq1ibc6Hfq5a +K10K0qMcgauRTfQ1MWrVCTW/KnJ1vkhiTOH+RvxapGn84gSaRmV6KZen0+gMsgae +KEGe/3Hn+PmDVV+PTamHgPACfpTww38WHIe/7Ce9gHfG7MZ8cKHNZhDy0IAYPln+ +YRwMLd7JNQffHAbWb2CE1mcea4H/12U8JZW5tHCF6y9V+7IuDzqwIrLKcW3lG17n +VUG6ODF/Ryqn3V5X+TL91YyXi6c34y34IpC7MQDV/67U7+5Bp5CfeDPWW2wVSrW+ +uGZtfEvhbNm6m2i4UNmpCXxUZQ== +-----END CERTIFICATE----- diff --git a/mybulma/node_modules/node-gyp/test/fixtures/ca.crt b/mybulma/node_modules/node-gyp/test/fixtures/ca.crt new file mode 100644 index 0000000..aaf9757 --- /dev/null +++ b/mybulma/node_modules/node-gyp/test/fixtures/ca.crt @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIDZDCCAkwCCQCAzfCLqrJvuTANBgkqhkiG9w0BAQsFADB0MQswCQYDVQQGEwJV +UzELMAkGA1UECAwCQ0ExEDAOBgNVBAoMB05vZGUuanMxETAPBgNVBAsMCG5vZGUt +Z3lwMRIwEAYDVQQDDAlsb2NhbGhvc3QxHzAdBgkqhkiG9w0BCQEWEGJ1aWxkQG5v +ZGVqcy5vcmcwHhcNMTkwNjIyMDYyMjMzWhcNMjIwNDExMDYyMjMzWjB0MQswCQYD +VQQGEwJVUzELMAkGA1UECAwCQ0ExEDAOBgNVBAoMB05vZGUuanMxETAPBgNVBAsM +CG5vZGUtZ3lwMRIwEAYDVQQDDAlsb2NhbGhvc3QxHzAdBgkqhkiG9w0BCQEWEGJ1 +aWxkQG5vZGVqcy5vcmcwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDS +CHjvtVW4HdbbUwZ/ZV9s6U4x0KSoyNQrsCZjB8kRpFPe50DS5mfmu2SNBGYKRgzk +4QEEwFB9N2o8YTWsCefSRl6ti4ToPZqulU4hhRKYrEGtMJcRzi3IN7s200JaO3UH +01Su8ruO0NESb5zEU1Ykfh8Lub8TGEAINmgI61d/5d5Aq3kDjUHQJt1Ekw03Ylnu +juQyCGZxLxnngu0mIvwzyL/UeeUgsfQLzvppUk6In7tC1zzMjSPWo0c8qu6KvrW4 +bKYnkZkzdQifzbpO5ERMEsh5HWq0uHa6+dgcVHFvlhdqF4Uat87ygNplVf0txsZB +MNVqbz1k6xkZYMnzDoydAgMBAAEwDQYJKoZIhvcNAQELBQADggEBADspZGtKpWxy +J1W3FA1aeQhMvequQTcMRz4avkm4K4HfTdV1iVD4CbvdezBphouBlyLVLDFJP7RZ +m7dBJVgBwnxufoFLne8cR2MGqDRoySbFT1AtDJdxabE6Fg+QGUpgOQfeBJ6ANlSB ++qJ+HG4QA+Ouh5hxz9mgYwkIsMUABHiwENdZ/kT8Edw4xKgd3uH0YP4iiePMD66c +rzW3uXH5J1jnKgBlpxtog4P6dHCcoq+PZJ17W5bdXNyqC1LPzQqniZ2BNcEZ4ix3 +slAZAOWD1zLLGJhBPMV1fa0sHNBWc6oicr3YK/IDb0cp9kiLvnUu1pHy+LWQGqtC +rceJuGsnJEQ= +-----END CERTIFICATE----- diff --git a/mybulma/node_modules/node-gyp/test/fixtures/nodedir/include/node/config.gypi b/mybulma/node_modules/node-gyp/test/fixtures/nodedir/include/node/config.gypi new file mode 100644 index 0000000..e767534 --- /dev/null +++ b/mybulma/node_modules/node-gyp/test/fixtures/nodedir/include/node/config.gypi @@ -0,0 +1,6 @@ +# Test configuration +{ + 'variables': { + 'build_with_electron': true + } +} diff --git a/mybulma/node_modules/node-gyp/test/fixtures/server.crt b/mybulma/node_modules/node-gyp/test/fixtures/server.crt new file mode 100644 index 0000000..5d0c440 --- /dev/null +++ b/mybulma/node_modules/node-gyp/test/fixtures/server.crt @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIDYjCCAkoCCQCSlmGR7KzZGTANBgkqhkiG9w0BAQsFADB0MQswCQYDVQQGEwJV +UzELMAkGA1UECAwCQ0ExEDAOBgNVBAoMB05vZGUuanMxETAPBgNVBAsMCG5vZGUt +Z3lwMRIwEAYDVQQDDAlsb2NhbGhvc3QxHzAdBgkqhkiG9w0BCQEWEGJ1aWxkQG5v +ZGVqcy5vcmcwHhcNMTkwNjIyMDYyNTU1WhcNMjkwNjE5MDYyNTU1WjByMQswCQYD +VQQGEwJVUzELMAkGA1UECAwCQ0ExEDAOBgNVBAoMB05vZGUuanMxETAPBgNVBAsM +CG5vZGUtZ3lwMRIwEAYDVQQDDAlsb2NhbGhvc3QxHTAbBgkqhkiG9w0BCQEWDmJ1 +aWxkQGlvanMub3JnMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA6S1E +2WchgmbJYqCnpN7310ZgHjIOqeJe6MpSue2u6z6mTNd5izgvQNaANmn3xLFCS5zs +uZaTvdPYPkcmSQzb1YcZSUYnAxZifjYARc6kb5GSBl3q+O70ELyFrimXfZ4JI+bd +IG9KiHY17DlvZZZj/csGYVWWg0mkeH3O5LPX6/DXQVh/9+gZ02/cdIBCAtZHQwqx +7tF/qZA/kD4GZNFpU1DYHzf9H6g9htoCqmNHQWrV2T9yFybt6mbZp9kglBmyKYCc +7hmQnb7N/mHn1yIuwhBsirCJTfKH86gN81u8M3+SVHA2VUHDllcNhpDWlmInXA+I +tHdGZHCp95ohqpCPgQIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQCdvYj6CD0ZLwT2 +3t1r+deC3TJuHlNVSeKeT7wIfFnh2FW5riGV0q/w6eXPLTHjuiS6YmpAAbdNUgX/ +sq64FqI2RLpX6pgY5yB0SKopMcJxMLKqmF4zHpIHxtYN5EmN3PR0vehneBR/nZ2T +3ikvWD5JeXlm7Dfw+tjijdxM/sEoDWErGup4mMKMd1s5s830p+ITJUa50d0DLFdH +mqPSbUZF8mMPwGJd+nu1Ht3gTLtK7+gYJgGtXMJmGC0Qg77EJHDB2NbotgDGNmSU +1H9BpAeFHHIcbh2Rr7kkTvnh/c03vFe+CsDZmezcmRpRzW1fKj3YbfqBxU4XwJrL +a5T/N9xU +-----END CERTIFICATE----- diff --git a/mybulma/node_modules/node-gyp/test/fixtures/server.key b/mybulma/node_modules/node-gyp/test/fixtures/server.key new file mode 100644 index 0000000..a844739 --- /dev/null +++ b/mybulma/node_modules/node-gyp/test/fixtures/server.key @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEA6S1E2WchgmbJYqCnpN7310ZgHjIOqeJe6MpSue2u6z6mTNd5 +izgvQNaANmn3xLFCS5zsuZaTvdPYPkcmSQzb1YcZSUYnAxZifjYARc6kb5GSBl3q ++O70ELyFrimXfZ4JI+bdIG9KiHY17DlvZZZj/csGYVWWg0mkeH3O5LPX6/DXQVh/ +9+gZ02/cdIBCAtZHQwqx7tF/qZA/kD4GZNFpU1DYHzf9H6g9htoCqmNHQWrV2T9y +Fybt6mbZp9kglBmyKYCc7hmQnb7N/mHn1yIuwhBsirCJTfKH86gN81u8M3+SVHA2 +VUHDllcNhpDWlmInXA+ItHdGZHCp95ohqpCPgQIDAQABAoIBABW8R4ewalo6dJlB ++n6O3jFt+PW3mtBRLqGqgm2cb0q0a1IMX+MPWLBFjmwEErl+AH0F4rcmBx2Ryr17 +amEy1qcf0caXyHksNAApznqzWXag7iizxnxv4cZRnHBwphNqkNWM5p3oYd04j6w2 +amDg1O9KZozaKo6QZclpiMiezwjKG+PVZLT8p7afswjv+yDWPDByhlcGiye9QD1T +VuZ0QCoXp6N/8JxW0gdkLp9NqFvGeGFzJ5h6L+d7A6BWw8akXrBRHHcKkyvVYBfd +myhSzSK4FPFMaxaEY/65FlVSyAO6ezGm3Umx4g7mkFjLdwKWaIOjkBkPeFgl3Pp4 +7Lo5X3UCgYEA/FrrIwmEU5ayulBVScEMKeavu5eNY4r0Sqbpov2oyTdYe8G49Pzy +ryMXfunY43moLKpajGwgTKRGvdqFtK08AAkaCssiAPkP3rZuZvMTF4sLo/vlWrjP +3er+tUqj22BzXi5XV0BAvH8Y3TL8KQ3he/8JxDvkC811/DQ9Y/Da3U8CgYEA7Itw +UM37URma08Bj9VTMoL9ZCyURewX+ZLDb2+O8sXGXJs28i1RkE6PTBlnRmedn+Jjk +byzQ5Cs5wA5uMbhYTA7kgXOs1bvgQqmlLmyL6FfHkucoMhr2Di7VeGf4OxE26JZ8 +JdY4+1MOyI3A2rR8WU+GmHxy0ay4K2xe6W0vsi8CgYBoGLEKIPDe8jkDtgOYivOT +jT9MaLXALB+dc8DIpU4swpHTaxP6qyUIrbcReTEolJSU6Ci16BxiwRkVU8D3yMYJ +VbfSX/zE3fh37FUaToa/nXHN0SjJBZdpeXhcHE//PIgaf48zxKNvnhYJmPB/luQ+ +m/PRaMsnOzfCM2JniYEe7QKBgGwjnxhB4tgDtaWCue/pcZc3gzS2IJS2e8N6mzie +l6Ajhu+FdOHZldrotUuc+la61OxwsVYmDeWR4VftAPGYDj3PPSX1RRl9R5wSRGLB +2wBASQvew6CMdNqtDIh8N56BUzHnwh/mHKzBHuwO6hDSHFsUITtLAY7bwGKRq55Z +fUmfAoGBANOYFyoJoDLcl+Jd750lyqfCifcGtkRdmZMtrPXaYnD8ZGme9vz1vsK/ +4iUkV3mi7Z9s1LXMa/tPPfKdVhCM1PXost3/si0+u1Bz5yKqEPXlyy2ltpIVyGu8 +yiy7y75asp8Iii/1cgtwyp9+VeSif8wJ+MHQoGdGxvAQP80R3EjF +-----END RSA PRIVATE KEY----- diff --git a/mybulma/node_modules/node-gyp/test/fixtures/test-charmap.py b/mybulma/node_modules/node-gyp/test/fixtures/test-charmap.py new file mode 100644 index 0000000..63aa77b --- /dev/null +++ b/mybulma/node_modules/node-gyp/test/fixtures/test-charmap.py @@ -0,0 +1,31 @@ +import sys +import locale + +try: + reload(sys) +except NameError: # Python 3 + pass + + +def main(): + encoding = locale.getdefaultlocale()[1] + if not encoding: + return False + + try: + sys.setdefaultencoding(encoding) + except AttributeError: # Python 3 + pass + + textmap = { + "cp936": "\u4e2d\u6587", + "cp1252": "Lat\u012Bna", + "cp932": "\u306b\u307b\u3093\u3054", + } + if encoding in textmap: + print(textmap[encoding]) + return True + + +if __name__ == "__main__": + print(main()) diff --git a/mybulma/node_modules/node-gyp/test/process-exec-sync.js b/mybulma/node_modules/node-gyp/test/process-exec-sync.js new file mode 100644 index 0000000..21763bc --- /dev/null +++ b/mybulma/node_modules/node-gyp/test/process-exec-sync.js @@ -0,0 +1,140 @@ +'use strict' + +const fs = require('graceful-fs') +const childProcess = require('child_process') + +function startsWith (str, search, pos) { + if (String.prototype.startsWith) { + return str.startsWith(search, pos) + } + + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search +} + +function processExecSync (file, args, options) { + var child, error, timeout, tmpdir, command + command = makeCommand(file, args) + + /* + this function emulates child_process.execSync for legacy node <= 0.10.x + derived from https://github.com/gvarsanyi/sync-exec/blob/master/js/sync-exec.js + */ + + options = options || {} + // init timeout + timeout = Date.now() + options.timeout + // init tmpdir + var osTempBase = '/tmp' + var os = determineOS() + osTempBase = '/tmp' + + if (process.env.TMP) { + osTempBase = process.env.TMP + } + + if (osTempBase[osTempBase.length - 1] !== '/') { + osTempBase += '/' + } + + tmpdir = osTempBase + 'processExecSync.' + Date.now() + Math.random() + fs.mkdirSync(tmpdir) + + // init command + if (os === 'linux') { + command = '(' + command + ' > ' + tmpdir + '/stdout 2> ' + tmpdir + + '/stderr); echo $? > ' + tmpdir + '/status' + } else { + command = '(' + command + ' > ' + tmpdir + '/stdout 2> ' + tmpdir + + '/stderr) | echo %errorlevel% > ' + tmpdir + '/status | exit' + } + + // init child + child = childProcess.exec(command, options) + + var maxTry = 100000 // increases the test time by 6 seconds on win-2016-node-0.10 + var tryCount = 0 + while (tryCount < maxTry) { + try { + var x = fs.readFileSync(tmpdir + '/status') + if (x.toString() === '0') { + break + } + } catch (ignore) {} + tryCount++ + if (Date.now() > timeout) { + error = child + break + } + } + + ['stdout', 'stderr', 'status'].forEach(function (file) { + child[file] = fs.readFileSync(tmpdir + '/' + file, options.encoding) + setTimeout(unlinkFile, 500, tmpdir + '/' + file) + }) + + child.status = Number(child.status) + if (child.status !== 0) { + error = child + } + + try { + fs.rmdirSync(tmpdir) + } catch (ignore) {} + if (error) { + throw error + } + return child.stdout +} + +function makeCommand (file, args) { + var command, quote + command = file + if (args.length > 0) { + for (var i in args) { + command = command + ' ' + if (args[i][0] === '-') { + command = command + args[i] + } else { + if (!quote) { + command = command + '"' + quote = true + } + command = command + args[i] + if (quote) { + if (args.length === (parseInt(i) + 1)) { + command = command + '"' + } + } + } + } + } + return command +} + +function determineOS () { + var os = '' + var tmpVar = '' + if (process.env.OSTYPE) { + tmpVar = process.env.OSTYPE + } else if (process.env.OS) { + tmpVar = process.env.OS + } else { + // default is linux + tmpVar = 'linux' + } + + if (startsWith(tmpVar, 'linux')) { + os = 'linux' + } + if (startsWith(tmpVar, 'win')) { + os = 'win' + } + + return os +} + +function unlinkFile (file) { + fs.unlinkSync(file) +} + +module.exports = processExecSync diff --git a/mybulma/node_modules/node-gyp/test/simple-proxy.js b/mybulma/node_modules/node-gyp/test/simple-proxy.js new file mode 100644 index 0000000..cb0dfcf --- /dev/null +++ b/mybulma/node_modules/node-gyp/test/simple-proxy.js @@ -0,0 +1,27 @@ +'use strict' + +const http = require('http') +const https = require('https') +const server = http.createServer(handler) +const port = +process.argv[2] +const prefix = process.argv[3] +const upstream = process.argv[4] +var calls = 0 + +server.listen(port) + +function handler (req, res) { + if (req.url.indexOf(prefix) !== 0) { + throw new Error('request url [' + req.url + '] does not start with [' + prefix + ']') + } + + var upstreamUrl = upstream + req.url.substring(prefix.length) + https.get(upstreamUrl, function (ures) { + ures.on('end', function () { + if (++calls === 2) { + server.close() + } + }) + ures.pipe(res) + }) +} diff --git a/mybulma/node_modules/node-gyp/test/test-addon.js b/mybulma/node_modules/node-gyp/test/test-addon.js new file mode 100644 index 0000000..f79eff7 --- /dev/null +++ b/mybulma/node_modules/node-gyp/test/test-addon.js @@ -0,0 +1,150 @@ +'use strict' + +const test = require('tap').test +const path = require('path') +const fs = require('graceful-fs') +const childProcess = require('child_process') +const os = require('os') +const addonPath = path.resolve(__dirname, 'node_modules', 'hello_world') +const nodeGyp = path.resolve(__dirname, '..', 'bin', 'node-gyp.js') +const execFileSync = childProcess.execFileSync || require('./process-exec-sync') +const execFile = childProcess.execFile + +function runHello (hostProcess) { + if (!hostProcess) { + hostProcess = process.execPath + } + var testCode = "console.log(require('hello_world').hello())" + return execFileSync(hostProcess, ['-e', testCode], { cwd: __dirname }).toString() +} + +function getEncoding () { + var code = 'import locale;print(locale.getdefaultlocale()[1])' + return execFileSync('python', ['-c', code]).toString().trim() +} + +function checkCharmapValid () { + var data + try { + data = execFileSync('python', ['fixtures/test-charmap.py'], + { cwd: __dirname }) + } catch (err) { + return false + } + var lines = data.toString().trim().split('\n') + return lines.pop() === 'True' +} + +test('build simple addon', function (t) { + t.plan(3) + + // Set the loglevel otherwise the output disappears when run via 'npm test' + var cmd = [nodeGyp, 'rebuild', '-C', addonPath, '--loglevel=verbose'] + var proc = execFile(process.execPath, cmd, function (err, stdout, stderr) { + var logLines = stderr.toString().trim().split(/\r?\n/) + var lastLine = logLines[logLines.length - 1] + t.strictEqual(err, null) + t.strictEqual(lastLine, 'gyp info ok', 'should end in ok') + t.strictEqual(runHello().trim(), 'world') + }) + proc.stdout.setEncoding('utf-8') + proc.stderr.setEncoding('utf-8') +}) + +test('build simple addon in path with non-ascii characters', function (t) { + t.plan(1) + + if (!checkCharmapValid()) { + return t.skip('python console app can\'t encode non-ascii character.') + } + + var testDirNames = { + cp936: '文件夹', + cp1252: 'Latīna', + cp932: 'フォルダ' + } + // Select non-ascii characters by current encoding + var testDirName = testDirNames[getEncoding()] + // If encoding is UTF-8 or other then no need to test + if (!testDirName) { + return t.skip('no need to test') + } + + t.plan(3) + + var data + var configPath = path.join(addonPath, 'build', 'config.gypi') + try { + data = fs.readFileSync(configPath, 'utf8') + } catch (err) { + t.error(err) + return + } + var config = JSON.parse(data.replace(/#.+\n/, '')) + var nodeDir = config.variables.nodedir + var testNodeDir = path.join(addonPath, testDirName) + // Create symbol link to path with non-ascii characters + try { + fs.symlinkSync(nodeDir, testNodeDir, 'dir') + } catch (err) { + switch (err.code) { + case 'EEXIST': break + case 'EPERM': + t.error(err, 'Please try to running console as an administrator') + return + default: + t.error(err) + return + } + } + + var cmd = [ + nodeGyp, + 'rebuild', + '-C', + addonPath, + '--loglevel=verbose', + '-nodedir=' + testNodeDir + ] + var proc = execFile(process.execPath, cmd, function (err, stdout, stderr) { + try { + fs.unlink(testNodeDir) + } catch (err) { + t.error(err) + } + + var logLines = stderr.toString().trim().split(/\r?\n/) + var lastLine = logLines[logLines.length - 1] + t.strictEqual(err, null) + t.strictEqual(lastLine, 'gyp info ok', 'should end in ok') + t.strictEqual(runHello().trim(), 'world') + }) + proc.stdout.setEncoding('utf-8') + proc.stderr.setEncoding('utf-8') +}) + +test('addon works with renamed host executable', function (t) { + // No `fs.copyFileSync` before node8. + if (process.version.substr(1).split('.')[0] < 8) { + t.skip('skipping test for old node version') + t.end() + return + } + + t.plan(3) + + var notNodePath = path.join(os.tmpdir(), 'notnode' + path.extname(process.execPath)) + fs.copyFileSync(process.execPath, notNodePath) + + var cmd = [nodeGyp, 'rebuild', '-C', addonPath, '--loglevel=verbose'] + var proc = execFile(process.execPath, cmd, function (err, stdout, stderr) { + var logLines = stderr.toString().trim().split(/\r?\n/) + var lastLine = logLines[logLines.length - 1] + t.strictEqual(err, null) + t.strictEqual(lastLine, 'gyp info ok', 'should end in ok') + t.strictEqual(runHello(notNodePath).trim(), 'world') + fs.unlinkSync(notNodePath) + }) + proc.stdout.setEncoding('utf-8') + proc.stderr.setEncoding('utf-8') +}) diff --git a/mybulma/node_modules/node-gyp/test/test-configure-python.js b/mybulma/node_modules/node-gyp/test/test-configure-python.js new file mode 100644 index 0000000..4290e7a --- /dev/null +++ b/mybulma/node_modules/node-gyp/test/test-configure-python.js @@ -0,0 +1,82 @@ +'use strict' + +const test = require('tap').test +const path = require('path') +const devDir = require('./common').devDir() +const gyp = require('../lib/node-gyp') +const requireInject = require('require-inject') +const configure = requireInject('../lib/configure', { + 'graceful-fs': { + openSync: function () { return 0 }, + closeSync: function () { }, + writeFile: function (file, data, cb) { cb() }, + stat: function (file, cb) { cb(null, {}) }, + mkdir: function (dir, options, cb) { cb() }, + promises: { + writeFile: function (file, data) { return Promise.resolve(null) } + } + } +}) + +const EXPECTED_PYPATH = path.join(__dirname, '..', 'gyp', 'pylib') +const SEPARATOR = process.platform === 'win32' ? ';' : ':' +const SPAWN_RESULT = { on: function () { } } + +require('npmlog').level = 'warn' + +test('configure PYTHONPATH with no existing env', function (t) { + t.plan(1) + + delete process.env.PYTHONPATH + + var prog = gyp() + prog.parseArgv([]) + prog.spawn = function () { + t.equal(process.env.PYTHONPATH, EXPECTED_PYPATH) + return SPAWN_RESULT + } + prog.devDir = devDir + configure(prog, [], t.fail) +}) + +test('configure PYTHONPATH with existing env of one dir', function (t) { + t.plan(2) + + var existingPath = path.join('a', 'b') + process.env.PYTHONPATH = existingPath + + var prog = gyp() + prog.parseArgv([]) + prog.spawn = function () { + t.equal(process.env.PYTHONPATH, [EXPECTED_PYPATH, existingPath].join(SEPARATOR)) + + var dirs = process.env.PYTHONPATH.split(SEPARATOR) + t.deepEqual(dirs, [EXPECTED_PYPATH, existingPath]) + + return SPAWN_RESULT + } + prog.devDir = devDir + configure(prog, [], t.fail) +}) + +test('configure PYTHONPATH with existing env of multiple dirs', function (t) { + t.plan(2) + + var pythonDir1 = path.join('a', 'b') + var pythonDir2 = path.join('b', 'c') + var existingPath = [pythonDir1, pythonDir2].join(SEPARATOR) + process.env.PYTHONPATH = existingPath + + var prog = gyp() + prog.parseArgv([]) + prog.spawn = function () { + t.equal(process.env.PYTHONPATH, [EXPECTED_PYPATH, existingPath].join(SEPARATOR)) + + var dirs = process.env.PYTHONPATH.split(SEPARATOR) + t.deepEqual(dirs, [EXPECTED_PYPATH, pythonDir1, pythonDir2]) + + return SPAWN_RESULT + } + prog.devDir = devDir + configure(prog, [], t.fail) +}) diff --git a/mybulma/node_modules/node-gyp/test/test-create-config-gypi.js b/mybulma/node_modules/node-gyp/test/test-create-config-gypi.js new file mode 100644 index 0000000..eeac73f --- /dev/null +++ b/mybulma/node_modules/node-gyp/test/test-create-config-gypi.js @@ -0,0 +1,70 @@ +'use strict' + +const path = require('path') +const { test } = require('tap') +const gyp = require('../lib/node-gyp') +const createConfigGypi = require('../lib/create-config-gypi') +const { parseConfigGypi, getCurrentConfigGypi } = createConfigGypi.test + +test('config.gypi with no options', async function (t) { + t.plan(2) + + const prog = gyp() + prog.parseArgv([]) + + const config = await getCurrentConfigGypi({ gyp: prog, vsInfo: {} }) + t.equal(config.target_defaults.default_configuration, 'Release') + t.equal(config.variables.target_arch, process.arch) +}) + +test('config.gypi with --debug', async function (t) { + t.plan(1) + + const prog = gyp() + prog.parseArgv(['_', '_', '--debug']) + + const config = await getCurrentConfigGypi({ gyp: prog, vsInfo: {} }) + t.equal(config.target_defaults.default_configuration, 'Debug') +}) + +test('config.gypi with custom options', async function (t) { + t.plan(1) + + const prog = gyp() + prog.parseArgv(['_', '_', '--shared-libxml2']) + + const config = await getCurrentConfigGypi({ gyp: prog, vsInfo: {} }) + t.equal(config.variables.shared_libxml2, true) +}) + +test('config.gypi with nodedir', async function (t) { + t.plan(1) + + const nodeDir = path.join(__dirname, 'fixtures', 'nodedir') + + const prog = gyp() + prog.parseArgv(['_', '_', `--nodedir=${nodeDir}`]) + + const config = await getCurrentConfigGypi({ gyp: prog, nodeDir, vsInfo: {} }) + t.equal(config.variables.build_with_electron, true) +}) + +test('config.gypi with --force-process-config', async function (t) { + t.plan(1) + + const nodeDir = path.join(__dirname, 'fixtures', 'nodedir') + + const prog = gyp() + prog.parseArgv(['_', '_', '--force-process-config', `--nodedir=${nodeDir}`]) + + const config = await getCurrentConfigGypi({ gyp: prog, nodeDir, vsInfo: {} }) + t.equal(config.variables.build_with_electron, undefined) +}) + +test('config.gypi parsing', function (t) { + t.plan(1) + + const str = "# Some comments\n{'variables': {'multiline': 'A'\n'B'}}" + const config = parseConfigGypi(str) + t.deepEqual(config, { variables: { multiline: 'AB' } }) +}) diff --git a/mybulma/node_modules/node-gyp/test/test-download.js b/mybulma/node_modules/node-gyp/test/test-download.js new file mode 100644 index 0000000..71a3c0d --- /dev/null +++ b/mybulma/node_modules/node-gyp/test/test-download.js @@ -0,0 +1,207 @@ +'use strict' + +const { test } = require('tap') +const fs = require('fs') +const path = require('path') +const util = require('util') +const http = require('http') +const https = require('https') +const install = require('../lib/install') +const semver = require('semver') +const devDir = require('./common').devDir() +const rimraf = require('rimraf') +const gyp = require('../lib/node-gyp') +const log = require('npmlog') + +log.level = 'warn' + +test('download over http', async (t) => { + t.plan(2) + + const server = http.createServer((req, res) => { + t.strictEqual(req.headers['user-agent'], `node-gyp v42 (node ${process.version})`) + res.end('ok') + }) + + t.tearDown(() => new Promise((resolve) => server.close(resolve))) + + const host = 'localhost' + await new Promise((resolve) => server.listen(0, host, resolve)) + const { port } = server.address() + const gyp = { + opts: {}, + version: '42' + } + const url = `http://${host}:${port}` + const res = await install.test.download(gyp, url) + t.strictEqual(await res.text(), 'ok') +}) + +test('download over https with custom ca', async (t) => { + t.plan(3) + + const cafile = path.join(__dirname, '/fixtures/ca.crt') + const [cert, key, ca] = await Promise.all([ + fs.promises.readFile(path.join(__dirname, 'fixtures/server.crt'), 'utf8'), + fs.promises.readFile(path.join(__dirname, 'fixtures/server.key'), 'utf8'), + install.test.readCAFile(cafile) + ]) + + t.strictEqual(ca.length, 1) + + const options = { ca: ca, cert: cert, key: key } + const server = https.createServer(options, (req, res) => { + t.strictEqual(req.headers['user-agent'], `node-gyp v42 (node ${process.version})`) + res.end('ok') + }) + + t.tearDown(() => new Promise((resolve) => server.close(resolve))) + + server.on('clientError', (err) => { throw err }) + + const host = 'localhost' + await new Promise((resolve) => server.listen(0, host, resolve)) + const { port } = server.address() + const gyp = { + opts: { cafile }, + version: '42' + } + const url = `https://${host}:${port}` + const res = await install.test.download(gyp, url) + t.strictEqual(await res.text(), 'ok') +}) + +test('download over http with proxy', async (t) => { + t.plan(2) + + const server = http.createServer((_, res) => { + res.end('ok') + }) + + const pserver = http.createServer((req, res) => { + t.strictEqual(req.headers['user-agent'], `node-gyp v42 (node ${process.version})`) + res.end('proxy ok') + }) + + t.tearDown(() => Promise.all([ + new Promise((resolve) => server.close(resolve)), + new Promise((resolve) => pserver.close(resolve)) + ])) + + const host = 'localhost' + await new Promise((resolve) => server.listen(0, host, resolve)) + const { port } = server.address() + await new Promise((resolve) => pserver.listen(port + 1, host, resolve)) + const gyp = { + opts: { + proxy: `http://${host}:${port + 1}`, + noproxy: 'bad' + }, + version: '42' + } + const url = `http://${host}:${port}` + const res = await install.test.download(gyp, url) + t.strictEqual(await res.text(), 'proxy ok') +}) + +test('download over http with noproxy', async (t) => { + t.plan(2) + + const server = http.createServer((req, res) => { + t.strictEqual(req.headers['user-agent'], `node-gyp v42 (node ${process.version})`) + res.end('ok') + }) + + const pserver = http.createServer((_, res) => { + res.end('proxy ok') + }) + + t.tearDown(() => Promise.all([ + new Promise((resolve) => server.close(resolve)), + new Promise((resolve) => pserver.close(resolve)) + ])) + + const host = 'localhost' + await new Promise((resolve) => server.listen(0, host, resolve)) + const { port } = server.address() + await new Promise((resolve) => pserver.listen(port + 1, host, resolve)) + const gyp = { + opts: { + proxy: `http://${host}:${port + 1}`, + noproxy: host + }, + version: '42' + } + const url = `http://${host}:${port}` + const res = await install.test.download(gyp, url) + t.strictEqual(await res.text(), 'ok') +}) + +test('download with missing cafile', async (t) => { + t.plan(1) + const gyp = { + opts: { cafile: 'no.such.file' } + } + try { + await install.test.download(gyp, {}, 'http://bad/') + } catch (e) { + t.ok(/no.such.file/.test(e.message)) + } +}) + +test('check certificate splitting', async (t) => { + const cas = await install.test.readCAFile(path.join(__dirname, 'fixtures/ca-bundle.crt')) + t.plan(2) + t.strictEqual(cas.length, 2) + t.notStrictEqual(cas[0], cas[1]) +}) + +// only run this test if we are running a version of Node with predictable version path behavior + +test('download headers (actual)', async (t) => { + if (process.env.FAST_TEST || + process.release.name !== 'node' || + semver.prerelease(process.version) !== null || + semver.satisfies(process.version, '<10')) { + return t.skip('Skipping actual download of headers due to test environment configuration') + } + + t.plan(12) + + const expectedDir = path.join(devDir, process.version.replace(/^v/, '')) + await util.promisify(rimraf)(expectedDir) + + const prog = gyp() + prog.parseArgv([]) + prog.devDir = devDir + log.level = 'warn' + await util.promisify(install)(prog, []) + + const data = await fs.promises.readFile(path.join(expectedDir, 'installVersion'), 'utf8') + t.strictEqual(data, '9\n', 'correct installVersion') + + const list = await fs.promises.readdir(path.join(expectedDir, 'include/node')) + t.ok(list.includes('common.gypi')) + t.ok(list.includes('config.gypi')) + t.ok(list.includes('node.h')) + t.ok(list.includes('node_version.h')) + t.ok(list.includes('openssl')) + t.ok(list.includes('uv')) + t.ok(list.includes('uv.h')) + t.ok(list.includes('v8-platform.h')) + t.ok(list.includes('v8.h')) + t.ok(list.includes('zlib.h')) + + const lines = (await fs.promises.readFile(path.join(expectedDir, 'include/node/node_version.h'), 'utf8')).split('\n') + + // extract the 3 version parts from the defines to build a valid version string and + // and check them against our current env version + const version = ['major', 'minor', 'patch'].reduce((version, type) => { + const re = new RegExp(`^#define\\sNODE_${type.toUpperCase()}_VERSION`) + const line = lines.find((l) => re.test(l)) + const i = line ? parseInt(line.replace(/^[^0-9]+([0-9]+).*$/, '$1'), 10) : 'ERROR' + return `${version}${type !== 'major' ? '.' : 'v'}${i}` + }, '') + + t.strictEqual(version, process.version) +}) diff --git a/mybulma/node_modules/node-gyp/test/test-find-accessible-sync.js b/mybulma/node_modules/node-gyp/test/test-find-accessible-sync.js new file mode 100644 index 0000000..0a2e584 --- /dev/null +++ b/mybulma/node_modules/node-gyp/test/test-find-accessible-sync.js @@ -0,0 +1,84 @@ +'use strict' + +const test = require('tap').test +const path = require('path') +const requireInject = require('require-inject') +const configure = requireInject('../lib/configure', { + 'graceful-fs': { + closeSync: function () { return undefined }, + openSync: function (path) { + if (readableFiles.some(function (f) { return f === path })) { + return 0 + } else { + var error = new Error('ENOENT - not found') + throw error + } + } + } +}) + +const dir = path.sep + 'testdir' +const readableFile = 'readable_file' +const anotherReadableFile = 'another_readable_file' +const readableFileInDir = 'somedir' + path.sep + readableFile +const readableFiles = [ + path.resolve(dir, readableFile), + path.resolve(dir, anotherReadableFile), + path.resolve(dir, readableFileInDir) +] + +test('find accessible - empty array', function (t) { + t.plan(1) + + var candidates = [] + var found = configure.test.findAccessibleSync('test', dir, candidates) + t.strictEqual(found, undefined) +}) + +test('find accessible - single item array, readable', function (t) { + t.plan(1) + + var candidates = [readableFile] + var found = configure.test.findAccessibleSync('test', dir, candidates) + t.strictEqual(found, path.resolve(dir, readableFile)) +}) + +test('find accessible - single item array, readable in subdir', function (t) { + t.plan(1) + + var candidates = [readableFileInDir] + var found = configure.test.findAccessibleSync('test', dir, candidates) + t.strictEqual(found, path.resolve(dir, readableFileInDir)) +}) + +test('find accessible - single item array, unreadable', function (t) { + t.plan(1) + + var candidates = ['unreadable_file'] + var found = configure.test.findAccessibleSync('test', dir, candidates) + t.strictEqual(found, undefined) +}) + +test('find accessible - multi item array, no matches', function (t) { + t.plan(1) + + var candidates = ['non_existent_file', 'unreadable_file'] + var found = configure.test.findAccessibleSync('test', dir, candidates) + t.strictEqual(found, undefined) +}) + +test('find accessible - multi item array, single match', function (t) { + t.plan(1) + + var candidates = ['non_existent_file', readableFile] + var found = configure.test.findAccessibleSync('test', dir, candidates) + t.strictEqual(found, path.resolve(dir, readableFile)) +}) + +test('find accessible - multi item array, return first match', function (t) { + t.plan(1) + + var candidates = ['non_existent_file', anotherReadableFile, readableFile] + var found = configure.test.findAccessibleSync('test', dir, candidates) + t.strictEqual(found, path.resolve(dir, anotherReadableFile)) +}) diff --git a/mybulma/node_modules/node-gyp/test/test-find-node-directory.js b/mybulma/node_modules/node-gyp/test/test-find-node-directory.js new file mode 100644 index 0000000..f1380d1 --- /dev/null +++ b/mybulma/node_modules/node-gyp/test/test-find-node-directory.js @@ -0,0 +1,119 @@ +'use strict' + +const test = require('tap').test +const path = require('path') +const findNodeDirectory = require('../lib/find-node-directory') + +const platforms = ['darwin', 'freebsd', 'linux', 'sunos', 'win32', 'aix'] + +// we should find the directory based on the directory +// the script is running in and it should match the layout +// in a build tree where npm is installed in +// .... /deps/npm +test('test find-node-directory - node install', function (t) { + t.plan(platforms.length) + for (var next = 0; next < platforms.length; next++) { + var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } + t.equal( + findNodeDirectory('/x/deps/npm/node_modules/node-gyp/lib', processObj), + path.join('/x')) + } +}) + +// we should find the directory based on the directory +// the script is running in and it should match the layout +// in an installed tree where npm is installed in +// .... /lib/node_modules/npm or .../node_modules/npm +// depending on the patform +test('test find-node-directory - node build', function (t) { + t.plan(platforms.length) + for (var next = 0; next < platforms.length; next++) { + var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } + if (platforms[next] === 'win32') { + t.equal( + findNodeDirectory('/y/node_modules/npm/node_modules/node-gyp/lib', + processObj), path.join('/y')) + } else { + t.equal( + findNodeDirectory('/y/lib/node_modules/npm/node_modules/node-gyp/lib', + processObj), path.join('/y')) + } + } +}) + +// we should find the directory based on the execPath +// for node and match because it was in the bin directory +test('test find-node-directory - node in bin directory', function (t) { + t.plan(platforms.length) + for (var next = 0; next < platforms.length; next++) { + var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } + t.equal( + findNodeDirectory('/nothere/npm/node_modules/node-gyp/lib', processObj), + path.join('/x/y')) + } +}) + +// we should find the directory based on the execPath +// for node and match because it was in the Release directory +test('test find-node-directory - node in build release dir', function (t) { + t.plan(platforms.length) + for (var next = 0; next < platforms.length; next++) { + var processObj + if (platforms[next] === 'win32') { + processObj = { execPath: '/x/y/Release/node', platform: platforms[next] } + } else { + processObj = { + execPath: '/x/y/out/Release/node', + platform: platforms[next] + } + } + + t.equal( + findNodeDirectory('/nothere/npm/node_modules/node-gyp/lib', processObj), + path.join('/x/y')) + } +}) + +// we should find the directory based on the execPath +// for node and match because it was in the Debug directory +test('test find-node-directory - node in Debug release dir', function (t) { + t.plan(platforms.length) + for (var next = 0; next < platforms.length; next++) { + var processObj + if (platforms[next] === 'win32') { + processObj = { execPath: '/a/b/Debug/node', platform: platforms[next] } + } else { + processObj = { execPath: '/a/b/out/Debug/node', platform: platforms[next] } + } + + t.equal( + findNodeDirectory('/nothere/npm/node_modules/node-gyp/lib', processObj), + path.join('/a/b')) + } +}) + +// we should not find it as it will not match based on the execPath nor +// the directory from which the script is running +test('test find-node-directory - not found', function (t) { + t.plan(platforms.length) + for (var next = 0; next < platforms.length; next++) { + var processObj = { execPath: '/x/y/z/y', platform: next } + t.equal(findNodeDirectory('/a/b/c/d', processObj), '') + } +}) + +// we should find the directory based on the directory +// the script is running in and it should match the layout +// in a build tree where npm is installed in +// .... /deps/npm +// same test as above but make sure additional directory entries +// don't cause an issue +test('test find-node-directory - node install', function (t) { + t.plan(platforms.length) + for (var next = 0; next < platforms.length; next++) { + var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } + t.equal( + findNodeDirectory('/x/y/z/a/b/c/deps/npm/node_modules/node-gyp/lib', + processObj), path.join('/x/y/z/a/b/c')) + } +}) diff --git a/mybulma/node_modules/node-gyp/test/test-find-python.js b/mybulma/node_modules/node-gyp/test/test-find-python.js new file mode 100644 index 0000000..67d0b26 --- /dev/null +++ b/mybulma/node_modules/node-gyp/test/test-find-python.js @@ -0,0 +1,226 @@ +'use strict' + +delete process.env.PYTHON + +const test = require('tap').test +const findPython = require('../lib/find-python') +const execFile = require('child_process').execFile +const PythonFinder = findPython.test.PythonFinder + +require('npmlog').level = 'warn' + +test('find python', function (t) { + t.plan(4) + + findPython.test.findPython(null, function (err, found) { + t.strictEqual(err, null) + var proc = execFile(found, ['-V'], function (err, stdout, stderr) { + t.strictEqual(err, null) + t.ok(/Python 3/.test(stdout)) + t.strictEqual(stderr, '') + }) + proc.stdout.setEncoding('utf-8') + proc.stderr.setEncoding('utf-8') + }) +}) + +function poison (object, property) { + function fail () { + console.error(Error(`Property ${property} should not have been accessed.`)) + process.abort() + } + var descriptor = { + configurable: false, + enumerable: false, + get: fail, + set: fail + } + Object.defineProperty(object, property, descriptor) +} + +function TestPythonFinder () { + PythonFinder.apply(this, arguments) +} +TestPythonFinder.prototype = Object.create(PythonFinder.prototype) +// Silence npmlog - remove for debugging +TestPythonFinder.prototype.log = { + silly: () => {}, + verbose: () => {}, + info: () => {}, + warn: () => {}, + error: () => {} +} +delete TestPythonFinder.prototype.env.NODE_GYP_FORCE_PYTHON + +test('find python - python', function (t) { + t.plan(6) + + var f = new TestPythonFinder('python', done) + f.execFile = function (program, args, opts, cb) { + f.execFile = function (program, args, opts, cb) { + poison(f, 'execFile') + t.strictEqual(program, '/path/python') + t.ok(/sys\.version_info/.test(args[1])) + cb(null, '3.9.1') + } + t.strictEqual(program, + process.platform === 'win32' ? '"python"' : 'python') + t.ok(/sys\.executable/.test(args[1])) + cb(null, '/path/python') + } + f.findPython() + + function done (err, python) { + t.strictEqual(err, null) + t.strictEqual(python, '/path/python') + } +}) + +test('find python - python too old', function (t) { + t.plan(2) + + var f = new TestPythonFinder(null, done) + f.execFile = function (program, args, opts, cb) { + if (/sys\.executable/.test(args[args.length - 1])) { + cb(null, '/path/python') + } else if (/sys\.version_info/.test(args[args.length - 1])) { + cb(null, '2.3.4') + } else { + t.fail() + } + } + f.findPython() + + function done (err) { + t.ok(/Could not find any Python/.test(err)) + t.ok(/not supported/i.test(f.errorLog)) + } +}) + +test('find python - no python', function (t) { + t.plan(2) + + var f = new TestPythonFinder(null, done) + f.execFile = function (program, args, opts, cb) { + if (/sys\.executable/.test(args[args.length - 1])) { + cb(new Error('not found')) + } else if (/sys\.version_info/.test(args[args.length - 1])) { + cb(new Error('not a Python executable')) + } else { + t.fail() + } + } + f.findPython() + + function done (err) { + t.ok(/Could not find any Python/.test(err)) + t.ok(/not in PATH/.test(f.errorLog)) + } +}) + +test('find python - no python2, no python, unix', function (t) { + t.plan(2) + + var f = new TestPythonFinder(null, done) + f.checkPyLauncher = t.fail + f.win = false + + f.execFile = function (program, args, opts, cb) { + if (/sys\.executable/.test(args[args.length - 1])) { + cb(new Error('not found')) + } else { + t.fail() + } + } + f.findPython() + + function done (err) { + t.ok(/Could not find any Python/.test(err)) + t.ok(/not in PATH/.test(f.errorLog)) + } +}) + +test('find python - no python, use python launcher', function (t) { + t.plan(4) + + var f = new TestPythonFinder(null, done) + f.win = true + + f.execFile = function (program, args, opts, cb) { + if (program === 'py.exe') { + t.notEqual(args.indexOf('-3'), -1) + t.notEqual(args.indexOf('-c'), -1) + return cb(null, 'Z:\\snake.exe') + } + if (/sys\.executable/.test(args[args.length - 1])) { + cb(new Error('not found')) + } else if (f.winDefaultLocations.includes(program)) { + cb(new Error('not found')) + } else if (/sys\.version_info/.test(args[args.length - 1])) { + if (program === 'Z:\\snake.exe') { + cb(null, '3.9.0') + } else { + t.fail() + } + } else { + t.fail() + } + } + f.findPython() + + function done (err, python) { + t.strictEqual(err, null) + t.strictEqual(python, 'Z:\\snake.exe') + } +}) + +test('find python - no python, no python launcher, good guess', function (t) { + t.plan(2) + + var f = new TestPythonFinder(null, done) + f.win = true + const expectedProgram = f.winDefaultLocations[0] + + f.execFile = function (program, args, opts, cb) { + if (program === 'py.exe') { + return cb(new Error('not found')) + } + if (/sys\.executable/.test(args[args.length - 1])) { + cb(new Error('not found')) + } else if (program === expectedProgram && + /sys\.version_info/.test(args[args.length - 1])) { + cb(null, '3.7.3') + } else { + t.fail() + } + } + f.findPython() + + function done (err, python) { + t.strictEqual(err, null) + t.ok(python === expectedProgram) + } +}) + +test('find python - no python, no python launcher, bad guess', function (t) { + t.plan(2) + + var f = new TestPythonFinder(null, done) + f.win = true + + f.execFile = function (program, args, opts, cb) { + if (/sys\.executable/.test(args[args.length - 1])) { + cb(new Error('not found')) + } else if (/sys\.version_info/.test(args[args.length - 1])) { + cb(new Error('not a Python executable')) + } else { + t.fail() + } + } + f.findPython() + + function done (err) { + t.ok(/Could not find any Python/.test(err)) + t.ok(/not in PATH/.test(f.errorLog)) + } +}) diff --git a/mybulma/node_modules/node-gyp/test/test-find-visualstudio.js b/mybulma/node_modules/node-gyp/test/test-find-visualstudio.js new file mode 100644 index 0000000..1327cf8 --- /dev/null +++ b/mybulma/node_modules/node-gyp/test/test-find-visualstudio.js @@ -0,0 +1,676 @@ +'use strict' + +const test = require('tap').test +const fs = require('fs') +const path = require('path') +const findVisualStudio = require('../lib/find-visualstudio') +const VisualStudioFinder = findVisualStudio.test.VisualStudioFinder + +const semverV1 = { major: 1, minor: 0, patch: 0 } + +delete process.env.VCINSTALLDIR + +function poison (object, property) { + function fail () { + console.error(Error(`Property ${property} should not have been accessed.`)) + process.abort() + } + var descriptor = { + configurable: false, + enumerable: false, + get: fail, + set: fail + } + Object.defineProperty(object, property, descriptor) +} + +function TestVisualStudioFinder () { VisualStudioFinder.apply(this, arguments) } +TestVisualStudioFinder.prototype = Object.create(VisualStudioFinder.prototype) +// Silence npmlog - remove for debugging +TestVisualStudioFinder.prototype.log = { + silly: () => {}, + verbose: () => {}, + info: () => {}, + warn: () => {}, + error: () => {} +} + +test('VS2013', function (t) { + t.plan(4) + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info, { + msBuild: 'C:\\MSBuild12\\MSBuild.exe', + path: 'C:\\VS2013', + sdk: null, + toolset: 'v120', + version: '12.0', + versionMajor: 12, + versionMinor: 0, + versionYear: 2013 + }) + }) + + finder.findVisualStudio2017OrNewer = (cb) => { + finder.parseData(new Error(), '', '', cb) + } + finder.regSearchKeys = (keys, value, addOpts, cb) => { + for (var i = 0; i < keys.length; ++i) { + const fullName = `${keys[i]}\\${value}` + switch (fullName) { + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + continue + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\12.0': + t.pass(`expected search for registry value ${fullName}`) + return cb(null, 'C:\\VS2013\\VC\\') + case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\12.0\\MSBuildToolsPath': + t.pass(`expected search for registry value ${fullName}`) + return cb(null, 'C:\\MSBuild12\\') + default: + t.fail(`unexpected search for registry value ${fullName}`) + } + } + return cb(new Error()) + } + finder.findVisualStudio() +}) + +test('VS2013 should not be found on new node versions', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder({ + major: 10, + minor: 0, + patch: 0 + }, null, (err, info) => { + t.ok(/find .* Visual Studio/i.test(err), 'expect error') + t.false(info, 'no data') + }) + + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', 'VS_2017_Unusable.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.regSearchKeys = (keys, value, addOpts, cb) => { + for (var i = 0; i < keys.length; ++i) { + const fullName = `${keys[i]}\\${value}` + switch (fullName) { + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + continue + default: + t.fail(`unexpected search for registry value ${fullName}`) + } + } + return cb(new Error()) + } + finder.findVisualStudio() +}) + +test('VS2015', function (t) { + t.plan(4) + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info, { + msBuild: 'C:\\MSBuild14\\MSBuild.exe', + path: 'C:\\VS2015', + sdk: null, + toolset: 'v140', + version: '14.0', + versionMajor: 14, + versionMinor: 0, + versionYear: 2015 + }) + }) + + finder.findVisualStudio2017OrNewer = (cb) => { + finder.parseData(new Error(), '', '', cb) + } + finder.regSearchKeys = (keys, value, addOpts, cb) => { + for (var i = 0; i < keys.length; ++i) { + const fullName = `${keys[i]}\\${value}` + switch (fullName) { + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + t.pass(`expected search for registry value ${fullName}`) + return cb(null, 'C:\\VS2015\\VC\\') + case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\14.0\\MSBuildToolsPath': + t.pass(`expected search for registry value ${fullName}`) + return cb(null, 'C:\\MSBuild14\\') + default: + t.fail(`unexpected search for registry value ${fullName}`) + } + } + return cb(new Error()) + } + finder.findVisualStudio() +}) + +test('error from PowerShell', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, null) + + finder.parseData(new Error(), '', '', (info) => { + t.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') + t.false(info, 'no data') + }) +}) + +test('empty output from PowerShell', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, null) + + finder.parseData(null, '', '', (info) => { + t.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') + t.false(info, 'no data') + }) +}) + +test('output from PowerShell not JSON', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, null) + + finder.parseData(null, 'AAAABBBB', '', (info) => { + t.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') + t.false(info, 'no data') + }) +}) + +test('wrong JSON from PowerShell', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, null) + + finder.parseData(null, '{}', '', (info) => { + t.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') + t.false(info, 'no data') + }) +}) + +test('empty JSON from PowerShell', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, null) + + finder.parseData(null, '[]', '', (info) => { + t.ok(/find .* Visual Studio/i.test(finder.errorLog[0]), 'expect error') + t.false(info, 'no data') + }) +}) + +test('future version', function (t) { + t.plan(3) + + const finder = new TestVisualStudioFinder(semverV1, null, null) + + finder.parseData(null, JSON.stringify([{ + packages: [ + 'Microsoft.VisualStudio.Component.VC.Tools.x86.x64', + 'Microsoft.VisualStudio.Component.Windows10SDK.17763', + 'Microsoft.VisualStudio.VC.MSBuild.Base' + ], + path: 'C:\\VS', + version: '9999.9999.9999.9999' + }]), '', (info) => { + t.ok(/unknown version/i.test(finder.errorLog[0]), 'expect error') + t.ok(/find .* Visual Studio/i.test(finder.errorLog[1]), 'expect error') + t.false(info, 'no data') + }) +}) + +test('single unusable VS2017', function (t) { + t.plan(3) + + const finder = new TestVisualStudioFinder(semverV1, null, null) + + const file = path.join(__dirname, 'fixtures', 'VS_2017_Unusable.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', (info) => { + t.ok(/checking/i.test(finder.errorLog[0]), 'expect error') + t.ok(/find .* Visual Studio/i.test(finder.errorLog[2]), 'expect error') + t.false(info, 'no data') + }) +}) + +test('minimal VS2017 Build Tools', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\' + + 'BuildTools\\MSBuild\\15.0\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\BuildTools', + sdk: '10.0.17134.0', + toolset: 'v141', + version: '15.9.28307.665', + versionMajor: 15, + versionMinor: 9, + versionYear: 2017 + }) + }) + + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', + 'VS_2017_BuildTools_minimal.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() +}) + +test('VS2017 Community with C++ workload', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\' + + 'Community\\MSBuild\\15.0\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community', + sdk: '10.0.17763.0', + toolset: 'v141', + version: '15.9.28307.665', + versionMajor: 15, + versionMinor: 9, + versionYear: 2017 + }) + }) + + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', + 'VS_2017_Community_workload.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() +}) + +test('VS2017 Express', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\' + + 'WDExpress\\MSBuild\\15.0\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\WDExpress', + sdk: '10.0.17763.0', + toolset: 'v141', + version: '15.9.28307.858', + versionMajor: 15, + versionMinor: 9, + versionYear: 2017 + }) + }) + + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', 'VS_2017_Express.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() +}) + +test('VS2019 Preview with C++ workload', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\' + + 'Preview\\MSBuild\\Current\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Preview', + sdk: '10.0.17763.0', + toolset: 'v142', + version: '16.0.28608.199', + versionMajor: 16, + versionMinor: 0, + versionYear: 2019 + }) + }) + + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', + 'VS_2019_Preview.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() +}) + +test('minimal VS2019 Build Tools', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\' + + 'BuildTools\\MSBuild\\Current\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools', + sdk: '10.0.17134.0', + toolset: 'v142', + version: '16.1.28922.388', + versionMajor: 16, + versionMinor: 1, + versionYear: 2019 + }) + }) + + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', + 'VS_2019_BuildTools_minimal.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() +}) + +test('VS2019 Community with C++ workload', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\' + + 'Community\\MSBuild\\Current\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community', + sdk: '10.0.17763.0', + toolset: 'v142', + version: '16.1.28922.388', + versionMajor: 16, + versionMinor: 1, + versionYear: 2019 + }) + }) + + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', + 'VS_2019_Community_workload.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() +}) + +function allVsVersions (t, finder) { + finder.findVisualStudio2017OrNewer = (cb) => { + const data0 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2017_Unusable.txt'))) + const data1 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2017_BuildTools_minimal.txt'))) + const data2 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2017_Community_workload.txt'))) + const data3 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2017_Express.txt'))) + const data4 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2019_Preview.txt'))) + const data5 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2019_BuildTools_minimal.txt'))) + const data6 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2019_Community_workload.txt'))) + const data = JSON.stringify(data0.concat(data1, data2, data3, data4, + data5, data6)) + finder.parseData(null, data, '', cb) + } + finder.regSearchKeys = (keys, value, addOpts, cb) => { + for (var i = 0; i < keys.length; ++i) { + const fullName = `${keys[i]}\\${value}` + switch (fullName) { + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\12.0': + continue + case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\12.0': + return cb(null, 'C:\\VS2013\\VC\\') + case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\12.0\\MSBuildToolsPath': + return cb(null, 'C:\\MSBuild12\\') + case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + return cb(null, 'C:\\VS2015\\VC\\') + case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\14.0\\MSBuildToolsPath': + return cb(null, 'C:\\MSBuild14\\') + default: + t.fail(`unexpected search for registry value ${fullName}`) + } + } + return cb(new Error()) + } +} + +test('fail when looking for invalid path', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, 'AABB', (err, info) => { + t.ok(/find .* Visual Studio/i.test(err), 'expect error') + t.false(info, 'no data') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('look for VS2013 by version number', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, '2013', (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.versionYear, 2013) + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('look for VS2013 by installation path', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2013', + (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.path, 'C:\\VS2013') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('look for VS2015 by version number', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, '2015', (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.versionYear, 2015) + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('look for VS2015 by installation path', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2015', + (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.path, 'C:\\VS2015') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('look for VS2017 by version number', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, '2017', (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.versionYear, 2017) + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('look for VS2017 by installation path', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community', + (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.path, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('look for VS2019 by version number', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, '2019', (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.versionYear, 2019) + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('look for VS2019 by installation path', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools', + (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.path, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('msvs_version match should be case insensitive', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, + 'c:\\program files (x86)\\microsoft visual studio\\2019\\BUILDTOOLS', + (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.path, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('latest version should be found by default', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.versionYear, 2019) + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('run on a usable VS Command Prompt', function (t) { + t.plan(2) + + process.env.VCINSTALLDIR = 'C:\\VS2015\\VC' + // VSINSTALLDIR is not defined on Visual C++ Build Tools 2015 + delete process.env.VSINSTALLDIR + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.path, 'C:\\VS2015') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('VCINSTALLDIR match should be case insensitive', function (t) { + t.plan(2) + + process.env.VCINSTALLDIR = + 'c:\\program files (x86)\\microsoft visual studio\\2019\\BUILDTOOLS\\VC' + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.path, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('run on a unusable VS Command Prompt', function (t) { + t.plan(2) + + process.env.VCINSTALLDIR = + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildToolsUnusable\\VC' + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.ok(/find .* Visual Studio/i.test(err), 'expect error') + t.false(info, 'no data') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('run on a VS Command Prompt with matching msvs_version', function (t) { + t.plan(2) + + process.env.VCINSTALLDIR = 'C:\\VS2015\\VC' + + const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2015', + (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.path, 'C:\\VS2015') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('run on a VS Command Prompt with mismatched msvs_version', function (t) { + t.plan(2) + + process.env.VCINSTALLDIR = + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools\\VC' + + const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2015', + (err, info) => { + t.ok(/find .* Visual Studio/i.test(err), 'expect error') + t.false(info, 'no data') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) diff --git a/mybulma/node_modules/node-gyp/test/test-install.js b/mybulma/node_modules/node-gyp/test/test-install.js new file mode 100644 index 0000000..5039dc9 --- /dev/null +++ b/mybulma/node_modules/node-gyp/test/test-install.js @@ -0,0 +1,46 @@ +'use strict' + +const { test } = require('tap') +const { test: { install } } = require('../lib/install') +const log = require('npmlog') + +log.level = 'error' // we expect a warning + +test('EACCES retry once', async (t) => { + t.plan(3) + + const fs = { + promises: { + stat (_) { + const err = new Error() + err.code = 'EACCES' + t.ok(true) + throw err + } + } + } + + const Gyp = { + devDir: __dirname, + opts: { + ensure: true + }, + commands: { + install (argv, cb) { + install(fs, Gyp, argv).then(cb, cb) + }, + remove (_, cb) { + cb() + } + } + } + + try { + await install(fs, Gyp, []) + } catch (err) { + t.ok(true) + if (/"pre" versions of node cannot be installed/.test(err.message)) { + t.ok(true) + } + } +}) diff --git a/mybulma/node_modules/node-gyp/test/test-options.js b/mybulma/node_modules/node-gyp/test/test-options.js new file mode 100644 index 0000000..b2ac62c --- /dev/null +++ b/mybulma/node_modules/node-gyp/test/test-options.js @@ -0,0 +1,31 @@ +'use strict' + +const test = require('tap').test +const gyp = require('../lib/node-gyp') + +test('options in environment', (t) => { + t.plan(1) + + // `npm test` dumps a ton of npm_config_* variables in the environment. + Object.keys(process.env) + .filter((key) => /^npm_config_/.test(key)) + .forEach((key) => { delete process.env[key] }) + + // in some platforms, certain keys are stubborn and cannot be removed + const keys = Object.keys(process.env) + .filter((key) => /^npm_config_/.test(key)) + .map((key) => key.substring('npm_config_'.length)) + .concat('argv', 'x') + + // Zero-length keys should get filtered out. + process.env.npm_config_ = '42' + // Other keys should get added. + process.env.npm_config_x = '42' + // Except loglevel. + process.env.npm_config_loglevel = 'debug' + + const g = gyp() + g.parseArgv(['rebuild']) // Also sets opts.argv. + + t.deepEqual(Object.keys(g.opts).sort(), keys.sort()) +}) diff --git a/mybulma/node_modules/node-gyp/test/test-process-release.js b/mybulma/node_modules/node-gyp/test/test-process-release.js new file mode 100644 index 0000000..c3ee070 --- /dev/null +++ b/mybulma/node_modules/node-gyp/test/test-process-release.js @@ -0,0 +1,434 @@ +'use strict' + +const test = require('tap').test +const processRelease = require('../lib/process-release') + +test('test process release - process.version = 0.8.20', function (t) { + t.plan(2) + + var release = processRelease([], { opts: {} }, 'v0.8.20', null) + + t.equal(release.semver.version, '0.8.20') + delete release.semver + + t.deepEqual(release, { + version: '0.8.20', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v0.8.20/', + tarballUrl: 'https://nodejs.org/dist/v0.8.20/node-v0.8.20.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v0.8.20/SHASUMS256.txt', + versionDir: '0.8.20', + ia32: { libUrl: 'https://nodejs.org/dist/v0.8.20/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.8.20/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.8.20/arm64/node.lib', libPath: 'arm64/node.lib' } + }) +}) + +test('test process release - process.version = 0.10.21', function (t) { + t.plan(2) + + var release = processRelease([], { opts: {} }, 'v0.10.21', null) + + t.equal(release.semver.version, '0.10.21') + delete release.semver + + t.deepEqual(release, { + version: '0.10.21', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v0.10.21/', + tarballUrl: 'https://nodejs.org/dist/v0.10.21/node-v0.10.21.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v0.10.21/SHASUMS256.txt', + versionDir: '0.10.21', + ia32: { libUrl: 'https://nodejs.org/dist/v0.10.21/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.10.21/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.10.21/arm64/node.lib', libPath: 'arm64/node.lib' } + }) +}) + +// prior to -headers.tar.gz +test('test process release - process.version = 0.12.9', function (t) { + t.plan(2) + + var release = processRelease([], { opts: {} }, 'v0.12.9', null) + + t.equal(release.semver.version, '0.12.9') + delete release.semver + + t.deepEqual(release, { + version: '0.12.9', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v0.12.9/', + tarballUrl: 'https://nodejs.org/dist/v0.12.9/node-v0.12.9.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v0.12.9/SHASUMS256.txt', + versionDir: '0.12.9', + ia32: { libUrl: 'https://nodejs.org/dist/v0.12.9/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.12.9/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.12.9/arm64/node.lib', libPath: 'arm64/node.lib' } + }) +}) + +// prior to -headers.tar.gz +test('test process release - process.version = 0.10.41', function (t) { + t.plan(2) + + var release = processRelease([], { opts: {} }, 'v0.10.41', null) + + t.equal(release.semver.version, '0.10.41') + delete release.semver + + t.deepEqual(release, { + version: '0.10.41', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v0.10.41/', + tarballUrl: 'https://nodejs.org/dist/v0.10.41/node-v0.10.41.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v0.10.41/SHASUMS256.txt', + versionDir: '0.10.41', + ia32: { libUrl: 'https://nodejs.org/dist/v0.10.41/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.10.41/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.10.41/arm64/node.lib', libPath: 'arm64/node.lib' } + }) +}) + +// has -headers.tar.gz +test('test process release - process.release ~ node@0.10.42', function (t) { + t.plan(2) + + var release = processRelease([], { opts: {} }, 'v0.10.42', null) + + t.equal(release.semver.version, '0.10.42') + delete release.semver + + t.deepEqual(release, { + version: '0.10.42', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v0.10.42/', + tarballUrl: 'https://nodejs.org/dist/v0.10.42/node-v0.10.42-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v0.10.42/SHASUMS256.txt', + versionDir: '0.10.42', + ia32: { libUrl: 'https://nodejs.org/dist/v0.10.42/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.10.42/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.10.42/arm64/node.lib', libPath: 'arm64/node.lib' } + }) +}) + +// has -headers.tar.gz +test('test process release - process.release ~ node@0.12.10', function (t) { + t.plan(2) + + var release = processRelease([], { opts: {} }, 'v0.12.10', null) + + t.equal(release.semver.version, '0.12.10') + delete release.semver + + t.deepEqual(release, { + version: '0.12.10', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v0.12.10/', + tarballUrl: 'https://nodejs.org/dist/v0.12.10/node-v0.12.10-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v0.12.10/SHASUMS256.txt', + versionDir: '0.12.10', + ia32: { libUrl: 'https://nodejs.org/dist/v0.12.10/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.12.10/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.12.10/arm64/node.lib', libPath: 'arm64/node.lib' } + }) +}) + +test('test process release - process.release ~ node@4.1.23', function (t) { + t.plan(2) + + var release = processRelease([], { opts: {} }, 'v4.1.23', { + name: 'node', + headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' + }) + + t.equal(release.semver.version, '4.1.23') + delete release.semver + + t.deepEqual(release, { + version: '4.1.23', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v4.1.23/', + tarballUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v4.1.23/SHASUMS256.txt', + versionDir: '4.1.23', + ia32: { libUrl: 'https://nodejs.org/dist/v4.1.23/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v4.1.23/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v4.1.23/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) +}) + +test('test process release - process.release ~ node@4.1.23 / corp build', function (t) { + t.plan(2) + + var release = processRelease([], { opts: {} }, 'v4.1.23', { + name: 'node', + headersUrl: 'https://some.custom.location/node-v4.1.23-headers.tar.gz' + }) + + t.equal(release.semver.version, '4.1.23') + delete release.semver + + t.deepEqual(release, { + version: '4.1.23', + name: 'node', + baseUrl: 'https://some.custom.location/', + tarballUrl: 'https://some.custom.location/node-v4.1.23-headers.tar.gz', + shasumsUrl: 'https://some.custom.location/SHASUMS256.txt', + versionDir: '4.1.23', + ia32: { libUrl: 'https://some.custom.location/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://some.custom.location/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://some.custom.location/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) +}) + +test('test process release - process.release ~ node@12.8.0 Windows', function (t) { + t.plan(2) + + var release = processRelease([], { opts: {} }, 'v12.8.0', { + name: 'node', + sourceUrl: 'https://nodejs.org/download/release/v12.8.0/node-v12.8.0.tar.gz', + headersUrl: 'https://nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', + libUrl: 'https://nodejs.org/download/release/v12.8.0/win-x64/node.lib' + }) + + t.equal(release.semver.version, '12.8.0') + delete release.semver + + t.deepEqual(release, { + version: '12.8.0', + name: 'node', + baseUrl: 'https://nodejs.org/download/release/v12.8.0/', + tarballUrl: 'https://nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/download/release/v12.8.0/SHASUMS256.txt', + versionDir: '12.8.0', + ia32: { libUrl: 'https://nodejs.org/download/release/v12.8.0/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://nodejs.org/download/release/v12.8.0/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/download/release/v12.8.0/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) +}) + +test('test process release - process.release ~ node@12.8.0 Windows ARM64', function (t) { + t.plan(2) + + var release = processRelease([], { opts: {} }, 'v12.8.0', { + name: 'node', + sourceUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/node-v12.8.0.tar.gz', + headersUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', + libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-arm64/node.lib' + }) + + t.equal(release.semver.version, '12.8.0') + delete release.semver + + t.deepEqual(release, { + version: '12.8.0', + name: 'node', + baseUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/', + tarballUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', + shasumsUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/SHASUMS256.txt', + versionDir: '12.8.0', + ia32: { libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) +}) + +test('test process release - process.release ~ node@4.1.23 --target=0.10.40', function (t) { + t.plan(2) + + var release = processRelease([], { opts: { target: '0.10.40' } }, 'v4.1.23', { + name: 'node', + headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' + }) + + t.equal(release.semver.version, '0.10.40') + delete release.semver + + t.deepEqual(release, { + version: '0.10.40', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v0.10.40/', + tarballUrl: 'https://nodejs.org/dist/v0.10.40/node-v0.10.40.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v0.10.40/SHASUMS256.txt', + versionDir: '0.10.40', + ia32: { libUrl: 'https://nodejs.org/dist/v0.10.40/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.10.40/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.10.40/arm64/node.lib', libPath: 'arm64/node.lib' } + }) +}) + +test('test process release - process.release ~ node@4.1.23 --dist-url=https://foo.bar/baz', function (t) { + t.plan(2) + + var release = processRelease([], { opts: { 'dist-url': 'https://foo.bar/baz' } }, 'v4.1.23', { + name: 'node', + headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' + }) + + t.equal(release.semver.version, '4.1.23') + delete release.semver + + t.deepEqual(release, { + version: '4.1.23', + name: 'node', + baseUrl: 'https://foo.bar/baz/v4.1.23/', + tarballUrl: 'https://foo.bar/baz/v4.1.23/node-v4.1.23-headers.tar.gz', + shasumsUrl: 'https://foo.bar/baz/v4.1.23/SHASUMS256.txt', + versionDir: '4.1.23', + ia32: { libUrl: 'https://foo.bar/baz/v4.1.23/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://foo.bar/baz/v4.1.23/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://foo.bar/baz/v4.1.23/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) +}) + +test('test process release - process.release ~ frankenstein@4.1.23', function (t) { + t.plan(2) + + var release = processRelease([], { opts: {} }, 'v4.1.23', { + name: 'frankenstein', + headersUrl: 'https://frankensteinjs.org/dist/v4.1.23/frankenstein-v4.1.23-headers.tar.gz' + }) + + t.equal(release.semver.version, '4.1.23') + delete release.semver + + t.deepEqual(release, { + version: '4.1.23', + name: 'frankenstein', + baseUrl: 'https://frankensteinjs.org/dist/v4.1.23/', + tarballUrl: 'https://frankensteinjs.org/dist/v4.1.23/frankenstein-v4.1.23-headers.tar.gz', + shasumsUrl: 'https://frankensteinjs.org/dist/v4.1.23/SHASUMS256.txt', + versionDir: 'frankenstein-4.1.23', + ia32: { libUrl: 'https://frankensteinjs.org/dist/v4.1.23/win-x86/frankenstein.lib', libPath: 'win-x86/frankenstein.lib' }, + x64: { libUrl: 'https://frankensteinjs.org/dist/v4.1.23/win-x64/frankenstein.lib', libPath: 'win-x64/frankenstein.lib' }, + arm64: { libUrl: 'https://frankensteinjs.org/dist/v4.1.23/win-arm64/frankenstein.lib', libPath: 'win-arm64/frankenstein.lib' } + }) +}) + +test('test process release - process.release ~ frankenstein@4.1.23 --dist-url=http://foo.bar/baz/', function (t) { + t.plan(2) + + var release = processRelease([], { opts: { 'dist-url': 'http://foo.bar/baz/' } }, 'v4.1.23', { + name: 'frankenstein', + headersUrl: 'https://frankensteinjs.org/dist/v4.1.23/frankenstein-v4.1.23.tar.gz' + }) + + t.equal(release.semver.version, '4.1.23') + delete release.semver + + t.deepEqual(release, { + version: '4.1.23', + name: 'frankenstein', + baseUrl: 'http://foo.bar/baz/v4.1.23/', + tarballUrl: 'http://foo.bar/baz/v4.1.23/frankenstein-v4.1.23-headers.tar.gz', + shasumsUrl: 'http://foo.bar/baz/v4.1.23/SHASUMS256.txt', + versionDir: 'frankenstein-4.1.23', + ia32: { libUrl: 'http://foo.bar/baz/v4.1.23/win-x86/frankenstein.lib', libPath: 'win-x86/frankenstein.lib' }, + x64: { libUrl: 'http://foo.bar/baz/v4.1.23/win-x64/frankenstein.lib', libPath: 'win-x64/frankenstein.lib' }, + arm64: { libUrl: 'http://foo.bar/baz/v4.1.23/win-arm64/frankenstein.lib', libPath: 'win-arm64/frankenstein.lib' } + }) +}) + +test('test process release - process.release ~ node@4.0.0-rc.4', function (t) { + t.plan(2) + + var release = processRelease([], { opts: {} }, 'v4.0.0-rc.4', { + name: 'node', + headersUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz' + }) + + t.equal(release.semver.version, '4.0.0-rc.4') + delete release.semver + + t.deepEqual(release, { + version: '4.0.0-rc.4', + name: 'node', + baseUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/', + tarballUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/SHASUMS256.txt', + versionDir: '4.0.0-rc.4', + ia32: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) +}) + +test('test process release - process.release ~ node@4.0.0-rc.4 passed as argv[0]', function (t) { + t.plan(2) + + // note the missing 'v' on the arg, it should normalise when checking + // whether we're on the default or not + var release = processRelease(['4.0.0-rc.4'], { opts: {} }, 'v4.0.0-rc.4', { + name: 'node', + headersUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz' + }) + + t.equal(release.semver.version, '4.0.0-rc.4') + delete release.semver + + t.deepEqual(release, { + version: '4.0.0-rc.4', + name: 'node', + baseUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/', + tarballUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/SHASUMS256.txt', + versionDir: '4.0.0-rc.4', + ia32: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) +}) + +test('test process release - process.release ~ node@4.0.0-rc.4 - bogus string passed as argv[0]', function (t) { + t.plan(2) + + // additional arguments can be passed in on the commandline that should be ignored if they + // are not specifying a valid version @ position 0 + var release = processRelease(['this is no version!'], { opts: {} }, 'v4.0.0-rc.4', { + name: 'node', + headersUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz' + }) + + t.equal(release.semver.version, '4.0.0-rc.4') + delete release.semver + + t.deepEqual(release, { + version: '4.0.0-rc.4', + name: 'node', + baseUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/', + tarballUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/SHASUMS256.txt', + versionDir: '4.0.0-rc.4', + ia32: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) +}) + +test('test process release - NODEJS_ORG_MIRROR', function (t) { + t.plan(2) + + process.env.NODEJS_ORG_MIRROR = 'http://foo.bar' + + var release = processRelease([], { opts: {} }, 'v4.1.23', { + name: 'node', + headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' + }) + + t.equal(release.semver.version, '4.1.23') + delete release.semver + + t.deepEqual(release, { + version: '4.1.23', + name: 'node', + baseUrl: 'http://foo.bar/v4.1.23/', + tarballUrl: 'http://foo.bar/v4.1.23/node-v4.1.23-headers.tar.gz', + shasumsUrl: 'http://foo.bar/v4.1.23/SHASUMS256.txt', + versionDir: '4.1.23', + ia32: { libUrl: 'http://foo.bar/v4.1.23/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'http://foo.bar/v4.1.23/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'http://foo.bar/v4.1.23/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) + + delete process.env.NODEJS_ORG_MIRROR +}) diff --git a/mybulma/node_modules/node-sass/LICENSE b/mybulma/node_modules/node-sass/LICENSE new file mode 100644 index 0000000..6713846 --- /dev/null +++ b/mybulma/node_modules/node-sass/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2013-2016 Andrew Nesbitt + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/mybulma/node_modules/node-sass/README.md b/mybulma/node_modules/node-sass/README.md new file mode 100644 index 0000000..f08caf7 --- /dev/null +++ b/mybulma/node_modules/node-sass/README.md @@ -0,0 +1,652 @@ +# node-sass + +**Warning:** [LibSass and Node Sass are deprecated](https://sass-lang.com/blog/libsass-is-deprecated). +While they will continue to receive maintenance releases indefinitely, there are no +plans to add additional features or compatibility with any new CSS or Sass features. +Projects that still use it should move onto +[Dart Sass](https://sass-lang.com/dart-sass). + +## Node version support policy + +1. Supported Node.js versions vary by release, please consult the [releases page](https://github.com/sass/node-sass/releases). +1. Node versions that hit end of life , will be dropped from support at each node-sass release (major, minor). +1. We will stop building binaries for unsupported releases, testing for breakages in dependency compatibility, but we will not block installations for those that want to support themselves. +1. New node release require minor internal changes along with support from CI providers (AppVeyor, GitHub Actions). We will open a single issue for interested parties to subscribe to, and close additional issues. + +Below is a quick guide for minimum and maximum supported versions of node-sass: + +NodeJS | Supported node-sass version | Node Module +--------|-----------------------------|------------ +Node 19 | 8.0+ | 111 +Node 18 | 8.0+ | 108 +Node 17 | 7.0+, <8.0 | 102 +Node 16 | 6.0+ | 93 +Node 15 | 5.0+, <7.0 | 88 +Node 14 | 4.14+ | 83 +Node 13 | 4.13+, <5.0 | 79 +Node 12 | 4.12+, <8.0 | 72 +Node 11 | 4.10+, <5.0 | 67 +Node 10 | 4.9+, <6.0 | 64 +Node 8 | 4.5.3+, <5.0 | 57 +Node <8 | <5.0 | <57 + + + + + + +
+ Sass logo + + + + +
+ +![Alpine](https://github.com/sass/node-sass/workflows/Build%20bindings%20for%20Alpine%20releases/badge.svg) +![Linux](https://github.com/sass/node-sass/workflows/Build%20bindings%20for%20Linux%20releases/badge.svg) +![macOS](https://github.com/sass/node-sass/workflows/Build%20bindings%20for%20macOS%20releases/badge.svg) +![Windows x64](https://github.com/sass/node-sass/workflows/Build%20bindings%20for%20Windows%20releases/badge.svg) +![Linting](https://github.com/sass/node-sass/workflows/Lint%20JS/badge.svg) +[![Windows x86](https://ci.appveyor.com/api/projects/status/22mjbk59kvd55m9y/branch/master?svg=true)](https://ci.appveyor.com/project/sass/node-sass/branch/master) +[![Coverage Status](https://coveralls.io/repos/sass/node-sass/badge.svg?branch=master)](https://coveralls.io/r/sass/node-sass?branch=master) + +Node-sass is a library that provides binding for Node.js to [LibSass], the C version of the popular stylesheet preprocessor, Sass. + +It allows you to natively compile .scss files to css at incredible speed and automatically via a connect middleware. + +Find it on npm: + +Follow @nodesass on twitter for release updates: + +## Install + +```shell +npm install node-sass +``` + +Some users have reported issues installing on Ubuntu due to `node` being registered to another package. [Follow the official NodeJS docs](https://github.com/nodesource/distributions/blob/master/README.md#debinstall) to install NodeJS so that `#!/usr/bin/env node` correctly resolves. + +Compiling on Windows machines requires the [node-gyp prerequisites](https://github.com/nodejs/node-gyp#on-windows). + +Are you seeing the following error? Check out our [Troubleshooting guide](https://github.com/sass/node-sass/blob/master/TROUBLESHOOTING.md#installing-node-sass-4x-with-node--4).** + +``` +SyntaxError: Use of const in strict mode. +``` + +**Having installation troubles? Check out our [Troubleshooting guide](https://github.com/sass/node-sass/blob/master/TROUBLESHOOTING.md).** + +### Install from mirror in China + +```shell +npm install -g mirror-config-china --registry=https://registry.npmmirror.com +npm install node-sass +``` + +## Usage + +```javascript +var sass = require('node-sass'); +sass.render({ + file: scss_filename, + [, options..] +}, function(err, result) { /*...*/ }); +// OR +var result = sass.renderSync({ + data: scss_content + [, options..] +}); +``` + +## Options + +### file + +* Type: `String` +* Default: `null` + +**Special**: `file` or `data` must be specified + +Path to a file for [LibSass] to compile. + +### data + +* Type: `String` +* Default: `null` + +**Special**: `file` or `data` must be specified + +A string to pass to [LibSass] to compile. It is recommended that you use `includePaths` in conjunction with this so that [LibSass] can find files when using the `@import` directive. + +### importer (>= v2.0.0) - _experimental_ + +**This is an experimental LibSass feature. Use with caution.** + +* Type: `Function | Function[]` signature `function(url, prev, done)` +* Default: `undefined` + +Function Parameters and Information: + +* `url (String)` - the path in import **as-is**, which [LibSass] encountered +* `prev (String)` - the previously resolved path +* `done (Function)` - a callback function to invoke on async completion, takes an object literal containing + * `file (String)` - an alternate path for [LibSass] to use **OR** + * `contents (String)` - the imported contents (for example, read from memory or the file system) + +Handles when [LibSass] encounters the `@import` directive. A custom importer allows extension of the [LibSass] engine in both a synchronous and asynchronous manner. In both cases, the goal is to either `return` or call `done()` with an object literal. Depending on the value of the object literal, one of two things will happen. + +When returning or calling `done()` with `{ file: "String" }`, the new file path will be assumed for the `@import`. It's recommended to be mindful of the value of `prev` in instances where relative path resolution may be required. + +When returning or calling `done()` with `{ contents: "String" }`, the string value will be used as if the file was read in through an external source. + +Starting from v3.0.0: + +* `this` refers to a contextual scope for the immediate run of `sass.render` or `sass.renderSync` + +* importers can return error and LibSass will emit that error in response. For instance: + + ```javascript + done(new Error('doesn\'t exist!')); + // or return synchronously + return new Error('nothing to do here'); + ``` + +* importer can be an array of functions, which will be called by LibSass in the order of their occurrence in array. This helps user specify special importer for particular kind of path (filesystem, http). If an importer does not want to handle a particular path, it should return `null`. See [functions section](#functions--v300---experimental) for more details on Sass types. + +### functions (>= v3.0.0) - _experimental_ + +**This is an experimental LibSass feature. Use with caution.** + +`functions` is an `Object` that holds a collection of custom functions that may be invoked by the sass files being compiled. They may take zero or more input parameters and must return a value either synchronously (`return ...;`) or asynchronously (`done();`). Those parameters will be instances of one of the constructors contained in the `require('node-sass').types` hash. The return value must be of one of these types as well. See the list of available types below: + +#### types.Number(value [, unit = ""]) + +* `getValue()`/ `setValue(value)` : gets / sets the numerical portion of the number +* `getUnit()` / `setUnit(unit)` : gets / sets the unit portion of the number + +#### types.String(value) + +* `getValue()` / `setValue(value)` : gets / sets the enclosed string + +#### types.Color(r, g, b [, a = 1.0]) or types.Color(argb) + +* `getR()` / `setR(value)` : red component (integer from `0` to `255`) +* `getG()` / `setG(value)` : green component (integer from `0` to `255`) +* `getB()` / `setB(value)` : blue component (integer from `0` to `255`) +* `getA()` / `setA(value)` : alpha component (number from `0` to `1.0`) + +Example: + +```javascript +var Color = require('node-sass').types.Color, + c1 = new Color(255, 0, 0), + c2 = new Color(0xff0088cc); +``` + +#### types.Boolean(value) + +* `getValue()` : gets the enclosed boolean +* `types.Boolean.TRUE` : Singleton instance of `types.Boolean` that holds "true" +* `types.Boolean.FALSE` : Singleton instance of `types.Boolean` that holds "false" + +#### types.List(length [, commaSeparator = true]) + +* `getValue(index)` / `setValue(index, value)` : `value` must itself be an instance of one of the constructors in `sass.types`. +* `getSeparator()` / `setSeparator(isComma)` : whether to use commas as a separator +* `getLength()` + +#### types.Map(length) + +* `getKey(index)` / `setKey(index, value)` +* `getValue(index)` / `setValue(index, value)` +* `getLength()` + +#### types.Null() + +* `types.Null.NULL` : Singleton instance of `types.Null`. + +#### Example + +```javascript +sass.renderSync({ + data: '#{headings(2,5)} { color: #08c; }', + functions: { + 'headings($from: 0, $to: 6)': function(from, to) { + var i, f = from.getValue(), t = to.getValue(), + list = new sass.types.List(t - f + 1); + + for (i = f; i <= t; i++) { + list.setValue(i - f, new sass.types.String('h' + i)); + } + + return list; + } + } +}); +``` + +### includePaths + +* Type: `Array` +* Default: `[]` + +An array of paths that [LibSass] can look in to attempt to resolve your `@import` declarations. When using `data`, it is recommended that you use this. + +### indentedSyntax + +* Type: `Boolean` +* Default: `false` + +`true` values enable [Sass Indented Syntax](https://sass-lang.com/documentation/file.INDENTED_SYNTAX.html) for parsing the data string or file. + +__Note:__ node-sass/libsass will compile a mixed library of scss and indented syntax (.sass) files with the Default setting (false) as long as .sass and .scss extensions are used in filenames. + +### indentType (>= v3.0.0) + +* Type: `String` +* Default: `space` + +Used to determine whether to use space or tab character for indentation. + +### indentWidth (>= v3.0.0) + +* Type: `Number` +* Default: `2` +* Maximum: `10` + +Used to determine the number of spaces or tabs to be used for indentation. + +### linefeed (>= v3.0.0) + +* Type: `String` +* Default: `lf` + +Used to determine whether to use `cr`, `crlf`, `lf` or `lfcr` sequence for line break. + +### omitSourceMapUrl + +* Type: `Boolean` +* Default: `false` + +**Special:** When using this, you should also specify `outFile` to avoid unexpected behavior. + +`true` values disable the inclusion of source map information in the output file. + +### outFile + +* Type: `String | null` +* Default: `null` + +**Special:** Required when `sourceMap` is a truthy value + +Specify the intended location of the output file. Strongly recommended when outputting source maps so that they can properly refer back to their intended files. + +**Attention** enabling this option will **not** write the file on disk for you, it's for internal reference purpose only (to generate the map for example). + +Example on how to write it on the disk + +```javascript +sass.render({ + ... + outFile: yourPathTotheFile, + }, function(error, result) { // node-style callback from v3.0.0 onwards + if(!error){ + // No errors during the compilation, write this result on the disk + fs.writeFile(yourPathTotheFile, result.css, function(err){ + if(!err){ + //file written on disk + } + }); + } + }); +}); +``` + +### outputStyle + +* Type: `String` +* Default: `nested` +* Values: `nested`, `expanded`, `compact`, `compressed` + +Determines the output format of the final CSS style. + +### precision + +* Type: `Integer` +* Default: `5` + +Used to determine how many digits after the decimal will be allowed. For instance, if you had a decimal number of `1.23456789` and a precision of `5`, the result will be `1.23457` in the final CSS. + +### sourceComments + +* Type: `Boolean` +* Default: `false` + +`true` Enables the line number and file where a selector is defined to be emitted into the compiled CSS as a comment. Useful for debugging, especially when using imports and mixins. + +### sourceMap + +* Type: `Boolean | String | undefined` +* Default: `undefined` + +Enables source map generation during `render` and `renderSync`. + +When `sourceMap === true`, the value of `outFile` is used as the target output location for the source map with the suffix `.map` appended. If no `outFile` is set, `sourceMap` parameter is ignored. + +When `typeof sourceMap === "string"`, the value of `sourceMap` will be used as the writing location for the file. + +### sourceMapContents + +* Type: `Boolean` +* Default: `false` + +`true` includes the `contents` in the source map information + +### sourceMapEmbed + +* Type: `Boolean` +* Default: `false` + +`true` embeds the source map as a data URI + +### sourceMapRoot + +* Type: `String` +* Default: `undefined` + +the value will be emitted as `sourceRoot` in the source map information + +## `render` Callback (>= v3.0.0) + +node-sass supports standard node style asynchronous callbacks with the signature of `function(err, result)`. In error conditions, the `error` argument is populated with the error object. In success conditions, the `result` object is populated with an object describing the result of the render call. + +### Error Object + +* `message` (String) - The error message. +* `line` (Number) - The line number of error. +* `column` (Number) - The column number of error. +* `status` (Number) - The status code. +* `file` (String) - The filename of error. In case `file` option was not set (in favour of `data`), this will reflect the value `stdin`. + +### Result Object + +* `css` (Buffer) - The compiled CSS. Write this to a file, or serve it out as needed. +* `map` (Buffer) - The source map +* `stats` (Object) - An object containing information about the compile. It contains the following keys: + * `entry` (String) - The path to the scss file, or `data` if the source was not a file + * `start` (Number) - Date.now() before the compilation + * `end` (Number) - Date.now() after the compilation + * `duration` (Number) - *end* - *start* + * `includedFiles` (Array) - Absolute paths to all related scss files in no particular order. + +### Examples + +```javascript +var sass = require('node-sass'); +sass.render({ + file: '/path/to/myFile.scss', + data: 'body{background:blue; a{color:black;}}', + importer: function(url, prev, done) { + // url is the path in import as is, which LibSass encountered. + // prev is the previously resolved path. + // done is an optional callback, either consume it or return value synchronously. + // this.options contains this options hash, this.callback contains the node-style callback + someAsyncFunction(url, prev, function(result){ + done({ + file: result.path, // only one of them is required, see section Special Behaviours. + contents: result.data + }); + }); + // OR + var result = someSyncFunction(url, prev); + return {file: result.path, contents: result.data}; + }, + includePaths: [ 'lib/', 'mod/' ], + outputStyle: 'compressed' +}, function(error, result) { // node-style callback from v3.0.0 onwards + if (error) { + console.log(error.status); // used to be "code" in v2x and below + console.log(error.column); + console.log(error.message); + console.log(error.line); + } + else { + console.log(result.css.toString()); + + console.log(result.stats); + + console.log(result.map.toString()); + // or better + console.log(JSON.stringify(result.map)); // note, JSON.stringify accepts Buffer too + } +}); +// OR +var result = sass.renderSync({ + file: '/path/to/file.scss', + data: 'body{background:blue; a{color:black;}}', + outputStyle: 'compressed', + outFile: '/to/my/output.css', + sourceMap: true, // or an absolute or relative (to outFile) path + importer: function(url, prev, done) { + // url is the path in import as is, which LibSass encountered. + // prev is the previously resolved path. + // done is an optional callback, either consume it or return value synchronously. + // this.options contains this options hash + someAsyncFunction(url, prev, function(result){ + done({ + file: result.path, // only one of them is required, see section Special Behaviours. + contents: result.data + }); + }); + // OR + var result = someSyncFunction(url, prev); + return {file: result.path, contents: result.data}; + } +}); + +console.log(result.css); +console.log(result.map); +console.log(result.stats); +``` + +### Special behaviours + +* In the case that both `file` and `data` options are set, node-sass will give precedence to `data` and use `file` to calculate paths in sourcemaps. + +### Version information (>= v2.0.0) + +Both `node-sass` and `libsass` version info is now exposed via the `info` method: + +```javascript +var sass = require('node-sass'); + +console.log(sass.info); + +/* + it will output something like: + + node-sass 2.0.1 (Wrapper) [JavaScript] + libsass 3.1.0 (Sass Compiler) [C/C++] +*/ +``` + +Since node-sass >=v3.0.0 LibSass version is determined at run time. + +## Integrations + +Listing of community uses of node-sass in build tools and frameworks. + +### Brackets extension + +[@jasonsanjose](https://github.com/jasonsanjose) has created a [Brackets](http://brackets.io) extension based on node-sass: . When editing Sass files, the extension compiles changes on save. The extension also integrates with Live Preview to show Sass changes in the browser without saving or compiling. + +### Brunch plugin + +[Brunch](http://brunch.io)'s official sass plugin uses node-sass by default, and automatically falls back to ruby if use of Compass is detected: + +### Connect/Express middleware + +Recompile `.scss` files automatically for connect and express based http servers. + +This functionality has been moved to [`node-sass-middleware`](https://github.com/sass/node-sass-middleware) in node-sass v1.0.0 + +### DocPad Plugin + +[@10xLaCroixDrinker](https://github.com/10xLaCroixDrinker) wrote a [DocPad](http://docpad.org/) plugin that compiles `.scss` files using node-sass: + +### Duo.js extension + +[@stephenway](https://github.com/stephenway) has created an extension that transpiles Sass to CSS using node-sass with [duo.js](http://duojs.org/) + + +### Grunt extension + +[@sindresorhus](https://github.com/sindresorhus/) has created a set of grunt tasks based on node-sass: + +### Gulp extension + +[@dlmanning](https://github.com/dlmanning/) has created a gulp sass plugin based on node-sass: + +### Harp + +[@sintaxi](https://github.com/sintaxi)’s Harp web server implicitly compiles `.scss` files using node-sass: + +### Metalsmith plugin + +[@stevenschobert](https://github.com/stevenschobert/) has created a metalsmith plugin based on node-sass: + +### Meteor plugin + +[@fourseven](https://github.com/fourseven) has created a meteor plugin based on node-sass: + +### Mimosa module + +[@dbashford](https://github.com/dbashford) has created a Mimosa module for sass which includes node-sass: + +## Example App + +There is also an example connect app here: + +## Rebuilding binaries + +Node-sass includes pre-compiled binaries for popular platforms, to add a binary for your platform follow these steps: + +Check out the project: + +```bash +git clone --recursive https://github.com/sass/node-sass.git +cd node-sass +npm install +node scripts/build -f # use -d switch for debug release +# if succeeded, it will generate and move +# the binary in vendor directory. +``` + +## Command Line Interface + +The interface for command-line usage is fairly simplistic at this stage, as seen in the following usage section. + +Output will be sent to stdout if the `--output` flag is omitted. + +### Usage + + `node-sass [options] [output]` + Or: + `cat | node-sass > output` + +Example: + +`node-sass src/style.scss dest/style.css` + + **Options:** + +```bash + -w, --watch Watch a directory or file + -r, --recursive Recursively watch directories or files + -o, --output Output directory + -x, --omit-source-map-url Omit source map URL comment from output + -i, --indented-syntax Treat data from stdin as sass code (versus scss) + -q, --quiet Suppress log output except on error + -v, --version Prints version info + --output-style CSS output style (nested | expanded | compact | compressed) + --indent-type Indent type for output CSS (space | tab) + --indent-width Indent width; number of spaces or tabs (maximum value: 10) + --linefeed Linefeed style (cr | crlf | lf | lfcr) + --source-comments Include debug info in output + --source-map Emit source map + --source-map-contents Embed include contents in map + --source-map-embed Embed sourceMappingUrl as data URI + --source-map-root Base path, will be emitted in source-map as is + --include-path Path to look for imported files + --follow Follow symlinked directories + --precision The amount of precision allowed in decimal numbers + --error-bell Output a bell character on errors + --importer Path to .js file containing custom importer + --functions Path to .js file containing custom functions + --help Print usage info +``` + +The `input` can be either a single `.scss` or `.sass`, or a directory. If the input is a directory the `--output` flag must also be supplied. + +Also, note `--importer` takes the (absolute or relative to pwd) path to a js file, which needs to have a default `module.exports` set to the importer function. See our test [fixtures](https://github.com/sass/node-sass/tree/974f93e76ddd08ea850e3e663cfe64bb6a059dd3/test/fixtures/extras) for example. + +The `--source-map` option accepts a boolean value, in which case it replaces destination extension with `.css.map`. It also accepts path to `.map` file and even path to the desired directory. +When compiling a directory `--source-map` can either be a boolean value or a directory. + +## Binary configuration parameters + +node-sass supports different configuration parameters to change settings related to the sass binary such as binary name, binary path or alternative download path. Following parameters are supported by node-sass: + +Variable name | .npmrc parameter | Process argument | Value +-------------------------|--------------------------|----------------------------|------ +SASS_BINARY_NAME | sass_binary_name | --sass-binary-name | path +SASS_BINARY_SITE | sass_binary_site | --sass-binary-site | URL +SASS_BINARY_PATH | sass_binary_path | --sass-binary-path | path +SASS_BINARY_DIR | sass_binary_dir | --sass-binary-dir | path +SASS_REJECT_UNAUTHORIZED | sass_reject_unauthorized | --sass-reject-unauthorized | value + +These parameters can be used as environment variable: + +* E.g. `export SASS_BINARY_SITE=http://example.com/` + +As local or global [.npmrc](https://docs.npmjs.com/misc/config) configuration file: + +* E.g. `sass_binary_site=http://example.com/` + +As a process argument: + +* E.g. `npm install node-sass --sass-binary-site=http://example.com/` + +If you are using self-signed certificates for your binary then `SASS_REJECT_UNAUTHORIZED` will override (rejectUnauthorized)[https://nodejs.org/docs/latest/api/tls.html#tls_tls_createserver_options_secureconnectionlistener]. + +## Post-install Build + +Install runs only two Mocha tests to see if your machine can use the pre-built [LibSass] which will save some time during install. If any tests fail it will build from source. + +## Maintainers + +This module is brought to you and maintained by the following people: + +* Michael Mifsud - Project Lead ([Github](https://github.com/xzyfer) / [Twitter](https://twitter.com/xzyfer)) +* Andrew Nesbitt ([Github](https://github.com/andrew) / [Twitter](https://twitter.com/teabass)) +* Dean Mao ([Github](https://github.com/deanmao) / [Twitter](https://twitter.com/deanmao)) +* Brett Wilkins ([Github](https://github.com/bwilkins) / [Twitter](https://twitter.com/bjmaz)) +* Keith Cirkel ([Github](https://github.com/keithamus) / [Twitter](https://twitter.com/Keithamus)) +* Laurent Goderre ([Github](https://github.com/laurentgoderre) / [Twitter](https://twitter.com/laurentgoderre)) +* Nick Schonning ([Github](https://github.com/nschonni) / [Twitter](https://twitter.com/nschonni)) +* Adeel Mujahid ([Github](https://github.com/am11) / [Twitter](https://twitter.com/adeelbm)) + +## Contributors + +We <3 our contributors! A special thanks to all those who have clocked in some dev time on this project, we really appreciate your hard work. You can find [a full list of those people here.](https://github.com/sass/node-sass/graphs/contributors) + +### Note on Patches/Pull Requests + +Check out our [Contributing guide](/.github/CONTRIBUTING.md) + +## Copyright + +Copyright (c) 2015 Andrew Nesbitt. See [LICENSE](https://github.com/sass/node-sass/blob/master/LICENSE) for details. + +[LibSass]: https://github.com/sass/libsass diff --git a/mybulma/node_modules/node-sass/bin/emcc b/mybulma/node_modules/node-sass/bin/emcc new file mode 100644 index 0000000..e2b8222 --- /dev/null +++ b/mybulma/node_modules/node-sass/bin/emcc @@ -0,0 +1,12 @@ +#!/bin/sh + +skip=0 + +for arg; do + shift + [ "$skip" = "1" ] && skip=0 && continue + [ "$arg" = "-arch" ] && skip=1 && continue + set -- "$@" "$arg" +done + +emcc $@ diff --git a/mybulma/node_modules/node-sass/bin/node-sass b/mybulma/node_modules/node-sass/bin/node-sass new file mode 100644 index 0000000..7645ecb --- /dev/null +++ b/mybulma/node_modules/node-sass/bin/node-sass @@ -0,0 +1,444 @@ +#!/usr/bin/env node + +var Emitter = require('events').EventEmitter, + forEach = require('async-foreach').forEach, + Gaze = require('gaze'), + meow = require('meow'), + util = require('util'), + path = require('path'), + glob = require('glob'), + sass = require('../lib'), + render = require('../lib/render'), + watcher = require('../lib/watcher'), + stdout = require('stdout-stream'), + stdin = require('get-stdin'), + fs = require('fs'); + +/** + * Initialize CLI + */ + +var cli = meow(` + Usage: + node-sass [options] + cat | node-sass [options] > output.css + + Example: Compile foobar.scss to foobar.css + node-sass --output-style compressed foobar.scss > foobar.css + cat foobar.scss | node-sass --output-style compressed > foobar.css + + Example: Watch the sass directory for changes, compile with sourcemaps to the css directory + node-sass --watch --recursive --output css + --source-map true --source-map-contents sass + + Options + -w, --watch Watch a directory or file + -r, --recursive Recursively watch directories or files + -o, --output Output directory + -x, --omit-source-map-url Omit source map URL comment from output + -i, --indented-syntax Treat data from stdin as sass code (versus scss) + -q, --quiet Suppress log output except on error + -v, --version Prints version info + --output-style CSS output style (nested | expanded | compact | compressed) + --indent-type Indent type for output CSS (space | tab) + --indent-width Indent width; number of spaces or tabs (maximum value: 10) + --linefeed Linefeed style (cr | crlf | lf | lfcr) + --source-comments Include debug info in output + --source-map Emit source map (boolean, or path to output .map file) + --source-map-contents Embed include contents in map + --source-map-embed Embed sourceMappingUrl as data URI + --source-map-root Base path, will be emitted in source-map as is + --include-path Path to look for imported files + --follow Follow symlinked directories + --precision The amount of precision allowed in decimal numbers + --error-bell Output a bell character on errors + --importer Path to .js file containing custom importer + --functions Path to .js file containing custom functions + --help Print usage info +`, { + version: sass.info, + flags: { + errorBell: { + type: 'boolean', + }, + functions: { + type: 'string', + }, + follow: { + type: 'boolean', + }, + importer: { + type: 'string', + }, + includePath: { + type: 'string', + default: [process.cwd()], + isMultiple: true, + }, + indentType: { + type: 'string', + default: 'space', + }, + indentWidth: { + type: 'number', + default: 2, + }, + indentedSyntax: { + type: 'boolean', + alias: 'i', + }, + linefeed: { + type: 'string', + default: 'lf', + }, + omitSourceMapUrl: { + type: 'boolean', + alias: 'x', + }, + output: { + type: 'string', + alias: 'o', + }, + outputStyle: { + type: 'string', + default: 'nested', + }, + precision: { + type: 'number', + default: 5, + }, + quiet: { + type: 'boolean', + default: false, + alias: 'q', + }, + recursive: { + type: 'boolean', + default: true, + alias: 'r', + }, + sourceMapContents: { + type: 'boolean', + }, + sourceMapEmbed: { + type: 'boolean', + }, + sourceMapRoot: { + type: 'string', + }, + sourceComments: { + type: 'boolean', + alias: 'c', + }, + version: { + type: 'boolean', + alias: 'v', + }, + watch: { + type: 'boolean', + alias: 'w', + }, + }, +}); + +/** + * Is a Directory + * + * @param {String} filePath + * @returns {Boolean} + * @api private + */ + +function isDirectory(filePath) { + var isDir = false; + try { + var absolutePath = path.resolve(filePath); + isDir = fs.statSync(absolutePath).isDirectory(); + } catch (e) { + isDir = e.code === 'ENOENT'; + } + return isDir; +} + +/** + * Get correct glob pattern + * + * @param {Object} options + * @returns {String} + * @api private + */ + +function globPattern(options) { + return options.recursive ? '**/*.{sass,scss}' : '*.{sass,scss}'; +} + +/** + * Create emitter + * + * @api private + */ + +function getEmitter() { + var emitter = new Emitter(); + + emitter.on('error', function(err) { + if (options.errorBell) { + err += '\x07'; + } + console.error(err); + if (!options.watch) { + process.exit(1); + } + }); + + emitter.on('warn', function(data) { + if (!options.quiet) { + console.warn(data); + } + }); + + emitter.on('info', function(data) { + if (!options.quiet) { + console.info(data); + } + }); + + emitter.on('log', stdout.write.bind(stdout)); + + return emitter; +} + +/** + * Construct options + * + * @param {Array} arguments + * @param {Object} options + * @api private + */ + +function getOptions(args, options) { + var cssDir, sassDir, file, mapDir; + options.src = args[0]; + + if (args[1]) { + options.dest = path.resolve(args[1]); + } else if (options.output) { + options.dest = path.join( + path.resolve(options.output), + [path.basename(options.src, path.extname(options.src)), '.css'].join('')); // replace ext. + } + + if (options.directory) { + sassDir = path.resolve(options.directory); + file = path.relative(sassDir, args[0]); + cssDir = path.resolve(options.output); + options.dest = path.join(cssDir, file).replace(path.extname(file), '.css'); + } + + if (options.sourceMap) { + if(!options.sourceMapOriginal) { + options.sourceMapOriginal = options.sourceMap; + } + + if (options.sourceMapOriginal === 'true') { + options.sourceMap = options.dest + '.map'; + } else { + // check if sourceMap path ends with .map to avoid isDirectory false-positive + var sourceMapIsDirectory = options.sourceMapOriginal.indexOf('.map', options.sourceMapOriginal.length - 4) === -1 && isDirectory(options.sourceMapOriginal); + + if (!sourceMapIsDirectory) { + options.sourceMap = path.resolve(options.sourceMapOriginal); + } else if (!options.directory) { + options.sourceMap = path.resolve(options.sourceMapOriginal, path.basename(options.dest) + '.map'); + } else { + sassDir = path.resolve(options.directory); + file = path.relative(sassDir, args[0]); + mapDir = path.resolve(options.sourceMapOriginal); + options.sourceMap = path.join(mapDir, file).replace(path.extname(file), '.css.map'); + } + } + } + + return options; +} + +/** + * Watch + * + * @param {Object} options + * @param {Object} emitter + * @api private + */ + +function watch(options, emitter) { + var handler = function(files) { + files.added.forEach(function(file) { + var watch = gaze.watched(); + Object.keys(watch).forEach(function (dir) { + if (watch[dir].indexOf(file) !== -1) { + gaze.add(file); + } + }); + }); + + files.changed.forEach(function(file) { + if (path.basename(file)[0] !== '_') { + renderFile(file, options, emitter); + } + }); + + files.removed.forEach(function(file) { + gaze.remove(file); + }); + }; + + var gaze = new Gaze(); + gaze.add(watcher.reset(options)); + gaze.on('error', emitter.emit.bind(emitter, 'error')); + + gaze.on('changed', function(file) { + handler(watcher.changed(file)); + }); + + gaze.on('added', function(file) { + handler(watcher.added(file)); + }); + + gaze.on('deleted', function(file) { + handler(watcher.removed(file)); + }); +} + +/** + * Run + * + * @param {Object} options + * @param {Object} emitter + * @api private + */ + +function run(options, emitter) { + if (options.directory) { + if (!options.output) { + emitter.emit('error', 'An output directory must be specified when compiling a directory'); + } + if (!isDirectory(options.output)) { + emitter.emit('error', 'An output directory must be specified when compiling a directory'); + } + } + + if (options.sourceMapOriginal && options.directory && !isDirectory(options.sourceMapOriginal) && options.sourceMapOriginal !== 'true') { + emitter.emit('error', 'The --source-map option must be either a boolean or directory when compiling a directory'); + } + + if (options.importer) { + if ((path.resolve(options.importer) === path.normalize(options.importer).replace(/(.+)([/|\\])$/, '$1'))) { + options.importer = require(options.importer); + } else { + options.importer = require(path.resolve(options.importer)); + } + } + + if (options.functions) { + if ((path.resolve(options.functions) === path.normalize(options.functions).replace(/(.+)([/|\\])$/, '$1'))) { + options.functions = require(options.functions); + } else { + options.functions = require(path.resolve(options.functions)); + } + } + + if (options.watch) { + watch(options, emitter); + } else if (options.directory) { + renderDir(options, emitter); + } else { + render(options, emitter); + } +} + +/** + * Render a file + * + * @param {String} file + * @param {Object} options + * @param {Object} emitter + * @api private + */ +function renderFile(file, options, emitter) { + options = getOptions([path.resolve(file)], options); + if (options.watch && !options.quiet) { + emitter.emit('info', util.format('=> changed: %s', file)); + } + render(options, emitter); +} + +/** + * Render all sass files in a directory + * + * @param {Object} options + * @param {Object} emitter + * @api private + */ +function renderDir(options, emitter) { + var globPath = path.resolve(options.directory, globPattern(options)); + glob(globPath, { ignore: '**/_*', follow: options.follow }, function(err, files) { + if (err) { + return emitter.emit('error', util.format('You do not have permission to access this path: %s.', err.path)); + } else if (!files.length) { + return emitter.emit('error', 'No input file was found.'); + } + + forEach(files, function(subject) { + emitter.once('done', this.async()); + renderFile(subject, options, emitter); + }, function(successful, arr) { + var outputDir = path.join(process.cwd(), options.output); + if (!options.quiet) { + emitter.emit('info', util.format('Wrote %s CSS files to %s', arr.length, outputDir)); + } + process.exit(); + }); + }); +} + +/** + * Arguments and options + */ + +var options = getOptions(cli.input, cli.flags); +var emitter = getEmitter(); + +/** + * Show usage if no arguments are supplied + */ + +if (!options.src && process.stdin.isTTY) { + emitter.emit('error', [ + 'Provide a Sass file to render', + '', + 'Example: Compile foobar.scss to foobar.css', + ' node-sass --output-style compressed foobar.scss > foobar.css', + ' cat foobar.scss | node-sass --output-style compressed > foobar.css', + '', + 'Example: Watch the sass directory for changes, compile with sourcemaps to the css directory', + ' node-sass --watch --recursive --output css', + ' --source-map true --source-map-contents sass', + ].join('\n')); +} + +/** + * Apply arguments + */ + +if (options.src) { + if (isDirectory(options.src)) { + options.directory = options.src; + } + run(options, emitter); +} else if (!process.stdin.isTTY) { + stdin(function(data) { + options.data = data; + options.stdin = true; + run(options, emitter); + }); +} diff --git a/mybulma/node_modules/node-sass/binding.gyp b/mybulma/node_modules/node-sass/binding.gyp new file mode 100644 index 0000000..bb87e6c --- /dev/null +++ b/mybulma/node_modules/node-sass/binding.gyp @@ -0,0 +1,74 @@ +{ + 'variables': { + 'libsass_ext%': '', + }, + 'targets': [ + { + 'target_name': 'binding', + 'win_delay_load_hook': 'true', + 'sources': [ + 'src/binding.cpp', + 'src/create_string.cpp', + 'src/custom_function_bridge.cpp', + 'src/custom_importer_bridge.cpp', + 'src/sass_context_wrapper.cpp', + 'src/sass_types/boolean.cpp', + 'src/sass_types/color.cpp', + 'src/sass_types/error.cpp', + 'src/sass_types/factory.cpp', + 'src/sass_types/list.cpp', + 'src/sass_types/map.cpp', + 'src/sass_types/null.cpp', + 'src/sass_types/number.cpp', + 'src/sass_types/string.cpp' + ], + 'msvs_settings': { + 'VCLinkerTool': { + 'SetChecksum': 'true' + } + }, + 'xcode_settings': { + 'CLANG_CXX_LIBRARY': 'libc++', + 'OTHER_LDFLAGS': [], + 'GCC_ENABLE_CPP_EXCEPTIONS': 'NO', + 'MACOSX_DEPLOYMENT_TARGET': '10.11' + }, + 'include_dirs': [ + '= flags.length) { + return null; + } + + return flags[index + 1]; +} + +/** + * Get binary name. + * If environment variable SASS_BINARY_NAME, + * .npmrc variable sass_binary_name or + * process argument --binary-name is provided, + * return it as is, otherwise make default binary + * name: {platform}-{arch}-{v8 version}.node + * + * @api public + */ + +function getBinaryName() { + var binaryName, + variant, + platform = process.platform; + + if (getArgument('--sass-binary-name')) { + binaryName = getArgument('--sass-binary-name'); + } else if (process.env.SASS_BINARY_NAME) { + binaryName = process.env.SASS_BINARY_NAME; + } else if (process.env.npm_config_sass_binary_name) { + binaryName = process.env.npm_config_sass_binary_name; + } else if (pkg.nodeSassConfig && pkg.nodeSassConfig.binaryName) { + binaryName = pkg.nodeSassConfig.binaryName; + } else { + variant = getPlatformVariant(); + if (variant) { + platform += '_' + variant; + } + + binaryName = [ + platform, '-', + process.arch, '-', + process.versions.modules + ].join(''); + } + + return [binaryName, 'binding.node'].join('_'); +} + +/** + * Determine the URL to fetch binary file from. + * By default fetch from the node-sass distribution + * site on GitHub. + * + * The default URL can be overridden using + * the environment variable SASS_BINARY_SITE, + * .npmrc variable sass_binary_site or + * or a command line option --sass-binary-site: + * + * node scripts/install.js --sass-binary-site http://example.com/ + * + * The URL should to the mirror of the repository + * laid out as follows: + * + * SASS_BINARY_SITE/ + * + * v3.0.0 + * v3.0.0/freebsd-x64-14_binding.node + * .... + * v3.0.0 + * v3.0.0/freebsd-ia32-11_binding.node + * v3.0.0/freebsd-x64-42_binding.node + * ... etc. for all supported versions and platforms + * + * @api public + */ + +function getBinaryUrl() { + var site = getArgument('--sass-binary-site') || + process.env.SASS_BINARY_SITE || + process.env.npm_config_sass_binary_site || + (pkg.nodeSassConfig && pkg.nodeSassConfig.binarySite) || + 'https://github.com/sass/node-sass/releases/download'; + + return [site, 'v' + pkg.version, getBinaryName()].join('/'); +} + +/** + * Get binary dir. + * If environment variable SASS_BINARY_DIR, + * .npmrc variable sass_binary_dir or + * process argument --sass-binary-dir is provided, + * select it by appending binary name, otherwise + * use default binary dir. + * Once the primary selection is made, check if + * callers wants to throw if file not exists before + * returning. + * + * @api public + */ + +function getBinaryDir() { + var binaryDir; + + if (getArgument('--sass-binary-dir')) { + binaryDir = getArgument('--sass-binary-dir'); + } else if (process.env.SASS_BINARY_DIR) { + binaryDir = process.env.SASS_BINARY_DIR; + } else if (process.env.npm_config_sass_binary_dir) { + binaryDir = process.env.npm_config_sass_binary_dir; + } else if (pkg.nodeSassConfig && pkg.nodeSassConfig.binaryDir) { + binaryDir = pkg.nodeSassConfig.binaryDir; + } else { + binaryDir = defaultBinaryDir; + } + + return binaryDir; +} + +/** + * Get binary path. + * If environment variable SASS_BINARY_PATH, + * .npmrc variable sass_binary_path or + * process argument --sass-binary-path is provided, + * select it by appending binary name, otherwise + * make default binary path using binary name. + * Once the primary selection is made, check if + * callers wants to throw if file not exists before + * returning. + * + * @api public + */ + +function getBinaryPath() { + var binaryPath; + + if (getArgument('--sass-binary-path')) { + binaryPath = getArgument('--sass-binary-path'); + } else if (process.env.SASS_BINARY_PATH) { + binaryPath = process.env.SASS_BINARY_PATH; + } else if (process.env.npm_config_sass_binary_path) { + binaryPath = process.env.npm_config_sass_binary_path; + } else if (pkg.nodeSassConfig && pkg.nodeSassConfig.binaryPath) { + binaryPath = pkg.nodeSassConfig.binaryPath; + } else { + binaryPath = path.join(getBinaryDir(), getBinaryName().replace(/_(?=binding\.node)/, '/')); + } + + try { + return trueCasePathSync(binaryPath) || binaryPath; + } catch (e) { + return binaryPath; + } +} + +/** + * An array of paths suitable for use as a local disk cache of the binding. + * + * @return {[]String} an array of paths + * @api public + */ +function getCachePathCandidates() { + return [ + process.env.npm_config_sass_binary_cache, + process.env.npm_config_cache, + ].filter(function(_) { return _; }); +} + +/** + * The most suitable location for caching the binding on disk. + * + * Given the candidates directories provided by `getCachePathCandidates()` this + * returns the first writable directory. By treating the candidate directories + * as a prioritised list this method is deterministic, assuming no change to the + * local environment. + * + * @return {String} directory to cache binding + * @api public + */ +function getBinaryCachePath() { + var i, + cachePath, + cachePathCandidates = getCachePathCandidates(); + + for (i = 0; i < cachePathCandidates.length; i++) { + cachePath = path.join(cachePathCandidates[i], pkg.name, pkg.version); + + try { + fs.mkdirSync(cachePath, {recursive: true}); + return cachePath; + } catch (e) { + // Directory is not writable, try another + } + } + + return ''; +} + +/** + * The cached binding + * + * Check the candidates directories provided by `getCachePathCandidates()` for + * the binding file, if it exists. By treating the candidate directories + * as a prioritised list this method is deterministic, assuming no change to the + * local environment. + * + * @return {String} path to cached binary + * @api public + */ +function getCachedBinary() { + var i, + cachePath, + cacheBinary, + cachePathCandidates = getCachePathCandidates(), + binaryName = getBinaryName(); + + for (i = 0; i < cachePathCandidates.length; i++) { + cachePath = path.join(cachePathCandidates[i], pkg.name, pkg.version); + cacheBinary = path.join(cachePath, binaryName); + + if (fs.existsSync(cacheBinary)) { + return cacheBinary; + } + } + + return ''; +} + +/** + * Does the supplied binary path exist + * + * @param {String} binaryPath + * @api public + */ + +function hasBinary(binaryPath) { + return fs.existsSync(binaryPath); +} + +/** + * Get Sass version information + * + * @api public + */ + +function getVersionInfo(binding) { + return [ + ['node-sass', pkg.version, '(Wrapper)', '[JavaScript]'].join('\t'), + ['libsass ', binding.libsassVersion(), '(Sass Compiler)', '[C/C++]'].join('\t'), + ].join(eol); +} + +/** + * Gets the platform variant, currently either an empty string or 'musl' for Linux/musl platforms. + * + * @api public + */ + +function getPlatformVariant() { + var contents = ''; + + if (process.platform !== 'linux') { + return ''; + } + + try { + contents = fs.readFileSync(process.execPath); + + if (contents.indexOf('libc.musl-x86_64.so.1') !== -1) { + return 'musl'; + } + } catch (err) { } // eslint-disable-line no-empty + + return ''; +} + +module.exports.hasBinary = hasBinary; +module.exports.getBinaryUrl = getBinaryUrl; +module.exports.getBinaryName = getBinaryName; +module.exports.getBinaryDir = getBinaryDir; +module.exports.getBinaryPath = getBinaryPath; +module.exports.getBinaryCachePath = getBinaryCachePath; +module.exports.getCachedBinary = getCachedBinary; +module.exports.getCachePathCandidates = getCachePathCandidates; +module.exports.getVersionInfo = getVersionInfo; +module.exports.getHumanEnvironment = getHumanEnvironment; +module.exports.getInstalledBinaries = getInstalledBinaries; +module.exports.isSupportedEnvironment = isSupportedEnvironment; diff --git a/mybulma/node_modules/node-sass/lib/index.js b/mybulma/node_modules/node-sass/lib/index.js new file mode 100644 index 0000000..1006280 --- /dev/null +++ b/mybulma/node_modules/node-sass/lib/index.js @@ -0,0 +1,458 @@ +/*! + * node-sass: lib/index.js + */ + +var path = require('path'), + clonedeep = require('lodash/cloneDeep'), + sass = require('./extensions'); + +/** + * Require binding + */ + +var binding = require('./binding')(sass); + +/** + * Get input file + * + * @param {Object} options + * @api private + */ + +function getInputFile(options) { + return options.file ? path.resolve(options.file) : null; +} + +/** + * Get output file + * + * @param {Object} options + * @api private + */ + +function getOutputFile(options) { + var outFile = options.outFile; + + if (!outFile || typeof outFile !== 'string' || (!options.data && !options.file)) { + return null; + } + + return path.resolve(outFile); +} + +/** + * Get source map + * + * @param {Object} options + * @api private + */ + +function getSourceMap(options) { + var sourceMap = options.sourceMap; + + if (sourceMap && typeof sourceMap !== 'string' && options.outFile) { + sourceMap = options.outFile + '.map'; + } + + return sourceMap && typeof sourceMap === 'string' ? path.resolve(sourceMap) : null; +} + +/** + * Get stats + * + * @param {Object} options + * @api private + */ + +function getStats(options) { + var stats = {}; + + stats.entry = options.file || 'data'; + stats.start = Date.now(); + + return stats; +} + +/** + * End stats + * + * @param {Object} stats + * @param {Object} sourceMap + * @api private + */ + +function endStats(stats) { + stats.end = Date.now(); + stats.duration = stats.end - stats.start; + + return stats; +} + +/** + * Get style + * + * @param {Object} options + * @api private + */ + +function getStyle(options) { + var styles = { + nested: 0, + expanded: 1, + compact: 2, + compressed: 3 + }; + + return styles[options.outputStyle] || 0; +} + +/** + * Get indent width + * + * @param {Object} options + * @api private + */ + +function getIndentWidth(options) { + var width = parseInt(options.indentWidth) || 2; + + return width > 10 ? 2 : width; +} + +/** + * Get indent type + * + * @param {Object} options + * @api private + */ + +function getIndentType(options) { + var types = { + space: 0, + tab: 1 + }; + + return types[options.indentType] || 0; +} + +/** + * Get linefeed + * + * @param {Object} options + * @api private + */ + +function getLinefeed(options) { + var feeds = { + cr: '\r', + crlf: '\r\n', + lf: '\n', + lfcr: '\n\r' + }; + + return feeds[options.linefeed] || '\n'; +} + +/** + * Build an includePaths string + * from the options.includePaths array and the SASS_PATH environment variable + * + * @param {Object} options + * @api private + */ + +function buildIncludePaths(options) { + options.includePaths = options.includePaths || []; + + if (Object.prototype.hasOwnProperty.call(process.env, 'SASS_PATH')) { + options.includePaths = options.includePaths.concat( + process.env.SASS_PATH.split(path.delimiter) + ); + } + + // Preserve the behaviour people have come to expect. + // This behaviour was removed from Sass in 3.4 and + // LibSass in 3.5. + options.includePaths.unshift(process.cwd()); + + return options.includePaths.join(path.delimiter); +} + +/** + * Get options + * + * @param {Object} options + * @api private + */ + +function getOptions(opts, cb) { + if (typeof opts !== 'object') { + throw new Error('Invalid: options is not an object.'); + } + var options = clonedeep(opts || {}); + + options.sourceComments = options.sourceComments || false; + if (Object.prototype.hasOwnProperty.call(options, 'file')) { + options.file = getInputFile(options); + } + options.outFile = getOutputFile(options); + options.includePaths = buildIncludePaths(options); + options.precision = parseInt(options.precision) || 5; + options.sourceMap = getSourceMap(options); + options.style = getStyle(options); + options.indentWidth = getIndentWidth(options); + options.indentType = getIndentType(options); + options.linefeed = getLinefeed(options); + + // context object represents node-sass environment + options.context = { options: options, callback: cb }; + + options.result = { + stats: getStats(options) + }; + + return options; +} + +/** + * Executes a callback and transforms any exception raised into a sass error + * + * @param {Function} callback + * @param {Array} arguments + * @api private + */ + +function tryCallback(callback, args) { + try { + return callback.apply(this, args); + } catch (e) { + if (typeof e === 'string') { + return new binding.types.Error(e); + } else if (e instanceof Error) { + return new binding.types.Error(e.message); + } else { + return new binding.types.Error('An unexpected error occurred'); + } + } +} + +/** + * Normalizes the signature of custom functions to make it possible to just supply the + * function name and have the signature default to `fn(...)`. The callback is adjusted + * to transform the input sass list into discrete arguments. + * + * @param {String} signature + * @param {Function} callback + * @return {Object} + * @api private + */ + +function normalizeFunctionSignature(signature, callback) { + if (!/^\*|@warn|@error|@debug|\w+\(.*\)$/.test(signature)) { + if (!/\w+/.test(signature)) { + throw new Error('Invalid function signature format "' + signature + '"'); + } + + return { + signature: signature + '(...)', + callback: function() { + var args = Array.prototype.slice.call(arguments), + list = args.shift(), + i; + + for (i = list.getLength() - 1; i >= 0; i--) { + args.unshift(list.getValue(i)); + } + + return callback.apply(this, args); + } + }; + } + + return { + signature: signature, + callback: callback + }; +} + +/** + * Render + * + * @param {Object} options + * @api public + */ + +module.exports.render = function(opts, cb) { + var options = getOptions(opts, cb); + + // options.error and options.success are for libsass binding + options.error = function(err) { + var payload = Object.assign(new Error(), JSON.parse(err)); + + if (cb) { + options.context.callback.call(options.context, payload, null); + } + }; + + options.success = function() { + var result = options.result; + var stats = endStats(result.stats); + var payload = { + css: result.css, + stats: stats + }; + if (result.map) { + payload.map = result.map; + } + + if (cb) { + options.context.callback.call(options.context, null, payload); + } + }; + + var importer = options.importer; + + if (importer) { + if (Array.isArray(importer)) { + options.importer = []; + importer.forEach(function(subject, index) { + options.importer[index] = function(file, prev, bridge) { + function done(result) { + bridge.success(result === module.exports.NULL ? null : result); + } + + var result = subject.call(options.context, file, prev, done); + + if (result !== undefined) { + done(result); + } + }; + }); + } else { + options.importer = function(file, prev, bridge) { + function done(result) { + bridge.success(result === module.exports.NULL ? null : result); + } + + var result = importer.call(options.context, file, prev, done); + + if (result !== undefined) { + done(result); + } + }; + } + } + + var functions = clonedeep(options.functions); + + if (functions) { + options.functions = {}; + + Object.keys(functions).forEach(function(subject) { + var cb = normalizeFunctionSignature(subject, functions[subject]); + + options.functions[cb.signature] = function() { + var args = Array.prototype.slice.call(arguments), + bridge = args.pop(); + + function done(data) { + bridge.success(data); + } + + var result = tryCallback(cb.callback.bind(options.context), args.concat(done)); + + if (result) { + done(result); + } + }; + }); + } + + if (options.data) { + binding.render(options); + } else if (options.file) { + binding.renderFile(options); + } else { + cb({status: 3, message: 'No input specified: provide a file name or a source string to process' }); + } +}; + +/** + * Render sync + * + * @param {Object} options + * @api public + */ + +module.exports.renderSync = function(opts) { + var options = getOptions(opts); + var importer = options.importer; + + if (importer) { + if (Array.isArray(importer)) { + options.importer = []; + importer.forEach(function(subject, index) { + options.importer[index] = function(file, prev) { + var result = subject.call(options.context, file, prev); + + return result === module.exports.NULL ? null : result; + }; + }); + } else { + options.importer = function(file, prev) { + var result = importer.call(options.context, file, prev); + + return result === module.exports.NULL ? null : result; + }; + } + } + + var functions = clonedeep(options.functions); + + if (options.functions) { + options.functions = {}; + + Object.keys(functions).forEach(function(signature) { + var cb = normalizeFunctionSignature(signature, functions[signature]); + + options.functions[cb.signature] = function() { + return tryCallback(cb.callback.bind(options.context), arguments); + }; + }); + } + + var status; + if (options.data) { + status = binding.renderSync(options); + } else if (options.file) { + status = binding.renderFileSync(options); + } else { + throw new Error('No input specified: provide a file name or a source string to process'); + } + + var result = options.result; + + if (status) { + result.stats = endStats(result.stats); + return result; + } + + throw Object.assign(new Error(), JSON.parse(result.error)); +}; + +/** + * API Info + * + * @api public + */ + +module.exports.info = sass.getVersionInfo(binding); + +/** + * Expose sass types + */ + +module.exports.types = binding.types; +module.exports.TRUE = binding.types.Boolean.TRUE; +module.exports.FALSE = binding.types.Boolean.FALSE; +module.exports.NULL = binding.types.Null.NULL; diff --git a/mybulma/node_modules/node-sass/lib/render.js b/mybulma/node_modules/node-sass/lib/render.js new file mode 100644 index 0000000..3539a9a --- /dev/null +++ b/mybulma/node_modules/node-sass/lib/render.js @@ -0,0 +1,120 @@ +/*! + * node-sass: lib/render.js + */ + +var chalk = require('chalk'), + fs = require('fs'), + path = require('path'), + sass = require('./'); + +/** + * Render + * + * @param {Object} options + * @param {Object} emitter + * @api public + */ + +module.exports = function(options, emitter) { + var renderOptions = { + includePaths: options.includePath, + omitSourceMapUrl: options.omitSourceMapUrl, + indentedSyntax: options.indentedSyntax, + outFile: options.dest, + outputStyle: options.outputStyle, + precision: options.precision, + sourceComments: options.sourceComments, + sourceMapEmbed: options.sourceMapEmbed, + sourceMapContents: options.sourceMapContents, + sourceMap: options.sourceMap, + sourceMapRoot: options.sourceMapRoot, + importer: options.importer, + functions: options.functions, + indentWidth: options.indentWidth, + indentType: options.indentType, + linefeed: options.linefeed + }; + + if (options.data) { + renderOptions.data = options.data; + } else if (options.src) { + renderOptions.file = options.src; + } + + var sourceMap = options.sourceMap; + var destination = options.dest; + var stdin = options.stdin; + + var success = function(result) { + var todo = 1; + var done = function() { + if (--todo <= 0) { + emitter.emit('done'); + } + }; + + if (!destination || stdin) { + emitter.emit('log', result.css.toString()); + + if (sourceMap && !options.sourceMapEmbed) { + emitter.emit('log', result.map.toString()); + } + + return done(); + } + + emitter.emit('info', chalk.green('Rendering Complete, saving .css file...')); + + fs.mkdir(path.dirname(destination), {recursive: true}, function(err) { + if (err) { + return emitter.emit('error', chalk.red(err)); + } + + fs.writeFile(destination, result.css.toString(), function(err) { + if (err) { + return emitter.emit('error', chalk.red(err)); + } + + emitter.emit('info', chalk.green('Wrote CSS to ' + destination)); + emitter.emit('write', err, destination, result.css.toString()); + done(); + }); + }); + + if (sourceMap) { + todo++; + + fs.mkdir(path.dirname(sourceMap), {recursive: true}, function(err) { + if (err) { + return emitter.emit('error', chalk.red(err)); + } + fs.writeFile(sourceMap, result.map, function(err) { + if (err) { + return emitter.emit('error', chalk.red('Error' + err)); + } + + emitter.emit('info', chalk.green('Wrote Source Map to ' + sourceMap)); + emitter.emit('write-source-map', err, sourceMap, result.map); + done(); + }); + }); + } + + emitter.emit('render', result.css.toString()); + }; + + var error = function(error) { + emitter.emit('error', chalk.red(JSON.stringify(error, null, 2))); + }; + + var renderCallback = function(err, result) { + if (err) { + error(err); + } + else { + success(result); + } + }; + + sass.render(renderOptions, renderCallback); +}; diff --git a/mybulma/node_modules/node-sass/lib/watcher.js b/mybulma/node_modules/node-sass/lib/watcher.js new file mode 100644 index 0000000..89443b4 --- /dev/null +++ b/mybulma/node_modules/node-sass/lib/watcher.js @@ -0,0 +1,93 @@ +var grapher = require('sass-graph'), + clonedeep = require('lodash/cloneDeep'), + path = require('path'), + config = {}, + watcher = {}, + graph = null; + +watcher.reset = function(opts) { + config = clonedeep(opts || config || {}); + var options = { + loadPaths: config.includePath, + extensions: ['scss', 'sass', 'css'], + follow: config.follow, + }; + + if (config.directory) { + graph = grapher.parseDir(config.directory, options); + } else { + graph = grapher.parseFile(config.src, options); + } + + return Object.keys(graph.index); +}; + +watcher.changed = function(absolutePath) { + var files = { + added: [], + changed: [], + removed: [], + }; + + this.reset(); + + if (absolutePath && path.basename(absolutePath)[0] !== '_') { + files.changed.push(absolutePath); + } + + graph.visitAncestors(absolutePath, function(parent) { + if (path.basename(parent)[0] !== '_') { + files.changed.push(parent); + } + }); + + graph.visitDescendents(absolutePath, function(child) { + files.added.push(child); + }); + + return files; +}; + +watcher.added = function(absolutePath) { + var files = { + added: [], + changed: [], + removed: [], + }; + + this.reset(); + + if (Object.keys(graph.index).indexOf(absolutePath) === -1) { + files.added.push(absolutePath); + } + + graph.visitDescendents(absolutePath, function(child) { + files.added.push(child); + }); + + return files; +}; + +watcher.removed = function(absolutePath) { + var files = { + added: [], + changed: [], + removed: [], + }; + + graph.visitAncestors(absolutePath, function(parent) { + if (path.basename(parent)[0] !== '_') { + files.changed.push(parent); + } + }); + + if (Object.keys(graph.index).indexOf(absolutePath) !== -1) { + files.removed.push(absolutePath); + } + + this.reset(); + + return files; +}; + +module.exports = watcher; diff --git a/mybulma/node_modules/node-sass/package.json b/mybulma/node_modules/node-sass/package.json new file mode 100644 index 0000000..7139aa8 --- /dev/null +++ b/mybulma/node_modules/node-sass/package.json @@ -0,0 +1,79 @@ +{ + "name": "node-sass", + "version": "8.0.0", + "libsass": "3.5.5", + "description": "Wrapper around libsass", + "license": "MIT", + "bugs": "https://github.com/sass/node-sass/issues", + "homepage": "https://github.com/sass/node-sass", + "repository": { + "type": "git", + "url": "https://github.com/sass/node-sass" + }, + "author": { + "name": "Andrew Nesbitt", + "email": "andrewnez@gmail.com", + "url": "http://andrew.github.com" + }, + "engines": { + "node": ">=14" + }, + "main": "lib/index.js", + "nodeSassConfig": { + "binarySite": "https://github.com/sass/node-sass/releases/download" + }, + "bin": { + "node-sass": "bin/node-sass" + }, + "gypfile": true, + "scripts": { + "coverage": "nyc npm run test", + "install": "node scripts/install.js", + "postinstall": "node scripts/build.js", + "lint": "eslint bin/node-sass lib scripts test", + "test": "mocha test/{*,**/**}.js", + "build": "node scripts/build.js --force", + "prepublishOnly ": "scripts/prepublish.js" + }, + "files": [ + "bin", + "binding.gyp", + "lib", + "scripts", + "src", + "test", + "vendor" + ], + "keywords": [ + "css", + "libsass", + "preprocessor", + "sass", + "scss", + "style" + ], + "dependencies": { + "async-foreach": "^0.1.3", + "chalk": "^4.1.2", + "cross-spawn": "^7.0.3", + "gaze": "^1.0.0", + "get-stdin": "^4.0.1", + "glob": "^7.0.3", + "lodash": "^4.17.15", + "make-fetch-happen": "^10.0.4", + "meow": "^9.0.0", + "nan": "^2.17.0", + "node-gyp": "^8.4.1", + "sass-graph": "^4.0.1", + "stdout-stream": "^1.4.0", + "true-case-path": "^2.2.1" + }, + "devDependencies": { + "eslint": "^8.0.0", + "fs-extra": "^10.0.0", + "mocha": "^9.0.1", + "nyc": "^15.1.0", + "rimraf": "^3.0.2", + "unique-temp-dir": "^1.0.0" + } +} diff --git a/mybulma/node_modules/node-sass/scripts/build.js b/mybulma/node_modules/node-sass/scripts/build.js new file mode 100644 index 0000000..5c8a42b --- /dev/null +++ b/mybulma/node_modules/node-sass/scripts/build.js @@ -0,0 +1,154 @@ +/*! + * node-sass: scripts/build.js + */ + +var fs = require('fs'), + path = require('path'), + spawn = require('cross-spawn'), + sass = require('../lib/extensions'); + +/** + * After build + * + * @param {Object} options + * @api private + */ + +function afterBuild(options) { + var install = sass.getBinaryPath(); + var target = path.join(__dirname, '..', 'build', + options.debug ? 'Debug' : + process.config.target_defaults + ? process.config.target_defaults.default_configuration + : 'Release', + 'binding.node'); + + fs.mkdir(path.dirname(install), {recursive: true}, function(err) { + if (err && err.code !== 'EEXIST') { + console.error(err.message); + return; + } + + fs.stat(target, function(err) { + if (err) { + console.error('Build succeeded but target not found'); + return; + } + + fs.rename(target, install, function(err) { + if (err) { + console.error(err.message); + return; + } + + console.log('Installed to', install); + }); + }); + }); +} + +/** + * Build + * + * @param {Object} options + * @api private + */ + +function build(options) { + var args = [require.resolve(path.join('node-gyp', 'bin', 'node-gyp.js')), 'rebuild', '--verbose'].concat( + ['libsass_ext', 'libsass_cflags', 'libsass_ldflags', 'libsass_library'].map(function(subject) { + return ['--', subject, '=', process.env[subject.toUpperCase()] || ''].join(''); + })).concat(options.args); + + console.log('Building:', [process.execPath].concat(args).join(' ')); + + var proc = spawn(process.execPath, args, { + stdio: [0, 1, 2] + }); + + proc.on('exit', function(errorCode) { + if (!errorCode) { + afterBuild(options); + return; + } + + if (errorCode === 127 ) { + console.error('node-gyp not found!'); + } else { + console.error('Build failed with error code:', errorCode); + } + + process.exit(1); + }); +} + +/** + * Parse arguments + * + * @param {Array} args + * @api private + */ + +function parseArgs(args) { + var options = { + arch: process.arch, + platform: process.platform, + force: process.env.npm_config_force === 'true', + }; + + options.args = args.filter(function(arg) { + if (arg === '-f' || arg === '--force') { + options.force = true; + return false; + } else if (arg.substring(0, 13) === '--target_arch') { + options.arch = arg.substring(14); + } else if (arg === '-d' || arg === '--debug') { + options.debug = true; + } else if (arg.substring(0, 13) === '--libsass_ext' && arg.substring(14) !== 'no') { + options.libsassExt = true; + } + + return true; + }); + + return options; +} + +/** + * Test for pre-built library + * + * @param {Object} options + * @api private + */ + +function testBinary(options) { + if (options.force || process.env.SASS_FORCE_BUILD) { + return build(options); + } + + if (!sass.hasBinary(sass.getBinaryPath())) { + return build(options); + } + + console.log('Binary found at', sass.getBinaryPath()); + console.log('Testing binary'); + + try { + require('../').renderSync({ + data: 's { a: ss }' + }); + + console.log('Binary is fine'); + } catch (e) { + console.log('Binary has a problem:', e); + console.log('Building the binary locally'); + + return build(options); + } +} + +/** + * Apply arguments and run + */ + +testBinary(parseArgs(process.argv.slice(2))); diff --git a/mybulma/node_modules/node-sass/scripts/install.js b/mybulma/node_modules/node-sass/scripts/install.js new file mode 100644 index 0000000..42511b3 --- /dev/null +++ b/mybulma/node_modules/node-sass/scripts/install.js @@ -0,0 +1,121 @@ +/*! + * node-sass: scripts/install.js + */ + +var fs = require('fs'), + eol = require('os').EOL, + path = require('path'), + fetch = require('make-fetch-happen'), + sass = require('../lib/extensions'), + downloadOptions = require('./util/downloadoptions'); + +/** + * Download file, if succeeds save, if not delete + * + * @param {String} url + * @param {String} dest + * @param {Function} cb + * @api private + */ + +function download(url, dest, cb) { + var reportError = function(err) { + cb(['Cannot download "', url, '": ', eol, eol, + typeof err.message === 'string' ? err.message : err, eol, eol, + 'Hint: If github.com is not accessible in your location', eol, + ' try setting a proxy via HTTP_PROXY, e.g. ', eol, eol, + ' export HTTP_PROXY=http://example.com:1234',eol, eol, + 'or configure npm proxy via', eol, eol, + ' npm config set proxy http://example.com:8080'].join('')); + }; + + var successful = function(response) { + return response.status >= 200 && response.status < 300; + }; + + console.log('Downloading binary from', url); + + try { + fetch(url, downloadOptions()).then(function (response) { + return response.buffer(); + }).then(function (buffer) { + fs.createWriteStream(dest).on('error', cb).end(buffer, cb); + console.log('Download complete'); + }).catch(function(err) { + if(!successful(err)) { + reportError(['HTTP error', err.code, err.message].join(' ')); + } else { + reportError(err); + } + }); + } catch (err) { + cb(err); + } +} + +/** + * Check and download binary + * + * @api private + */ + +function checkAndDownloadBinary() { + if (process.env.SKIP_SASS_BINARY_DOWNLOAD_FOR_CI) { + console.log('Skipping downloading binaries on CI builds'); + return; + } + + var cachedBinary = sass.getCachedBinary(), + cachePath = sass.getBinaryCachePath(), + binaryPath = sass.getBinaryPath(); + + if (sass.hasBinary(binaryPath)) { + console.log('node-sass build', 'Binary found at', binaryPath); + return; + } + + try { + fs.mkdirSync(path.dirname(binaryPath), {recursive: true}); + } catch (err) { + console.error('Unable to save binary', path.dirname(binaryPath), ':', err); + return; + } + + if (cachedBinary) { + console.log('Cached binary found at', cachedBinary); + fs.createReadStream(cachedBinary).pipe(fs.createWriteStream(binaryPath)); + return; + } + + download(sass.getBinaryUrl(), binaryPath, function(err) { + if (err) { + console.error(err); + return; + } + + console.log('Binary saved to', binaryPath); + + cachedBinary = path.join(cachePath, sass.getBinaryName()); + + if (cachePath) { + console.log('Caching binary to', cachedBinary); + + try { + fs.mkdirSync(path.dirname(cachedBinary), {recursive: true}); + fs.createReadStream(binaryPath) + .pipe(fs.createWriteStream(cachedBinary)) + .on('error', function (err) { + console.log('Failed to cache binary:', err); + }); + } catch (err) { + console.log('Failed to cache binary:', err); + } + } + }); +} + +/** + * If binary does not exist, download it + */ + +checkAndDownloadBinary(); diff --git a/mybulma/node_modules/node-sass/scripts/prepublish.js b/mybulma/node_modules/node-sass/scripts/prepublish.js new file mode 100644 index 0000000..b1befd4 --- /dev/null +++ b/mybulma/node_modules/node-sass/scripts/prepublish.js @@ -0,0 +1,17 @@ +/*! + * node-sass: scripts/install.js + */ + +var path = require('path'), + rimraf = require('rimraf'); + +function prepublish() { + var vendorPath = path.resolve(__dirname, '..', 'vendor'); + rimraf.sync(vendorPath); +} + +/** + * Run + */ + +prepublish(); diff --git a/mybulma/node_modules/node-sass/scripts/util/downloadoptions.js b/mybulma/node_modules/node-sass/scripts/util/downloadoptions.js new file mode 100644 index 0000000..6add52a --- /dev/null +++ b/mybulma/node_modules/node-sass/scripts/util/downloadoptions.js @@ -0,0 +1,26 @@ +var proxy = require('./proxy'), + userAgent = require('./useragent'), + rejectUnauthorized = require('./rejectUnauthorized'); + +/** + * The options passed to make-fetch-happen when downloading the binary + * + * @return {Object} an options object for make-fetch-happen + * @api private + */ +module.exports = function() { + var options = { + strictSSL: rejectUnauthorized(), + timeout: 60000, + headers: { + 'User-Agent': userAgent(), + }, + }; + + var proxyConfig = proxy(); + if (proxyConfig) { + options.proxy = proxyConfig; + } + + return options; +}; diff --git a/mybulma/node_modules/node-sass/scripts/util/proxy.js b/mybulma/node_modules/node-sass/scripts/util/proxy.js new file mode 100644 index 0000000..e65eac5 --- /dev/null +++ b/mybulma/node_modules/node-sass/scripts/util/proxy.js @@ -0,0 +1,22 @@ + +/** + * Determine the proxy settings configured by npm + * + * It's possible to configure npm to use a proxy different + * from the system defined proxy. This can be done via the + * `npm config` CLI or the `.npmrc` config file. + * + * If a proxy has been configured in this way we must + * tell request explicitly to use it. + * + * Otherwise we can trust request to the right thing. + * + * @return {String} the proxy configured by npm or an empty string + * @api private + */ +module.exports = function() { + return process.env.npm_config_https_proxy || + process.env.npm_config_proxy || + process.env.npm_config_http_proxy || + ''; +}; diff --git a/mybulma/node_modules/node-sass/scripts/util/rejectUnauthorized.js b/mybulma/node_modules/node-sass/scripts/util/rejectUnauthorized.js new file mode 100644 index 0000000..43d8373 --- /dev/null +++ b/mybulma/node_modules/node-sass/scripts/util/rejectUnauthorized.js @@ -0,0 +1,46 @@ +var pkg = require('../../package.json'); + +/** + * Get the value of a CLI argument + * + * @param {String} name + * @param {Array} args + * @api private + */ +function getArgument(name, args) { + var flags = args || process.argv.slice(2), + index = flags.lastIndexOf(name); + + if (index === -1 || index + 1 >= flags.length) { + return null; + } + + return flags[index + 1]; +} + +/** + * Get the value of reject-unauthorized + * If environment variable SASS_REJECT_UNAUTHORIZED is non-zero, + * .npmrc variable sass_reject_unauthorized or + * process argument --sass-reject_unauthorized is provided, + * set rejectUnauthorized to true + * Else set to false by default + * + * @return {Boolean} The value of rejectUnauthorized + * @api private + */ +module.exports = function() { + var rejectUnauthorized = false; + + if (getArgument('--sass-reject-unauthorized')) { + rejectUnauthorized = getArgument('--sass-reject-unauthorized'); + } else if (process.env.SASS_REJECT_UNAUTHORIZED !== '0') { + rejectUnauthorized = true; + } else if (process.env.npm_config_sass_reject_unauthorized) { + rejectUnauthorized = process.env.npm_config_sass_reject_unauthorized; + } else if (pkg.nodeSassConfig && pkg.nodeSassConfig.rejectUnauthorized) { + rejectUnauthorized = pkg.nodeSassConfig.rejectUnauthorized; + } + + return rejectUnauthorized; +}; diff --git a/mybulma/node_modules/node-sass/scripts/util/useragent.js b/mybulma/node_modules/node-sass/scripts/util/useragent.js new file mode 100644 index 0000000..2496eec --- /dev/null +++ b/mybulma/node_modules/node-sass/scripts/util/useragent.js @@ -0,0 +1,13 @@ +var pkg = require('../../package.json'); + +/** + * A custom user agent use for binary downloads. + * + * @api private + */ +module.exports = function() { + return [ + 'node/', process.version, ' ', + 'node-sass-installer/', pkg.version + ].join(''); +}; diff --git a/mybulma/node_modules/node-sass/src/binding.cpp b/mybulma/node_modules/node-sass/src/binding.cpp new file mode 100644 index 0000000..c8376e9 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/binding.cpp @@ -0,0 +1,358 @@ +#include +#include +#include "sass_context_wrapper.h" +#include "custom_function_bridge.h" +#include "create_string.h" +#include "sass_types/factory.h" + +Sass_Import_List sass_importer(const char* cur_path, Sass_Importer_Entry cb, struct Sass_Compiler* comp) +{ + void* cookie = sass_importer_get_cookie(cb); + struct Sass_Import* previous = sass_compiler_get_last_import(comp); + const char* prev_path = sass_import_get_abs_path(previous); + CustomImporterBridge& bridge = *(static_cast(cookie)); + + std::vector argv; + argv.push_back((void*)cur_path); + argv.push_back((void*)prev_path); + + return bridge(argv); +} + +union Sass_Value* sass_custom_function(const union Sass_Value* s_args, Sass_Function_Entry cb, struct Sass_Compiler* comp) +{ + void* cookie = sass_function_get_cookie(cb); + CustomFunctionBridge& bridge = *(static_cast(cookie)); + + std::vector argv; + for (unsigned l = sass_list_get_length(s_args), i = 0; i < l; i++) { + argv.push_back((void*)sass_list_get_value(s_args, i)); + } + + return bridge(argv); +} + +int ExtractOptions(v8::Local options, void* cptr, sass_context_wrapper* ctx_w, bool is_file, bool is_sync) { + Nan::HandleScope scope; + + struct Sass_Context* ctx; + + v8::Local result_ = Nan::Get( + options, + Nan::New("result").ToLocalChecked() + ).ToLocalChecked(); + if (!result_->IsObject()) { + Nan::ThrowTypeError("\"result\" element is not an object"); + return -1; + } + + ctx_w->result.Reset(result_.As()); + + if (is_file) { + ctx_w->fctx = (struct Sass_File_Context*) cptr; + ctx = sass_file_context_get_context(ctx_w->fctx); + } + else { + ctx_w->dctx = (struct Sass_Data_Context*) cptr; + ctx = sass_data_context_get_context(ctx_w->dctx); + } + + struct Sass_Options* sass_options = sass_context_get_options(ctx); + + ctx_w->is_sync = is_sync; + + if (!is_sync) { + ctx_w->request.data = ctx_w; + + // async (callback) style + v8::Local success_callback = v8::Local::Cast(Nan::Get(options, Nan::New("success").ToLocalChecked()).ToLocalChecked()); + v8::Local error_callback = v8::Local::Cast(Nan::Get(options, Nan::New("error").ToLocalChecked()).ToLocalChecked()); + + ctx_w->success_callback = new Nan::Callback(success_callback); + ctx_w->error_callback = new Nan::Callback(error_callback); + } + + if (!is_file) { + ctx_w->file = create_string(Nan::Get(options, Nan::New("file").ToLocalChecked())); + sass_option_set_input_path(sass_options, ctx_w->file); + } + + int indent_len = Nan::To( + Nan::Get( + options, + Nan::New("indentWidth").ToLocalChecked() + ).ToLocalChecked()).FromJust(); + + ctx_w->indent = (char*)malloc(indent_len + 1); + + strcpy(ctx_w->indent, std::string( + indent_len, + Nan::To( + Nan::Get( + options, + Nan::New("indentType").ToLocalChecked() + ).ToLocalChecked()).FromJust() == 1 ? '\t' : ' ' + ).c_str()); + + ctx_w->linefeed = create_string(Nan::Get(options, Nan::New("linefeed").ToLocalChecked())); + ctx_w->include_path = create_string(Nan::Get(options, Nan::New("includePaths").ToLocalChecked())); + ctx_w->out_file = create_string(Nan::Get(options, Nan::New("outFile").ToLocalChecked())); + ctx_w->source_map = create_string(Nan::Get(options, Nan::New("sourceMap").ToLocalChecked())); + ctx_w->source_map_root = create_string(Nan::Get(options, Nan::New("sourceMapRoot").ToLocalChecked())); + + sass_option_set_output_path(sass_options, ctx_w->out_file); + sass_option_set_output_style(sass_options, (Sass_Output_Style)Nan::To(Nan::Get(options, Nan::New("style").ToLocalChecked()).ToLocalChecked()).FromJust()); + sass_option_set_is_indented_syntax_src(sass_options, Nan::To(Nan::Get(options, Nan::New("indentedSyntax").ToLocalChecked()).ToLocalChecked()).FromJust()); + sass_option_set_source_comments(sass_options, Nan::To(Nan::Get(options, Nan::New("sourceComments").ToLocalChecked()).ToLocalChecked()).FromJust()); + sass_option_set_omit_source_map_url(sass_options, Nan::To(Nan::Get(options, Nan::New("omitSourceMapUrl").ToLocalChecked()).ToLocalChecked()).FromJust()); + sass_option_set_source_map_embed(sass_options, Nan::To(Nan::Get(options, Nan::New("sourceMapEmbed").ToLocalChecked()).ToLocalChecked()).FromJust()); + sass_option_set_source_map_contents(sass_options, Nan::To(Nan::Get(options, Nan::New("sourceMapContents").ToLocalChecked()).ToLocalChecked()).FromJust()); + sass_option_set_source_map_file(sass_options, ctx_w->source_map); + sass_option_set_source_map_root(sass_options, ctx_w->source_map_root); + sass_option_set_include_path(sass_options, ctx_w->include_path); + sass_option_set_precision(sass_options, Nan::To(Nan::Get(options, Nan::New("precision").ToLocalChecked()).ToLocalChecked()).FromJust()); + sass_option_set_indent(sass_options, ctx_w->indent); + sass_option_set_linefeed(sass_options, ctx_w->linefeed); + + v8::Local importer_callback = Nan::Get(options, Nan::New("importer").ToLocalChecked()).ToLocalChecked(); + + if (importer_callback->IsFunction()) { + v8::Local importer = importer_callback.As(); + + CustomImporterBridge *bridge = new CustomImporterBridge(importer, ctx_w->is_sync); + ctx_w->importer_bridges.push_back(bridge); + + Sass_Importer_List c_importers = sass_make_importer_list(1); + c_importers[0] = sass_make_importer(sass_importer, 0, bridge); + + sass_option_set_c_importers(sass_options, c_importers); + } + else if (importer_callback->IsArray()) { + v8::Local importers = importer_callback.As(); + Sass_Importer_List c_importers = sass_make_importer_list(importers->Length()); + + for (size_t i = 0; i < importers->Length(); ++i) { + v8::Local callback = v8::Local::Cast(Nan::Get(importers, static_cast(i)).ToLocalChecked()); + + CustomImporterBridge *bridge = new CustomImporterBridge(callback, ctx_w->is_sync); + ctx_w->importer_bridges.push_back(bridge); + + c_importers[i] = sass_make_importer(sass_importer, importers->Length() - i - 1, bridge); + } + + sass_option_set_c_importers(sass_options, c_importers); + } + + v8::Local custom_functions = Nan::Get(options, Nan::New("functions").ToLocalChecked()).ToLocalChecked(); + + if (custom_functions->IsObject()) { + v8::Local functions = custom_functions.As(); + v8::Local signatures = Nan::GetOwnPropertyNames(functions).ToLocalChecked(); + unsigned num_signatures = signatures->Length(); + Sass_Function_List fn_list = sass_make_function_list(num_signatures); + + for (unsigned i = 0; i < num_signatures; i++) { + v8::Local signature = v8::Local::Cast(Nan::Get(signatures, Nan::New(i)).ToLocalChecked()); + v8::Local callback = v8::Local::Cast(Nan::Get(functions, signature).ToLocalChecked()); + + CustomFunctionBridge *bridge = new CustomFunctionBridge(callback, ctx_w->is_sync); + ctx_w->function_bridges.push_back(bridge); + + char* sig = create_string(signature); + Sass_Function_Entry fn = sass_make_function(sig, sass_custom_function, bridge); + free(sig); + sass_function_set_list_entry(fn_list, i, fn); + } + + sass_option_set_c_functions(sass_options, fn_list); + } + return 0; +} + +void GetStats(sass_context_wrapper* ctx_w, Sass_Context* ctx) { + Nan::HandleScope scope; + + char** included_files = sass_context_get_included_files(ctx); + v8::Local arr = Nan::New(); + + if (included_files) { + for (int i = 0; included_files[i] != nullptr; ++i) { + Nan::Set(arr, i, Nan::New(included_files[i]).ToLocalChecked()); + } + } + + v8::Local result = Nan::New(ctx_w->result); + assert(result->IsObject()); + + v8::Local stats = Nan::Get( + result, + Nan::New("stats").ToLocalChecked() + ).ToLocalChecked(); + if (stats->IsObject()) { + Nan::Set( + stats.As(), + Nan::New("includedFiles").ToLocalChecked(), + arr + ); + } else { + Nan::ThrowTypeError("\"result.stats\" element is not an object"); + } +} + +int GetResult(sass_context_wrapper* ctx_w, Sass_Context* ctx, bool is_sync = false) { + Nan::HandleScope scope; + v8::Local result; + + int status = sass_context_get_error_status(ctx); + + result = Nan::New(ctx_w->result); + assert(result->IsObject()); + + if (status == 0) { + const char* css = sass_context_get_output_string(ctx); + const char* map = sass_context_get_source_map_string(ctx); + + Nan::Set(result, Nan::New("css").ToLocalChecked(), Nan::CopyBuffer(css, static_cast(strlen(css))).ToLocalChecked()); + + GetStats(ctx_w, ctx); + + if (map) { + Nan::Set(result, Nan::New("map").ToLocalChecked(), Nan::CopyBuffer(map, static_cast(strlen(map))).ToLocalChecked()); + } + } + else if (is_sync) { + Nan::Set(result, Nan::New("error").ToLocalChecked(), Nan::New(sass_context_get_error_json(ctx)).ToLocalChecked()); + } + + return status; +} + +void PerformCall(sass_context_wrapper* ctx_w, Nan::Callback* callback, int argc, v8::Local argv[]) { + if (ctx_w->is_sync) { + Nan::Call(*callback, argc, argv); + } else { + callback->Call(argc, argv, ctx_w->async_resource); + } +} + +void MakeCallback(uv_work_t* req) { + Nan::HandleScope scope; + + Nan::TryCatch try_catch; + sass_context_wrapper* ctx_w = static_cast(req->data); + struct Sass_Context* ctx; + + if (ctx_w->dctx) { + ctx = sass_data_context_get_context(ctx_w->dctx); + } + else { + ctx = sass_file_context_get_context(ctx_w->fctx); + } + + int status = GetResult(ctx_w, ctx); + + if (status == 0 && ctx_w->success_callback) { + // if no error, do callback(null, result) + PerformCall(ctx_w, ctx_w->success_callback, 0, 0); + } + else if (ctx_w->error_callback) { + // if error, do callback(error) + const char* err = sass_context_get_error_json(ctx); + v8::Local argv[] = { + Nan::New(err).ToLocalChecked() + }; + PerformCall(ctx_w, ctx_w->error_callback, 1, argv); + } + if (try_catch.HasCaught()) { + Nan::FatalException(try_catch); + } + + sass_free_context_wrapper(ctx_w); +} + +NAN_METHOD(render) { + + v8::Local options = Nan::To(info[0]).ToLocalChecked(); + char* source_string = create_string(Nan::Get(options, Nan::New("data").ToLocalChecked())); + struct Sass_Data_Context* dctx = sass_make_data_context(source_string); + sass_context_wrapper* ctx_w = sass_make_context_wrapper(); + + ctx_w->async_resource = new Nan::AsyncResource("node-sass:sass_context_wrapper:render"); + + if (ExtractOptions(options, dctx, ctx_w, false, false) >= 0) { + + int status = uv_queue_work(uv_default_loop(), &ctx_w->request, compile_it, (uv_after_work_cb)MakeCallback); + + assert(status == 0); + } +} + +NAN_METHOD(render_sync) { + + v8::Local options = Nan::To(info[0]).ToLocalChecked(); + char* source_string = create_string(Nan::Get(options, Nan::New("data").ToLocalChecked())); + struct Sass_Data_Context* dctx = sass_make_data_context(source_string); + struct Sass_Context* ctx = sass_data_context_get_context(dctx); + sass_context_wrapper* ctx_w = sass_make_context_wrapper(); + int result = -1; + + if ((result = ExtractOptions(options, dctx, ctx_w, false, true)) >= 0) { + compile_data(dctx); + result = GetResult(ctx_w, ctx, true); + } + + sass_free_context_wrapper(ctx_w); + + info.GetReturnValue().Set(result == 0); +} + +NAN_METHOD(render_file) { + + v8::Local options = Nan::To(info[0]).ToLocalChecked(); + char* input_path = create_string(Nan::Get(options, Nan::New("file").ToLocalChecked())); + struct Sass_File_Context* fctx = sass_make_file_context(input_path); + sass_context_wrapper* ctx_w = sass_make_context_wrapper(); + + ctx_w->async_resource = new Nan::AsyncResource("node-sass:sass_context_wrapper:render_file"); + + if (ExtractOptions(options, fctx, ctx_w, true, false) >= 0) { + + int status = uv_queue_work(uv_default_loop(), &ctx_w->request, compile_it, (uv_after_work_cb)MakeCallback); + assert(status == 0); + } +} + +NAN_METHOD(render_file_sync) { + + v8::Local options = Nan::To(info[0]).ToLocalChecked(); + char* input_path = create_string(Nan::Get(options, Nan::New("file").ToLocalChecked())); + struct Sass_File_Context* fctx = sass_make_file_context(input_path); + struct Sass_Context* ctx = sass_file_context_get_context(fctx); + sass_context_wrapper* ctx_w = sass_make_context_wrapper(); + int result = -1; + + if ((result = ExtractOptions(options, fctx, ctx_w, true, true)) >= 0) { + compile_file(fctx); + result = GetResult(ctx_w, ctx, true); + }; + + free(input_path); + sass_free_context_wrapper(ctx_w); + + info.GetReturnValue().Set(result == 0); +} + +NAN_METHOD(libsass_version) { + info.GetReturnValue().Set(Nan::New(libsass_version()).ToLocalChecked()); +} + +NAN_MODULE_INIT(RegisterModule) { + Nan::SetMethod(target, "render", render); + Nan::SetMethod(target, "renderSync", render_sync); + Nan::SetMethod(target, "renderFile", render_file); + Nan::SetMethod(target, "renderFileSync", render_file_sync); + Nan::SetMethod(target, "libsassVersion", libsass_version); + SassTypes::Factory::initExports(target); +} + +NODE_MODULE(binding, RegisterModule); diff --git a/mybulma/node_modules/node-sass/src/callback_bridge.h b/mybulma/node_modules/node-sass/src/callback_bridge.h new file mode 100644 index 0000000..25f62e1 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/callback_bridge.h @@ -0,0 +1,228 @@ +#ifndef CALLBACK_BRIDGE_H +#define CALLBACK_BRIDGE_H + +#include +#include +#include +#include + +#define COMMA , + +template +class CallbackBridge { + public: + CallbackBridge(v8::Local, bool); + virtual ~CallbackBridge(); + + // Executes the callback + T operator()(std::vector); + + protected: + // We will expose a bridge object to the JS callback that wraps this instance so we don't loose context. + // This is the V8 constructor for such objects. + static Nan::MaybeLocal get_wrapper_constructor(); + static void async_gone(uv_handle_t *handle); + static NAN_METHOD(New); + static NAN_METHOD(ReturnCallback); + static Nan::Persistent wrapper_constructor; + Nan::Persistent wrapper; + + // The callback that will get called in the main thread after the worker thread used for the sass + // compilation step makes a call to uv_async_send() + static void dispatched_async_uv_callback(uv_async_t*); + + // The V8 values sent to our ReturnCallback must be read on the main thread not the sass worker thread. + // This gives a chance to specialized subclasses to transform those values into whatever makes sense to + // sass before we resume the worker thread. + virtual T post_process_return_value(v8::Local) const =0; + + + virtual std::vector> pre_process_args(std::vector) const =0; + + Nan::Callback* callback; + Nan::AsyncResource* async_resource; + bool is_sync; + + uv_mutex_t cv_mutex; + uv_cond_t condition_variable; + uv_async_t *async; + std::vector argv; + bool has_returned; + T return_value; +}; + +template +Nan::Persistent CallbackBridge::wrapper_constructor; + +template +CallbackBridge::CallbackBridge(v8::Local callback, bool is_sync) : callback(new Nan::Callback(callback)), is_sync(is_sync) { + /* + * This is invoked from the main JavaScript thread. + * V8 context is available. + */ + Nan::HandleScope scope; + uv_mutex_init(&this->cv_mutex); + uv_cond_init(&this->condition_variable); + if (!is_sync) { + this->async = new uv_async_t; + this->async->data = (void*) this; + uv_async_init(uv_default_loop(), this->async, (uv_async_cb) dispatched_async_uv_callback); + this->async_resource = new Nan::AsyncResource("node-sass:CallbackBridge"); + } + + v8::Local func = CallbackBridge::get_wrapper_constructor().ToLocalChecked(); + wrapper.Reset(Nan::NewInstance(func).ToLocalChecked()); + Nan::SetInternalFieldPointer(Nan::New(wrapper), 0, this); +} + +template +CallbackBridge::~CallbackBridge() { + delete this->callback; + this->wrapper.Reset(); + uv_cond_destroy(&this->condition_variable); + uv_mutex_destroy(&this->cv_mutex); + + if (!is_sync) { + uv_close((uv_handle_t*)this->async, &async_gone); + delete this->async_resource; + } +} + +template +T CallbackBridge::operator()(std::vector argv) { + // argv.push_back(wrapper); + if (this->is_sync) { + /* + * This is invoked from the main JavaScript thread. + * V8 context is available. + * + * Establish Local<> scope for all functions + * from types invoked by pre_process_args() and + * post_process_args(). + */ + Nan::HandleScope scope; + Nan::TryCatch try_catch; + std::vector> argv_v8 = pre_process_args(argv); + if (try_catch.HasCaught()) { + Nan::FatalException(try_catch); + } + + argv_v8.push_back(Nan::New(wrapper)); + + return this->post_process_return_value( + Nan::Call(*this->callback, argv_v8.size(), &argv_v8[0]).ToLocalChecked() + ); + } else { + /* + * This is invoked from the worker thread. + * No V8 context and functions available. + * Just wait for response from asynchronously + * scheduled JavaScript code + * + * XXX Issue #1048: We block here even if the + * event loop stops and the callback + * would never be executed. + * XXX Issue #857: By waiting here we occupy + * one of the threads taken from the + * uv threadpool. Might deadlock if + * async I/O executed from JavaScript callbacks. + */ + this->argv = argv; + + uv_mutex_lock(&this->cv_mutex); + this->has_returned = false; + uv_async_send(this->async); + while (!this->has_returned) { + uv_cond_wait(&this->condition_variable, &this->cv_mutex); + } + uv_mutex_unlock(&this->cv_mutex); + return this->return_value; + } +} + +template +void CallbackBridge::dispatched_async_uv_callback(uv_async_t *req) { + CallbackBridge* bridge = static_cast(req->data); + + /* + * Function scheduled via uv_async mechanism, therefore + * it is invoked from the main JavaScript thread. + * V8 context is available. + * + * Establish Local<> scope for all functions + * from types invoked by pre_process_args() and + * post_process_args(). + */ + Nan::HandleScope scope; + Nan::TryCatch try_catch; + + std::vector> argv_v8 = bridge->pre_process_args(bridge->argv); + if (try_catch.HasCaught()) { + Nan::FatalException(try_catch); + } + argv_v8.push_back(Nan::New(bridge->wrapper)); + + bridge->callback->Call(argv_v8.size(), &argv_v8[0], bridge->async_resource); + + if (try_catch.HasCaught()) { + Nan::FatalException(try_catch); + } +} + +template +NAN_METHOD(CallbackBridge::ReturnCallback) { + + /* + * Callback function invoked by the user code. + * It is invoked from the main JavaScript thread. + * V8 context is available. + * + * Implicit Local<> handle scope created by NAN_METHOD(.) + */ + CallbackBridge* bridge = static_cast*>(Nan::GetInternalFieldPointer(info.This(), 0)); + Nan::TryCatch try_catch; + + bridge->return_value = bridge->post_process_return_value(info[0]); + + { + uv_mutex_lock(&bridge->cv_mutex); + bridge->has_returned = true; + uv_mutex_unlock(&bridge->cv_mutex); + } + + uv_cond_broadcast(&bridge->condition_variable); + + if (try_catch.HasCaught()) { + Nan::FatalException(try_catch); + } +} + +template +Nan::MaybeLocal CallbackBridge::get_wrapper_constructor() { + /* Uses handle scope created in the CallbackBridge constructor */ + if (wrapper_constructor.IsEmpty()) { + v8::Local tpl = Nan::New(New); + tpl->SetClassName(Nan::New("CallbackBridge").ToLocalChecked()); + tpl->InstanceTemplate()->SetInternalFieldCount(1); + + Nan::SetPrototypeTemplate(tpl, "success", + Nan::New(ReturnCallback) + ); + + wrapper_constructor.Reset(Nan::GetFunction(tpl).ToLocalChecked()); + } + + return Nan::New(wrapper_constructor); +} + +template +NAN_METHOD(CallbackBridge::New) { + info.GetReturnValue().Set(info.This()); +} + +template +void CallbackBridge::async_gone(uv_handle_t *handle) { + delete (uv_async_t *)handle; +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/create_string.cpp b/mybulma/node_modules/node-sass/src/create_string.cpp new file mode 100644 index 0000000..27a496f --- /dev/null +++ b/mybulma/node_modules/node-sass/src/create_string.cpp @@ -0,0 +1,21 @@ +#include +#include +#include +#include "create_string.h" + +char* create_string(Nan::MaybeLocal maybevalue) { + v8::Local value; + + if (maybevalue.ToLocal(&value)) { + if (value->IsNull() || !value->IsString()) { + return 0; + } + } else { + return 0; + } + + Nan::Utf8String string(value); + char *str = (char *)malloc(string.length() + 1); + strcpy(str, *string); + return str; +} diff --git a/mybulma/node_modules/node-sass/src/create_string.h b/mybulma/node_modules/node-sass/src/create_string.h new file mode 100644 index 0000000..03c7c92 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/create_string.h @@ -0,0 +1,8 @@ +#ifndef CREATE_STRING_H +#define CREATE_STRING_H + +#include + +char* create_string(Nan::MaybeLocal); + +#endif diff --git a/mybulma/node_modules/node-sass/src/custom_function_bridge.cpp b/mybulma/node_modules/node-sass/src/custom_function_bridge.cpp new file mode 100644 index 0000000..f27c695 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/custom_function_bridge.cpp @@ -0,0 +1,27 @@ +#include +#include +#include "custom_function_bridge.h" +#include "sass_types/factory.h" +#include "sass_types/value.h" + +Sass_Value* CustomFunctionBridge::post_process_return_value(v8::Local _val) const { + SassTypes::Value *value = SassTypes::Factory::unwrap(_val); + if (value) { + return value->get_sass_value(); + } else { + return sass_make_error("A SassValue object was expected."); + } +} + +std::vector> CustomFunctionBridge::pre_process_args(std::vector in) const { + std::vector> argv = std::vector>(); + + for (void* value : in) { + Sass_Value* x = static_cast(value); + SassTypes::Value* y = SassTypes::Factory::create(x); + + argv.push_back(y->get_js_object()); + } + + return argv; +} diff --git a/mybulma/node_modules/node-sass/src/custom_function_bridge.h b/mybulma/node_modules/node-sass/src/custom_function_bridge.h new file mode 100644 index 0000000..99c83ea --- /dev/null +++ b/mybulma/node_modules/node-sass/src/custom_function_bridge.h @@ -0,0 +1,18 @@ +#ifndef CUSTOM_FUNCTION_BRIDGE_H +#define CUSTOM_FUNCTION_BRIDGE_H + +#include +#include +#include +#include "callback_bridge.h" + +class CustomFunctionBridge : public CallbackBridge { + public: + CustomFunctionBridge(v8::Local cb, bool is_sync) : CallbackBridge(cb, is_sync) {} + + private: + Sass_Value* post_process_return_value(v8::Local) const; + std::vector> pre_process_args(std::vector) const; +}; + +#endif diff --git a/mybulma/node_modules/node-sass/src/custom_importer_bridge.cpp b/mybulma/node_modules/node-sass/src/custom_importer_bridge.cpp new file mode 100644 index 0000000..1ae5ae4 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/custom_importer_bridge.cpp @@ -0,0 +1,104 @@ +#include +#include +#include "custom_importer_bridge.h" +#include "create_string.h" + +SassImportList CustomImporterBridge::post_process_return_value(v8::Local returned_value) const { + SassImportList imports = 0; + Nan::HandleScope scope; + + if (returned_value->IsArray()) { + v8::Local array = returned_value.As(); + + imports = sass_make_import_list(array->Length()); + + for (size_t i = 0; i < array->Length(); ++i) { + v8::Local value; + Nan::MaybeLocal unchecked = Nan::Get(array, static_cast(i)); + + if (!unchecked.ToLocal(&value) || !value->IsObject()) { + imports[i] = sass_make_import_entry(0, 0, 0); + sass_import_set_error(imports[i], "returned array must only contain object literals", -1, -1); + continue; + } + + v8::Local object = value.As(); + + if (value->IsNativeError()) { + char* message = create_string(Nan::Get(object, Nan::New("message").ToLocalChecked())); + + imports[i] = sass_make_import_entry(0, 0, 0); + + sass_import_set_error(imports[i], message, -1, -1); + free(message); + } + else { + imports[i] = get_importer_entry(object); + } + } + } + else if (returned_value->IsNativeError()) { + imports = sass_make_import_list(1); + v8::Local object = returned_value.As(); + char* message = create_string(Nan::Get(object, Nan::New("message").ToLocalChecked())); + + imports[0] = sass_make_import_entry(0, 0, 0); + + sass_import_set_error(imports[0], message, -1, -1); + free(message); + } + else if (returned_value->IsObject()) { + imports = sass_make_import_list(1); + imports[0] = get_importer_entry(returned_value.As()); + } + + return imports; +} + +Sass_Import* CustomImporterBridge::check_returned_string(Nan::MaybeLocal value, const char *msg) const +{ + v8::Local checked; + if (value.ToLocal(&checked)) { + if (!checked->IsUndefined() && !checked->IsString()) { + goto err; + } else { + return nullptr; + } + } +err: + auto entry = sass_make_import_entry(0, 0, 0); + sass_import_set_error(entry, msg, -1, -1); + return entry; +} + +Sass_Import* CustomImporterBridge::get_importer_entry(const v8::Local& object) const { + Nan::MaybeLocal returned_file = Nan::Get(object, Nan::New("file").ToLocalChecked()); + Nan::MaybeLocal returned_contents = Nan::Get(object, Nan::New("contents").ToLocalChecked()); + Nan::MaybeLocal returned_map = Nan::Get(object, Nan::New("map").ToLocalChecked()); + Sass_Import *err; + + if ((err = check_returned_string(returned_file, "returned value of `file` must be a string"))) + return err; + + if ((err = check_returned_string(returned_contents, "returned value of `contents` must be a string"))) + return err; + + if ((err = check_returned_string(returned_map, "returned value of `returned_map` must be a string"))) + return err; + + char* path = create_string(returned_file); + char* contents = create_string(returned_contents); + char* srcmap = create_string(returned_map); + + return sass_make_import_entry(path, contents, srcmap); +} + +std::vector> CustomImporterBridge::pre_process_args(std::vector in) const { + std::vector> out; + + for (void* ptr : in) { + out.push_back(Nan::New((char const*)ptr).ToLocalChecked()); + } + + return out; +} diff --git a/mybulma/node_modules/node-sass/src/custom_importer_bridge.h b/mybulma/node_modules/node-sass/src/custom_importer_bridge.h new file mode 100644 index 0000000..0cbd3e6 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/custom_importer_bridge.h @@ -0,0 +1,22 @@ +#ifndef CUSTOM_IMPORTER_BRIDGE_H +#define CUSTOM_IMPORTER_BRIDGE_H + +#include +#include +#include +#include "callback_bridge.h" + +typedef Sass_Import_List SassImportList; + +class CustomImporterBridge : public CallbackBridge { + public: + CustomImporterBridge(v8::Local cb, bool is_sync) : CallbackBridge(cb, is_sync) {} + + private: + SassImportList post_process_return_value(v8::Local) const; + Sass_Import* check_returned_string(Nan::MaybeLocal value, const char *msg) const; + Sass_Import* get_importer_entry(const v8::Local&) const; + std::vector> pre_process_args(std::vector) const; +}; + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass.gyp b/mybulma/node_modules/node-sass/src/libsass.gyp new file mode 100644 index 0000000..add96e8 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass.gyp @@ -0,0 +1,114 @@ +{ + 'targets': [ + { + 'target_name': 'libsass', + 'win_delay_load_hook': 'false', + 'type': 'static_library', + 'defines': [ + 'LIBSASS_VERSION="$nul)) { gem install minitest --no-ri --no-rdoc } + if ($env:Compiler -eq "mingw" -AND -Not (Test-Path "C:\mingw64")) { + # Install MinGW. + $file = "x86_64-4.9.2-release-win32-seh-rt_v4-rev3.7z" + wget https://bintray.com/artifact/download/drewwells/generic/$file -OutFile $file + &7z x -oC:\ $file > $null + } + - set PATH=C:\mingw64\bin;%PATH% + - set CC=gcc + +build_script: + - ps: | + if ($env:Compiler -eq "mingw") { + mingw32-make -j4 sassc + } else { + msbuild /m:4 /p:"Configuration=$env:Config;Platform=$env:Platform" sassc\win\sassc.sln + } + + # print the branding art + mv script/branding script/branding.ps1 + script/branding.ps1 + + # print the version info + &$env:TargetPath -v + ruby -v + +test_script: + - ps: | + $PRNR = $env:APPVEYOR_PULL_REQUEST_NUMBER + if ($PRNR) { + echo "Fetching info for PR $PRNR" + wget https://api.github.com/repos/sass/libsass/pulls/$PRNR -OutFile pr.json + $json = cat pr.json -Raw + $SPEC_PR = [regex]::match($json,'sass\/sass-spec(#|\/pull\/)([0-9]+)').Groups[2].Value + if ($SPEC_PR) { + echo "Checkout sass spec PR $SPEC_PR" + git -C sass-spec fetch -q -u origin pull/$SPEC_PR/head:ci-spec-pr-$SPEC_PR + git -C sass-spec checkout -q --force ci-spec-pr-$SPEC_PR + } + } + $env:TargetPath = Join-Path $pwd.Path $env:TargetPath + If (Test-Path "$env:TargetPath") { + ruby sass-spec/sass-spec.rb -V 3.5 --probe-todo --impl libsass -c $env:TargetPath -s sass-spec/spec + if(-not($?)) { + echo "sass-spec tests failed" + exit 1 + } + } else { + echo "spec runner not found (compile error?)" + exit 1 + } + Write-Host "Explicitly testing the case when cwd has Cyrillic characters: " -nonewline + # See comments in gh-1774 for details. + cd sass-spec/spec/libsass/Sáss-UŢF8/ + &$env:TargetPath ./input.scss 2>&1>$null + if(-not($?)) { + echo "Failed!" + exit 1 + } else { + echo "Success!" + } diff --git a/mybulma/node_modules/node-sass/src/libsass/configure.ac b/mybulma/node_modules/node-sass/src/libsass/configure.ac new file mode 100644 index 0000000..b5a9432 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/configure.ac @@ -0,0 +1,134 @@ +# -*- Autoconf -*- +# Process this file with autoconf to produce a configure script. + +AC_PREREQ([2.61]) + +AC_INIT([libsass], m4_esyscmd_s([./version.sh]), [support@moovweb.com]) +AC_CONFIG_SRCDIR([src/ast.hpp]) +AC_CONFIG_MACRO_DIR([m4]) +AC_CONFIG_HEADERS([src/config.h]) +AC_CONFIG_FILES([include/sass/version.h]) +AC_CONFIG_AUX_DIR([script]) + +# These are flags passed to automake +# Though they look like gcc flags! +AM_INIT_AUTOMAKE([foreign parallel-tests -Wall]) +m4_ifdef([AM_SILENT_RULES], [AM_SILENT_RULES([no])]) + +# Checks for programs. +AC_PROG_CC +AC_PROG_CXX +AC_LANG_PUSH([C]) +AC_LANG_PUSH([C++]) +AC_GNU_SOURCE +# Check fails on Travis, but it works fine +# AX_CXX_COMPILE_STDCXX_11([ext],[optional]) +AC_CHECK_TOOL([AR], [ar], [false]) +AC_CHECK_TOOL([DLLTOOL], [dlltool], [false]) +AC_CHECK_TOOL([DLLWRAP], [dllwrap], [false]) +AC_CHECK_TOOL([WINDRES], [windres], [false]) +m4_ifdef([AM_PROG_AR], [AM_PROG_AR]) +LT_INIT([dlopen]) + +# Checks for header files. +AC_CHECK_HEADERS([unistd.h]) + +# Checks for typedefs, structures, and compiler characteristics. +AC_TYPE_SIZE_T + +# Checks for library functions. +AC_FUNC_MALLOC +AC_CHECK_FUNCS([floor getcwd strtol]) + +# Checks for testing. +AC_ARG_ENABLE(tests, AS_HELP_STRING([--enable-tests], [enable testing the build]), + [enable_tests="$enableval"], [enable_tests=no]) + +AS_CASE([$host], [*-*-mingw*], [is_mingw32=yes], [is_mingw32=no]) +AM_CONDITIONAL(COMPILER_IS_MINGW32, test "x$is_mingw32" = "xyes") + +dnl The dlopen() function is in the C library for *BSD and in +dnl libdl on GLIBC-based systems +if test "x$is_mingw32" != "xyes"; then + AC_SEARCH_LIBS([dlopen], [dl dld], [], [ + AC_MSG_ERROR([unable to find the dlopen() function]) + ]) +fi + +if test "x$enable_tests" = "xyes"; then + AC_PROG_CC + AC_PROG_AWK + # test need minitest gem + AC_PATH_PROG(RUBY, [ruby]) + AC_PATH_PROG(TAPOUT, [tapout]) + AC_REQUIRE_AUX_FILE([tap-driver]) + AC_REQUIRE_AUX_FILE([tap-runner]) + AC_ARG_WITH(sassc-dir, + AS_HELP_STRING([--with-sassc-dir=], [specify directory of sassc sources for testing (default: sassc)]), + [sassc_dir="$withval"], [sassc_dir="sassc"]) + AC_CHECK_FILE([$sassc_dir/sassc.c], [], [ + AC_MSG_ERROR([Unable to find sassc directory. +You must clone the sassc repository in this directory or specify +the --with-sassc-dir= argument. +]) + ]) + SASS_SASSC_PATH=$sassc_dir + AC_SUBST(SASS_SASSC_PATH) + + AC_ARG_WITH(sass-spec-dir, + AS_HELP_STRING([--with-sass-spec-dir=], [specify directory of sass-spec for testing (default: sass-spec)]), + [sass_spec_dir="$withval"], [sass_spec_dir="sass-spec"]) + AC_CHECK_FILE([$sass_spec_dir/sass-spec.rb], [], [ + AC_MSG_ERROR([Unable to find sass-spec directory. +You must clone the sass-spec repository in this directory or specify +the --with-sass-spec-dir= argument. +]) + ]) + # Automake doesn't like its tests in an absolute path, so we make it relative. + case $sass_spec_dir in + /*) + SASS_SPEC_PATH=`$RUBY -e "require 'pathname'; puts Pathname.new('$sass_spec_dir').relative_path_from(Pathname.new('$PWD')).to_s"` + ;; + *) + SASS_SPEC_PATH="$sass_spec_dir" + ;; + esac + AC_SUBST(SASS_SPEC_PATH) +else + # we do not really need these paths for non test build + # but automake may error if we do not define them here + SASS_SPEC_PATH=sass-spec + SASS_SASSC_PATH=sassc + AC_SUBST(SASS_SPEC_PATH) + AC_SUBST(SASS_SASSC_PATH) +fi + +AM_CONDITIONAL(ENABLE_TESTS, test "x$enable_tests" = "xyes") + +AC_ARG_ENABLE([coverage], + [AS_HELP_STRING([--enable-coverage], + [enable coverage report for test suite])], + [enable_cov=$enableval], + [enable_cov=no]) + +if test "x$enable_cov" = "xyes"; then + + AC_CHECK_PROG(GCOV, gcov, gcov) + + # Remove all optimization flags from C[XX]FLAGS + changequote({,}) + CFLAGS=`echo "$CFLAGS -O1 -fno-omit-frame-pointer" | $SED -e 's/-O[0-9]*//g'` + CXXFLAGS=`echo "$CXXFLAGS -O1 -fno-omit-frame-pointer" | $SED -e 's/-O[0-9]*//g'` + changequote([,]) + + AC_SUBST(GCOV) +fi + +AM_CONDITIONAL(ENABLE_COVERAGE, test "x$enable_cov" = "xyes") + +AC_SUBST(PACKAGE_VERSION) + +AC_MSG_NOTICE([Building libsass ($VERSION)]) + +AC_CONFIG_FILES([GNUmakefile src/GNUmakefile src/support/libsass.pc]) +AC_OUTPUT diff --git a/mybulma/node_modules/node-sass/src/libsass/contrib/libsass.spec b/mybulma/node_modules/node-sass/src/libsass/contrib/libsass.spec new file mode 100644 index 0000000..a83d5f0 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/contrib/libsass.spec @@ -0,0 +1,66 @@ +Name: libsass +Version: %{version} +Release: 1%{?dist} +Summary: A C/C++ implementation of a Sass compiler + +License: MIT +URL: http://libsass.org +Source0: %{name}-%{version}.tar.gz + +BuildRequires: gcc-c++ >= 4.7 +BuildRequires: autoconf +BuildRequires: automake +BuildRequires: libtool + + +%description +LibSass is a C/C++ port of the Sass engine. The point is to be simple, fast, and easy to integrate. + +%package devel +Summary: Development files for %{name} +Requires: %{name}%{?_isa} = %{version}-%{release} + + +%description devel +The %{name}-devel package contains libraries and header files for +developing applications that use %{name}. + + +%prep +%setup -q +autoreconf --force --install + + +%build +%configure --disable-static \ + --disable-tests \ + --enable-shared + +make %{?_smp_mflags} + + +%install +%make_install +find $RPM_BUILD_ROOT -name '*.la' -exec rm -f {} ';' + + +%post -p /sbin/ldconfig + +%postun -p /sbin/ldconfig + + +%files +%doc Readme.md LICENSE +%{_libdir}/*.so.* + +%files devel +%doc +%{_includedir}/* +%{_libdir}/*.so +%{_libdir}/pkgconfig/*.pc + + +%changelog +* Tue Feb 10 2015 Gawain Lynch - 3.1.0-1 +- Initial SPEC file + diff --git a/mybulma/node_modules/node-sass/src/libsass/contrib/plugin.cpp b/mybulma/node_modules/node-sass/src/libsass/contrib/plugin.cpp new file mode 100644 index 0000000..2f67bb3 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/contrib/plugin.cpp @@ -0,0 +1,60 @@ +#include +#include +#include +#include + +// gcc: g++ -shared plugin.cpp -o plugin.so -fPIC -Llib -lsass +// mingw: g++ -shared plugin.cpp -o plugin.dll -Llib -lsass + +extern "C" const char* ADDCALL libsass_get_version() { + return libsass_version(); +} + +union Sass_Value* custom_function(const union Sass_Value* s_args, Sass_Function_Entry cb, struct Sass_Compiler* comp) +{ + // get context/option struct associated with this compiler + struct Sass_Context* ctx = sass_compiler_get_context(comp); + struct Sass_Options* opts = sass_compiler_get_options(comp); + // get the cookie from function descriptor + void* cookie = sass_function_get_cookie(cb); + // we actually abuse the void* to store an "int" + return sass_make_number((intptr_t)cookie, "px"); +} + +extern "C" Sass_Function_List ADDCALL libsass_load_functions() +{ + // allocate a custom function caller + Sass_Function_Entry c_func = + sass_make_function("foo()", custom_function, (void*)42); + // create list of all custom functions + Sass_Function_List fn_list = sass_make_function_list(1); + // put the only function in this plugin to the list + sass_function_set_list_entry(fn_list, 0, c_func); + // return the list + return fn_list; +} + +Sass_Import_List custom_importer(const char* cur_path, Sass_Importer_Entry cb, struct Sass_Compiler* comp) +{ + // get the cookie from importer descriptor + void* cookie = sass_importer_get_cookie(cb); + // create a list to hold our import entries + Sass_Import_List incs = sass_make_import_list(1); + // create our only import entry (route path back) + incs[0] = sass_make_import_entry(cur_path, 0, 0); + // return imports + return incs; +} + +extern "C" Sass_Importer_List ADDCALL libsass_load_importers() +{ + // allocate a custom function caller + Sass_Importer_Entry c_imp = + sass_make_importer(custom_importer, - 99, (void*)42); + // create list of all custom functions + Sass_Importer_List imp_list = sass_make_importer_list(1); + // put the only function in this plugin to the list + sass_importer_set_list_entry(imp_list, 0, c_imp); + // return the list + return imp_list; +} diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/README.md b/mybulma/node_modules/node-sass/src/libsass/docs/README.md new file mode 100644 index 0000000..a233fae --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/README.md @@ -0,0 +1,20 @@ +Welcome to the LibSass documentation! + +## First Off +LibSass is just a library. To run the code locally (i.e. to compile your stylesheets), you need an implementer. SassC (get it?) is an implementer written in C. There are a number of other implementations of LibSass - for example Node. We encourage you to write your own port - the whole point of LibSass is that we want to bring Sass to many other languages, not just Ruby! + +We're working hard on moving to full parity with Ruby Sass... learn more at the [The-LibSass-Compatibility-Plan](compatibility-plan.md)! + +### Implementing LibSass + +If you're interested in implementing LibSass in your own project see the [API Documentation](api-doc.md) which now includes implementing +your own [Sass functions](api-function.md). You may wish to [look at other implementations](implementations.md) for your language of choice. +Or make your own! + +### Contributing to LibSass + +| Issue Tracker | Issue Triage | Community Guidelines | +|-------------------|----------------------------------|-----------------------------| +| We're always needing help, so check out our issue tracker, help some people out, and read our article on [Contributing](contributing.md)! It's got all the details on what to do! | To help understand the process of triaging bugs, have a look at our [Issue-Triage](triage.md) document. | Oh, and don't forget we always follow [[Sass Community Guidelines|http://sass-lang.com/community-guidelines]]. Be nice and everyone else will be nice too! | + +Please refer to the steps on [Building LibSass](build.md) diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/api-context-example.md b/mybulma/node_modules/node-sass/src/libsass/docs/api-context-example.md new file mode 100644 index 0000000..4f2a2a0 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/api-context-example.md @@ -0,0 +1,45 @@ +## Example main.c + +```C +#include +#include "sass/context.h" + +int main( int argc, const char* argv[] ) +{ + + // get the input file from first argument or use default + const char* input = argc > 1 ? argv[1] : "styles.scss"; + + // create the file context and get all related structs + struct Sass_File_Context* file_ctx = sass_make_file_context(input); + struct Sass_Context* ctx = sass_file_context_get_context(file_ctx); + struct Sass_Options* ctx_opt = sass_context_get_options(ctx); + + // configure some options ... + sass_option_set_precision(ctx_opt, 10); + + // context is set up, call the compile step now + int status = sass_compile_file_context(file_ctx); + + // print the result or the error to the stdout + if (status == 0) puts(sass_context_get_output_string(ctx)); + else puts(sass_context_get_error_message(ctx)); + + // release allocated memory + sass_delete_file_context(file_ctx); + + // exit status + return status; + +} +``` + +### Compile main.c + +```bash +gcc -c main.c -o main.o +gcc -o sample main.o -lsass +echo "foo { margin: 21px * 2; }" > foo.scss +./sample foo.scss => "foo { margin: 42px }" +``` + diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/api-context-internal.md b/mybulma/node_modules/node-sass/src/libsass/docs/api-context-internal.md new file mode 100644 index 0000000..1a2818b --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/api-context-internal.md @@ -0,0 +1,163 @@ +```C +// Input behaviours +enum Sass_Input_Style { + SASS_CONTEXT_NULL, + SASS_CONTEXT_FILE, + SASS_CONTEXT_DATA, + SASS_CONTEXT_FOLDER +}; + +// sass config options structure +struct Sass_Inspect_Options { + + // Output style for the generated css code + // A value from above SASS_STYLE_* constants + enum Sass_Output_Style output_style; + + // Precision for fractional numbers + int precision; + +}; + +// sass config options structure +struct Sass_Output_Options : Sass_Inspect_Options { + + // String to be used for indentation + const char* indent; + // String to be used to for line feeds + const char* linefeed; + + // Emit comments in the generated CSS indicating + // the corresponding source line. + bool source_comments; + +}; + +// sass config options structure +struct Sass_Options : Sass_Output_Options { + + // embed sourceMappingUrl as data uri + bool source_map_embed; + + // embed include contents in maps + bool source_map_contents; + + // create file urls for sources + bool source_map_file_urls; + + // Disable sourceMappingUrl in css output + bool omit_source_map_url; + + // Treat source_string as sass (as opposed to scss) + bool is_indented_syntax_src; + + // The input path is used for source map + // generation. It can be used to define + // something with string compilation or to + // overload the input file path. It is + // set to "stdin" for data contexts and + // to the input file on file contexts. + char* input_path; + + // The output path is used for source map + // generation. LibSass will not write to + // this file, it is just used to create + // information in source-maps etc. + char* output_path; + + // Colon-separated list of paths + // Semicolon-separated on Windows + // Maybe use array interface instead? + char* include_path; + char* plugin_path; + + // Include paths (linked string list) + struct string_list* include_paths; + // Plugin paths (linked string list) + struct string_list* plugin_paths; + + // Path to source map file + // Enables source map generation + // Used to create sourceMappingUrl + char* source_map_file; + + // Directly inserted in source maps + char* source_map_root; + + // Custom functions that can be called from sccs code + Sass_Function_List c_functions; + + // Callback to overload imports + Sass_Importer_List c_importers; + + // List of custom headers + Sass_Importer_List c_headers; + +}; + +// base for all contexts +struct Sass_Context : Sass_Options +{ + + // store context type info + enum Sass_Input_Style type; + + // generated output data + char* output_string; + + // generated source map json + char* source_map_string; + + // error status + int error_status; + char* error_json; + char* error_text; + char* error_message; + // error position + char* error_file; + size_t error_line; + size_t error_column; + const char* error_src; + + // report imported files + char** included_files; + +}; + +// struct for file compilation +struct Sass_File_Context : Sass_Context { + + // no additional fields required + // input_path is already on options + +}; + +// struct for data compilation +struct Sass_Data_Context : Sass_Context { + + // provided source string + char* source_string; + char* srcmap_string; + +}; + +// Compiler states +enum Sass_Compiler_State { + SASS_COMPILER_CREATED, + SASS_COMPILER_PARSED, + SASS_COMPILER_EXECUTED +}; + +// link c and cpp context +struct Sass_Compiler { + // progress status + Sass_Compiler_State state; + // original c context + Sass_Context* c_ctx; + // Sass::Context + Sass::Context* cpp_ctx; + // Sass::Block + Sass::Block_Obj root; +}; +``` + diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/api-context.md b/mybulma/node_modules/node-sass/src/libsass/docs/api-context.md new file mode 100644 index 0000000..dfd10c1 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/api-context.md @@ -0,0 +1,295 @@ +Sass Contexts come in two flavors: + +- `Sass_File_Context` +- `Sass_Data_Context` + +### Basic Usage + +```C +#include "sass/context.h" +``` + +***Sass_Options*** + +```C +// Precision for fractional numbers +int precision; +``` +```C +// Output style for the generated css code +// A value from above SASS_STYLE_* constants +int output_style; +``` +```C +// Emit comments in the generated CSS indicating +// the corresponding source line. +bool source_comments; +``` +```C +// embed sourceMappingUrl as data uri +bool source_map_embed; +``` +```C +// embed include contents in maps +bool source_map_contents; +``` +```C +// create file urls for sources +bool source_map_file_urls; +``` +```C +// Disable sourceMappingUrl in css output +bool omit_source_map_url; +``` +```C +// Treat source_string as sass (as opposed to scss) +bool is_indented_syntax_src; +``` +```C +// The input path is used for source map +// generating. It can be used to define +// something with string compilation or to +// overload the input file path. It is +// set to "stdin" for data contexts and +// to the input file on file contexts. +char* input_path; +``` +```C +// The output path is used for source map +// generating. LibSass will not write to +// this file, it is just used to create +// information in source-maps etc. +char* output_path; +``` +```C +// String to be used for indentation +const char* indent; +``` +```C +// String to be used to for line feeds +const char* linefeed; +``` +```C +// Colon-separated list of paths +// Semicolon-separated on Windows +char* include_path; +char* plugin_path; +``` +```C +// Additional include paths +// Must be null delimited +char** include_paths; +char** plugin_paths; +``` +```C +// Path to source map file +// Enables the source map generating +// Used to create sourceMappingUrl +char* source_map_file; +``` +```C +// Directly inserted in source maps +char* source_map_root; +``` +```C +// Custom functions that can be called from Sass code +Sass_C_Function_List c_functions; +``` +```C +// Callback to overload imports +Sass_C_Import_Callback importer; +``` + +***Sass_Context*** + +```C +// store context type info +enum Sass_Input_Style type; +```` +```C +// generated output data +char* output_string; +``` +```C +// generated source map json +char* source_map_string; +``` +```C +// error status +int error_status; +char* error_json; +char* error_text; +char* error_message; +// error position +char* error_file; +size_t error_line; +size_t error_column; +``` +```C +// report imported files +char** included_files; +``` + +***Sass_File_Context*** + +```C +// no additional fields required +// input_path is already on options +``` + +***Sass_Data_Context*** + +```C +// provided source string +char* source_string; +``` + +### Sass Context API + +```C +// Forward declaration +struct Sass_Compiler; + +// Forward declaration +struct Sass_Options; +struct Sass_Context; // : Sass_Options +struct Sass_File_Context; // : Sass_Context +struct Sass_Data_Context; // : Sass_Context + +// Create and initialize an option struct +struct Sass_Options* sass_make_options (void); +// Create and initialize a specific context +struct Sass_File_Context* sass_make_file_context (const char* input_path); +struct Sass_Data_Context* sass_make_data_context (char* source_string); + +// Call the compilation step for the specific context +int sass_compile_file_context (struct Sass_File_Context* ctx); +int sass_compile_data_context (struct Sass_Data_Context* ctx); + +// Create a sass compiler instance for more control +struct Sass_Compiler* sass_make_file_compiler (struct Sass_File_Context* file_ctx); +struct Sass_Compiler* sass_make_data_compiler (struct Sass_Data_Context* data_ctx); + +// Execute the different compilation steps individually +// Usefull if you only want to query the included files +int sass_compiler_parse (struct Sass_Compiler* compiler); +int sass_compiler_execute (struct Sass_Compiler* compiler); + +// Release all memory allocated with the compiler +// This does _not_ include any contexts or options +void sass_delete_compiler (struct Sass_Compiler* compiler); +void sass_delete_options(struct Sass_Options* options); + +// Release all memory allocated and also ourself +void sass_delete_file_context (struct Sass_File_Context* ctx); +void sass_delete_data_context (struct Sass_Data_Context* ctx); + +// Getters for Context from specific implementation +struct Sass_Context* sass_file_context_get_context (struct Sass_File_Context* file_ctx); +struct Sass_Context* sass_data_context_get_context (struct Sass_Data_Context* data_ctx); + +// Getters for Context_Options from Sass_Context +struct Sass_Options* sass_context_get_options (struct Sass_Context* ctx); +struct Sass_Options* sass_file_context_get_options (struct Sass_File_Context* file_ctx); +struct Sass_Options* sass_data_context_get_options (struct Sass_Data_Context* data_ctx); +void sass_file_context_set_options (struct Sass_File_Context* file_ctx, struct Sass_Options* opt); +void sass_data_context_set_options (struct Sass_Data_Context* data_ctx, struct Sass_Options* opt); + +// Getters for Sass_Context values +const char* sass_context_get_output_string (struct Sass_Context* ctx); +int sass_context_get_error_status (struct Sass_Context* ctx); +const char* sass_context_get_error_json (struct Sass_Context* ctx); +const char* sass_context_get_error_text (struct Sass_Context* ctx); +const char* sass_context_get_error_message (struct Sass_Context* ctx); +const char* sass_context_get_error_file (struct Sass_Context* ctx); +size_t sass_context_get_error_line (struct Sass_Context* ctx); +size_t sass_context_get_error_column (struct Sass_Context* ctx); +const char* sass_context_get_source_map_string (struct Sass_Context* ctx); +char** sass_context_get_included_files (struct Sass_Context* ctx); + +// Getters for Sass_Compiler options (query import stack) +size_t sass_compiler_get_import_stack_size(struct Sass_Compiler* compiler); +Sass_Import_Entry sass_compiler_get_last_import(struct Sass_Compiler* compiler); +Sass_Import_Entry sass_compiler_get_import_entry(struct Sass_Compiler* compiler, size_t idx); +// Getters for Sass_Compiler options (query function stack) +size_t sass_compiler_get_callee_stack_size(struct Sass_Compiler* compiler); +Sass_Callee_Entry sass_compiler_get_last_callee(struct Sass_Compiler* compiler); +Sass_Callee_Entry sass_compiler_get_callee_entry(struct Sass_Compiler* compiler, size_t idx); + +// Take ownership of memory (value on context is set to 0) +char* sass_context_take_error_json (struct Sass_Context* ctx); +char* sass_context_take_error_text (struct Sass_Context* ctx); +char* sass_context_take_error_message (struct Sass_Context* ctx); +char* sass_context_take_error_file (struct Sass_Context* ctx); +char* sass_context_take_output_string (struct Sass_Context* ctx); +char* sass_context_take_source_map_string (struct Sass_Context* ctx); +``` + +### Sass Options API + +```C +// Getters for Context_Option values +int sass_option_get_precision (struct Sass_Options* options); +enum Sass_Output_Style sass_option_get_output_style (struct Sass_Options* options); +bool sass_option_get_source_comments (struct Sass_Options* options); +bool sass_option_get_source_map_embed (struct Sass_Options* options); +bool sass_option_get_source_map_contents (struct Sass_Options* options); +bool sass_option_get_source_map_file_urls (struct Sass_Options* options); +bool sass_option_get_omit_source_map_url (struct Sass_Options* options); +bool sass_option_get_is_indented_syntax_src (struct Sass_Options* options); +const char* sass_option_get_indent (struct Sass_Options* options); +const char* sass_option_get_linefeed (struct Sass_Options* options); +const char* sass_option_get_input_path (struct Sass_Options* options); +const char* sass_option_get_output_path (struct Sass_Options* options); +const char* sass_option_get_source_map_file (struct Sass_Options* options); +const char* sass_option_get_source_map_root (struct Sass_Options* options); +Sass_C_Function_List sass_option_get_c_functions (struct Sass_Options* options); +Sass_C_Import_Callback sass_option_get_importer (struct Sass_Options* options); + +// Getters for Context_Option include path array +size_t sass_option_get_include_path_size(struct Sass_Options* options); +const char* sass_option_get_include_path(struct Sass_Options* options, size_t i); +// Plugin paths to load dynamic libraries work the same +size_t sass_option_get_plugin_path_size(struct Sass_Options* options); +const char* sass_option_get_plugin_path(struct Sass_Options* options, size_t i); + +// Setters for Context_Option values +void sass_option_set_precision (struct Sass_Options* options, int precision); +void sass_option_set_output_style (struct Sass_Options* options, enum Sass_Output_Style output_style); +void sass_option_set_source_comments (struct Sass_Options* options, bool source_comments); +void sass_option_set_source_map_embed (struct Sass_Options* options, bool source_map_embed); +void sass_option_set_source_map_contents (struct Sass_Options* options, bool source_map_contents); +void sass_option_set_source_map_file_urls (struct Sass_Options* options, bool source_map_file_urls); +void sass_option_set_omit_source_map_url (struct Sass_Options* options, bool omit_source_map_url); +void sass_option_set_is_indented_syntax_src (struct Sass_Options* options, bool is_indented_syntax_src); +void sass_option_set_indent (struct Sass_Options* options, const char* indent); +void sass_option_set_linefeed (struct Sass_Options* options, const char* linefeed); +void sass_option_set_input_path (struct Sass_Options* options, const char* input_path); +void sass_option_set_output_path (struct Sass_Options* options, const char* output_path); +void sass_option_set_plugin_path (struct Sass_Options* options, const char* plugin_path); +void sass_option_set_include_path (struct Sass_Options* options, const char* include_path); +void sass_option_set_source_map_file (struct Sass_Options* options, const char* source_map_file); +void sass_option_set_source_map_root (struct Sass_Options* options, const char* source_map_root); +void sass_option_set_c_functions (struct Sass_Options* options, Sass_C_Function_List c_functions); +void sass_option_set_importer (struct Sass_Options* options, Sass_C_Import_Callback importer); + +// Push function for paths (no manipulation support for now) +void sass_option_push_plugin_path (struct Sass_Options* options, const char* path); +void sass_option_push_include_path (struct Sass_Options* options, const char* path); + +// Resolve a file via the given include paths in the sass option struct +// find_file looks for the exact file name while find_include does a regular sass include +char* sass_find_file (const char* path, struct Sass_Options* opt); +char* sass_find_include (const char* path, struct Sass_Options* opt); + +// Resolve a file relative to last import or include paths in the sass option struct +// find_file looks for the exact file name while find_include does a regular sass include +char* sass_compiler_find_file (const char* path, struct Sass_Compiler* compiler); +char* sass_compiler_find_include (const char* path, struct Sass_Compiler* compiler); +``` + +### More links + +- [Sass Context Example](api-context-example.md) +- [Sass Context Internal](api-context-internal.md) + diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/api-doc.md b/mybulma/node_modules/node-sass/src/libsass/docs/api-doc.md new file mode 100644 index 0000000..3765616 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/api-doc.md @@ -0,0 +1,215 @@ +## Introduction + +LibSass wouldn't be much good without a way to interface with it. These +interface documentations describe the various functions and data structures +available to implementers. They are split up over three major components, which +have all their own source files (plus some common functionality). + +- [Sass Context](api-context.md) - Trigger and handle the main Sass compilation +- [Sass Value](api-value.md) - Exchange values and its format with LibSass +- [Sass Function](api-function.md) - Get invoked by LibSass for function statments +- [Sass Importer](api-importer.md) - Get invoked by LibSass for @import statments + +### Basic usage + +First you will need to include the header file! +This will automatically load all other headers too! + +```C +#include "sass/context.h" +``` + +## Basic C Example + +```C +#include +#include "sass/context.h" + +int main() { + puts(libsass_version()); + return 0; +} +``` + +```bash +gcc -Wall version.c -lsass -o version && ./version +``` + +## More C Examples + +- [Sample code for Sass Context](api-context-example.md) +- [Sample code for Sass Value](api-value-example.md) +- [Sample code for Sass Function](api-function-example.md) +- [Sample code for Sass Importer](api-importer-example.md) + +## Compiling your code + +The most important is your sass file (or string of sass code). With this, you +will want to start a LibSass compiler. Here is some pseudocode describing the +process. The compiler has two different modes: direct input as a string with +`Sass_Data_Context` or LibSass will do file reading for you by using +`Sass_File_Context`. See the code for a list of options available +[Sass_Options](https://github.com/sass/libsass/blob/36feef0/include/sass/interface.h#L18) + +**Building a file compiler** + + context = sass_make_file_context("file.scss") + options = sass_file_context_get_options(context) + sass_option_set_precision(options, 1) + sass_option_set_source_comments(options, true) + + sass_file_context_set_options(context, options) + + compiler = sass_make_file_compiler(sass_context) + sass_compiler_parse(compiler) + sass_compiler_execute(compiler) + + output = sass_context_get_output_string(context) + // Retrieve errors during compilation + error_status = sass_context_get_error_status(context) + json_error = sass_context_get_error_json(context) + // Release memory dedicated to the C compiler + sass_delete_compiler(compiler) + +**Building a data compiler** + + context = sass_make_data_context("div { a { color: blue; } }") + options = sass_data_context_get_options(context) + sass_option_set_precision(options, 1) + sass_option_set_source_comments(options, true) + + sass_data_context_set_options(context, options) + + compiler = sass_make_data_compiler(context) + sass_compiler_parse(compiler) + sass_compiler_execute(compiler) + + output = sass_context_get_output_string(context) + // div a { color: blue; } + // Retrieve errors during compilation + error_status = sass_context_get_error_status(context) + json_error = sass_context_get_error_json(context) + // Release memory dedicated to the C compiler + sass_delete_compiler(compiler) + +## Sass Context Internals + +Everything is stored in structs: + +```C +struct Sass_Options; +struct Sass_Context : Sass_Options; +struct Sass_File_context : Sass_Context; +struct Sass_Data_context : Sass_Context; +``` + +This mirrors very well how `libsass` uses these structures. + +- `Sass_Options` holds everything you feed in before the compilation. It also hosts +`input_path` and `output_path` options, because they are used to generate/calculate +relative links in source-maps. The `input_path` is shared with `Sass_File_Context`. +- `Sass_Context` holds all the data returned by the compilation step. +- `Sass_File_Context` is a specific implementation that requires no additional fields +- `Sass_Data_Context` is a specific implementation that adds the `input_source` field + +Structs can be down-casted to access `context` or `options`! + +## Memory handling and life-cycles + +We keep memory around for as long as the main [context](api-context.md) object +is not destroyed (`sass_delete_context`). LibSass will create copies of most +inputs/options beside the main sass code. You need to allocate and fill that +buffer before passing it to LibSass. You may also overtake memory management +from libsass for certain return values (i.e. `sass_context_take_output_string`). + +```C +// to allocate buffer to be filled +void* sass_alloc_memory(size_t size); +// to allocate a buffer from existing string +char* sass_copy_c_string(const char* str); +// to free overtaken memory when done +void sass_free_memory(void* ptr); +``` + +## Miscellaneous API functions + +```C +// Some convenient string helper function +char* sass_string_unquote (const char* str); +char* sass_string_quote (const char* str, const char quote_mark); + +// Get compiled libsass version +const char* libsass_version(void); + +// Implemented sass language version +// Hardcoded version 3.4 for time being +const char* libsass_language_version(void); +``` + +## Common Pitfalls + +**input_path** + +The `input_path` is part of `Sass_Options`, but it also is the main option for +`Sass_File_Context`. It is also used to generate relative file links in source- +maps. Therefore it is pretty usefull to pass this information if you have a +`Sass_Data_Context` and know the original path. + +**output_path** + +Be aware that `libsass` does not write the output file itself. This option +merely exists to give `libsass` the proper information to generate links in +source-maps. The file has to be written to the disk by the +binding/implementation. If the `output_path` is omitted, `libsass` tries to +extrapolate one from the `input_path` by replacing (or adding) the file ending +with `.css`. + +## Error Codes + +The `error_code` is integer value which indicates the type of error that +occurred inside the LibSass process. Following is the list of error codes along +with the short description: + +* 1: normal errors like parsing or `eval` errors +* 2: bad allocation error (memory error) +* 3: "untranslated" C++ exception (`throw std::exception`) +* 4: legacy string exceptions ( `throw const char*` or `std::string` ) +* 5: Some other unknown exception + +Although for the API consumer, error codes do not offer much value except +indicating whether *any* error occurred during the compilation, it helps +debugging the LibSass internal code paths. + +## Real-World Implementations + +The proof is in the pudding, so we have highlighted a few implementations that +should be on par with the latest LibSass interface version. Some of them may not +have all features implemented! + +1. [Perl Example](https://github.com/sass/perl-libsass/blob/master/lib/CSS/Sass.xs) +2. [Go Example](https://godoc.org/github.com/wellington/go-libsass#example-Compiler--Stdin) +3. [Node Example](https://github.com/sass/node-sass/blob/master/src/binding.cpp) + +## ABI forward compatibility + +We use a functional API to make dynamic linking more robust and future +compatible. The API is not yet 100% stable, so we do not yet guarantee +[ABI](https://gcc.gnu.org/onlinedocs/libstdc++/manual/abi.html) forward +compatibility. + +## Plugins (experimental) + +LibSass can load plugins from directories. Just define `plugin_path` on context +options to load all plugins from the directories. To implement plugins, please +consult the following example implementations. + +- https://github.com/mgreter/libsass-glob +- https://github.com/mgreter/libsass-math +- https://github.com/mgreter/libsass-digest + +## Internal Structs + +- [Sass Context Internals](api-context-internal.md) +- [Sass Value Internals](api-value-internal.md) +- [Sass Function Internals](api-function-internal.md) +- [Sass Importer Internals](api-importer-internal.md) diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/api-function-example.md b/mybulma/node_modules/node-sass/src/libsass/docs/api-function-example.md new file mode 100644 index 0000000..38608e1 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/api-function-example.md @@ -0,0 +1,67 @@ +## Example main.c + +```C +#include +#include +#include "sass/context.h" + +union Sass_Value* call_fn_foo(const union Sass_Value* s_args, Sass_Function_Entry cb, struct Sass_Compiler* comp) +{ + // get context/option struct associated with this compiler + struct Sass_Context* ctx = sass_compiler_get_context(comp); + struct Sass_Options* opts = sass_compiler_get_options(comp); + // get information about previous importer entry from the stack + Sass_Import_Entry import = sass_compiler_get_last_import(comp); + const char* prev_abs_path = sass_import_get_abs_path(import); + const char* prev_imp_path = sass_import_get_imp_path(import); + // get the cookie from function descriptor + void* cookie = sass_function_get_cookie(cb); + // we actually abuse the void* to store an "int" + return sass_make_number((intptr_t)cookie, "px"); +} + +int main( int argc, const char* argv[] ) +{ + + // get the input file from first argument or use default + const char* input = argc > 1 ? argv[1] : "styles.scss"; + + // create the file context and get all related structs + struct Sass_File_Context* file_ctx = sass_make_file_context(input); + struct Sass_Context* ctx = sass_file_context_get_context(file_ctx); + struct Sass_Options* ctx_opt = sass_context_get_options(ctx); + + // allocate a custom function caller + Sass_Function_Entry fn_foo = + sass_make_function("foo()", call_fn_foo, (void*)42); + + // create list of all custom functions + Sass_Function_List fn_list = sass_make_function_list(1); + sass_function_set_list_entry(fn_list, 0, fn_foo); + sass_option_set_c_functions(ctx_opt, fn_list); + + // context is set up, call the compile step now + int status = sass_compile_file_context(file_ctx); + + // print the result or the error to the stdout + if (status == 0) puts(sass_context_get_output_string(ctx)); + else puts(sass_context_get_error_message(ctx)); + + // release allocated memory + sass_delete_file_context(file_ctx); + + // exit status + return status; + +} +``` + +### Compile main.c + +```bash +gcc -c main.c -o main.o +gcc -o sample main.o -lsass +echo "foo { margin: foo(); }" > foo.scss +./sample foo.scss => "foo { margin: 42px }" +``` + diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/api-function-internal.md b/mybulma/node_modules/node-sass/src/libsass/docs/api-function-internal.md new file mode 100644 index 0000000..69d81d0 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/api-function-internal.md @@ -0,0 +1,8 @@ +```C +// Struct to hold custom function callback +struct Sass_Function { + const char* signature; + Sass_Function_Fn function; + void* cookie; +}; +``` diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/api-function.md b/mybulma/node_modules/node-sass/src/libsass/docs/api-function.md new file mode 100644 index 0000000..8d9d97c --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/api-function.md @@ -0,0 +1,74 @@ +Sass functions are used to define new custom functions callable by Sass code. They are also used to overload debug or error statements. You can also define a fallback function, which is called for every unknown function found in the Sass code. Functions get passed zero or more `Sass_Values` (a `Sass_List` value) and they must also return a `Sass_Value`. Return a `Sass_Error` if you want to signal an error. + +## Special signatures + +- `*` - Fallback implementation +- `@warn` - Overload warn statements +- `@error` - Overload error statements +- `@debug` - Overload debug statements + +Note: The fallback implementation will be given the name of the called function as the first argument, before all the original function arguments. These features are pretty new and should be considered experimental. + +### Basic Usage + +```C +#include "sass/functions.h" +``` + +## Sass Function API + +```C +// Forward declaration +struct Sass_Compiler; +struct Sass_Function; + +// Typedef helpers for custom functions lists +typedef struct Sass_Function (*Sass_Function_Entry); +typedef struct Sass_Function* (*Sass_Function_List); +// Typedef defining function signature and return type +typedef union Sass_Value* (*Sass_Function_Fn) + (const union Sass_Value*, Sass_Function_Entry cb, struct Sass_Compiler* compiler); + +// Creators for sass function list and function descriptors +Sass_Function_List sass_make_function_list (size_t length); +Sass_Function_Entry sass_make_function (const char* signature, Sass_Function_Fn cb, void* cookie); +// In case you need to free them yourself +void sass_delete_function (Sass_Function_Entry entry); +void sass_delete_function_list (Sass_Function_List list); + +// Setters and getters for callbacks on function lists +Sass_Function_Entry sass_function_get_list_entry(Sass_Function_List list, size_t pos); +void sass_function_set_list_entry(Sass_Function_List list, size_t pos, Sass_Function_Entry cb); + +// Setters to insert an entry into the import list (you may also use [] access directly) +// Since we are dealing with pointers they should have a guaranteed and fixed size +void sass_import_set_list_entry (Sass_Import_List list, size_t idx, Sass_Import_Entry entry); +Sass_Import_Entry sass_import_get_list_entry (Sass_Import_List list, size_t idx); + +// Getters for custom function descriptors +const char* sass_function_get_signature (Sass_Function_Entry cb); +Sass_Function_Fn sass_function_get_function (Sass_Function_Entry cb); +void* sass_function_get_cookie (Sass_Function_Entry cb); + +// Getters for callee entry +const char* sass_callee_get_name (Sass_Callee_Entry); +const char* sass_callee_get_path (Sass_Callee_Entry); +size_t sass_callee_get_line (Sass_Callee_Entry); +size_t sass_callee_get_column (Sass_Callee_Entry); +enum Sass_Callee_Type sass_callee_get_type (Sass_Callee_Entry); +Sass_Env_Frame sass_callee_get_env (Sass_Callee_Entry); + +// Getters and Setters for environments (lexical, local and global) +union Sass_Value* sass_env_get_lexical (Sass_Env_Frame, const char*); +void sass_env_set_lexical (Sass_Env_Frame, const char*, union Sass_Value*); +union Sass_Value* sass_env_get_local (Sass_Env_Frame, const char*); +void sass_env_set_local (Sass_Env_Frame, const char*, union Sass_Value*); +union Sass_Value* sass_env_get_global (Sass_Env_Frame, const char*); +void sass_env_set_global (Sass_Env_Frame, const char*, union Sass_Value*); +``` + +### More links + +- [Sass Function Example](api-function-example.md) +- [Sass Function Internal](api-function-internal.md) + diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/api-importer-example.md b/mybulma/node_modules/node-sass/src/libsass/docs/api-importer-example.md new file mode 100644 index 0000000..d83bf26 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/api-importer-example.md @@ -0,0 +1,112 @@ +## Example importer.c + +```C +#include +#include +#include "sass/context.h" + +Sass_Import_List sass_importer(const char* path, Sass_Importer_Entry cb, struct Sass_Compiler* comp) +{ + // get the cookie from importer descriptor + void* cookie = sass_importer_get_cookie(cb); + Sass_Import_List list = sass_make_import_list(2); + char* local = sass_copy_c_string("local { color: green; }"); + char* remote = sass_copy_c_string("remote { color: red; }"); + list[0] = sass_make_import_entry("/tmp/styles.scss", local, 0); + list[1] = sass_make_import_entry("http://www.example.com", remote, 0); + return list; +} + +int main( int argc, const char* argv[] ) +{ + + // get the input file from first argument or use default + const char* input = argc > 1 ? argv[1] : "styles.scss"; + + // create the file context and get all related structs + struct Sass_File_Context* file_ctx = sass_make_file_context(input); + struct Sass_Context* ctx = sass_file_context_get_context(file_ctx); + struct Sass_Options* ctx_opt = sass_context_get_options(ctx); + + // allocate custom importer + Sass_Importer_Entry c_imp = + sass_make_importer(sass_importer, 0, 0); + // create list for all custom importers + Sass_Importer_List imp_list = sass_make_importer_list(1); + // put only the importer on to the list + sass_importer_set_list_entry(imp_list, 0, c_imp); + // register list on to the context options + sass_option_set_c_importers(ctx_opt, imp_list); + // context is set up, call the compile step now + int status = sass_compile_file_context(file_ctx); + + // print the result or the error to the stdout + if (status == 0) puts(sass_context_get_output_string(ctx)); + else puts(sass_context_get_error_message(ctx)); + + // release allocated memory + sass_delete_file_context(file_ctx); + + // exit status + return status; + +} +``` + +Compile importer.c + +```bash +gcc -c importer.c -o importer.o +gcc -o importer importer.o -lsass +echo "@import 'foobar';" > importer.scss +./importer importer.scss +``` + +## Importer Behavior Examples + +```C +Sass_Import_List importer(const char* path, Sass_Importer_Entry cb, struct Sass_Compiler* comp) { + // let LibSass handle the import request + return NULL; +} + +Sass_Import_List importer(const char* path, Sass_Importer_Entry cb, struct Sass_Compiler* comp) { + // let LibSass handle the request + // swallows »@import "http://…"« pass-through + // (arguably a bug) + Sass_Import_List list = sass_make_import_list(1); + list[0] = sass_make_import_entry(path, 0, 0); + return list; +} + +Sass_Import_List importer(const char* path, Sass_Importer_Entry cb, struct Sass_Compiler* comp) { + // return an error to halt execution + Sass_Import_List list = sass_make_import_list(1); + const char* message = "some error message"; + list[0] = sass_make_import_entry(path, 0, 0); + sass_import_set_error(list[0], sass_copy_c_string(message), 0, 0); + return list; +} + +Sass_Import_List importer(const char* path, Sass_Importer_Entry cb, struct Sass_Compiler* comp) { + // let LibSass load the file identifed by the importer + Sass_Import_List list = sass_make_import_list(1); + list[0] = sass_make_import_entry("/tmp/file.scss", 0, 0); + return list; +} + +Sass_Import_List importer(const char* path, Sass_Importer_Entry cb, struct Sass_Compiler* comp) { + // completely hide the import + // (arguably a bug) + Sass_Import_List list = sass_make_import_list(0); + return list; +} + +Sass_Import_List importer(const char* path, Sass_Importer_Entry cb, struct Sass_Compiler* comp) { + // completely hide the import + // (arguably a bug) + Sass_Import_List list = sass_make_import_list(1); + list[0] = sass_make_import_entry(0, 0, 0); + return list; +} +``` diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/api-importer-internal.md b/mybulma/node_modules/node-sass/src/libsass/docs/api-importer-internal.md new file mode 100644 index 0000000..63d70fe --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/api-importer-internal.md @@ -0,0 +1,20 @@ +```C +// External import entry +struct Sass_Import { + char* imp_path; // path as found in the import statement + char *abs_path; // path after importer has resolved it + char* source; + char* srcmap; + // error handling + char* error; + size_t line; + size_t column; +}; + +// Struct to hold importer callback +struct Sass_Importer { + Sass_Importer_Fn importer; + double priority; + void* cookie; +}; +``` diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/api-importer.md b/mybulma/node_modules/node-sass/src/libsass/docs/api-importer.md new file mode 100644 index 0000000..b626500 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/api-importer.md @@ -0,0 +1,86 @@ +By using custom importers, Sass stylesheets can be implemented in any possible way, such as by being loaded via a remote server. Please note: this feature is experimental and is implemented differently than importers in Ruby Sass. Imports must be relative to the parent import context and therefore we need to pass this information to the importer callback. This is currently done by passing the complete import string/path of the previous import context. + +## Return Imports + +You actually have to return a list of imports, since some importers may want to import multiple files from one import statement (ie. a glob/star importer). The memory you pass with source and srcmap is taken over by LibSass and freed automatically when the import is done. You are also allowed to return `0` instead of a list, which will tell LibSass to handle the import by itself (as if no custom importer was in use). + +```C +Sass_Import_Entry* rv = sass_make_import_list(1); +rv[0] = sass_make_import(rel, abs, source, srcmap); +``` + +Every import will then be included in LibSass. You are allowed to only return a file path without any loaded source. This way you can ie. implement rewrite rules for import paths and leave the loading part for LibSass. + +Please note that LibSass doesn't use the srcmap parameter yet. It has been added to not deprecate the C-API once support has been implemented. It will be used to re-map the actual sourcemap with the provided ones. + +### Basic Usage + +```C +#include "sass/functions.h" +``` + +## Sass Importer API + +```C +// Forward declaration +struct Sass_Import; + +// Forward declaration +struct Sass_C_Import_Descriptor; + +// Typedef defining the custom importer callback +typedef struct Sass_C_Import_Descriptor (*Sass_C_Import_Callback); +// Typedef defining the importer c function prototype +typedef Sass_Import_Entry* (*Sass_C_Import_Fn) (const char* url, const char* prev, void* cookie); + +// Creators for custom importer callback (with some additional pointer) +// The pointer is mostly used to store the callback into the actual function +Sass_C_Import_Callback sass_make_importer (Sass_C_Import_Fn, void* cookie); + +// Getters for import function descriptors +Sass_C_Import_Fn sass_import_get_function (Sass_C_Import_Callback fn); +void* sass_import_get_cookie (Sass_C_Import_Callback fn); + +// Deallocator for associated memory +void sass_delete_importer (Sass_C_Import_Callback fn); + +// Creator for sass custom importer return argument list +Sass_Import_Entry* sass_make_import_list (size_t length); +// Creator for a single import entry returned by the custom importer inside the list +Sass_Import_Entry sass_make_import_entry (const char* path, char* source, char* srcmap); +Sass_Import_Entry sass_make_import (const char* rel, const char* abs, char* source, char* srcmap); + +// set error message to abort import and to print out a message (path from existing object is used in output) +Sass_Import_Entry sass_import_set_error(Sass_Import_Entry import, const char* message, size_t line, size_t col); + +// Setters to insert an entry into the import list (you may also use [] access directly) +// Since we are dealing with pointers they should have a guaranteed and fixed size +void sass_import_set_list_entry (Sass_Import_Entry* list, size_t idx, Sass_Import_Entry entry); +Sass_Import_Entry sass_import_get_list_entry (Sass_Import_Entry* list, size_t idx); + +// Getters for import entry +const char* sass_import_get_imp_path (Sass_Import_Entry); +const char* sass_import_get_abs_path (Sass_Import_Entry); +const char* sass_import_get_source (Sass_Import_Entry); +const char* sass_import_get_srcmap (Sass_Import_Entry); +// Explicit functions to take ownership of these items +// The property on our struct will be reset to NULL +char* sass_import_take_source (Sass_Import_Entry); +char* sass_import_take_srcmap (Sass_Import_Entry); + +// Getters for import error entries +size_t sass_import_get_error_line (Sass_Import_Entry); +size_t sass_import_get_error_column (Sass_Import_Entry); +const char* sass_import_get_error_message (Sass_Import_Entry); + +// Deallocator for associated memory (incl. entries) +void sass_delete_import_list (Sass_Import_Entry*); +// Just in case we have some stray import structs +void sass_delete_import (Sass_Import_Entry); +``` + +### More links + +- [Sass Importer Example](api-importer-example.md) +- [Sass Importer Internal](api-importer-internal.md) + diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/api-value-example.md b/mybulma/node_modules/node-sass/src/libsass/docs/api-value-example.md new file mode 100644 index 0000000..690654e --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/api-value-example.md @@ -0,0 +1,55 @@ +## Example operation.c + +```C +#include +#include +#include "sass/values.h" + +int main( int argc, const char* argv[] ) +{ + + // create two new sass values to be added + union Sass_Value* string = sass_make_string("String"); + union Sass_Value* number = sass_make_number(42, "nits"); + + // invoke the add operation which returns a new sass value + union Sass_Value* total = sass_value_op(ADD, string, number); + + // no further use for the two operands + sass_delete_value(string); + sass_delete_value(number); + + // this works since libsass will always return a + // string for add operations with a string as the + // left hand side. But you should never rely on it! + puts(sass_string_get_value(total)); + + // invoke stringification (uncompressed with precision of 5) + union Sass_Value* result = sass_value_stringify(total, false, 5); + + // no further use for the sum + sass_delete_value(total); + + // print the result - you may want to make + // sure result is indeed a string, altough + // stringify guarantees to return a string + // if (sass_value_is_string(result)) {} + // really depends on your level of paranoia + puts(sass_string_get_value(result)); + + // finally free result + sass_delete_value(result); + + // exit status + return 0; + +} +``` + +## Compile operation.c + +```bash +gcc -c operation.c -o operation.o +gcc -o operation operation.o -lsass +./operation # => String42nits +``` diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/api-value-internal.md b/mybulma/node_modules/node-sass/src/libsass/docs/api-value-internal.md new file mode 100644 index 0000000..fed4022 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/api-value-internal.md @@ -0,0 +1,76 @@ +```C +struct Sass_Unknown { + enum Sass_Tag tag; +}; + +struct Sass_Boolean { + enum Sass_Tag tag; + bool value; +}; + +struct Sass_Number { + enum Sass_Tag tag; + double value; + char* unit; +}; + +struct Sass_Color { + enum Sass_Tag tag; + double r; + double g; + double b; + double a; +}; + +struct Sass_String { + enum Sass_Tag tag; + char* value; +}; + +struct Sass_List { + enum Sass_Tag tag; + enum Sass_Separator separator; + size_t length; + // null terminated "array" + union Sass_Value** values; +}; + +struct Sass_Map { + enum Sass_Tag tag; + size_t length; + struct Sass_MapPair* pairs; +}; + +struct Sass_Null { + enum Sass_Tag tag; +}; + +struct Sass_Error { + enum Sass_Tag tag; + char* message; +}; + +struct Sass_Warning { + enum Sass_Tag tag; + char* message; +}; + +union Sass_Value { + struct Sass_Unknown unknown; + struct Sass_Boolean boolean; + struct Sass_Number number; + struct Sass_Color color; + struct Sass_String string; + struct Sass_List list; + struct Sass_Map map; + struct Sass_Null null; + struct Sass_Error error; + struct Sass_Warning warning; +}; + +struct Sass_MapPair { + union Sass_Value* key; + union Sass_Value* value; +}; +``` + diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/api-value.md b/mybulma/node_modules/node-sass/src/libsass/docs/api-value.md new file mode 100644 index 0000000..d786258 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/api-value.md @@ -0,0 +1,154 @@ +`Sass_Values` are used to pass values and their types between the implementer +and LibSass. Sass knows various different value types (including nested arrays +and hash-maps). If you implement a binding to another programming language, you +have to find a way to [marshal][1] (convert) `Sass_Values` between the target +language and C. `Sass_Values` are currently only used by custom functions, but +it should also be possible to use them without a compiler context. + +[1]: https://en.wikipedia.org/wiki/Marshalling_%28computer_science%29 + +### Basic Usage + +```C +#include "sass/values.h" +``` + +```C +// Type for Sass values +enum Sass_Tag { + SASS_BOOLEAN, + SASS_NUMBER, + SASS_COLOR, + SASS_STRING, + SASS_LIST, + SASS_MAP, + SASS_NULL, + SASS_ERROR, + SASS_WARNING +}; + +// Tags for denoting Sass list separators +enum Sass_Separator { + SASS_COMMA, + SASS_SPACE, + // only used internally to represent a hash map before evaluation + // otherwise we would be too early to check for duplicate keys + SASS_HASH +}; + +// Value Operators +enum Sass_OP { + AND, OR, // logical connectives + EQ, NEQ, GT, GTE, LT, LTE, // arithmetic relations + ADD, SUB, MUL, DIV, MOD, // arithmetic functions + NUM_OPS // so we know how big to make the op table +}; +``` + +### Sass Value API + +```C +// Forward declaration +union Sass_Value; + +// Creator functions for all value types +union Sass_Value* sass_make_null (void); +union Sass_Value* sass_make_boolean (bool val); +union Sass_Value* sass_make_string (const char* val); +union Sass_Value* sass_make_qstring (const char* val); +union Sass_Value* sass_make_number (double val, const char* unit); +union Sass_Value* sass_make_color (double r, double g, double b, double a); +union Sass_Value* sass_make_list (size_t len, enum Sass_Separator sep, bool is_bracketed); +union Sass_Value* sass_make_map (size_t len); +union Sass_Value* sass_make_error (const char* msg); +union Sass_Value* sass_make_warning (const char* msg); + +// Generic destructor function for all types +// Will release memory of all associated Sass_Values +// Means we will delete recursively for lists and maps +void sass_delete_value (union Sass_Value* val); + +// Make a deep cloned copy of the given sass value +union Sass_Value* sass_clone_value (const union Sass_Value* val); + +// Stringify a Sass_Values and also return the result as a Sass_Value (of type STRING) +union Sass_Value* sass_value_stringify (const union Sass_Value* a, bool compressed, int precision); + +// Execute an operation for two Sass_Values and return the result as a Sass_Value too +union Sass_Value* sass_value_op (enum Sass_OP op, const union Sass_Value* a, const union Sass_Value* b); + +// Return the sass tag for a generic sass value +// Check is needed before accessing specific values! +enum Sass_Tag sass_value_get_tag (const union Sass_Value* v); + +// Check value to be of a specific type +// Can also be used before accessing properties! +bool sass_value_is_null (const union Sass_Value* v); +bool sass_value_is_number (const union Sass_Value* v); +bool sass_value_is_string (const union Sass_Value* v); +bool sass_value_is_boolean (const union Sass_Value* v); +bool sass_value_is_color (const union Sass_Value* v); +bool sass_value_is_list (const union Sass_Value* v); +bool sass_value_is_map (const union Sass_Value* v); +bool sass_value_is_error (const union Sass_Value* v); +bool sass_value_is_warning (const union Sass_Value* v); + +// Getters and setters for Sass_Number +double sass_number_get_value (const union Sass_Value* v); +void sass_number_set_value (union Sass_Value* v, double value); +const char* sass_number_get_unit (const union Sass_Value* v); +void sass_number_set_unit (union Sass_Value* v, char* unit); + +// Getters and setters for Sass_String +const char* sass_string_get_value (const union Sass_Value* v); +void sass_string_set_value (union Sass_Value* v, char* value); +bool sass_string_is_quoted(const union Sass_Value* v); +void sass_string_set_quoted(union Sass_Value* v, bool quoted); + +// Getters and setters for Sass_Boolean +bool sass_boolean_get_value (const union Sass_Value* v); +void sass_boolean_set_value (union Sass_Value* v, bool value); + +// Getters and setters for Sass_Color +double sass_color_get_r (const union Sass_Value* v); +void sass_color_set_r (union Sass_Value* v, double r); +double sass_color_get_g (const union Sass_Value* v); +void sass_color_set_g (union Sass_Value* v, double g); +double sass_color_get_b (const union Sass_Value* v); +void sass_color_set_b (union Sass_Value* v, double b); +double sass_color_get_a (const union Sass_Value* v); +void sass_color_set_a (union Sass_Value* v, double a); + +// Getter for the number of items in list +size_t sass_list_get_length (const union Sass_Value* v); +// Getters and setters for Sass_List +enum Sass_Separator sass_list_get_separator (const union Sass_Value* v); +void sass_list_set_separator (union Sass_Value* v, enum Sass_Separator value); +bool sass_list_get_is_bracketed (const union Sass_Value* v); +void sass_list_set_is_bracketed (union Sass_Value* v, bool value); +// Getters and setters for Sass_List values +union Sass_Value* sass_list_get_value (const union Sass_Value* v, size_t i); +void sass_list_set_value (union Sass_Value* v, size_t i, union Sass_Value* value); + +// Getter for the number of items in map +size_t sass_map_get_length (const union Sass_Value* v); +// Getters and setters for Sass_Map keys and values +union Sass_Value* sass_map_get_key (const union Sass_Value* v, size_t i); +void sass_map_set_key (union Sass_Value* v, size_t i, union Sass_Value*); +union Sass_Value* sass_map_get_value (const union Sass_Value* v, size_t i); +void sass_map_set_value (union Sass_Value* v, size_t i, union Sass_Value*); + +// Getters and setters for Sass_Error +char* sass_error_get_message (const union Sass_Value* v); +void sass_error_set_message (union Sass_Value* v, char* msg); + +// Getters and setters for Sass_Warning +char* sass_warning_get_message (const union Sass_Value* v); +void sass_warning_set_message (union Sass_Value* v, char* msg); +``` + +### More links + +- [Sass Value Example](api-value-example.md) +- [Sass Value Internal](api-value-internal.md) + diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/build-on-darwin.md b/mybulma/node_modules/node-sass/src/libsass/docs/build-on-darwin.md new file mode 100644 index 0000000..119a535 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/build-on-darwin.md @@ -0,0 +1,27 @@ +To install LibSass, make sure the OS X build tools are installed: + + xcode-select --install + +## Homebrew + +To install homebrew, see [http://brew.sh](http://brew.sh) + + ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" + +You can install the latest version of LibSass quite easily with brew. + + brew install --HEAD libsass + +To update this, do: + + brew reinstall --HEAD libsass + +Brew will build static and shared libraries, and a `libsass.pc` file in `/usr/local/lib/pkgconfig`. + +To use `libsass.pc`, make sure this path is in your `PKG_CONFIG_PATH` + + export PKG_CONFIG_PATH=/usr/local/lib/pkgconfig + +## Manually + +See the linux instructions [Building-with-autotools](build-with-autotools.md) or [Building-with-makefiles](build-with-makefiles.md) diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/build-on-gentoo.md b/mybulma/node_modules/node-sass/src/libsass/docs/build-on-gentoo.md new file mode 100644 index 0000000..601b1fe --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/build-on-gentoo.md @@ -0,0 +1,55 @@ +Here are two ebuilds to compile LibSass and sassc on gentoo linux. If you do not know how to use these ebuilds, you should probably read the gentoo wiki page about [portage overlays](http://wiki.gentoo.org/wiki/Overlay). + +## www-misc/libsass/libsass-9999.ebuild +```ebuild +EAPI=4 + +inherit eutils git-2 autotools + +DESCRIPTION="A C/C++ implementation of a Sass compiler." +HOMEPAGE="http://libsass.org/" +EGIT_PROJECT='libsass' +EGIT_REPO_URI="https://github.com/sass/libsass.git" +LICENSE="MIT" +SLOT="0" +KEYWORDS="" +IUSE="" +DEPEND="" +RDEPEND="${DEPEND}" +DEPEND="${DEPEND}" + +pkg_pretend() { + # older gcc is not supported + local major=$(gcc-major-version) + local minor=$(gcc-minor-version) + [[ "${MERGE_TYPE}" != "binary" && ( $major > 4 || ( $major == 4 && $minor < 5 ) ) ]] && \ + die "Sorry, but gcc earlier than 4.5 will not work for LibSass." +} + +src_prepare() { + eautoreconf +} +``` + +## www-misc/sassc/sassc-9999.ebuild +```ebuild +EAPI=4 + +inherit eutils git-2 autotools + +DESCRIPTION="Command Line Tool for LibSass." +HOMEPAGE="http://libsass.org/" +EGIT_PROJECT='sassc' +EGIT_REPO_URI="https://github.com/sass/sassc.git" +LICENSE="MIT" +SLOT="0" +KEYWORDS="" +IUSE="" +DEPEND="www-misc/libsass" +RDEPEND="${DEPEND}" +DEPEND="${DEPEND}" + +src_prepare() { + eautoreconf +} +``` diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/build-on-windows.md b/mybulma/node_modules/node-sass/src/libsass/docs/build-on-windows.md new file mode 100644 index 0000000..0afaa2e --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/build-on-windows.md @@ -0,0 +1,139 @@ +We support builds via MingGW and via Visual Studio Community 2013. +Both should be considered experimental (MinGW was better tested)! + +## Building via MingGW (makefiles) + +First grab the latest [MinGW for windows][1] installer. Once it is installed, you can click on continue or open the Installation Manager via `bin\mingw-get.exe`. + +You need to have the following components installed: +![Visualization of components installed in the interface](https://cloud.githubusercontent.com/assets/282293/5525466/947bf396-89e6-11e4-841d-4aa916f14de1.png) + +Next we need to install [git for windows][2]. You probably want to check the option to add it to the global path, but you do not need to install the unix tools. + +If you want to run the spec test-suite you also need [ruby][3] and a few gems available. Grab the [latest installer][3] and make sure to add it the global path. Then install the missing gems: + +```bash +gem install minitest +``` + +### Mount the mingw root directory + +As mentioned in the [MinGW Getting Started](http://www.mingw.org/wiki/Getting_Started#toc5) guide, you should edit `C:\MinGW\msys\1.0\etc\fstab` to contain the following line: + +``` +C:\MinGW /mingw +``` + +### Starting a "MingGW" console + +Create a batch file with this content: +```bat +@echo off +set PATH=C:\MinGW\bin;%PATH% +REM only needed if not already available +set PATH=%PROGRAMFILES%\git\bin;%PATH% +REM C:\MinGW\msys\1.0\msys.bat +cmd +``` + +Execute it and make sure these commands can be called: `git`, `mingw32-make`, `rm` and `gcc`! Once this is all set, you should be ready to compile `libsass`! + +### Get the sources + +```bash +# using git is preferred +git clone https://github.com/sass/libsass.git +# only needed for sassc and/or testsuite +git clone https://github.com/sass/sassc.git libsass/sassc +git clone https://github.com/sass/sass-spec.git libsass/sass-spec +``` + +### Decide for static or shared library + +`libsass` can be built and linked as a `static` or as a `shared` library. The default is `static`. To change it you can set the `BUILD` environment variable: + +```bat +set BUILD="shared" +``` + +### Compile the library +```bash +mingw32-make -C libsass +``` + +### Results can be found in +```bash +$ ls libsass/lib +libsass.a libsass.dll libsass.so +``` + +### Run the spec test-suite +```bash +mingw32-make -C libsass test_build +``` + +## Building via MingGW 64bit (makefiles) +Building libass to dll on window 64bit. + ++ downloads [MinGW64 for windows7 64bit](http://sourceforge.net/projects/mingw-w64/files/Toolchains%20targetting%20Win64/Personal%20Builds/mingw-builds/4.9.2/threads-win32/seh/x86_64-4.9.2-release-win32-seh-rt_v3-rev0.7z/download) , and unzip to "C:\mingw64". + ++ Create a batch file with this content: + +```bat +@echo off +set PATH=C:\mingw64\bin;%PATH% +set CC=gcc +REM only needed if not already available +set PATH=%PROGRAMFILES%\Git\bin;%PATH% +REM C:\MinGW\msys\1.0\msys.bat +cmd +``` + ++ By default , mingw64 dll will depends on "​m​i​n​g​w​m​1​0​.​d​l​l​、​ ​l​i​b​g​c​c​_​s​_​d​w​2​-​1​.​d​l​l​" , we can modify Makefile to fix this:(add "-static") + +``` bash +lib/libsass.dll: $(COBJECTS) $(OBJECTS) $(RCOBJECTS) + $(MKDIR) lib + $(CXX) -shared $(LDFLAGS) -o $@ $(COBJECTS) $(OBJECTS) $(RCOBJECTS) $(LDLIBS) -s -static -Wl,--subsystem,windows,--out-implib,lib/libsass.a +``` + ++ Compile the library + +```bash +mingw32-make -C libsass +``` + +By the way , if you are using java jna , [JNAerator](http://jnaerator.googlecode.com/) is a good tool. + +## Building via Visual Studio Community 2013 + +Open a Visual Studio 2013 command prompt: +- `VS2013 x86 Native Tools Command Prompt` + +Note: When I installed the community edition, I only got the 2012 command prompts. I copied them from the Startmenu to the Desktop and adjusted the paths from `Visual Studio 11.0` to `Visual Studio 12.0`. Since `libsass` uses some `C++11` features, you need at least a MSVC 2013 compiler (v120). + +### Get the source +```bash +# using git is preferred +git clone https://github.com/sass/libsass.git +git clone https://github.com/sass/sassc.git libsass/sassc +# only needed if you want to run the testsuite +git clone https://github.com/sass/sass-spec.git libsass/sass-spec +``` + +### Compile sassc + +Sometimes `msbuild` seems not available from the command prompt. Just search for it and add it to the global path. It seems to be included in the .net folders too. + +```bat +cd libsass +REM set PATH=%PATH%;%PROGRAMFILES%\MSBuild\12.0\Bin +msbuild /m:4 /p:Configuration=Release win\libsass.sln +REM running the spec test-suite manually (needs ruby and minitest gem) +ruby sass-spec\sass-spec.rb -V 3.5 -c win\bin\sassc.exe -s --impl libsass sass-spec/spec +cd .. +``` + +[1]: http://sourceforge.net/projects/mingw/files/latest/download?source=files +[2]: https://msysgit.github.io/ +[3]: http://rubyinstaller.org/ diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/build-shared-library.md b/mybulma/node_modules/node-sass/src/libsass/docs/build-shared-library.md new file mode 100644 index 0000000..3c143b4 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/build-shared-library.md @@ -0,0 +1,35 @@ +This page is mostly intended for people that want to build a system library that gets distributed via RPMs or other means. This is currently in a experimental phase, as we currently do not really guarantee any ABI forward compatibility. The C API was rewritten to make this possible in the future, but we want to wait some more time till we can call this final and stable. + +Building via autotools +-- + +You want to build a system library only via autotools, since it will create the proper `libtool` files to make it loadable on multiple systems. We hope this works correctly, but nobody of the `libsass` core team has much knowledge in this area. Therefore we are open for comments or improvements by people that have more experience in that matter (like package maintainers from various linux distributions). + +```bash +apt-get install autoconf libtool +git clone https://github.com/sass/libsass.git +cd libsass +autoreconf --force --install +./configure \ + --disable-tests \ + --disable-static \ + --enable-shared \ + --prefix=/usr +make -j5 install +cd .. +``` + +This should install these files +```bash +# $ ls -la /usr/lib/libsass.* +/usr/lib/libsass.la +/usr/lib/libsass.so -> libsass.so.0.0.9 +/usr/lib/libsass.so.0 -> libsass.so.0.0.9 +/usr/lib/libsass.so.0.0.9 +# $ ls -la /usr/include/sass* +/usr/include/sass.h +/usr/include/sass2scss.h +/usr/include/sass/context.h +/usr/include/sass/functions.h +/usr/include/sass/values.h +``` diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/build-with-autotools.md b/mybulma/node_modules/node-sass/src/libsass/docs/build-with-autotools.md new file mode 100644 index 0000000..a48ed18 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/build-with-autotools.md @@ -0,0 +1,78 @@ +### Get the sources +```bash +# using git is preferred +git clone https://github.com/sass/libsass.git +# only needed for sassc and/or testsuite +git clone https://github.com/sass/sassc.git libsass/sassc +git clone https://github.com/sass/sass-spec.git libsass/sass-spec +``` + +### Prerequisites + +In order to run autotools you need a few tools installed on your system. +```bash +yum install automake libtool # RedHat Linux +emerge -a automake libtool # Gentoo Linux +pkgin install automake libtool # SmartOS +``` + + +### Create configure script +```bash +cd libsass +autoreconf --force --install +cd .. +``` + +### Create custom makefiles +```bash +cd libsass +./configure \ + --disable-tests \ + --disable-shared \ + --prefix=/usr +cd .. +``` + +### Build the library +```bash +make -C libsass -j5 +``` + +### Install the library +The library will be installed to the location given as `prefix` to `configure`. This is standard behavior for autotools and not `libsass` specific. +```bash +make -C libsass -j5 install +``` + +### Configure options +The `configure` script is created by autotools. To get an overview of available options you can call `./configure --help`. When you execute this script, it will create specific makefiles, which you then use via the regular make command. + +There are some `libsass` specific options: + +``` +Optional Features: + --enable-tests enable testing the build + --enable-coverage enable coverage report for test suite + --enable-shared build shared libraries [default=yes] + --enable-static build static libraries [default=yes] + +Optional Packages: + --with-sassc-dir= specify directory of sassc sources for + testing (default: sassc) + --with-sass-spec-dir= specify directory of sass-spec for testing + (default: sass-spec) +``` + +### Build sassc and run spec test-suite + +```bash +cd libsass +autoreconf --force --install +./configure \ + --enable-tests \ + --enable-shared \ + --prefix=/usr +make -j5 test_build +cd .. +``` diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/build-with-makefiles.md b/mybulma/node_modules/node-sass/src/libsass/docs/build-with-makefiles.md new file mode 100644 index 0000000..7ae2e33 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/build-with-makefiles.md @@ -0,0 +1,68 @@ +### Get the sources +```bash +# using git is preferred +git clone https://github.com/sass/libsass.git +# only needed for sassc and/or testsuite +git clone https://github.com/sass/sassc.git libsass/sassc +git clone https://github.com/sass/sass-spec.git libsass/sass-spec +``` + +### Decide for static or shared library + +`libsass` can be built and linked as a `static` or as a `shared` library. The default is `static`. To change it you can set the `BUILD` environment variable: + +```bash +export BUILD="shared" +``` + +Alternatively you can also define it directly when calling make: + +```bash +BUILD="shared" make ... +``` + +### Compile the library +```bash +make -C libsass -j5 +``` + +### Results can be found in +```bash +$ ls libsass/lib +libsass.a libsass.so +``` + +### Install onto the system + +We recommend to use [autotools to install](build-with-autotools.md) libsass onto the +system, since that brings all the benefits of using libtools as the main install method. +If you still want to install libsass via the makefile, you need to make sure that gnu +`install` utility (or compatible) is installed on your system. +```bash +yum install coreutils # RedHat Linux +emerge -a coreutils # Gentoo Linux +pkgin install coreutils # SmartOS +``` + +You can set the install location by setting `PREFIX` +```bash +PREFIX="/opt/local" make install +``` + + +### Compling sassc + +```bash +# Let build know library location +export SASS_LIBSASS_PATH="`pwd`/libsass" +# Invokes the sassc makefile +make -C libsass -j5 sassc +``` + +### Run the spec test-suite + +```bash +# needs ruby available +# also gem install minitest +make -C libsass -j5 test_build +``` diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/build-with-mingw.md b/mybulma/node_modules/node-sass/src/libsass/docs/build-with-mingw.md new file mode 100644 index 0000000..416507f --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/build-with-mingw.md @@ -0,0 +1,107 @@ +## Building LibSass with MingGW (makefiles) + +First grab the latest [MinGW for windows][1] installer. Once it is installed, you can click on continue or open the Installation Manager via `bin\mingw-get.exe`. + +You need to have the following components installed: +![](https://cloud.githubusercontent.com/assets/282293/5525466/947bf396-89e6-11e4-841d-4aa916f14de1.png) + +Next we need to install [git for windows][2]. You probably want to check the option to add it to the global path, but you do not need to install the unix tools. + +If you want to run the spec test-suite you also need [ruby][3] and a few gems available. Grab the [latest installer][3] and make sure to add it the global path. Then install the missing gems: + +```bash +gem install minitest +``` + +### Mount the mingw root directory + +As mentioned in the [MinGW Getting Started](http://www.mingw.org/wiki/Getting_Started#toc5) guide, you should edit `C:\MinGW\msys\1.0\etc\fstab` to contain the following line: + +``` +C:\MinGW /mingw +``` + +### Starting a "MingGW" console + +Create a batch file with this content: +```bat +@echo off +set PATH=C:\MinGW\bin;%PATH% +REM only needed if not already available +set PATH=%PROGRAMFILES%\git\bin;%PATH% +REM C:\MinGW\msys\1.0\msys.bat +cmd +``` + +Execute it and make sure these commands can be called: `git`, `mingw32-make`, `rm` and `gcc`! Once this is all set, you should be ready to compile `libsass`! + +### Get the sources + +```bash +# using git is preferred +git clone https://github.com/sass/libsass.git +# only needed for sassc and/or testsuite +git clone https://github.com/sass/sassc.git libsass/sassc +git clone https://github.com/sass/sass-spec.git libsass/sass-spec +``` + +### Decide for static or shared library + +`libsass` can be built and linked as a `static` or as a `shared` library. The default is `static`. To change it you can set the `BUILD` environment variable: + +```bat +set BUILD="shared" +``` + +### Compile the library +```bash +mingw32-make -C libsass +``` + +### Results can be found in +```bash +$ ls libsass/lib +libsass.a libsass.dll libsass.so +``` + +### Run the spec test-suite +```bash +mingw32-make -C libsass test_build +``` + +## Building via MingGW 64bit (makefiles) +Building libass to dll on window 64bit. + +Download [MinGW64 for windows7 64bit](http://sourceforge.net/projects/mingw-w64/files/Toolchains%20targetting%20Win64/Personal%20Builds/mingw-builds/4.9.2/threads-win32/seh/x86_64-4.9.2-release-win32-seh-rt_v3-rev0.7z/download) and unzip to "C:\mingw64". + +Create a batch file with this content: + +```bat +@echo off +set PATH=C:\mingw64\bin;%PATH% +set CC=gcc +REM only needed if not already available +set PATH=%PROGRAMFILES%\Git\bin;%PATH% +REM C:\MinGW\msys\1.0\msys.bat +cmd +``` + +By default, mingw64 dll will depends on "​m​i​n​g​w​m​1​0​.​d​l​l​、​ ​l​i​b​g​c​c​_​s​_​d​w​2​-​1​.​d​l​l​", we can modify Makefile to fix this:(add "-static") + +``` bash +lib/libsass.dll: $(COBJECTS) $(OBJECTS) $(RCOBJECTS) + $(MKDIR) lib + $(CXX) -shared $(LDFLAGS) -o $@ $(COBJECTS) $(OBJECTS) $(RCOBJECTS) $(LDLIBS) -s -static -Wl,--subsystem,windows,--out-implib,lib/libsass.a +``` + +Compile the library + +```bash +mingw32-make -C libsass +``` + +By the way, if you are using java jna, [JNAerator](http://jnaerator.googlecode.com/) is a good tool. + +[1]: http://sourceforge.net/projects/mingw/files/latest/download?source=files +[2]: https://msysgit.github.io/ +[3]: http://rubyinstaller.org/ diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/build-with-visual-studio.md b/mybulma/node_modules/node-sass/src/libsass/docs/build-with-visual-studio.md new file mode 100644 index 0000000..275b917 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/build-with-visual-studio.md @@ -0,0 +1,90 @@ +## Building LibSass with Visual Studio + +### Requirements: + +The minimum requirement to build LibSass with Visual Studio is "Visual Studio 2013 Express for Desktop". + +Additionally, it is recommended to have `git` installed and available in `PATH`, so to deduce the `libsass` version information. For instance, if GitHub for Windows (https://windows.github.com/) is installed, the `PATH` will have an entry resembling: `X:\Users\\AppData\Local\GitHub\PortableGit_\cmd\` (where `X` is the drive letter of system drive). If `git` is not available, inquiring the LibSass version will result in `[NA]`. + +### Build Steps: + +#### From Visual Studio: + +On opening the `win\libsass.sln` solution and build (Ctrl+Shift+B) to build `libsass.dll`. + +To Build LibSass as a static Library, it is recommended to set an environment variable `LIBSASS_STATIC_LIB` before launching the project: + +```cmd +cd path\to\libsass +SET LIBSASS_STATIC_LIB=1 +:: +:: or in PowerShell: +:: $env:LIBSASS_STATIC_LIB=1 +:: +win\libsass.sln +``` + +Visual Studio will form the filtered source tree as shown below: + +![image](https://cloud.githubusercontent.com/assets/3840695/9298985/aae9e072-44bf-11e5-89eb-e7995c098085.png) + +`Header Files` contains the .h and .hpp files, while `Source Files` covers `.c` and `.cpp`. The other used headers/sources will appear under `External Dependencies`. + +If there is a LibSass code file appearing under External Dependencies, it can be changed by altering the `win\libsass.vcxproj.filters` file or dragging in Solution Explorer. + +#### From Command Prompt: + +Notice that in the following commands: + +* If the platform is 32-bit Windows, replace `ProgramFiles(x86)` with `ProgramFiles`. +* To build with Visual Studio 2015, replace `12.0` with `14.0` in the aforementioned command. + +Open a command prompt: + +To build dynamic/shared library (`libsass.dll`): + +```cmd +:: debug build: +"%ProgramFiles(x86)%\MSBuild\12.0\Bin\MSBuild" win\libsass.sln + +:: release build: +"%ProgramFiles(x86)%\MSBuild\12.0\Bin\MSBuild" win\libsass.sln ^ +/p:Configuration=Release +``` + +To build static library (`libsass.lib`): + +```cmd +:: debug build: +"%ProgramFiles(x86)%\MSBuild\12.0\Bin\MSBuild" win\libsass.sln ^ +/p:LIBSASS_STATIC_LIB=1 + +:: release build: +"%ProgramFiles(x86)%\MSBuild\12.0\Bin\MSBuild" win\libsass.sln ^ +/p:LIBSASS_STATIC_LIB=1 /p:Configuration=Release +``` + +#### From PowerShell: + +To build dynamic/shared library (`libsass.dll`): + +```powershell +# debug build: +&"${env:ProgramFiles(x86)}\MSBuild\12.0\Bin\MSBuild" win\libsass.sln + +# release build: +&"${env:ProgramFiles(x86)}\MSBuild\12.0\Bin\MSBuild" win\libsass.sln ` +/p:Configuration=Release +``` + +To build static library (`libsass.lib`): + +```powershell +# build: +&"${env:ProgramFiles(x86)}\MSBuild\12.0\Bin\MSBuild" win\libsass.sln ` +/p:LIBSASS_STATIC_LIB=1 + +# release build: +&"${env:ProgramFiles(x86)}\MSBuild\12.0\Bin\MSBuild" win\libsass.sln ` +/p:LIBSASS_STATIC_LIB=1 /p:Configuration=Release +``` diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/build.md b/mybulma/node_modules/node-sass/src/libsass/docs/build.md new file mode 100644 index 0000000..c656d88 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/build.md @@ -0,0 +1,97 @@ +`libsass` is only a library and does not do much on its own. You need an implementation that you can use from the [command line][6]. Or some [bindings|Implementations][9] to use it within your favorite programming language. You should be able to get [`sassc`][6] running by following the instructions in this guide. + +Before starting, see [setup dev environment](setup-environment.md). + +Building on different Operating Systems +-- + +We try to keep the code as OS independent and standard compliant as possible. Reading files from the file-system has some OS depending code, but will ultimately fall back to a posix compatible implementation. We do use some `C++11` features, but are so far only committed to use `unordered_map`. This means you will need a pretty recent compiler on most systems (gcc 4.5 seems to be the minimum). + +### Building on Linux (and other *nix flavors) + +Linux is the main target for `libsass` and we support two ways to build `libsass` here. The old plain makefiles should still work on most systems (including MinGW), while the autotools build is preferred if you want to create a [system library] (experimental). + +- [Building with makefiles][1] +- [Building with autotools][2] + +### Building on Windows (experimental) + +Windows build support was added very recently and should be considered experimental. Credits go to @darrenkopp and @am11 for their work on getting `libsass` and `sassc` to compile with visual studio! + +- [Building with MinGW][3] +- [Building with Visual Studio][11] + +### Building on Max OS X (untested) + +Works the same as on linux, but you can also install LibSass via `homebrew`. + +- [Building on Mac OS X][10] + +### Building a system library (experimental) + +Since `libsass` is a library, it makes sense to install it as a shared library on your system. On linux this means creating a `.so` library via autotools. This should work pretty well already, but we are not yet committed to keep the ABI 100% stable. This should be the case once we increase the version number for the library to 1.0.0 or higher. On Windows you should be able get a `dll` by creating a shared build with MinGW. There is currently no target in the MSVC project files to do this. + +- [Building shared system library][4] + +Compiling with clang instead of gcc +-- + +To use clang you just need to set the appropriate environment variables: + +```bash +export CC=/usr/bin/clang +export CXX=/usr/bin/clang++ +``` + +Running the spec test-suite +-- + +We constantly and automatically test `libsass` against the official [spec test-suite][5]. To do this we need to have a test-runner (which is written in ruby) and a command-line tool ([`sassc`][6]) to run the tests. Therefore we need to additionally compile `sassc`. To do this, the build files of all three projects need to work together. This may not have the same quality for all build flavors. You definitely need to have ruby (2.1?) installed (version 1.9 seems to cause problems at least on windows). You also need some gems installed: + +```bash +ruby -v +gem install minitest +# should be optional +gem install minitap +``` + +Including the LibSass version +-- + +There is a function in `libsass` to query the current version. This has to be defined at compile time. We use a C macro for this, which can be defined by calling `g++ -DLIBSASS_VERSION="\"x.y.z.\""`. The two quotes are necessary, since it needs to end up as a valid C string. Normally you do not need to do anything if you use the makefiles or autotools. They will try to fetch the version via git directly. If you only have the sources without the git repo, you can pass the version as an environment variable to `make` or `configure`: + +``` +export LIBSASS_VERSION="x.y.z." +``` + +Continuous Integration +-- + +We use two CI services to automatically test all commits against the latest [spec test-suite][5]. + +- [LibSass on Travis-CI (linux)][7] +[![Build Status](https://travis-ci.org/sass/libsass.png?branch=master)](https://travis-ci.org/sass/libsass) +- [LibSass on AppVeyor (windows)][8] +[![Build status](https://ci.appveyor.com/api/projects/status/github/sass/libsass?svg=true)](https://ci.appveyor.com/project/mgreter/libsass-513/branch/master) + +Why not using CMake? +-- + +There were some efforts to get `libsass` to compile with CMake, which should make it easier to create build files for linux and windows. Unfortunately this was not completed. But we are certainly open for PRs! + +Miscellaneous +-- + +- [Ebuilds for Gentoo Linux](build-on-gentoo.md) + +[1]: build-with-makefiles.md +[2]: build-with-autotools.md +[3]: build-with-mingw.md +[4]: build-shared-library.md +[5]: https://github.com/sass/sass-spec +[6]: https://github.com/sass/sassc +[7]: https://github.com/sass/libsass/blob/master/.travis.yml +[8]: https://github.com/sass/libsass/blob/master/appveyor.yml +[9]: implementations.md +[10]: build-on-darwin.md +[11]: build-with-visual-studio.md diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/compatibility-plan.md b/mybulma/node_modules/node-sass/src/libsass/docs/compatibility-plan.md new file mode 100644 index 0000000..d8e538f --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/compatibility-plan.md @@ -0,0 +1,48 @@ +This document is to serve as a living, changing plan for getting LibSass caught up with Ruby Sass. + +_Note: an "s" preceeding a version number is specifying a Ruby Sass version. Without an s, it's a version of LibSass._ + +# Goal +**Our goal is to reach full s3.4 compatibility as soon as possible. LibSass version 3.4 will behave just like Ruby Sass 3.4** + +I highlight the goal, because there are some things that are *not* currently priorities. To be clear, they WILL be priorities, but they are not at the moment: + +* Performance Improvements +* Extensibility + +The overriding goal is correctness. + +## Verifying Correctness +LibSass uses the spec for its testing. The spec was originally based off s3.2 tests. Many things have changed in Ruby Sass since then and some of the tests need to be updated and changed in order to get them to match both LibSass and Ruby Sass. + +Until this project is complete, the spec will be primarily a place to test LibSass. By the time LibSass reaches 3.4, it is our goal that sass-spec will be fully usable as an official testing source for ALL implementations of Sass. + +## Version Naming +Until LibSass reaches parity with Ruby Sass, we will be aggressively bumping versions, and LibSass 3.4 will be the peer to Ruby Sass 3.4 in every way. + +# Release Plan + +## 3.0 +The goal of 3.0 is to introduce some of the most demanded features for LibSass. That is, we are focusing on issues and features that have kept adoption down. This is a mongrel release wrt which version of Sass it's targeting. It's often a mixture of 3.2 / 3.3 / 3.4 behaviours. This is not ideal, but it's favourable to not existing. Targeting 3.4 strictly during this release would mean we never actually release. + +# 3.1 +The goal of 3.1 is to update all the passing specs to agree with 3.4. This will not be a complete representation of s3.4 (aka, there will me missing features), but the goal is to change existing features and implemented features to match 3.4 behaviour. + +By the end of this, the sass-spec must pass against 3.4. + +Major issues: +* Variable Scoping +* Color Handling +* Precision + +# 3.2 +This version will focus on edge case fixes. There are a LOT of edge cases in the _todo_ tests and this is the release where we hunt those down like dogs (not that we want to hurt dogs, it's just a figure of speech in English). + +# 3.3 +Dress rehearsal. When we are 99% sure that we've fixed the main issues keeping us from saying we are compliant in s3.4 behaviour. + +# 3.4 +Compass Compatibility. We need to be able to work with Compass and all the other libraries out there. At this point, we are calling LibSass "mature" + +# Beyond 3.4 +Obviously, there is matching Sass 3.5 behaviour. But, beyond that, we'll want to focus on performance, stability, and error handling. These can always be improved upon and are the life's work of an open source project. We'll have to work closely with Sass in the future. diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/contributing.md b/mybulma/node_modules/node-sass/src/libsass/docs/contributing.md new file mode 100644 index 0000000..4a2d470 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/contributing.md @@ -0,0 +1,17 @@ +First of all, welcome! Thanks for even reading this page. If you're here, you're probably wondering what you can do to help make the LibSass project even more awesome. And, even having that feeling means you are awesome! + +## I'm a programmer + +Awesome! We need your help. The best thing to do is go find issues that are tagged with both "bug" and "test written". We do spec driven development here and these issues have a test that's written already in the sass-spec project. Go find the test by going to sass-spec/spec/LibSass-todo-issues/issue_XXX/ where XXX is the issue number. Write the code, and compile, and then issue a pull request referencing the issue. We'll quickly verify it and get it merged in! + +To get your dev environment setup, check out our article on [Setup-Dev-Environment](setup-environment.md). + +## I'm not a backend programmer + +COOL! We also need your help. Doing [Issue-Triage](triage.md) is a big deal and something we need constant help with. That means helping to verify issues, write tests for them, and make sure they are getting fixed. It's being part of the smiling face of the project. + +Also, we need help with the Sass-Spec project itself. Just people to organize, refactor, and understand the tests in there. + +## I don't know what a computer is? + +Hmm.... well, it's the thing you are looking at right now. Ummm... check out training courses! Then, come back and join us! diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/custom-functions-internal.md b/mybulma/node_modules/node-sass/src/libsass/docs/custom-functions-internal.md new file mode 100644 index 0000000..57fec82 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/custom-functions-internal.md @@ -0,0 +1,122 @@ +# Developer Documentation + +Custom functions are internally represented by `struct Sass_C_Function_Descriptor`. + +## Sass_C_Function_Descriptor + +```C +struct Sass_C_Function_Descriptor { + const char* signature; + Sass_C_Function function; + void* cookie; +}; +``` + +- `signature`: The function declaration, like `foo($bar, $baz:1)` +- `function`: Reference to the C function callback +- `cookie`: any pointer you want to attach + +### signature + +The signature defines how the function can be invoked. It also declares which arguments are required and which are optional. Required arguments will be enforced by LibSass and a Sass error is thrown in the event a call as missing an argument. Optional arguments only need to be present when you want to overwrite the default value. + + foo($bar, $baz: 2) + +In this example, `$bar` is required and will error if not passed. `$baz` is optional and the default value of it is 2. A call like `foo(10)` is therefore equal to `foo(10, 2)`, while `foo()` will produce an error. + +### function + +The callback function needs to be of the following form: + +```C +union Sass_Value* call_sass_function( + const union Sass_Value* s_args, + void* cookie +) { + return sass_clone_value(s_args); +} +``` + +### cookie + +The cookie can hold any pointer you want. In the `perl-libsass` implementation it holds the structure with the reference of the actual registered callback into the perl interpreter. Before that call `perl-libsass` will convert all `Sass_Values` to corresponding perl data types (so they can be used natively inside the perl interpretor). The callback can also return a `Sass_Value`. In `perl-libsass` the actual function returns a perl value, which has to be converted before `libsass` can work with it again! + +## Sass_Values + +```C +// allocate memory (copies passed strings) +union Sass_Value* sass_make_null (void); +union Sass_Value* sass_make_boolean (bool val); +union Sass_Value* sass_make_string (const char* val); +union Sass_Value* sass_make_qstring (const char* val); +union Sass_Value* sass_make_number (double val, const char* unit); +union Sass_Value* sass_make_color (double r, double g, double b, double a); +union Sass_Value* sass_make_list (size_t len, enum Sass_Separator sep, bool is_bracketed); +union Sass_Value* sass_make_map (size_t len); +union Sass_Value* sass_make_error (const char* msg); +union Sass_Value* sass_make_warning (const char* msg); + +// Make a deep cloned copy of the given sass value +union Sass_Value* sass_clone_value (const union Sass_Value* val); + +// deallocate memory (incl. all copied memory) +void sass_delete_value (const union Sass_Value* val); +``` + +## Example main.c + +```C +#include +#include +#include "sass/context.h" + +union Sass_Value* call_fn_foo(const union Sass_Value* s_args, void* cookie) +{ + // we actually abuse the void* to store an "int" + return sass_make_number((size_t)cookie, "px"); +} + +int main( int argc, const char* argv[] ) +{ + + // get the input file from first argument or use default + const char* input = argc > 1 ? argv[1] : "styles.scss"; + + // create the file context and get all related structs + struct Sass_File_Context* file_ctx = sass_make_file_context(input); + struct Sass_Context* ctx = sass_file_context_get_context(file_ctx); + struct Sass_Options* ctx_opt = sass_context_get_options(ctx); + + // allocate a custom function caller + Sass_C_Function_Callback fn_foo = + sass_make_function("foo()", call_fn_foo, (void*)42); + + // create list of all custom functions + Sass_C_Function_List fn_list = sass_make_function_list(1); + sass_function_set_list_entry(fn_list, 0, fn_foo); + sass_option_set_c_functions(ctx_opt, fn_list); + + // context is set up, call the compile step now + int status = sass_compile_file_context(file_ctx); + + // print the result or the error to the stdout + if (status == 0) puts(sass_context_get_output_string(ctx)); + else puts(sass_context_get_error_message(ctx)); + + // release allocated memory + sass_delete_file_context(file_ctx); + + // exit status + return status; + +} +``` + +## Compile main.c + +```bash +gcc -c main.c -o main.o +gcc -o sample main.o -lsass +echo "foo { margin: foo(); }" > foo.scss +./sample foo.scss => "foo { margin: 42px }" +``` diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/dev-ast-memory.md b/mybulma/node_modules/node-sass/src/libsass/docs/dev-ast-memory.md new file mode 100644 index 0000000..31004bc --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/dev-ast-memory.md @@ -0,0 +1,223 @@ +# LibSass smart pointer implementation + +LibSass uses smart pointers very similar to `shared_ptr` known +by Boost or C++11. Implementation is a bit less modular since +it was not needed. Various compile time debug options are +available if you need to debug memory life-cycles. + + +## Memory Classes + +### SharedObj + +Base class for the actual node implementations. This ensures +that every object has a reference counter and other values. + +```c++ +class AST_Node : public SharedObj { ... }; +``` + +### SharedPtr (base class for SharedImpl) + +Base class that holds on to the pointer. The reference counter +is stored inside the pointer object directly (`SharedObj`). + +### SharedImpl (inherits from SharedPtr) + +This is the main base class for objects you use in your code. It +will make sure that the memory it points at will be deleted once +all copies to the same object/memory go out of scope. + +```c++ +Class* pointer = new Class(...); +SharedImpl obj(pointer); +``` + +To spare the developer of typing the templated class every time, +we created typedefs for each available AST Node specialization. + +```c++ +typedef SharedImpl Number_Obj; +Number_Obj number = SASS_MEMORY_NEW(...); +``` + + +## Memory life-cycles + +### Pointer pickups + +I often use the terminology of "pickup". This means the moment when +a raw pointer not under any control is assigned to a reference counted +object (`XYZ_Obj = XYZ_Ptr`). From that point on memory will be +automatically released once the object goes out of scope (but only +if the reference counter reaches zero). Main point beeing, you don't +have to worry about memory management yourself. + +### Object detach + +Sometimes we can't return reference counted objects directly (see +invalid covariant return types problems below). But we often still +need to use reference objects inside a function to avoid leaks when +something throws. For this you can use `detach`, which basically +detaches the pointer memory from the reference counted object. So +when the reference counted object goes out of scope, it will not +free the attached memory. You are now again in charge of freeing +the memory (just assign it to a reference counted object again). + + +## Circular references + +Reference counted memory implementations are prone to circular references. +This can be addressed by using a multi generation garbage collector. But +for our use-case that seems overkill. There is no way so far for users +(sass code) to create circular references. Therefore we can code around +this possible issue. But developers should be aware of this limitation. + +There are AFAIR two places where circular references could happen. One is +the `sources` member on every `Selector`. The other one can happen in the +extend code (Node handling). The easy way to avoid this is to only assign +complete object clones to these members. If you know the objects lifetime +is longer than the reference you create, you can also just store the raw +pointer. Once needed this could be solved with weak pointers. + + +## Addressing the invalid covariant return types problems + +If you are not familiar with the mentioned problem, you may want +to read up on covariant return types and virtual functions, i.e. + +- http://stackoverflow.com/questions/6924754/return-type-covariance-with-smart-pointers +- http://stackoverflow.com/questions/196733/how-can-i-use-covariant-return-types-with-smart-pointers +- http://stackoverflow.com/questions/2687790/how-to-accomplish-covariant-return-types-when-returning-a-shared-ptr + +We hit this issue at least with the CRTP visitor pattern (eval, expand, +listize and so forth). This means we cannot return reference counted +objects directly. We are forced to return raw pointers or we would need +to have a lot of explicit and expensive upcasts by callers/consumers. + +### Simple functions that allocate new AST Nodes + +In the parser step we often create new objects and can just return a +unique pointer (meaning ownership clearly shifts back to the caller). +The caller/consumer is responsible that the memory is freed. + +```c++ +typedef Number* Number_Ptr; +int parse_integer() { + ... // do the parsing + return 42; +} +Number_Ptr parse_number() { + Number_Ptr p_nr = SASS_MEMORY_NEW(...); + p_nr->value(parse_integer()); + return p_nr; +} +Number_Obj nr = parse_number(); +``` + +The above would be the encouraged pattern for such simple cases. + +### Allocate new AST Nodes in functions that can throw + +There is a major caveat with the previous example, considering this +more real-life implementation that throws an error. The throw may +happen deep down in another function. Holding raw pointers that +we need to free would leak in this case. + +```c++ +int parse_integer() { + ... // do the parsing + if (error) throw(error); + return 42; +} +``` + +With this `parse_integer` function the previous example would leak memory. +I guess it is pretty obvious, as the allocated memory will not be freed, +as it was never assigned to a SharedObj value. Therefore the above code +would better be written as: + +```c++ +typedef Number* Number_Ptr; +int parse_integer() { + ... // do the parsing + if (error) throw(error); + return 42; +} +// this leaks due to pointer return +// should return Number_Obj instead +// though not possible for virtuals! +Number_Ptr parse_number() { + Number_Obj nr = SASS_MEMORY_NEW(...); + nr->value(parse_integer()); // throws + return &nr; // Ptr from Obj +} +Number_Obj nr = parse_number(); +// will now be freed automatically +``` + +The example above unfortunately will not work as is, since we return a +`Number_Ptr` from that function. Therefore the object allocated inside +the function is already gone when it is picked up again by the caller. +The easy fix for the given simplified use case would be to change the +return type of `parse_number` to `Number_Obj`. Indeed we do it exactly +this way in the parser. But as stated above, this will not work for +virtual functions due to invalid covariant return types! + +### Return managed objects from virtual functions + +The easy fix would be to just create a new copy on the heap and return +that. But this seems like a very inelegant solution to this problem. I +mean why can't we just tell the object to treat it like a newly allocated +object? And indeed we can. I've added a `detach` method that will tell +the object to survive deallocation until the next pickup. This means +that it will leak if it is not picked up by consumer. + +```c++ +typedef Number* Number_Ptr; +int parse_integer() { + ... // do the parsing + if (error) throw(error); + return 42; +} +Number_Ptr parse_number() { + Number_Obj nr = SASS_MEMORY_NEW(...); + nr->value(parse_integer()); // throws + return nr.detach(); +} +Number_Obj nr = parse_number(); +// will now be freed automatically +``` + + +## Compile time debug options + +To enable memory debugging you need to define `DEBUG_SHARED_PTR`. +This can i.e. be done in `include/sass/base.h` + +```c++ +define DEBUG_SHARED_PTR +``` + +This will print lost memory on exit to stderr. You can also use +`setDbg(true)` on sepecific variables to emit reference counter +increase, decrease and other events. + + +## Why reinvent the wheel when there is `shared_ptr` from C++11 + +First, implementing a smart pointer class is not really that hard. It +was indeed also a learning experience for myself. But there are more +profound advantages: + +- Better GCC 4.4 compatibility (which most code still has OOTB) +- Not thread safe (give us some free performance on some compiler) +- Beeing able to track memory allocations for debugging purposes +- Adding additional features if needed (as seen in `detach`) +- Optional: optimized weak pointer implementation possible + +### Thread Safety + +As said above, this is not thread safe currently. But we don't need +this ATM anyway. And I guess we probably never will share AST Nodes +across different threads. \ No newline at end of file diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/implementations.md b/mybulma/node_modules/node-sass/src/libsass/docs/implementations.md new file mode 100644 index 0000000..5239adc --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/implementations.md @@ -0,0 +1,65 @@ +There are several implementations of `libsass` for a variety of languages. Here are just a few of them. Note, some implementations may or may not be up to date. We have not verified whether they work. + +### C +* [sassc](https://github.com/hcatlin/sassc) + +### Crystal +* [sass.cr](https://github.com/straight-shoota/sass.cr) + +### Elixir +* [sass.ex](https://github.com/scottdavis/sass.ex) + +### Go +* [go-libsass](https://github.com/wellington/go-libsass) +* [go_sass](https://github.com/suapapa/go_sass) +* [go-sass](https://github.com/SamWhited/go-sass) + +### Haskell +* [hLibsass](https://github.com/jakubfijalkowski/hlibsass) +* [hSass](https://github.com/jakubfijalkowski/hsass) + +### Java +* [libsass-maven-plugin](https://github.com/warmuuh/libsass-maven-plugin) +* [jsass](https://github.com/bit3/jsass) + +### JavaScript +* [sass.js](https://github.com/medialize/sass.js) + +### Lua +* [lua-sass](https://github.com/craigbarnes/lua-sass) + +### .NET +* [libsass-net](https://github.com/darrenkopp/libsass-net) +* [NSass](https://github.com/TBAPI-0KA/NSass) +* [Sass.Net](https://github.com/andyalm/Sass.Net) +* [SharpScss](https://github.com/xoofx/SharpScss) +* [LibSassHost](https://github.com/Taritsyn/LibSassHost) + +### Nim +* [nim-sass](https://github.com/zacharycarter/nim-sass) + +### node.js +* [node-sass](https://github.com/sass/node-sass) + +### Perl +* [CSS::Sass](https://github.com/caldwell/CSS-Sass) +* [Text::Sass::XS](https://github.com/ysasaki/Text-Sass-XS) + +### PHP +* [sassphp](https://github.com/sensational/sassphp) +* [php-sass](https://github.com/lesstif/php-sass) + +### Python +* [libsass-python](https://github.com/dahlia/libsass-python) +* [SassPython](https://github.com/marianoguerra/SassPython) +* [pylibsass](https://github.com/rsenk330/pylibsass) +* [python-scss](https://github.com/pistolero/python-scss) + +### Ruby +* [sassruby](https://github.com/hcatlin/sassruby) + +### Scala +* [Sass-Scala](https://github.com/kkung/Sass-Scala) + +### Tcl +* [tclsass](https://github.com/flightaware/tclsass) diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/plugins.md b/mybulma/node_modules/node-sass/src/libsass/docs/plugins.md new file mode 100644 index 0000000..a9711e3 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/plugins.md @@ -0,0 +1,47 @@ +Plugins are shared object files (.so on *nix and .dll on win) that can be loaded by LibSass on runtime. Currently we only provide a way to load internal/custom functions from plugins. In the future we probably will also add a way to provide custom importers via plugins (needs more refactoring to [support multiple importers with some kind of priority system](https://github.com/sass/libsass/issues/962)). + +## plugin.cpp + +```C++ +#include +#include +#include +#include "sass_values.h" + +union Sass_Value* ADDCALL call_fn_foo(const union Sass_Value* s_args, void* cookie) +{ + // we actually abuse the void* to store an "int" + return sass_make_number((intptr_t)cookie, "px"); +} + +extern "C" const char* ADDCALL libsass_get_version() { + return libsass_version(); +} + +extern "C" Sass_C_Function_List ADDCALL libsass_load_functions() +{ + // allocate a custom function caller + Sass_C_Function_Callback fn_foo = + sass_make_function("foo()", call_fn_foo, (void*)42); + // create list of all custom functions + Sass_C_Function_List fn_list = sass_make_function_list(1); + // put the only function in this plugin to the list + sass_function_set_list_entry(fn_list, 0, fn_foo); + // return the list + return fn_list; +} +``` + +To compile the plugin you need to have LibSass already built as a shared library (to link against it). The commands below expect the shared library in the `lib` sub-directory (`-Llib`). The plugin and the main LibSass process should "consume" the same shared LibSass library on runtime. It will propably also work if they use different LibSass versions. In this case we check if the major versions are compatible (i.e. 3.1.3 and 3.1.1 would be considered compatible). + +## Compile with gcc on linux + +```bash +g++ -O2 -shared plugin.cpp -o plugin.so -fPIC -Llib -lsass +``` + +## Compile with mingw on windows + +```bash +g++ -O2 -shared plugin.cpp -o plugin.dll -Llib -lsass +``` diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/setup-environment.md b/mybulma/node_modules/node-sass/src/libsass/docs/setup-environment.md new file mode 100644 index 0000000..8056136 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/setup-environment.md @@ -0,0 +1,68 @@ +## Requirements +In order to install and setup your local development environment, there are some prerequisites: + +* git +* gcc/clang/llvm (Linux: build tools, Mac OS X: XCode w/ Command Line Tools) +* ruby w/ bundler + +OS X: +First you'll need to install XCode which you can now get from the AppStore installed on your mac. After you download that and run it, then run this on the command line: + +```` +xcode-select --install +```` + +## Cloning the Projects + +First, clone the project and then add a line to your `~/.bash_profile` that will let other programs know where the LibSass dev files are. + +```` +git clone git@github.com:sass/libsass.git +cd libsass +echo "export SASS_LIBSASS_PATH=$(pwd)" >> ~/.bash_profile + +```` + +Then, if you run the "bootstrap" script, it should clone all the other required projects. + +```` +./script/bootstrap +```` + +You should now have a `sass-spec` and `sassc` folder within the libsass folder. Both of these are clones of their respective git projects. If you want to do a pull request, remember to work in those folders. For instance, if you want to add a test (see other documentation for how to do that), make sure to commit it to your *fork* of the sass-spec github project. Also, whenever you are running tests, make sure to `pull` from the origin! We want to make sure we are testing against the newest libsass, sassc, and sass-spec! + +Now, try and see if you can build the project. We do that with the `make` command. + +```` +make +```` + +At this point, if you get an error, something is most likely wrong with your compiler installation. Yikes. It's hard to cover how to fix this in an article. Feel free to open an issue and we'll try and help! But, remember, before you do that, googling the error message is your friend! Many problems are solved quickly that way. + +## Running The Spec Against LibSass + +Then, to run the spec against LibSass, just run: + +```` +./script/spec +```` + +If you get an error about `SASS_LIBSASS_PATH`, you may still need to set a variable pointing to the libsass folder, like this: + +```` +export SASS_LIBSASS_PATH=/Users/you/path/libsass +```` + +...where the latter part is to the `libsass` directory you've cloned. You can get this path by typing `pwd` in the Terminal + +## Running the Spec Against Ruby Sass + +Go into the sass-spec folder that should have been cloned earlier with the "bootstrap" command. Run the following. + +```` +bundle install +./sass-spec.rb +```` + +Voila! Now you are testing against Sass too! + diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/source-map-internals.md b/mybulma/node_modules/node-sass/src/libsass/docs/source-map-internals.md new file mode 100644 index 0000000..50f83b5 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/source-map-internals.md @@ -0,0 +1,51 @@ +This document is mainly intended for developers! + +# Documenting some of the source map internals + +Since source maps are somewhat a black box to all LibSass maintainers, [I](@mgreter) will try to document my findings with source maps in LibSass, as I come across them. This document will also brievely explain how LibSass parses the source and how it outputs the result. + +The main storage for SourceMap mappings is the `mappings` vector: + +``` +# in source_map.hpp +vector mappings +# in mappings.hpp +struct Mapping ... + Position original_position; + Position generated_position; +``` + +## Every parsed token has its source associated + +LibSass uses a lexical parser. Whenever LibSass finds a token of interest, it creates a specific `AST_Node`, which will hold a reference to the input source with line/column information. `AST_Node` is the base class for all parsed items. They are declared in `ast.hpp` and are used in `parser.hpp`. Here a simple example: + +``` +if (lex< custom_property_name >()) { + Sass::String* prop = new (ctx.mem) String_Constant(path, source_position, lexed); + return new (ctx.mem) Declaration(path, prop->position(), prop, ...); +} +``` + +## How is the `source_position` calculated + +This is automatically done with `lex` in `parser.hpp`. Whenever something is lexed, the `source_position` is updated. But be aware that `source_position` points to the begining of the parsed text. If you need a mapping for the position where the parsing ended, you need to add another call to `lex` (to match nothing)! + +``` +lex< exactly < empty_str > >(); +end = new (ctx.mem) String_Constant(path, source_position, lexed); +``` + +## How are mappings for the output created + +So far we have collected all needed data for all tokens in the input stream. We can now use this information to create mappings when we put things into the output stream. Mappings are created via the `add_mappings` method: + +``` +# in source_map.hpp +void add_mapping(AST_Node* node); +``` + +This method is called in two places: +- `Inspect::append_to_buffer` +- `Output_[Nested|Compressed]::append_to_buffer` + +Mappings can only be created for things that have been parsed into a `AST_Node`. Otherwise we do not have the information to create the mappings, which is the reason why LibSass currently only maps the most important tokens in source maps. diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/trace.md b/mybulma/node_modules/node-sass/src/libsass/docs/trace.md new file mode 100644 index 0000000..4a57c90 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/trace.md @@ -0,0 +1,26 @@ +## This is proposed interface in https://github.com/sass/libsass/pull/1288 + +Additional debugging macros with low overhead are available, `TRACE()` and `TRACEINST()`. + +Both macros simulate a string stream, so they can be used like this: + + TRACE() << "Reached."; + +produces: + + [LibSass] parse_value parser.cpp:1384 Reached. + +`TRACE()` + logs function name, source filename, source file name to the standard error and the attached + stream to the standard error. + +`TRACEINST(obj)` + logs object instance address, function name, source filename, source file name to the standard error and the attached stream to the standard error, for example: + + TRACEINST(this) << "String_Constant created " << this; + +produces: + + [LibSass] 0x8031ba980:String_Constant ./ast.hpp:1371 String_Constant created (0,"auto") + +The macros generate output only of `LibSass_TRACE` is set in the environment. diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/triage.md b/mybulma/node_modules/node-sass/src/libsass/docs/triage.md new file mode 100644 index 0000000..0fc1178 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/triage.md @@ -0,0 +1,17 @@ +This is an article about how to help with LibSass issues. Issue triage is a fancy word for explaining how we deal with incoming issues and make sure that the right problems get worked on. The lifecycle of an issue goes like this: + +1. Issue is reported by a user. +2. If the issue seems like a bug, then the "bug" tag is added. +3. If the reporting user didn't also create a spec test over at sass/sass-spec, the "needs test" tag is added. +4. Verify that Ruby Sass *does not* have the same bug. LibSass strives to be an exact replica of how Ruby Sass works. If it's an issue that neither project has solved, please close the ticket with the "not in sass" label. +5. The smallest possible breaking test is created in sass-spec. Cut away any extra information or non-breaking code until the core issue is made clear. +6. Again, verify that the expected output matches the latest Ruby Sass release. Do this by using your own tool OR by running ./sass-spec.rb in the spec folder and making sure that your test passes! +7. Create the test cases in sass-spec with the name spec/LibSass-todo-issues/issue_XXX/input.scss and expected_output.css where the XXX is the issue number here. +8. Commit that test to sass-spec, making sure to reference the issue in the comment message like "Test to demonstrate sass/LibSass#XXX". +9. Once the spec test exists, remove the "needs test" tag and replace it with "test written". +10. A C++ developer will then work on the issue and issue a pull request to fix the issue. +11. A core member verifies that the fix does actually fix the spec tests. +12. The fix is merged into the project. +13. The spec is moved from the LibSass-todo-issues folder into LibSass-closed-issues +14. The issue is closed +15. Have a soda pop or enjoyable beverage of your choice diff --git a/mybulma/node_modules/node-sass/src/libsass/docs/unicode.md b/mybulma/node_modules/node-sass/src/libsass/docs/unicode.md new file mode 100644 index 0000000..a1eb5b1 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/docs/unicode.md @@ -0,0 +1,45 @@ +LibSass currently expects all input to be utf8 encoded (and outputs only utf8), if you actually have any unicode characters at all. We do not support conversion between encodings, even if you declare it with a `@charset` rule. The text below was originally posted as an [issue](https://github.com/sass/libsass/issues/381) on the LibSass tracker. Since then the status is outdated as LibSass now expects your +input to be utf8/ascii compatible, as it has been proven that reading ANSI (e.g. single byte encodings) as utf8 can lead to unexpected +behavior, which can in the worst case lead to buffer overruns/segfaults. Therefore LibSass now checks your input to be valid utf8 encoded! + +### [Declaring character encodings in CSS](http://www.w3.org/International/questions/qa-css-charset.en) + +This [explains](http://www.w3.org/International/questions/qa-css-charset.en) how the character encoding of a css file is determined. Since we are only dealing with local files, we never have a HTTP header. So the precedence should be 'charset' rule, byte-order mark (BOM) or auto-detection (finally falling back to system default/UTF-8). This may not sound too hard to implement, but what about import rules? The CSS specs do not forbid the mixing of different encodings! I [solved that](https://github.com/mgreter/webmerge/) by converting all files to UTF-8 internally. On writing there is an option to tell the tool what encoding it should be (UTF-8 by default). One can also define if it should write a BOM or not and if it should add the charset declaration. + +Since my [tool]((https://github.com/mgreter/webmerge/)) is written in perl, I have a lot of utilities at hand to deal with different unicode charsets. I'm pretty sure that most OSS uses [ICU](http://site.icu-project.org/) or [libiconv](https://www.gnu.org/software/libiconv/) to convert between different encodings. But I have now idea how easy/hard this would be to integrate platform independent (it seems doable). ANSII (single byte encoding) to utf8 is basically just a conversion table (for every supported code-page). + +### Current status on LibSass unicode support + +LibSass should/is fully UTF (and therefore plain ASCII) compatible. + +~~Currently LibSass seems to handle the common UTF-8 case pretty well. I believe it should correctly support all ASCII compatible encodings (like UTF-8 or Latin-1). If all includes use the same encoding, the output should be correct (in the same encoding). It should also handle unicode chars in [selectors, variable names and other identifiers](https://github.com/hcatlin/libsass/issues/244#issuecomment-34681227). This is true for all ASCII compatible encodings. So the main incompatible encodings (I'm aware of) are UTF-16/UTF-32 (which could be converted to UTF-8 with libiconv).~~ + +LibSass 3.5 will enforce that your input is either plain ASCII (chars below 127) or utf8. It does not handle anything else, but therefore ensures that the output is in a valid form. Before version 3.5 you were able to mix different code-pages, which yielded unexpected behavior. + +### Current encoding auto detection + +LibSass currently reads all kind of BOMs and will error out if it finds something it doesn't know how to handle! It seems that it throws away the optional UTF-8 BOM (if any is found). IMO it would be nice if users could configure that (also if a charset rule should be added to the output). But it does not really take any `@charset` into account, it always assumes your input is utf8 and ignores any given `@charset`! + +### What is currently not supported + +- Using non ASCII compatible encodings (like UTF-16, Latin-1 etc.) +- Using non ASCII characters in different encodings in different includes + +### What is missing to support the above cases + +- A way to convert between encodings (like libiconv/ICU) +- Sniffing the charset inside the file (source is available) +- Handling the conversion on import (and export) +- Optional: Make output encoding configurable +- Optional: Add optional/mandatory BOM (configurable) + +### Low priority feature + +I guess the current implementation should handle more than 99% of all real world use cases. +A) Unicode characters are still seldomly seen (as they can be written escaped) +~~B) It will still work if it's UTF-8 or in any of the most common known western ISO codepages. +Although I'm not sure how this applies to asian and other "exotic" codepages!~~ + +I guess the biggest Problem is to have libiconv/ICU (or some other) library as a dependency. Since it contains a lot of rules for the conversions, I see it as the only way to handle this correctly. Once that is sorted out it should be pretty much straight forward to implement the missing pieces (in parser.cpp - Parser::parse should return encoding and add Parser::sniff_charset, then convert the source byte stream to UTF-8). + +I hope the statements above all hold true. Unicode is really not the easiest topic to wrap your head around. But since I did all the above recently in Perl, I wanted to document it here. Feel free to extend or criticize. diff --git a/mybulma/node_modules/node-sass/src/libsass/extconf.rb b/mybulma/node_modules/node-sass/src/libsass/extconf.rb new file mode 100644 index 0000000..3e6d00b --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/extconf.rb @@ -0,0 +1,6 @@ +require 'mkmf' +# .. more stuff +#$LIBPATH.push(Config::CONFIG['libdir']) +$CFLAGS << " #{ENV["CFLAGS"]}" +$LIBS << " #{ENV["LIBS"]}" +create_makefile("libsass") diff --git a/mybulma/node_modules/node-sass/src/libsass/include/sass.h b/mybulma/node_modules/node-sass/src/libsass/include/sass.h new file mode 100644 index 0000000..1dd8b06 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/include/sass.h @@ -0,0 +1,15 @@ +#ifndef SASS_H +#define SASS_H + +// #define DEBUG 1 + +// include API headers +#include +#include +#include +#include +#include +#include + +#endif + diff --git a/mybulma/node_modules/node-sass/src/libsass/include/sass/base.h b/mybulma/node_modules/node-sass/src/libsass/include/sass/base.h new file mode 100644 index 0000000..88dd8d3 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/include/sass/base.h @@ -0,0 +1,89 @@ +#ifndef SASS_BASE_H +#define SASS_BASE_H + +// #define DEBUG_SHARED_PTR + +#ifdef _MSC_VER + #pragma warning(disable : 4503) + #ifndef _SCL_SECURE_NO_WARNINGS + #define _SCL_SECURE_NO_WARNINGS + #endif + #ifndef _CRT_SECURE_NO_WARNINGS + #define _CRT_SECURE_NO_WARNINGS + #endif + #ifndef _CRT_NONSTDC_NO_DEPRECATE + #define _CRT_NONSTDC_NO_DEPRECATE + #endif +#endif + +#include +#include + +#ifdef __GNUC__ + #define DEPRECATED(func) func __attribute__ ((deprecated)) +#elif defined(_MSC_VER) + #define DEPRECATED(func) __declspec(deprecated) func +#else + #pragma message("WARNING: You need to implement DEPRECATED for this compiler") + #define DEPRECATED(func) func +#endif + +#ifdef _WIN32 + + /* You should define ADD_EXPORTS *only* when building the DLL. */ + #ifdef ADD_EXPORTS + #define ADDAPI __declspec(dllexport) + #define ADDCALL __cdecl + #else + #define ADDAPI + #define ADDCALL + #endif + +#else /* _WIN32 not defined. */ + + /* Define with no value on non-Windows OSes. */ + #define ADDAPI + #define ADDCALL + +#endif + +/* Make sure functions are exported with C linkage under C++ compilers. */ +#ifdef __cplusplus +extern "C" { +#endif + + +// Different render styles +enum Sass_Output_Style { + SASS_STYLE_NESTED, + SASS_STYLE_EXPANDED, + SASS_STYLE_COMPACT, + SASS_STYLE_COMPRESSED, + // only used internaly + SASS_STYLE_INSPECT, + SASS_STYLE_TO_SASS +}; + +// to allocate buffer to be filled +ADDAPI void* ADDCALL sass_alloc_memory(size_t size); +// to allocate a buffer from existing string +ADDAPI char* ADDCALL sass_copy_c_string(const char* str); +// to free overtaken memory when done +ADDAPI void ADDCALL sass_free_memory(void* ptr); + +// Some convenient string helper function +ADDAPI char* ADDCALL sass_string_quote (const char* str, const char quote_mark); +ADDAPI char* ADDCALL sass_string_unquote (const char* str); + +// Implemented sass language version +// Hardcoded version 3.4 for time being +ADDAPI const char* ADDCALL libsass_version(void); + +// Get compiled libsass language +ADDAPI const char* ADDCALL libsass_language_version(void); + +#ifdef __cplusplus +} // __cplusplus defined. +#endif + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/include/sass/context.h b/mybulma/node_modules/node-sass/src/libsass/include/sass/context.h new file mode 100644 index 0000000..2f88d68 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/include/sass/context.h @@ -0,0 +1,170 @@ +#ifndef SASS_C_CONTEXT_H +#define SASS_C_CONTEXT_H + +#include +#include +#include +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + + +// Forward declaration +struct Sass_Compiler; + +// Forward declaration +struct Sass_Options; // base struct +struct Sass_Context; // : Sass_Options +struct Sass_File_Context; // : Sass_Context +struct Sass_Data_Context; // : Sass_Context + +// Compiler states +enum Sass_Compiler_State { + SASS_COMPILER_CREATED, + SASS_COMPILER_PARSED, + SASS_COMPILER_EXECUTED +}; + +// Create and initialize an option struct +ADDAPI struct Sass_Options* ADDCALL sass_make_options (void); +// Create and initialize a specific context +ADDAPI struct Sass_File_Context* ADDCALL sass_make_file_context (const char* input_path); +ADDAPI struct Sass_Data_Context* ADDCALL sass_make_data_context (char* source_string); + +// Call the compilation step for the specific context +ADDAPI int ADDCALL sass_compile_file_context (struct Sass_File_Context* ctx); +ADDAPI int ADDCALL sass_compile_data_context (struct Sass_Data_Context* ctx); + +// Create a sass compiler instance for more control +ADDAPI struct Sass_Compiler* ADDCALL sass_make_file_compiler (struct Sass_File_Context* file_ctx); +ADDAPI struct Sass_Compiler* ADDCALL sass_make_data_compiler (struct Sass_Data_Context* data_ctx); + +// Execute the different compilation steps individually +// Usefull if you only want to query the included files +ADDAPI int ADDCALL sass_compiler_parse(struct Sass_Compiler* compiler); +ADDAPI int ADDCALL sass_compiler_execute(struct Sass_Compiler* compiler); + +// Release all memory allocated with the compiler +// This does _not_ include any contexts or options +ADDAPI void ADDCALL sass_delete_compiler(struct Sass_Compiler* compiler); +ADDAPI void ADDCALL sass_delete_options(struct Sass_Options* options); + +// Release all memory allocated and also ourself +ADDAPI void ADDCALL sass_delete_file_context (struct Sass_File_Context* ctx); +ADDAPI void ADDCALL sass_delete_data_context (struct Sass_Data_Context* ctx); + +// Getters for context from specific implementation +ADDAPI struct Sass_Context* ADDCALL sass_file_context_get_context (struct Sass_File_Context* file_ctx); +ADDAPI struct Sass_Context* ADDCALL sass_data_context_get_context (struct Sass_Data_Context* data_ctx); + +// Getters for Context_Options from Sass_Context +ADDAPI struct Sass_Options* ADDCALL sass_context_get_options (struct Sass_Context* ctx); +ADDAPI struct Sass_Options* ADDCALL sass_file_context_get_options (struct Sass_File_Context* file_ctx); +ADDAPI struct Sass_Options* ADDCALL sass_data_context_get_options (struct Sass_Data_Context* data_ctx); +ADDAPI void ADDCALL sass_file_context_set_options (struct Sass_File_Context* file_ctx, struct Sass_Options* opt); +ADDAPI void ADDCALL sass_data_context_set_options (struct Sass_Data_Context* data_ctx, struct Sass_Options* opt); + + +// Getters for Context_Option values +ADDAPI int ADDCALL sass_option_get_precision (struct Sass_Options* options); +ADDAPI enum Sass_Output_Style ADDCALL sass_option_get_output_style (struct Sass_Options* options); +ADDAPI bool ADDCALL sass_option_get_source_comments (struct Sass_Options* options); +ADDAPI bool ADDCALL sass_option_get_source_map_embed (struct Sass_Options* options); +ADDAPI bool ADDCALL sass_option_get_source_map_contents (struct Sass_Options* options); +ADDAPI bool ADDCALL sass_option_get_source_map_file_urls (struct Sass_Options* options); +ADDAPI bool ADDCALL sass_option_get_omit_source_map_url (struct Sass_Options* options); +ADDAPI bool ADDCALL sass_option_get_is_indented_syntax_src (struct Sass_Options* options); +ADDAPI const char* ADDCALL sass_option_get_indent (struct Sass_Options* options); +ADDAPI const char* ADDCALL sass_option_get_linefeed (struct Sass_Options* options); +ADDAPI const char* ADDCALL sass_option_get_input_path (struct Sass_Options* options); +ADDAPI const char* ADDCALL sass_option_get_output_path (struct Sass_Options* options); +ADDAPI const char* ADDCALL sass_option_get_source_map_file (struct Sass_Options* options); +ADDAPI const char* ADDCALL sass_option_get_source_map_root (struct Sass_Options* options); +ADDAPI Sass_Importer_List ADDCALL sass_option_get_c_headers (struct Sass_Options* options); +ADDAPI Sass_Importer_List ADDCALL sass_option_get_c_importers (struct Sass_Options* options); +ADDAPI Sass_Function_List ADDCALL sass_option_get_c_functions (struct Sass_Options* options); + +// Setters for Context_Option values +ADDAPI void ADDCALL sass_option_set_precision (struct Sass_Options* options, int precision); +ADDAPI void ADDCALL sass_option_set_output_style (struct Sass_Options* options, enum Sass_Output_Style output_style); +ADDAPI void ADDCALL sass_option_set_source_comments (struct Sass_Options* options, bool source_comments); +ADDAPI void ADDCALL sass_option_set_source_map_embed (struct Sass_Options* options, bool source_map_embed); +ADDAPI void ADDCALL sass_option_set_source_map_contents (struct Sass_Options* options, bool source_map_contents); +ADDAPI void ADDCALL sass_option_set_source_map_file_urls (struct Sass_Options* options, bool source_map_file_urls); +ADDAPI void ADDCALL sass_option_set_omit_source_map_url (struct Sass_Options* options, bool omit_source_map_url); +ADDAPI void ADDCALL sass_option_set_is_indented_syntax_src (struct Sass_Options* options, bool is_indented_syntax_src); +ADDAPI void ADDCALL sass_option_set_indent (struct Sass_Options* options, const char* indent); +ADDAPI void ADDCALL sass_option_set_linefeed (struct Sass_Options* options, const char* linefeed); +ADDAPI void ADDCALL sass_option_set_input_path (struct Sass_Options* options, const char* input_path); +ADDAPI void ADDCALL sass_option_set_output_path (struct Sass_Options* options, const char* output_path); +ADDAPI void ADDCALL sass_option_set_plugin_path (struct Sass_Options* options, const char* plugin_path); +ADDAPI void ADDCALL sass_option_set_include_path (struct Sass_Options* options, const char* include_path); +ADDAPI void ADDCALL sass_option_set_source_map_file (struct Sass_Options* options, const char* source_map_file); +ADDAPI void ADDCALL sass_option_set_source_map_root (struct Sass_Options* options, const char* source_map_root); +ADDAPI void ADDCALL sass_option_set_c_headers (struct Sass_Options* options, Sass_Importer_List c_headers); +ADDAPI void ADDCALL sass_option_set_c_importers (struct Sass_Options* options, Sass_Importer_List c_importers); +ADDAPI void ADDCALL sass_option_set_c_functions (struct Sass_Options* options, Sass_Function_List c_functions); + + +// Getters for Sass_Context values +ADDAPI const char* ADDCALL sass_context_get_output_string (struct Sass_Context* ctx); +ADDAPI int ADDCALL sass_context_get_error_status (struct Sass_Context* ctx); +ADDAPI const char* ADDCALL sass_context_get_error_json (struct Sass_Context* ctx); +ADDAPI const char* ADDCALL sass_context_get_error_text (struct Sass_Context* ctx); +ADDAPI const char* ADDCALL sass_context_get_error_message (struct Sass_Context* ctx); +ADDAPI const char* ADDCALL sass_context_get_error_file (struct Sass_Context* ctx); +ADDAPI const char* ADDCALL sass_context_get_error_src (struct Sass_Context* ctx); +ADDAPI size_t ADDCALL sass_context_get_error_line (struct Sass_Context* ctx); +ADDAPI size_t ADDCALL sass_context_get_error_column (struct Sass_Context* ctx); +ADDAPI const char* ADDCALL sass_context_get_source_map_string (struct Sass_Context* ctx); +ADDAPI char** ADDCALL sass_context_get_included_files (struct Sass_Context* ctx); + +// Getters for options include path array +ADDAPI size_t ADDCALL sass_option_get_include_path_size(struct Sass_Options* options); +ADDAPI const char* ADDCALL sass_option_get_include_path(struct Sass_Options* options, size_t i); + +// Calculate the size of the stored null terminated array +ADDAPI size_t ADDCALL sass_context_get_included_files_size (struct Sass_Context* ctx); + +// Take ownership of memory (value on context is set to 0) +ADDAPI char* ADDCALL sass_context_take_error_json (struct Sass_Context* ctx); +ADDAPI char* ADDCALL sass_context_take_error_text (struct Sass_Context* ctx); +ADDAPI char* ADDCALL sass_context_take_error_message (struct Sass_Context* ctx); +ADDAPI char* ADDCALL sass_context_take_error_file (struct Sass_Context* ctx); +ADDAPI char* ADDCALL sass_context_take_output_string (struct Sass_Context* ctx); +ADDAPI char* ADDCALL sass_context_take_source_map_string (struct Sass_Context* ctx); +ADDAPI char** ADDCALL sass_context_take_included_files (struct Sass_Context* ctx); + +// Getters for Sass_Compiler options +ADDAPI enum Sass_Compiler_State ADDCALL sass_compiler_get_state(struct Sass_Compiler* compiler); +ADDAPI struct Sass_Context* ADDCALL sass_compiler_get_context(struct Sass_Compiler* compiler); +ADDAPI struct Sass_Options* ADDCALL sass_compiler_get_options(struct Sass_Compiler* compiler); +ADDAPI size_t ADDCALL sass_compiler_get_import_stack_size(struct Sass_Compiler* compiler); +ADDAPI Sass_Import_Entry ADDCALL sass_compiler_get_last_import(struct Sass_Compiler* compiler); +ADDAPI Sass_Import_Entry ADDCALL sass_compiler_get_import_entry(struct Sass_Compiler* compiler, size_t idx); +ADDAPI size_t ADDCALL sass_compiler_get_callee_stack_size(struct Sass_Compiler* compiler); +ADDAPI Sass_Callee_Entry ADDCALL sass_compiler_get_last_callee(struct Sass_Compiler* compiler); +ADDAPI Sass_Callee_Entry ADDCALL sass_compiler_get_callee_entry(struct Sass_Compiler* compiler, size_t idx); + +// Push function for paths (no manipulation support for now) +ADDAPI void ADDCALL sass_option_push_plugin_path (struct Sass_Options* options, const char* path); +ADDAPI void ADDCALL sass_option_push_include_path (struct Sass_Options* options, const char* path); + +// Resolve a file via the given include paths in the sass option struct +// find_file looks for the exact file name while find_include does a regular sass include +ADDAPI char* ADDCALL sass_find_file (const char* path, struct Sass_Options* opt); +ADDAPI char* ADDCALL sass_find_include (const char* path, struct Sass_Options* opt); + +// Resolve a file relative to last import or include paths in the sass option struct +// find_file looks for the exact file name while find_include does a regular sass include +ADDAPI char* ADDCALL sass_compiler_find_file (const char* path, struct Sass_Compiler* compiler); +ADDAPI char* ADDCALL sass_compiler_find_include (const char* path, struct Sass_Compiler* compiler); + +#ifdef __cplusplus +} // __cplusplus defined. +#endif + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/include/sass/functions.h b/mybulma/node_modules/node-sass/src/libsass/include/sass/functions.h new file mode 100644 index 0000000..ac47e8e --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/include/sass/functions.h @@ -0,0 +1,139 @@ +#ifndef SASS_C_FUNCTIONS_H +#define SASS_C_FUNCTIONS_H + +#include +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + + +// Forward declaration +struct Sass_Env; +struct Sass_Callee; +struct Sass_Import; +struct Sass_Options; +struct Sass_Compiler; +struct Sass_Importer; +struct Sass_Function; + +// Typedef helpers for callee lists +typedef struct Sass_Env (*Sass_Env_Frame); +// Typedef helpers for callee lists +typedef struct Sass_Callee (*Sass_Callee_Entry); +// Typedef helpers for import lists +typedef struct Sass_Import (*Sass_Import_Entry); +typedef struct Sass_Import* (*Sass_Import_List); +// Typedef helpers for custom importer lists +typedef struct Sass_Importer (*Sass_Importer_Entry); +typedef struct Sass_Importer* (*Sass_Importer_List); +// Typedef defining importer signature and return type +typedef Sass_Import_List (*Sass_Importer_Fn) + (const char* url, Sass_Importer_Entry cb, struct Sass_Compiler* compiler); + +// Typedef helpers for custom functions lists +typedef struct Sass_Function (*Sass_Function_Entry); +typedef struct Sass_Function* (*Sass_Function_List); +// Typedef defining function signature and return type +typedef union Sass_Value* (*Sass_Function_Fn) + (const union Sass_Value*, Sass_Function_Entry cb, struct Sass_Compiler* compiler); + +// Type of function calls +enum Sass_Callee_Type { + SASS_CALLEE_MIXIN, + SASS_CALLEE_FUNCTION, + SASS_CALLEE_C_FUNCTION, +}; + +// Creator for sass custom importer return argument list +ADDAPI Sass_Importer_List ADDCALL sass_make_importer_list (size_t length); +ADDAPI Sass_Importer_Entry ADDCALL sass_importer_get_list_entry (Sass_Importer_List list, size_t idx); +ADDAPI void ADDCALL sass_importer_set_list_entry (Sass_Importer_List list, size_t idx, Sass_Importer_Entry entry); +ADDAPI void ADDCALL sass_delete_importer_list (Sass_Importer_List list); + + +// Creators for custom importer callback (with some additional pointer) +// The pointer is mostly used to store the callback into the actual binding +ADDAPI Sass_Importer_Entry ADDCALL sass_make_importer (Sass_Importer_Fn importer, double priority, void* cookie); + +// Getters for import function descriptors +ADDAPI Sass_Importer_Fn ADDCALL sass_importer_get_function (Sass_Importer_Entry cb); +ADDAPI double ADDCALL sass_importer_get_priority (Sass_Importer_Entry cb); +ADDAPI void* ADDCALL sass_importer_get_cookie (Sass_Importer_Entry cb); + +// Deallocator for associated memory +ADDAPI void ADDCALL sass_delete_importer (Sass_Importer_Entry cb); + +// Creator for sass custom importer return argument list +ADDAPI Sass_Import_List ADDCALL sass_make_import_list (size_t length); +// Creator for a single import entry returned by the custom importer inside the list +ADDAPI Sass_Import_Entry ADDCALL sass_make_import_entry (const char* path, char* source, char* srcmap); +ADDAPI Sass_Import_Entry ADDCALL sass_make_import (const char* imp_path, const char* abs_base, char* source, char* srcmap); +// set error message to abort import and to print out a message (path from existing object is used in output) +ADDAPI Sass_Import_Entry ADDCALL sass_import_set_error(Sass_Import_Entry import, const char* message, size_t line, size_t col); + +// Setters to insert an entry into the import list (you may also use [] access directly) +// Since we are dealing with pointers they should have a guaranteed and fixed size +ADDAPI void ADDCALL sass_import_set_list_entry (Sass_Import_List list, size_t idx, Sass_Import_Entry entry); +ADDAPI Sass_Import_Entry ADDCALL sass_import_get_list_entry (Sass_Import_List list, size_t idx); + +// Getters for callee entry +ADDAPI const char* ADDCALL sass_callee_get_name (Sass_Callee_Entry); +ADDAPI const char* ADDCALL sass_callee_get_path (Sass_Callee_Entry); +ADDAPI size_t ADDCALL sass_callee_get_line (Sass_Callee_Entry); +ADDAPI size_t ADDCALL sass_callee_get_column (Sass_Callee_Entry); +ADDAPI enum Sass_Callee_Type ADDCALL sass_callee_get_type (Sass_Callee_Entry); +ADDAPI Sass_Env_Frame ADDCALL sass_callee_get_env (Sass_Callee_Entry); + +// Getters and Setters for environments (lexical, local and global) +ADDAPI union Sass_Value* ADDCALL sass_env_get_lexical (Sass_Env_Frame, const char*); +ADDAPI void ADDCALL sass_env_set_lexical (Sass_Env_Frame, const char*, union Sass_Value*); +ADDAPI union Sass_Value* ADDCALL sass_env_get_local (Sass_Env_Frame, const char*); +ADDAPI void ADDCALL sass_env_set_local (Sass_Env_Frame, const char*, union Sass_Value*); +ADDAPI union Sass_Value* ADDCALL sass_env_get_global (Sass_Env_Frame, const char*); +ADDAPI void ADDCALL sass_env_set_global (Sass_Env_Frame, const char*, union Sass_Value*); + +// Getters for import entry +ADDAPI const char* ADDCALL sass_import_get_imp_path (Sass_Import_Entry); +ADDAPI const char* ADDCALL sass_import_get_abs_path (Sass_Import_Entry); +ADDAPI const char* ADDCALL sass_import_get_source (Sass_Import_Entry); +ADDAPI const char* ADDCALL sass_import_get_srcmap (Sass_Import_Entry); +// Explicit functions to take ownership of these items +// The property on our struct will be reset to NULL +ADDAPI char* ADDCALL sass_import_take_source (Sass_Import_Entry); +ADDAPI char* ADDCALL sass_import_take_srcmap (Sass_Import_Entry); +// Getters from import error entry +ADDAPI size_t ADDCALL sass_import_get_error_line (Sass_Import_Entry); +ADDAPI size_t ADDCALL sass_import_get_error_column (Sass_Import_Entry); +ADDAPI const char* ADDCALL sass_import_get_error_message (Sass_Import_Entry); + +// Deallocator for associated memory (incl. entries) +ADDAPI void ADDCALL sass_delete_import_list (Sass_Import_List); +// Just in case we have some stray import structs +ADDAPI void ADDCALL sass_delete_import (Sass_Import_Entry); + + + +// Creators for sass function list and function descriptors +ADDAPI Sass_Function_List ADDCALL sass_make_function_list (size_t length); +ADDAPI Sass_Function_Entry ADDCALL sass_make_function (const char* signature, Sass_Function_Fn cb, void* cookie); +ADDAPI void ADDCALL sass_delete_function (Sass_Function_Entry entry); +ADDAPI void ADDCALL sass_delete_function_list (Sass_Function_List list); + +// Setters and getters for callbacks on function lists +ADDAPI Sass_Function_Entry ADDCALL sass_function_get_list_entry(Sass_Function_List list, size_t pos); +ADDAPI void ADDCALL sass_function_set_list_entry(Sass_Function_List list, size_t pos, Sass_Function_Entry cb); + +// Getters for custom function descriptors +ADDAPI const char* ADDCALL sass_function_get_signature (Sass_Function_Entry cb); +ADDAPI Sass_Function_Fn ADDCALL sass_function_get_function (Sass_Function_Entry cb); +ADDAPI void* ADDCALL sass_function_get_cookie (Sass_Function_Entry cb); + + +#ifdef __cplusplus +} // __cplusplus defined. +#endif + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/include/sass/values.h b/mybulma/node_modules/node-sass/src/libsass/include/sass/values.h new file mode 100644 index 0000000..9832038 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/include/sass/values.h @@ -0,0 +1,145 @@ +#ifndef SASS_C_VALUES_H +#define SASS_C_VALUES_H + +#include +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif + + +// Forward declaration +union Sass_Value; + +// Type for Sass values +enum Sass_Tag { + SASS_BOOLEAN, + SASS_NUMBER, + SASS_COLOR, + SASS_STRING, + SASS_LIST, + SASS_MAP, + SASS_NULL, + SASS_ERROR, + SASS_WARNING +}; + +// Tags for denoting Sass list separators +enum Sass_Separator { + SASS_COMMA, + SASS_SPACE, + // only used internally to represent a hash map before evaluation + // otherwise we would be too early to check for duplicate keys + SASS_HASH +}; + +// Value Operators +enum Sass_OP { + AND, OR, // logical connectives + EQ, NEQ, GT, GTE, LT, LTE, // arithmetic relations + ADD, SUB, MUL, DIV, MOD, // arithmetic functions + NUM_OPS // so we know how big to make the op table +}; + +// Creator functions for all value types +ADDAPI union Sass_Value* ADDCALL sass_make_null (void); +ADDAPI union Sass_Value* ADDCALL sass_make_boolean (bool val); +ADDAPI union Sass_Value* ADDCALL sass_make_string (const char* val); +ADDAPI union Sass_Value* ADDCALL sass_make_qstring (const char* val); +ADDAPI union Sass_Value* ADDCALL sass_make_number (double val, const char* unit); +ADDAPI union Sass_Value* ADDCALL sass_make_color (double r, double g, double b, double a); +ADDAPI union Sass_Value* ADDCALL sass_make_list (size_t len, enum Sass_Separator sep, bool is_bracketed); +ADDAPI union Sass_Value* ADDCALL sass_make_map (size_t len); +ADDAPI union Sass_Value* ADDCALL sass_make_error (const char* msg); +ADDAPI union Sass_Value* ADDCALL sass_make_warning (const char* msg); + +// Generic destructor function for all types +// Will release memory of all associated Sass_Values +// Means we will delete recursively for lists and maps +ADDAPI void ADDCALL sass_delete_value (union Sass_Value* val); + +// Make a deep cloned copy of the given sass value +ADDAPI union Sass_Value* ADDCALL sass_clone_value (const union Sass_Value* val); + +// Execute an operation for two Sass_Values and return the result as a Sass_Value too +ADDAPI union Sass_Value* ADDCALL sass_value_op (enum Sass_OP op, const union Sass_Value* a, const union Sass_Value* b); + +// Stringify a Sass_Values and also return the result as a Sass_Value (of type STRING) +ADDAPI union Sass_Value* ADDCALL sass_value_stringify (const union Sass_Value* a, bool compressed, int precision); + +// Return the sass tag for a generic sass value +// Check is needed before accessing specific values! +ADDAPI enum Sass_Tag ADDCALL sass_value_get_tag (const union Sass_Value* v); + +// Check value to be of a specific type +// Can also be used before accessing properties! +ADDAPI bool ADDCALL sass_value_is_null (const union Sass_Value* v); +ADDAPI bool ADDCALL sass_value_is_number (const union Sass_Value* v); +ADDAPI bool ADDCALL sass_value_is_string (const union Sass_Value* v); +ADDAPI bool ADDCALL sass_value_is_boolean (const union Sass_Value* v); +ADDAPI bool ADDCALL sass_value_is_color (const union Sass_Value* v); +ADDAPI bool ADDCALL sass_value_is_list (const union Sass_Value* v); +ADDAPI bool ADDCALL sass_value_is_map (const union Sass_Value* v); +ADDAPI bool ADDCALL sass_value_is_error (const union Sass_Value* v); +ADDAPI bool ADDCALL sass_value_is_warning (const union Sass_Value* v); + +// Getters and setters for Sass_Number +ADDAPI double ADDCALL sass_number_get_value (const union Sass_Value* v); +ADDAPI void ADDCALL sass_number_set_value (union Sass_Value* v, double value); +ADDAPI const char* ADDCALL sass_number_get_unit (const union Sass_Value* v); +ADDAPI void ADDCALL sass_number_set_unit (union Sass_Value* v, char* unit); + +// Getters and setters for Sass_String +ADDAPI const char* ADDCALL sass_string_get_value (const union Sass_Value* v); +ADDAPI void ADDCALL sass_string_set_value (union Sass_Value* v, char* value); +ADDAPI bool ADDCALL sass_string_is_quoted(const union Sass_Value* v); +ADDAPI void ADDCALL sass_string_set_quoted(union Sass_Value* v, bool quoted); + +// Getters and setters for Sass_Boolean +ADDAPI bool ADDCALL sass_boolean_get_value (const union Sass_Value* v); +ADDAPI void ADDCALL sass_boolean_set_value (union Sass_Value* v, bool value); + +// Getters and setters for Sass_Color +ADDAPI double ADDCALL sass_color_get_r (const union Sass_Value* v); +ADDAPI void ADDCALL sass_color_set_r (union Sass_Value* v, double r); +ADDAPI double ADDCALL sass_color_get_g (const union Sass_Value* v); +ADDAPI void ADDCALL sass_color_set_g (union Sass_Value* v, double g); +ADDAPI double ADDCALL sass_color_get_b (const union Sass_Value* v); +ADDAPI void ADDCALL sass_color_set_b (union Sass_Value* v, double b); +ADDAPI double ADDCALL sass_color_get_a (const union Sass_Value* v); +ADDAPI void ADDCALL sass_color_set_a (union Sass_Value* v, double a); + +// Getter for the number of items in list +ADDAPI size_t ADDCALL sass_list_get_length (const union Sass_Value* v); +// Getters and setters for Sass_List +ADDAPI enum Sass_Separator ADDCALL sass_list_get_separator (const union Sass_Value* v); +ADDAPI void ADDCALL sass_list_set_separator (union Sass_Value* v, enum Sass_Separator value); +ADDAPI bool ADDCALL sass_list_get_is_bracketed (const union Sass_Value* v); +ADDAPI void ADDCALL sass_list_set_is_bracketed (union Sass_Value* v, bool value); +// Getters and setters for Sass_List values +ADDAPI union Sass_Value* ADDCALL sass_list_get_value (const union Sass_Value* v, size_t i); +ADDAPI void ADDCALL sass_list_set_value (union Sass_Value* v, size_t i, union Sass_Value* value); + +// Getter for the number of items in map +ADDAPI size_t ADDCALL sass_map_get_length (const union Sass_Value* v); +// Getters and setters for Sass_Map keys and values +ADDAPI union Sass_Value* ADDCALL sass_map_get_key (const union Sass_Value* v, size_t i); +ADDAPI void ADDCALL sass_map_set_key (union Sass_Value* v, size_t i, union Sass_Value*); +ADDAPI union Sass_Value* ADDCALL sass_map_get_value (const union Sass_Value* v, size_t i); +ADDAPI void ADDCALL sass_map_set_value (union Sass_Value* v, size_t i, union Sass_Value*); + +// Getters and setters for Sass_Error +ADDAPI char* ADDCALL sass_error_get_message (const union Sass_Value* v); +ADDAPI void ADDCALL sass_error_set_message (union Sass_Value* v, char* msg); + +// Getters and setters for Sass_Warning +ADDAPI char* ADDCALL sass_warning_get_message (const union Sass_Value* v); +ADDAPI void ADDCALL sass_warning_set_message (union Sass_Value* v, char* msg); + +#ifdef __cplusplus +} // __cplusplus defined. +#endif + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/include/sass/version.h b/mybulma/node_modules/node-sass/src/libsass/include/sass/version.h new file mode 100644 index 0000000..56ea016 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/include/sass/version.h @@ -0,0 +1,12 @@ +#ifndef SASS_VERSION_H +#define SASS_VERSION_H + +#ifndef LIBSASS_VERSION +#define LIBSASS_VERSION "[NA]" +#endif + +#ifndef LIBSASS_LANGUAGE_VERSION +#define LIBSASS_LANGUAGE_VERSION "3.5" +#endif + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/include/sass/version.h.in b/mybulma/node_modules/node-sass/src/libsass/include/sass/version.h.in new file mode 100644 index 0000000..b8d4072 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/include/sass/version.h.in @@ -0,0 +1,12 @@ +#ifndef SASS_VERSION_H +#define SASS_VERSION_H + +#ifndef LIBSASS_VERSION +#define LIBSASS_VERSION "@PACKAGE_VERSION@" +#endif + +#ifndef LIBSASS_LANGUAGE_VERSION +#define LIBSASS_LANGUAGE_VERSION "3.5" +#endif + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/include/sass2scss.h b/mybulma/node_modules/node-sass/src/libsass/include/sass2scss.h new file mode 100644 index 0000000..8736b2c --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/include/sass2scss.h @@ -0,0 +1,120 @@ +/** + * sass2scss + * Licensed under the MIT License + * Copyright (c) Marcel Greter + */ + +#ifndef SASS2SCSS_H +#define SASS2SCSS_H + +#ifdef _WIN32 + + /* You should define ADD_EXPORTS *only* when building the DLL. */ + #ifdef ADD_EXPORTS + #define ADDAPI __declspec(dllexport) + #define ADDCALL __cdecl + #else + #define ADDAPI + #define ADDCALL + #endif + +#else /* _WIN32 not defined. */ + + /* Define with no value on non-Windows OSes. */ + #define ADDAPI + #define ADDCALL + +#endif + +#ifdef __cplusplus + +#include +#include +#include +#include +#include + +#ifndef SASS2SCSS_VERSION +// Hardcode once the file is copied from +// https://github.com/mgreter/sass2scss +#define SASS2SCSS_VERSION "1.1.1" +#endif + +// add namespace for c++ +namespace Sass +{ + + // pretty print options + const int SASS2SCSS_PRETTIFY_0 = 0; + const int SASS2SCSS_PRETTIFY_1 = 1; + const int SASS2SCSS_PRETTIFY_2 = 2; + const int SASS2SCSS_PRETTIFY_3 = 3; + + // remove one-line comment + const int SASS2SCSS_KEEP_COMMENT = 32; + // remove multi-line comments + const int SASS2SCSS_STRIP_COMMENT = 64; + // convert one-line to multi-line + const int SASS2SCSS_CONVERT_COMMENT = 128; + + // String for finding something interesting + const std::string SASS2SCSS_FIND_WHITESPACE = " \t\n\v\f\r"; + + // converter struct + // holding all states + struct converter + { + // bit options + int options; + // is selector + bool selector; + // concat lists + bool comma; + // has property + bool property; + // has semicolon + bool semicolon; + // comment context + std::string comment; + // flag end of file + bool end_of_file; + // whitespace buffer + std::string whitespace; + // context/block stack + std::stack indents; + }; + + // function only available in c++ code + char* sass2scss (const std::string& sass, const int options); + +} +// EO namespace + +// declare for c +extern "C" { +#endif + + // prettyfy print options + #define SASS2SCSS_PRETTIFY_0 0 + #define SASS2SCSS_PRETTIFY_1 1 + #define SASS2SCSS_PRETTIFY_2 2 + #define SASS2SCSS_PRETTIFY_3 3 + + // keep one-line comments + #define SASS2SCSS_KEEP_COMMENT 32 + // remove multi-line comments + #define SASS2SCSS_STRIP_COMMENT 64 + // convert one-line to multi-line + #define SASS2SCSS_CONVERT_COMMENT 128 + + // available to c and c++ code + ADDAPI char* ADDCALL sass2scss (const char* sass, const int options); + + // Get compiled sass2scss version + ADDAPI const char* ADDCALL sass2scss_version(void); + +#ifdef __cplusplus +} // __cplusplus defined. +#endif + +#endif \ No newline at end of file diff --git a/mybulma/node_modules/node-sass/src/libsass/m4/.gitkeep b/mybulma/node_modules/node-sass/src/libsass/m4/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/mybulma/node_modules/node-sass/src/libsass/m4/m4-ax_cxx_compile_stdcxx_11.m4 b/mybulma/node_modules/node-sass/src/libsass/m4/m4-ax_cxx_compile_stdcxx_11.m4 new file mode 100644 index 0000000..395b13d --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/m4/m4-ax_cxx_compile_stdcxx_11.m4 @@ -0,0 +1,167 @@ +# ============================================================================ +# http://www.gnu.org/software/autoconf-archive/ax_cxx_compile_stdcxx_11.html +# ============================================================================ +# +# SYNOPSIS +# +# AX_CXX_COMPILE_STDCXX_11([ext|noext],[mandatory|optional]) +# +# DESCRIPTION +# +# Check for baseline language coverage in the compiler for the C++11 +# standard; if necessary, add switches to CXXFLAGS to enable support. +# +# The first argument, if specified, indicates whether you insist on an +# extended mode (e.g. -std=gnu++11) or a strict conformance mode (e.g. +# -std=c++11). If neither is specified, you get whatever works, with +# preference for an extended mode. +# +# The second argument, if specified 'mandatory' or if left unspecified, +# indicates that baseline C++11 support is required and that the macro +# should error out if no mode with that support is found. If specified +# 'optional', then configuration proceeds regardless, after defining +# HAVE_CXX11 if and only if a supporting mode is found. +# +# LICENSE +# +# Copyright (c) 2008 Benjamin Kosnik +# Copyright (c) 2012 Zack Weinberg +# Copyright (c) 2013 Roy Stogner +# Copyright (c) 2014, 2015 Google Inc.; contributed by Alexey Sokolov +# +# Copying and distribution of this file, with or without modification, are +# permitted in any medium without royalty provided the copyright notice +# and this notice are preserved. This file is offered as-is, without any +# warranty. + +#serial 11 + +m4_define([_AX_CXX_COMPILE_STDCXX_11_testbody], [[ + template + struct check + { + static_assert(sizeof(int) <= sizeof(T), "not big enough"); + }; + + struct Base { + virtual void f() {} + }; + struct Child : public Base { + virtual void f() override {} + }; + + typedef check> right_angle_brackets; + + int a; + decltype(a) b; + + typedef check check_type; + check_type c; + check_type&& cr = static_cast(c); + + auto d = a; + auto l = [](){}; + // Prevent Clang error: unused variable 'l' [-Werror,-Wunused-variable] + struct use_l { use_l() { l(); } }; + + // http://stackoverflow.com/questions/13728184/template-aliases-and-sfinae + // Clang 3.1 fails with headers of libstd++ 4.8.3 when using std::function because of this + namespace test_template_alias_sfinae { + struct foo {}; + + template + using member = typename T::member_type; + + template + void func(...) {} + + template + void func(member*) {} + + void test(); + + void test() { + func(0); + } + } +]]) + +AC_DEFUN([AX_CXX_COMPILE_STDCXX_11], [dnl + m4_if([$1], [], [], + [$1], [ext], [], + [$1], [noext], [], + [m4_fatal([invalid argument `$1' to AX_CXX_COMPILE_STDCXX_11])])dnl + m4_if([$2], [], [ax_cxx_compile_cxx11_required=true], + [$2], [mandatory], [ax_cxx_compile_cxx11_required=true], + [$2], [optional], [ax_cxx_compile_cxx11_required=false], + [m4_fatal([invalid second argument `$2' to AX_CXX_COMPILE_STDCXX_11])]) + AC_LANG_PUSH([C++])dnl + ac_success=no + AC_CACHE_CHECK(whether $CXX supports C++11 features by default, + ax_cv_cxx_compile_cxx11, + [AC_COMPILE_IFELSE([AC_LANG_SOURCE([_AX_CXX_COMPILE_STDCXX_11_testbody])], + [ax_cv_cxx_compile_cxx11=yes], + [ax_cv_cxx_compile_cxx11=no])]) + if test x$ax_cv_cxx_compile_cxx11 = xyes; then + ac_success=yes + fi + + m4_if([$1], [noext], [], [dnl + if test x$ac_success = xno; then + for switch in -std=gnu++11 -std=gnu++0x; do + cachevar=AS_TR_SH([ax_cv_cxx_compile_cxx11_$switch]) + AC_CACHE_CHECK(whether $CXX supports C++11 features with $switch, + $cachevar, + [ac_save_CXXFLAGS="$CXXFLAGS" + CXXFLAGS="$CXXFLAGS $switch" + AC_COMPILE_IFELSE([AC_LANG_SOURCE([_AX_CXX_COMPILE_STDCXX_11_testbody])], + [eval $cachevar=yes], + [eval $cachevar=no]) + CXXFLAGS="$ac_save_CXXFLAGS"]) + if eval test x\$$cachevar = xyes; then + CXXFLAGS="$CXXFLAGS $switch" + ac_success=yes + break + fi + done + fi]) + + m4_if([$1], [ext], [], [dnl + if test x$ac_success = xno; then + dnl HP's aCC needs +std=c++11 according to: + dnl http://h21007.www2.hp.com/portal/download/files/unprot/aCxx/PDF_Release_Notes/769149-001.pdf + for switch in -std=c++11 -std=c++0x +std=c++11; do + cachevar=AS_TR_SH([ax_cv_cxx_compile_cxx11_$switch]) + AC_CACHE_CHECK(whether $CXX supports C++11 features with $switch, + $cachevar, + [ac_save_CXXFLAGS="$CXXFLAGS" + CXXFLAGS="$CXXFLAGS $switch" + AC_COMPILE_IFELSE([AC_LANG_SOURCE([_AX_CXX_COMPILE_STDCXX_11_testbody])], + [eval $cachevar=yes], + [eval $cachevar=no]) + CXXFLAGS="$ac_save_CXXFLAGS"]) + if eval test x\$$cachevar = xyes; then + CXXFLAGS="$CXXFLAGS $switch" + ac_success=yes + break + fi + done + fi]) + AC_LANG_POP([C++]) + if test x$ax_cxx_compile_cxx11_required = xtrue; then + if test x$ac_success = xno; then + AC_MSG_ERROR([*** A compiler with support for C++11 language features is required.]) + fi + else + if test x$ac_success = xno; then + HAVE_CXX11=0 + AC_MSG_NOTICE([No compiler with C++11 support was found]) + else + HAVE_CXX11=1 + AC_DEFINE(HAVE_CXX11,1, + [define if the compiler supports basic C++11 syntax]) + fi + + AC_SUBST(HAVE_CXX11) + fi +]) diff --git a/mybulma/node_modules/node-sass/src/libsass/res/resource.rc b/mybulma/node_modules/node-sass/src/libsass/res/resource.rc new file mode 100644 index 0000000..fc49e6a --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/res/resource.rc @@ -0,0 +1,35 @@ +#include + +// DLL version information. +VS_VERSION_INFO VERSIONINFO +FILEVERSION 1,0,0,0 +PRODUCTVERSION 1,0,0,0 +FILEFLAGSMASK VS_FFI_FILEFLAGSMASK +#ifdef _DEBUG + FILEFLAGS VS_FF_DEBUG | VS_FF_PRERELEASE +#else + FILEFLAGS 0 +#endif +FILEOS VOS_NT_WINDOWS32 +FILETYPE VFT_DLL +FILESUBTYPE VFT2_UNKNOWN +BEGIN + BLOCK "StringFileInfo" + BEGIN + BLOCK "080904b0" + BEGIN + VALUE "CompanyName", "Sass Open Source Foundation" + VALUE "FileDescription", "A C/C++ implementation of a Sass compiler" + VALUE "FileVersion", "1.0.0.0" + VALUE "InternalName", "libsass" + VALUE "LegalCopyright", "\251 2017 libsass.org" + VALUE "OriginalFilename", "libsass.dll" + VALUE "ProductName", "LibSass Library" + VALUE "ProductVersion", "1.0.0.0" + END + END + BLOCK "VarFileInfo" + BEGIN + VALUE "Translation", 0x809, 1200 + END +END diff --git a/mybulma/node_modules/node-sass/src/libsass/script/bootstrap b/mybulma/node_modules/node-sass/src/libsass/script/bootstrap new file mode 100644 index 0000000..ab82fac --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/script/bootstrap @@ -0,0 +1,13 @@ +#!/bin/bash + +script/branding + +: ${SASS_SPEC_PATH:="sass-spec"} +: ${SASS_SASSC_PATH:="sassc" } + +if [ ! -d $SASS_SPEC_PATH ]; then + git clone https://github.com/sass/sass-spec.git $SASS_SPEC_PATH +fi +if [ ! -d $SASS_SASSC_PATH ]; then + git clone https://github.com/sass/sassc.git $SASS_SASSC_PATH +fi diff --git a/mybulma/node_modules/node-sass/src/libsass/script/branding b/mybulma/node_modules/node-sass/src/libsass/script/branding new file mode 100644 index 0000000..cd8cb2a --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/script/branding @@ -0,0 +1,10 @@ +#! /bin/bash + +echo " " +echo " _ ___ ____ ____ _ ____ ____ " +echo "| | |_ _| __ ) ___| / \ / ___/ ___| " +echo "| | | || _ \___ \ / _ \ \___ \___ \ " +echo "| |___ | || |_) |__) / ___ \ ___) |__) |" +echo "|_____|___|____/____/_/ \_\____/____/ " +echo " " + diff --git a/mybulma/node_modules/node-sass/src/libsass/script/ci-build-libsass b/mybulma/node_modules/node-sass/src/libsass/script/ci-build-libsass new file mode 100644 index 0000000..40ea22f --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/script/ci-build-libsass @@ -0,0 +1,134 @@ +#!/bin/bash + +set -e + +script/bootstrap + +# export this path right here (was in script/spec before) +export SASS_LIBSASS_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )"/../ && pwd )" + +# use some defaults if not running under travis ci +if [ "x$CONTINUOUS_INTEGRATION" == "x" ]; then export CONTINUOUS_INTEGRATION=true; fi +if [ "x$TRAVIS_BUILD_DIR" == "x" ]; then export TRAVIS_BUILD_DIR=$(pwd); fi +if [ "x$SASS_SASSC_PATH" == "x" ]; then export SASS_SASSC_PATH=$(pwd)/sassc; fi +if [ "x$SASS_SPEC_PATH" == "x" ]; then export SASS_SPEC_PATH=$(pwd)/sass-spec; fi + +# try to get the os name from uname (and filter via perl - probably not the most portable way?) +if [ "x$TRAVIS_OS_NAME" == "x" ]; then export TRAVIS_OS_NAME=`uname -s | perl -ne 'print lc \$1 if\(/^([a-zA-Z]+)/'\)`; fi + +if [ "x$COVERAGE" == "xyes" ]; then + COVERAGE_OPT="--enable-coverage" + export EXTRA_CFLAGS="-fprofile-arcs -ftest-coverage" + export EXTRA_CXXFLAGS="-fprofile-arcs -ftest-coverage" + if [ "$TRAVIS_OS_NAME" == "osx" ]; then + # osx doesn't seem to know gcov lib? + export EXTRA_LDFLAGS="--coverage" + else + export EXTRA_LDFLAGS="-lgcov --coverage" + fi +else + COVERAGE_OPT="--disable-coverage" +fi + +if [ "x$BUILD" == "xstatic" ]; then + SHARED_OPT="--disable-shared --enable-static" + MAKE_TARGET="static" +else + # Makefile of sassc wants to link to static + SHARED_OPT="--enable-shared --enable-static" + MAKE_TARGET="shared" +fi + +if [ "$(expr substr $(uname -s) 1 10)" == "MINGW32_NT" ]; then + MAKE_OPTS="$MAKE_OPTS -j1 V=1" +else + MAKE_OPTS="$MAKE_OPTS -j5 V=1" +fi + +if [ "x$PREFIX" == "x" ]; then + if [ "x$TRAVIS_BUILD_DIR" == "x" ]; then + PREFIX=$SASS_LIBSASS_PATH/build + else + PREFIX=$TRAVIS_BUILD_DIR/build + fi +fi + +# enable address sanitation +# https://en.wikipedia.org/wiki/AddressSanitizer +if [ "x$CC" == "xclang" ]; then + if [ "x$COVERAGE" != "xyes" ]; then + if [ "$TRAVIS_OS_NAME" == "linux" ]; then + export EXTRA_CFLAGS="$EXTRA_CFLAGS -fsanitize=address" + export EXTRA_CXXFLAGS="$EXTRA_CXXFLAGS -fsanitize=address" + export EXTRA_LDFLAGS="$EXTRA_LDFLAGS -fsanitize=address" + fi + fi +fi + +echo SASS_LIBSASS_PATH: $SASS_LIBSASS_PATH +echo TRAVIS_BUILD_DIR: $TRAVIS_BUILD_DIR +echo SASS_SASSC_PATH: $SASS_SASSC_PATH +echo SASS_SPEC_PATH: $SASS_SPEC_PATH +echo INSTALL_LOCATION: $PREFIX + +if [ "x$AUTOTOOLS" == "xyes" ]; then + + echo -en 'travis_fold:start:configure\r' + autoreconf --force --install + ./configure --enable-tests $COVERAGE_OPT \ + --disable-silent-rules \ + --with-sassc-dir=$SASS_SASSC_PATH \ + --with-sass-spec-dir=$SASS_SPEC_PATH \ + --prefix=$PREFIX \ + ${SHARED_OPT} + echo -en 'travis_fold:end:configure\r' + + make $MAKE_OPTS clean + + # install to prefix directory + PREFIX="$PREFIX" make $MAKE_OPTS install + +else + + make $MAKE_OPTS clean + +fi + +# install to prefix directory +PREFIX="$PREFIX" make $MAKE_OPTS install + +ls -la $PREFIX/* + +echo successfully compiled libsass +echo AUTOTOOLS=$AUTOTOOLS COVERAGE=$COVERAGE BUILD=$BUILD + +if [ "$CONTINUOUS_INTEGRATION" == "true" ] && [ "$TRAVIS_PULL_REQUEST" != "false" ] && [ "x$TRAVIS_PULL_REQUEST" != "x" ] && + ([ "$TRAVIS_OS_NAME" == "linux" ] || [ "$TRAVIS_OS_NAME" == "osx" ] || [ "$TRAVIS_OS_NAME" == "cygwin" ]); +then + + echo "Fetching PR $TRAVIS_PULL_REQUEST" + + JSON=$(curl -L -sS https://api.github.com/repos/sass/libsass/pulls/$TRAVIS_PULL_REQUEST) + + if [[ $JSON =~ "API rate limit exceeded" ]]; + then + echo "Travis rate limit on github exceeded" + echo "Retrying via 'special purpose proxy'" + JSON=$(curl -L -sS https://github-api-reverse-proxy.herokuapp.com/repos/sass/libsass/pulls/$TRAVIS_PULL_REQUEST) + fi + + RE_SPEC_PR="sass\/sass-spec(#|\/pull\/)([0-9]+)" + + if [[ $JSON =~ $RE_SPEC_PR ]]; + then + SPEC_PR="${BASH_REMATCH[2]}" + echo "Fetching Sass Spec PR $SPEC_PR" + git -C sass-spec fetch -u origin pull/$SPEC_PR/head:ci-spec-pr-$SPEC_PR + git -C sass-spec checkout --force ci-spec-pr-$SPEC_PR + LD_LIBRARY_PATH="$PREFIX/lib/" make $MAKE_OPTS test_probe + else + LD_LIBRARY_PATH="$PREFIX/lib/" make $MAKE_OPTS test_probe + fi +else + LD_LIBRARY_PATH="$PREFIX/lib/" make $MAKE_OPTS test_probe +fi diff --git a/mybulma/node_modules/node-sass/src/libsass/script/ci-build-plugin b/mybulma/node_modules/node-sass/src/libsass/script/ci-build-plugin new file mode 100644 index 0000000..0dd67b9 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/script/ci-build-plugin @@ -0,0 +1,62 @@ +#!/bin/bash + +PLUGIN=$1 +RUBY_BIN=ruby +SASS_SPEC_PATH=sass-spec +SASSC_BIN=sassc/bin/sassc +SASS_SPEC_SPEC_DIR=plugins/libsass-${PLUGIN}/test + +if [ -e ./tester ] ; then + SASSC_BIN=./tester +fi + +if [ -d ./build/lib ] ; then + cp -a build/lib lib +fi + +if [ "x$1" == "x" ] ; then + echo "No plugin name given" + exit 1 +fi + +if [ "x$COVERAGE" == "0" ] ; then + unset COVERAGE +fi + +export EXTRA_CFLAGS="" +export EXTRA_CXXFLAGS="" +if [ "$TRAVIS_OS_NAME" == "osx" ]; then + # osx doesn't seem to know gcov lib? + export EXTRA_LDFLAGS="--coverage" +else + export EXTRA_LDFLAGS="-lgcov --coverage" +fi + +mkdir -p plugins +if [ ! -d plugins/libsass-${PLUGIN} ] ; then + git clone https://github.com/mgreter/libsass-${PLUGIN} plugins/libsass-${PLUGIN} +fi +if [ ! -d plugins/libsass-${PLUGIN}/build ] ; then + mkdir plugins/libsass-${PLUGIN}/build +fi +RETVAL=$?; if [ "$RETVAL" != "0" ]; then exit $RETVAL; fi + +cd plugins/libsass-${PLUGIN}/build +cmake -G "Unix Makefiles" -D LIBSASS_DIR="../../.." .. +RETVAL=$?; if [ "$RETVAL" != "0" ]; then exit $RETVAL; fi +make VERBOSE=1 -j2 +RETVAL=$?; if [ "$RETVAL" != "0" ]; then exit $RETVAL; fi +cd ../../.. + +# glob only works on paths relative to imports +if [ "x$PLUGIN" == "xglob" ]; then + ${SASSC_BIN} --plugin-path plugins/libsass-${PLUGIN}/build ${SASS_SPEC_SPEC_DIR}/basic/input.scss > ${SASS_SPEC_SPEC_DIR}/basic/result.css + ${SASSC_BIN} --plugin-path plugins/libsass-${PLUGIN}/build ${SASS_SPEC_SPEC_DIR}/basic/input.scss --sourcemap > /dev/null +else + cat ${SASS_SPEC_SPEC_DIR}/basic/input.scss | ${SASSC_BIN} --precision 5 --plugin-path plugins/libsass-${PLUGIN}/build -I ${SASS_SPEC_SPEC_DIR}/basic > ${SASS_SPEC_SPEC_DIR}/basic/result.css + cat ${SASS_SPEC_SPEC_DIR}/basic/input.scss | ${SASSC_BIN} --precision 5 --plugin-path plugins/libsass-${PLUGIN}/build -I ${SASS_SPEC_SPEC_DIR}/basic --sourcemap > /dev/null +fi +RETVAL=$?; if [ "$RETVAL" != "0" ]; then exit $RETVAL; fi + +diff ${SASS_SPEC_SPEC_DIR}/basic/expected_output.css ${SASS_SPEC_SPEC_DIR}/basic/result.css +RETVAL=$?; if [ "$RETVAL" != "0" ]; then exit $RETVAL; fi diff --git a/mybulma/node_modules/node-sass/src/libsass/script/ci-install-compiler b/mybulma/node_modules/node-sass/src/libsass/script/ci-install-compiler new file mode 100644 index 0000000..3a68b3a --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/script/ci-install-compiler @@ -0,0 +1,6 @@ +#!/bin/bash + +gem install minitest +gem install minitap + +pip2 install --user 'requests[security]' diff --git a/mybulma/node_modules/node-sass/src/libsass/script/ci-install-deps b/mybulma/node_modules/node-sass/src/libsass/script/ci-install-deps new file mode 100644 index 0000000..27b485a --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/script/ci-install-deps @@ -0,0 +1,20 @@ +#!/bin/bash +if [ "x$COVERAGE" == "xyes" ]; then + pip2 install --user gcovr + pip2 install --user cpp-coveralls +else + echo "no dependencies to install" +fi + +if [ "x$AUTOTOOLS" == "xyes" ]; then + AUTOTOOLS=yes + + if [ "$TRAVIS_OS_NAME" == "linux" ]; then + sudo add-apt-repository -y ppa:rbose-debianizer/automake &> /dev/null + sudo apt-get -qq update + sudo apt-get -qq install automake + fi + +fi + +exit 0 diff --git a/mybulma/node_modules/node-sass/src/libsass/script/ci-report-coverage b/mybulma/node_modules/node-sass/src/libsass/script/ci-report-coverage new file mode 100644 index 0000000..495cb05 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/script/ci-report-coverage @@ -0,0 +1,42 @@ +#!/bin/bash + +if [ "x$COVERAGE" = "xyes" ]; then + + # find / -name "gcovr" + # find / -name "coveralls" + # this is only needed for mac os x builds! + PATH=$PATH:/Users/travis/Library/Python/2.7/bin/ + + + # exclude some directories from profiling (.libs is from autotools) + export EXCLUDE_COVERAGE="--exclude plugins + --exclude sassc/sassc.c + --exclude src/sass-spec + --exclude src/.libs + --exclude src/debug.hpp + --exclude src/json.cpp + --exclude src/json.hpp + --exclude src/cencode.c + --exclude src/b64 + --exclude src/utf8 + --exclude src/utf8_string.hpp + --exclude src/utf8.h + --exclude src/utf8_string.cpp + --exclude src/sass2scss.h + --exclude src/sass2scss.cpp + --exclude src/test + --exclude src/posix + --exclude src/debugger.hpp" + # debug used gcov version + # option not available on mac + if [ "$TRAVIS_OS_NAME" != "osx" ]; then + gcov -v + fi + # create summarized report + gcovr -r . + # submit report to coveralls.io + coveralls $EXCLUDE_COVERAGE --gcov-options '\-lp' + +else + echo "skip coverage reporting" +fi diff --git a/mybulma/node_modules/node-sass/src/libsass/script/spec b/mybulma/node_modules/node-sass/src/libsass/script/spec new file mode 100644 index 0000000..d0b864a --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/script/spec @@ -0,0 +1,5 @@ +#!/bin/bash + +script/bootstrap + +make $MAKE_OPTS test_build diff --git a/mybulma/node_modules/node-sass/src/libsass/script/tap-driver b/mybulma/node_modules/node-sass/src/libsass/script/tap-driver new file mode 100644 index 0000000..ed8a9a9 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/script/tap-driver @@ -0,0 +1,652 @@ +#!/usr/bin/env sh +# Copyright (C) 2011-2013 Free Software Foundation, Inc. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2, or (at your option) +# any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +# As a special exception to the GNU General Public License, if you +# distribute this file as part of a program that contains a +# configuration script generated by Autoconf, you may include it under +# the same distribution terms that you use for the rest of that program. + +# This file is maintained in Automake, please report +# bugs to or send patches to +# . + +scriptversion=2011-12-27.17; # UTC + +# Make unconditional expansion of undefined variables an error. This +# helps a lot in preventing typo-related bugs. +set -u + +me=tap-driver.sh + +fatal () +{ + echo "$me: fatal: $*" >&2 + exit 1 +} + +usage_error () +{ + echo "$me: $*" >&2 + print_usage >&2 + exit 2 +} + +print_usage () +{ + cat < + # + trap : 1 3 2 13 15 + if test $merge -gt 0; then + exec 2>&1 + else + exec 2>&3 + fi + "$@" + echo $? + ) | LC_ALL=C ${AM_TAP_AWK-awk} \ + -v me="$me" \ + -v test_script_name="$test_name" \ + -v log_file="$log_file" \ + -v trs_file="$trs_file" \ + -v expect_failure="$expect_failure" \ + -v merge="$merge" \ + -v ignore_exit="$ignore_exit" \ + -v comments="$comments" \ + -v diag_string="$diag_string" \ +' +# FIXME: the usages of "cat >&3" below could be optimized when using +# FIXME: GNU awk, and/on on systems that supports /dev/fd/. + +# Implementation note: in what follows, `result_obj` will be an +# associative array that (partly) simulates a TAP result object +# from the `TAP::Parser` perl module. + +## ----------- ## +## FUNCTIONS ## +## ----------- ## + +function fatal(msg) +{ + print me ": " msg | "cat >&2" + exit 1 +} + +function abort(where) +{ + fatal("internal error " where) +} + +# Convert a boolean to a "yes"/"no" string. +function yn(bool) +{ + return bool ? "yes" : "no"; +} + +function add_test_result(result) +{ + if (!test_results_index) + test_results_index = 0 + test_results_list[test_results_index] = result + test_results_index += 1 + test_results_seen[result] = 1; +} + +# Whether the test script should be re-run by "make recheck". +function must_recheck() +{ + for (k in test_results_seen) + if (k != "XFAIL" && k != "PASS" && k != "SKIP") + return 1 + return 0 +} + +# Whether the content of the log file associated to this test should +# be copied into the "global" test-suite.log. +function copy_in_global_log() +{ + for (k in test_results_seen) + if (k != "PASS") + return 1 + return 0 +} + +# FIXME: this can certainly be improved ... +function get_global_test_result() +{ + if ("ERROR" in test_results_seen) + return "ERROR" + if ("FAIL" in test_results_seen || "XPASS" in test_results_seen) + return "FAIL" + all_skipped = 1 + for (k in test_results_seen) + if (k != "SKIP") + all_skipped = 0 + if (all_skipped) + return "SKIP" + return "PASS"; +} + +function stringify_result_obj(result_obj) +{ + if (result_obj["is_unplanned"] || result_obj["number"] != testno) + return "ERROR" + + if (plan_seen == LATE_PLAN) + return "ERROR" + + if (result_obj["directive"] == "TODO") + return result_obj["is_ok"] ? "XPASS" : "XFAIL" + + if (result_obj["directive"] == "SKIP") + return result_obj["is_ok"] ? "SKIP" : COOKED_FAIL; + + if (length(result_obj["directive"])) + abort("in function stringify_result_obj()") + + return result_obj["is_ok"] ? COOKED_PASS : COOKED_FAIL +} + +function decorate_result(result) +{ + color_name = color_for_result[result] + if (color_name) + return color_map[color_name] "" result "" color_map["std"] + # If we are not using colorized output, or if we do not know how + # to colorize the given result, we should return it unchanged. + return result +} + +function report(result, details) +{ + if (result ~ /^(X?(PASS|FAIL)|SKIP|ERROR)/) + { + msg = ": " test_script_name + add_test_result(result) + } + else if (result == "#") + { + msg = " " test_script_name ":" + } + else + { + abort("in function report()") + } + if (length(details)) + msg = msg " " details + # Output on console might be colorized. + print decorate_result(result) msg + # Log the result in the log file too, to help debugging (this is + # especially true when said result is a TAP error or "Bail out!"). + print result msg | "cat >&3"; +} + +function testsuite_error(error_message) +{ + report("ERROR", "- " error_message) +} + +function handle_tap_result() +{ + details = result_obj["number"]; + if (length(result_obj["description"])) + details = details " " result_obj["description"] + + if (plan_seen == LATE_PLAN) + { + details = details " # AFTER LATE PLAN"; + } + else if (result_obj["is_unplanned"]) + { + details = details " # UNPLANNED"; + } + else if (result_obj["number"] != testno) + { + details = sprintf("%s # OUT-OF-ORDER (expecting %d)", + details, testno); + } + else if (result_obj["directive"]) + { + details = details " # " result_obj["directive"]; + if (length(result_obj["explanation"])) + details = details " " result_obj["explanation"] + } + + report(stringify_result_obj(result_obj), details) +} + +# `skip_reason` should be empty whenever planned > 0. +function handle_tap_plan(planned, skip_reason) +{ + planned += 0 # Avoid getting confused if, say, `planned` is "00" + if (length(skip_reason) && planned > 0) + abort("in function handle_tap_plan()") + if (plan_seen) + { + # Error, only one plan per stream is acceptable. + testsuite_error("multiple test plans") + return; + } + planned_tests = planned + # The TAP plan can come before or after *all* the TAP results; we speak + # respectively of an "early" or a "late" plan. If we see the plan line + # after at least one TAP result has been seen, assume we have a late + # plan; in this case, any further test result seen after the plan will + # be flagged as an error. + plan_seen = (testno >= 1 ? LATE_PLAN : EARLY_PLAN) + # If testno > 0, we have an error ("too many tests run") that will be + # automatically dealt with later, so do not worry about it here. If + # $plan_seen is true, we have an error due to a repeated plan, and that + # has already been dealt with above. Otherwise, we have a valid "plan + # with SKIP" specification, and should report it as a particular kind + # of SKIP result. + if (planned == 0 && testno == 0) + { + if (length(skip_reason)) + skip_reason = "- " skip_reason; + report("SKIP", skip_reason); + } +} + +function extract_tap_comment(line) +{ + if (index(line, diag_string) == 1) + { + # Strip leading `diag_string` from `line`. + line = substr(line, length(diag_string) + 1) + # And strip any leading and trailing whitespace left. + sub("^[ \t]*", "", line) + sub("[ \t]*$", "", line) + # Return what is left (if any). + return line; + } + return ""; +} + +# When this function is called, we know that line is a TAP result line, +# so that it matches the (perl) RE "^(not )?ok\b". +function setup_result_obj(line) +{ + # Get the result, and remove it from the line. + result_obj["is_ok"] = (substr(line, 1, 2) == "ok" ? 1 : 0) + sub("^(not )?ok[ \t]*", "", line) + + # If the result has an explicit number, get it and strip it; otherwise, + # automatically assing the next progresive number to it. + if (line ~ /^[0-9]+$/ || line ~ /^[0-9]+[^a-zA-Z0-9_]/) + { + match(line, "^[0-9]+") + # The final `+ 0` is to normalize numbers with leading zeros. + result_obj["number"] = substr(line, 1, RLENGTH) + 0 + line = substr(line, RLENGTH + 1) + } + else + { + result_obj["number"] = testno + } + + if (plan_seen == LATE_PLAN) + # No further test results are acceptable after a "late" TAP plan + # has been seen. + result_obj["is_unplanned"] = 1 + else if (plan_seen && testno > planned_tests) + result_obj["is_unplanned"] = 1 + else + result_obj["is_unplanned"] = 0 + + # Strip trailing and leading whitespace. + sub("^[ \t]*", "", line) + sub("[ \t]*$", "", line) + + # This will have to be corrected if we have a "TODO"/"SKIP" directive. + result_obj["description"] = line + result_obj["directive"] = "" + result_obj["explanation"] = "" + + if (index(line, "#") == 0) + return # No possible directive, nothing more to do. + + # Directives are case-insensitive. + rx = "[ \t]*#[ \t]*([tT][oO][dD][oO]|[sS][kK][iI][pP])[ \t]*" + + # See whether we have the directive, and if yes, where. + pos = match(line, rx "$") + if (!pos) + pos = match(line, rx "[^a-zA-Z0-9_]") + + # If there was no TAP directive, we have nothing more to do. + if (!pos) + return + + # Let`s now see if the TAP directive has been escaped. For example: + # escaped: ok \# SKIP + # not escaped: ok \\# SKIP + # escaped: ok \\\\\# SKIP + # not escaped: ok \ # SKIP + if (substr(line, pos, 1) == "#") + { + bslash_count = 0 + for (i = pos; i > 1 && substr(line, i - 1, 1) == "\\"; i--) + bslash_count += 1 + if (bslash_count % 2) + return # Directive was escaped. + } + + # Strip the directive and its explanation (if any) from the test + # description. + result_obj["description"] = substr(line, 1, pos - 1) + # Now remove the test description from the line, that has been dealt + # with already. + line = substr(line, pos) + # Strip the directive, and save its value (normalized to upper case). + sub("^[ \t]*#[ \t]*", "", line) + result_obj["directive"] = toupper(substr(line, 1, 4)) + line = substr(line, 5) + # Now get the explanation for the directive (if any), with leading + # and trailing whitespace removed. + sub("^[ \t]*", "", line) + sub("[ \t]*$", "", line) + result_obj["explanation"] = line +} + +function get_test_exit_message(status) +{ + if (status == 0) + return "" + if (status !~ /^[1-9][0-9]*$/) + abort("getting exit status") + if (status < 127) + exit_details = "" + else if (status == 127) + exit_details = " (command not found?)" + else if (status >= 128 && status <= 255) + exit_details = sprintf(" (terminated by signal %d?)", status - 128) + else if (status > 256 && status <= 384) + # We used to report an "abnormal termination" here, but some Korn + # shells, when a child process die due to signal number n, can leave + # in $? an exit status of 256+n instead of the more standard 128+n. + # Apparently, both behaviours are allowed by POSIX (2008), so be + # prepared to handle them both. See also Austing Group report ID + # 0000051 + exit_details = sprintf(" (terminated by signal %d?)", status - 256) + else + # Never seen in practice. + exit_details = " (abnormal termination)" + return sprintf("exited with status %d%s", status, exit_details) +} + +function write_test_results() +{ + print ":global-test-result: " get_global_test_result() > trs_file + print ":recheck: " yn(must_recheck()) > trs_file + print ":copy-in-global-log: " yn(copy_in_global_log()) > trs_file + for (i = 0; i < test_results_index; i += 1) + print ":test-result: " test_results_list[i] > trs_file + close(trs_file); +} + +BEGIN { + +## ------- ## +## SETUP ## +## ------- ## + +'"$init_colors"' + +# Properly initialized once the TAP plan is seen. +planned_tests = 0 + +COOKED_PASS = expect_failure ? "XPASS": "PASS"; +COOKED_FAIL = expect_failure ? "XFAIL": "FAIL"; + +# Enumeration-like constants to remember which kind of plan (if any) +# has been seen. It is important that NO_PLAN evaluates "false" as +# a boolean. +NO_PLAN = 0 +EARLY_PLAN = 1 +LATE_PLAN = 2 + +testno = 0 # Number of test results seen so far. +bailed_out = 0 # Whether a "Bail out!" directive has been seen. + +# Whether the TAP plan has been seen or not, and if yes, which kind +# it is ("early" is seen before any test result, "late" otherwise). +plan_seen = NO_PLAN + +## --------- ## +## PARSING ## +## --------- ## + +is_first_read = 1 + +while (1) + { + # Involutions required so that we are able to read the exit status + # from the last input line. + st = getline + if (st < 0) # I/O error. + fatal("I/O error while reading from input stream") + else if (st == 0) # End-of-input + { + if (is_first_read) + abort("in input loop: only one input line") + break + } + if (is_first_read) + { + is_first_read = 0 + nextline = $0 + continue + } + else + { + curline = nextline + nextline = $0 + $0 = curline + } + # Copy any input line verbatim into the log file. + print | "cat >&3" + # Parsing of TAP input should stop after a "Bail out!" directive. + if (bailed_out) + continue + + # TAP test result. + if ($0 ~ /^(not )?ok$/ || $0 ~ /^(not )?ok[^a-zA-Z0-9_]/) + { + testno += 1 + setup_result_obj($0) + handle_tap_result() + } + # TAP plan (normal or "SKIP" without explanation). + else if ($0 ~ /^1\.\.[0-9]+[ \t]*$/) + { + # The next two lines will put the number of planned tests in $0. + sub("^1\\.\\.", "") + sub("[^0-9]*$", "") + handle_tap_plan($0, "") + continue + } + # TAP "SKIP" plan, with an explanation. + else if ($0 ~ /^1\.\.0+[ \t]*#/) + { + # The next lines will put the skip explanation in $0, stripping + # any leading and trailing whitespace. This is a little more + # tricky in truth, since we want to also strip a potential leading + # "SKIP" string from the message. + sub("^[^#]*#[ \t]*(SKIP[: \t][ \t]*)?", "") + sub("[ \t]*$", ""); + handle_tap_plan(0, $0) + } + # "Bail out!" magic. + # Older versions of prove and TAP::Harness (e.g., 3.17) did not + # recognize a "Bail out!" directive when preceded by leading + # whitespace, but more modern versions (e.g., 3.23) do. So we + # emulate the latter, "more modern" behaviour. + else if ($0 ~ /^[ \t]*Bail out!/) + { + bailed_out = 1 + # Get the bailout message (if any), with leading and trailing + # whitespace stripped. The message remains stored in `$0`. + sub("^[ \t]*Bail out![ \t]*", ""); + sub("[ \t]*$", ""); + # Format the error message for the + bailout_message = "Bail out!" + if (length($0)) + bailout_message = bailout_message " " $0 + testsuite_error(bailout_message) + } + # Maybe we have too look for dianogtic comments too. + else if (comments != 0) + { + comment = extract_tap_comment($0); + if (length(comment)) + report("#", comment); + } + } + +## -------- ## +## FINISH ## +## -------- ## + +# A "Bail out!" directive should cause us to ignore any following TAP +# error, as well as a non-zero exit status from the TAP producer. +if (!bailed_out) + { + if (!plan_seen) + { + testsuite_error("missing test plan") + } + else if (planned_tests != testno) + { + bad_amount = testno > planned_tests ? "many" : "few" + testsuite_error(sprintf("too %s tests run (expected %d, got %d)", + bad_amount, planned_tests, testno)) + } + if (!ignore_exit) + { + # Fetch exit status from the last line. + exit_message = get_test_exit_message(nextline) + if (exit_message) + testsuite_error(exit_message) + } + } + +write_test_results() + +exit 0 + +} # End of "BEGIN" block. +' + +# TODO: document that we consume the file descriptor 3 :-( +} 3>"$log_file" + +test $? -eq 0 || fatal "I/O or internal error" + +# Local Variables: +# mode: shell-script +# sh-indentation: 2 +# eval: (add-hook 'write-file-hooks 'time-stamp) +# time-stamp-start: "scriptversion=" +# time-stamp-format: "%:y-%02m-%02d.%02H" +# time-stamp-time-zone: "UTC" +# time-stamp-end: "; # UTC" +# End: diff --git a/mybulma/node_modules/node-sass/src/libsass/script/tap-runner b/mybulma/node_modules/node-sass/src/libsass/script/tap-runner new file mode 100644 index 0000000..56c13bf --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/script/tap-runner @@ -0,0 +1 @@ +$@ $TEST_FLAGS --tap --silent | tapout tap diff --git a/mybulma/node_modules/node-sass/src/libsass/script/test-leaks.pl b/mybulma/node_modules/node-sass/src/libsass/script/test-leaks.pl new file mode 100644 index 0000000..bfb8653 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/script/test-leaks.pl @@ -0,0 +1,103 @@ +#!/usr/bin/perl +############################################################ +# this perl script is meant for developers only! +# it will run all spec-tests (without verifying the +# results) via valgrind to detect possible leaks. +# expect that it takes 1h or more to finish! +############################################################ +# Prerequisite install: `cpan Parallel::Runner` +# You may also need to install `cpan File::Find` +# You may also need to install `cpan IPC::Run3` +############################################################ +# usage: `perl test-leaks.pl [threads]` +# example: `time perl test-leaks.pl 4` +############################################################ +# leaks will be reported in "mem-leaks.log" +############################################################ + +use strict; +use warnings; + +############################################################ +# configurations (you may adjust) +############################################################ + +# number of threads to use +my $threads = $ARGV[0] || 8; + +# the github repositories to checkout +# if you need other branch, clone manually! +my $sassc = "https://www.github.com/sass/sassc"; +my $specs = "https://www.github.com/sass/sass-spec"; + +############################################################ +# load modules +############################################################ + +use IPC::Run3; +use IO::Handle; +use Fcntl qw(:flock); +use File::Find::Rule; +use Parallel::Runner; +use List::Util qw(shuffle); + +############################################################ +# check prerequisites +############################################################ + +unless (-d "../sassc") { + warn "sassc folder not found\n"; + warn "trying to checkout via git\n"; + system("git", "clone", $sassc, "../sassc"); + die "git command did not exit gracefully" if $?; +} + +unless (-d "../sass-spec") { + warn "sass-spec folder not found\n"; + warn "trying to checkout via git\n"; + system("git", "clone", $specs, "../sass-spec"); + die "git command did not exit gracefully" if $?; +} + +unless (-f "../sassc/bin/sassc") { + warn "sassc executable not found\n"; + warn "trying to compile via make\n"; + system("make", "-C", "../sassc", "-j", $threads); + die "make command did not exit gracefully" if $?; +} + +############################################################ +# main runner code +############################################################ + +my $root = "../sass-spec/spec"; +my @files = File::Find::Rule->file() + ->name('input.scss')->in($root); + +open(my $leaks, ">", "mem-leaks.log"); +die "Cannot open log" unless $leaks; +my $runner = Parallel::Runner->new($threads); +die "Cannot start runner" unless $runner; + +print "##########################\n"; +print "Testing $#files spec files\n"; +print "##########################\n"; + +foreach my $file (shuffle @files) { + $runner->run(sub { + $| = 1; select STDOUT; + my $cmd = sprintf('../sassc/bin/sassc %s', $file); + my $check = sprintf('valgrind --leak-check=yes %s', $cmd); + run3($check, undef, \ my $out, \ my $err); + if ($err =~ m/in use at exit: 0 bytes in 0 blocks/) { + print "."; # print success indicator + } else { + print "F"; # print error indicator + flock($leaks, LOCK_EX) or die "Cannot lock log"; + $leaks->printflush("#" x 80, "\n", $err, "\n"); + flock($leaks, LOCK_UN) or die "Cannot unlock log"; + } + }); +} + +$runner->finish; diff --git a/mybulma/node_modules/node-sass/src/libsass/src/GNUmakefile.am b/mybulma/node_modules/node-sass/src/libsass/src/GNUmakefile.am new file mode 100644 index 0000000..fee9312 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/GNUmakefile.am @@ -0,0 +1,54 @@ +ACLOCAL_AMFLAGS = ${ACLOCAL_FLAGS} -I m4 -I script + +AM_COPT = -Wall -O2 +AM_COVLDFLAGS = + +if ENABLE_COVERAGE + AM_COPT = -O0 --coverage + AM_COVLDFLAGS += -lgcov +endif + +AM_CPPFLAGS = -I$(top_srcdir)/include +AM_CFLAGS = $(AM_COPT) +AM_CXXFLAGS = $(AM_COPT) +AM_LDFLAGS = $(AM_COPT) $(AM_COVLDFLAGS) + +if COMPILER_IS_MINGW32 + AM_CXXFLAGS += -std=gnu++0x +else + AM_CXXFLAGS += -std=c++0x +endif + +EXTRA_DIST = \ + COPYING \ + INSTALL \ + LICENSE \ + Readme.md + +pkgconfigdir = $(libdir)/pkgconfig +pkgconfig_DATA = support/libsass.pc + +lib_LTLIBRARIES = libsass.la + +include $(top_srcdir)/Makefile.conf + +libsass_la_SOURCES = ${CSOURCES} ${SOURCES} + +libsass_la_LDFLAGS = $(AM_LDFLAGS) -no-undefined -version-info 1:0:0 + +if ENABLE_TESTS +if ENABLE_COVERAGE +nodist_EXTRA_libsass_la_SOURCES = non-existent-file-to-force-CXX-linking.cxx +endif +endif + +include_HEADERS = $(top_srcdir)/include/sass.h \ + $(top_srcdir)/include/sass2scss.h + +sass_includedir = $(includedir)/sass + +sass_include_HEADERS = $(top_srcdir)/include/sass/base.h \ + $(top_srcdir)/include/sass/values.h \ + $(top_srcdir)/include/sass/version.h \ + $(top_srcdir)/include/sass/context.h \ + $(top_srcdir)/include/sass/functions.h diff --git a/mybulma/node_modules/node-sass/src/libsass/src/ast.cpp b/mybulma/node_modules/node-sass/src/libsass/src/ast.cpp new file mode 100644 index 0000000..c3b38ef --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/ast.cpp @@ -0,0 +1,2226 @@ +#include "sass.hpp" +#include "ast.hpp" +#include "context.hpp" +#include "node.hpp" +#include "eval.hpp" +#include "extend.hpp" +#include "emitter.hpp" +#include "color_maps.hpp" +#include "ast_fwd_decl.hpp" +#include +#include +#include +#include +#include +#include +#include + +namespace Sass { + + static Null sass_null(ParserState("null")); + + bool Wrapped_Selector::find ( bool (*f)(AST_Node_Obj) ) + { + // check children first + if (selector_) { + if (selector_->find(f)) return true; + } + // execute last + return f(this); + } + + bool Selector_List::find ( bool (*f)(AST_Node_Obj) ) + { + // check children first + for (Complex_Selector_Obj sel : elements()) { + if (sel->find(f)) return true; + } + // execute last + return f(this); + } + + bool Compound_Selector::find ( bool (*f)(AST_Node_Obj) ) + { + // check children first + for (Simple_Selector_Obj sel : elements()) { + if (sel->find(f)) return true; + } + // execute last + return f(this); + } + + bool Complex_Selector::find ( bool (*f)(AST_Node_Obj) ) + { + // check children first + if (head_ && head_->find(f)) return true; + if (tail_ && tail_->find(f)) return true; + // execute last + return f(this); + } + + bool Supports_Operator::needs_parens(Supports_Condition_Obj cond) const { + if (Supports_Operator_Obj op = Cast(cond)) { + return op->operand() != operand(); + } + return Cast(cond) != NULL; + } + + bool Supports_Negation::needs_parens(Supports_Condition_Obj cond) const { + return Cast(cond) || + Cast(cond); + } + + void str_rtrim(std::string& str, const std::string& delimiters = " \f\n\r\t\v") + { + str.erase( str.find_last_not_of( delimiters ) + 1 ); + } + + void String_Constant::rtrim() + { + str_rtrim(value_); + } + + void String_Schema::rtrim() + { + if (!empty()) { + if (String_Ptr str = Cast(last())) str->rtrim(); + } + } + + void Argument::set_delayed(bool delayed) + { + if (value_) value_->set_delayed(delayed); + is_delayed(delayed); + } + + void Arguments::set_delayed(bool delayed) + { + for (Argument_Obj arg : elements()) { + if (arg) arg->set_delayed(delayed); + } + is_delayed(delayed); + } + + + bool At_Root_Query::exclude(std::string str) + { + bool with = feature() && unquote(feature()->to_string()).compare("with") == 0; + List_Ptr l = static_cast(value().ptr()); + std::string v; + + if (with) + { + if (!l || l->length() == 0) return str.compare("rule") != 0; + for (size_t i = 0, L = l->length(); i < L; ++i) + { + v = unquote((*l)[i]->to_string()); + if (v.compare("all") == 0 || v == str) return false; + } + return true; + } + else + { + if (!l || !l->length()) return str.compare("rule") == 0; + for (size_t i = 0, L = l->length(); i < L; ++i) + { + v = unquote((*l)[i]->to_string()); + if (v.compare("all") == 0 || v == str) return true; + } + return false; + } + } + + void AST_Node::update_pstate(const ParserState& pstate) + { + pstate_.offset += pstate - pstate_ + pstate.offset; + } + + bool Simple_Selector::is_ns_eq(const Simple_Selector& r) const + { + // https://github.com/sass/sass/issues/2229 + if ((has_ns_ == r.has_ns_) || + (has_ns_ && ns_.empty()) || + (r.has_ns_ && r.ns_.empty()) + ) { + if (ns_.empty() && r.ns() == "*") return false; + else if (r.ns().empty() && ns() == "*") return false; + else return ns() == r.ns(); + } + return false; + } + + bool Compound_Selector::operator< (const Compound_Selector& rhs) const + { + size_t L = std::min(length(), rhs.length()); + for (size_t i = 0; i < L; ++i) + { + Simple_Selector_Obj l = (*this)[i]; + Simple_Selector_Obj r = rhs[i]; + if (!l && !r) return false; + else if (!r) return false; + else if (!l) return true; + else if (*l != *r) + { return *l < *r; } + } + // just compare the length now + return length() < rhs.length(); + } + + bool Compound_Selector::has_parent_ref() const + { + for (Simple_Selector_Obj s : *this) { + if (s && s->has_parent_ref()) return true; + } + return false; + } + + bool Compound_Selector::has_real_parent_ref() const + { + for (Simple_Selector_Obj s : *this) { + if (s && s->has_real_parent_ref()) return true; + } + return false; + } + + bool Complex_Selector::has_parent_ref() const + { + return (head() && head()->has_parent_ref()) || + (tail() && tail()->has_parent_ref()); + } + + bool Complex_Selector::has_real_parent_ref() const + { + return (head() && head()->has_real_parent_ref()) || + (tail() && tail()->has_real_parent_ref()); + } + + bool Complex_Selector::operator< (const Complex_Selector& rhs) const + { + // const iterators for tails + Complex_Selector_Ptr_Const l = this; + Complex_Selector_Ptr_Const r = &rhs; + Compound_Selector_Ptr l_h = NULL; + Compound_Selector_Ptr r_h = NULL; + if (l) l_h = l->head(); + if (r) r_h = r->head(); + // process all tails + while (true) + { + #ifdef DEBUG + // skip empty ancestor first + if (l && l->is_empty_ancestor()) + { + l_h = NULL; + l = l->tail(); + if(l) l_h = l->head(); + continue; + } + // skip empty ancestor first + if (r && r->is_empty_ancestor()) + { + r_h = NULL; + r = r->tail(); + if (r) r_h = r->head(); + continue; + } + #endif + // check for valid selectors + if (!l) return !!r; + if (!r) return false; + // both are null + else if (!l_h && !r_h) + { + // check combinator after heads + if (l->combinator() != r->combinator()) + { return l->combinator() < r->combinator(); } + // advance to next tails + l = l->tail(); + r = r->tail(); + // fetch the next headers + l_h = NULL; r_h = NULL; + if (l) l_h = l->head(); + if (r) r_h = r->head(); + } + // one side is null + else if (!r_h) return true; + else if (!l_h) return false; + // heads ok and equal + else if (*l_h == *r_h) + { + // check combinator after heads + if (l->combinator() != r->combinator()) + { return l->combinator() < r->combinator(); } + // advance to next tails + l = l->tail(); + r = r->tail(); + // fetch the next headers + l_h = NULL; r_h = NULL; + if (l) l_h = l->head(); + if (r) r_h = r->head(); + } + // heads are not equal + else return *l_h < *r_h; + } + } + + bool Complex_Selector::operator== (const Complex_Selector& rhs) const + { + // const iterators for tails + Complex_Selector_Ptr_Const l = this; + Complex_Selector_Ptr_Const r = &rhs; + Compound_Selector_Ptr l_h = NULL; + Compound_Selector_Ptr r_h = NULL; + if (l) l_h = l->head(); + if (r) r_h = r->head(); + // process all tails + while (true) + { + #ifdef DEBUG + // skip empty ancestor first + if (l && l->is_empty_ancestor()) + { + l_h = NULL; + l = l->tail(); + if (l) l_h = l->head(); + continue; + } + // skip empty ancestor first + if (r && r->is_empty_ancestor()) + { + r_h = NULL; + r = r->tail(); + if (r) r_h = r->head(); + continue; + } + #endif + // check the pointers + if (!r) return !l; + if (!l) return !r; + // both are null + if (!l_h && !r_h) + { + // check combinator after heads + if (l->combinator() != r->combinator()) + { return l->combinator() < r->combinator(); } + // advance to next tails + l = l->tail(); + r = r->tail(); + // fetch the next heads + l_h = NULL; r_h = NULL; + if (l) l_h = l->head(); + if (r) r_h = r->head(); + } + // equals if other head is empty + else if ((!l_h && !r_h) || + (!l_h && r_h->empty()) || + (!r_h && l_h->empty()) || + (l_h && r_h && *l_h == *r_h)) + { + // check combinator after heads + if (l->combinator() != r->combinator()) + { return l->combinator() == r->combinator(); } + // advance to next tails + l = l->tail(); + r = r->tail(); + // fetch the next heads + l_h = NULL; r_h = NULL; + if (l) l_h = l->head(); + if (r) r_h = r->head(); + } + // abort + else break; + } + // unreachable + return false; + } + + Compound_Selector_Ptr Compound_Selector::unify_with(Compound_Selector_Ptr rhs) + { + if (empty()) return rhs; + Compound_Selector_Obj unified = SASS_MEMORY_COPY(rhs); + for (size_t i = 0, L = length(); i < L; ++i) + { + if (unified.isNull()) break; + unified = at(i)->unify_with(unified); + } + return unified.detach(); + } + + bool Complex_Selector::operator== (const Selector& rhs) const + { + if (const Selector_List* sl = Cast(&rhs)) return *this == *sl; + if (const Simple_Selector* sp = Cast(&rhs)) return *this == *sp; + if (const Complex_Selector* cs = Cast(&rhs)) return *this == *cs; + if (const Compound_Selector* ch = Cast(&rhs)) return *this == *ch; + throw std::runtime_error("invalid selector base classes to compare"); + } + + + bool Complex_Selector::operator< (const Selector& rhs) const + { + if (const Selector_List* sl = Cast(&rhs)) return *this < *sl; + if (const Simple_Selector* sp = Cast(&rhs)) return *this < *sp; + if (const Complex_Selector* cs = Cast(&rhs)) return *this < *cs; + if (const Compound_Selector* ch = Cast(&rhs)) return *this < *ch; + throw std::runtime_error("invalid selector base classes to compare"); + } + + bool Compound_Selector::operator== (const Selector& rhs) const + { + if (const Selector_List* sl = Cast(&rhs)) return *this == *sl; + if (const Simple_Selector* sp = Cast(&rhs)) return *this == *sp; + if (const Complex_Selector* cs = Cast(&rhs)) return *this == *cs; + if (const Compound_Selector* ch = Cast(&rhs)) return *this == *ch; + throw std::runtime_error("invalid selector base classes to compare"); + } + + bool Compound_Selector::operator< (const Selector& rhs) const + { + if (const Selector_List* sl = Cast(&rhs)) return *this < *sl; + if (const Simple_Selector* sp = Cast(&rhs)) return *this < *sp; + if (const Complex_Selector* cs = Cast(&rhs)) return *this < *cs; + if (const Compound_Selector* ch = Cast(&rhs)) return *this < *ch; + throw std::runtime_error("invalid selector base classes to compare"); + } + + bool Selector_Schema::operator== (const Selector& rhs) const + { + if (const Selector_List* sl = Cast(&rhs)) return *this == *sl; + if (const Simple_Selector* sp = Cast(&rhs)) return *this == *sp; + if (const Complex_Selector* cs = Cast(&rhs)) return *this == *cs; + if (const Compound_Selector* ch = Cast(&rhs)) return *this == *ch; + throw std::runtime_error("invalid selector base classes to compare"); + } + + bool Selector_Schema::operator< (const Selector& rhs) const + { + if (const Selector_List* sl = Cast(&rhs)) return *this < *sl; + if (const Simple_Selector* sp = Cast(&rhs)) return *this < *sp; + if (const Complex_Selector* cs = Cast(&rhs)) return *this < *cs; + if (const Compound_Selector* ch = Cast(&rhs)) return *this < *ch; + throw std::runtime_error("invalid selector base classes to compare"); + } + + bool Simple_Selector::operator== (const Selector& rhs) const + { + if (Simple_Selector_Ptr_Const sp = Cast(&rhs)) return *this == *sp; + return false; + } + + bool Simple_Selector::operator< (const Selector& rhs) const + { + if (Simple_Selector_Ptr_Const sp = Cast(&rhs)) return *this < *sp; + return false; + } + + bool Simple_Selector::operator== (const Simple_Selector& rhs) const + { + // solve the double dispatch problem by using RTTI information via dynamic cast + if (const Pseudo_Selector* lhs = Cast(this)) {return *lhs == rhs; } + else if (const Wrapped_Selector* lhs = Cast(this)) {return *lhs == rhs; } + else if (const Element_Selector* lhs = Cast(this)) {return *lhs == rhs; } + else if (const Attribute_Selector* lhs = Cast(this)) {return *lhs == rhs; } + else if (name_ == rhs.name_) + { return is_ns_eq(rhs); } + else return false; + } + + bool Simple_Selector::operator< (const Simple_Selector& rhs) const + { + // solve the double dispatch problem by using RTTI information via dynamic cast + if (const Pseudo_Selector* lhs = Cast(this)) {return *lhs < rhs; } + else if (const Wrapped_Selector* lhs = Cast(this)) {return *lhs < rhs; } + else if (const Element_Selector* lhs = Cast(this)) {return *lhs < rhs; } + else if (const Attribute_Selector* lhs = Cast(this)) {return *lhs < rhs; } + if (is_ns_eq(rhs)) + { return name_ < rhs.name_; } + return ns_ < rhs.ns_; + } + + bool Selector_List::operator== (const Selector& rhs) const + { + // solve the double dispatch problem by using RTTI information via dynamic cast + if (Selector_List_Ptr_Const sl = Cast(&rhs)) { return *this == *sl; } + else if (Complex_Selector_Ptr_Const cpx = Cast(&rhs)) { return *this == *cpx; } + else if (Compound_Selector_Ptr_Const cpd = Cast(&rhs)) { return *this == *cpd; } + // no compare method + return this == &rhs; + } + + // Selector lists can be compared to comma lists + bool Selector_List::operator== (const Expression& rhs) const + { + // solve the double dispatch problem by using RTTI information via dynamic cast + if (List_Ptr_Const ls = Cast(&rhs)) { return *ls == *this; } + if (Selector_Ptr_Const ls = Cast(&rhs)) { return *this == *ls; } + // compare invalid (maybe we should error?) + return false; + } + + bool Selector_List::operator== (const Selector_List& rhs) const + { + // for array access + size_t i = 0, n = 0; + size_t iL = length(); + size_t nL = rhs.length(); + // create temporary vectors and sort them + std::vector l_lst = this->elements(); + std::vector r_lst = rhs.elements(); + std::sort(l_lst.begin(), l_lst.end(), OrderNodes()); + std::sort(r_lst.begin(), r_lst.end(), OrderNodes()); + // process loop + while (true) + { + // first check for valid index + if (i == iL) return iL == nL; + else if (n == nL) return iL == nL; + // the access the vector items + Complex_Selector_Obj l = l_lst[i]; + Complex_Selector_Obj r = r_lst[n]; + // skip nulls + if (!l) ++i; + else if (!r) ++n; + // do the check + else if (*l != *r) + { return false; } + // advance + ++i; ++n; + } + // there is no break?! + } + + bool Selector_List::operator< (const Selector& rhs) const + { + if (Selector_List_Ptr_Const sp = Cast(&rhs)) return *this < *sp; + return false; + } + + bool Selector_List::operator< (const Selector_List& rhs) const + { + size_t l = rhs.length(); + if (length() < l) l = length(); + for (size_t i = 0; i < l; i ++) { + if (*at(i) < *rhs.at(i)) return true; + } + return false; + } + + Compound_Selector_Ptr Simple_Selector::unify_with(Compound_Selector_Ptr rhs) + { + for (size_t i = 0, L = rhs->length(); i < L; ++i) + { if (to_string() == rhs->at(i)->to_string()) return rhs; } + + // check for pseudo elements because they are always last + size_t i, L; + bool found = false; + if (typeid(*this) == typeid(Pseudo_Selector) || typeid(*this) == typeid(Wrapped_Selector) || typeid(*this) == typeid(Attribute_Selector)) + { + for (i = 0, L = rhs->length(); i < L; ++i) + { + if ((Cast((*rhs)[i]) || Cast((*rhs)[i]) || Cast((*rhs)[i])) && (*rhs)[L-1]->is_pseudo_element()) + { found = true; break; } + } + } + else + { + for (i = 0, L = rhs->length(); i < L; ++i) + { + if (Cast((*rhs)[i]) || Cast((*rhs)[i]) || Cast((*rhs)[i])) + { found = true; break; } + } + } + if (!found) + { + rhs->append(this); + } else { + rhs->elements().insert(rhs->elements().begin() + i, this); + } + return rhs; + } + + Simple_Selector_Ptr Element_Selector::unify_with(Simple_Selector_Ptr rhs) + { + // check if ns can be extended + // true for no ns or universal + if (has_universal_ns()) + { + // but dont extend with universal + // true for valid ns and universal + if (!rhs->is_universal_ns()) + { + // overwrite the name if star is given as name + if (this->name() == "*") { this->name(rhs->name()); } + // now overwrite the namespace name and flag + this->ns(rhs->ns()); this->has_ns(rhs->has_ns()); + // return copy + return this; + } + } + // namespace may changed, check the name now + // overwrite star (but not with another star) + if (name() == "*" && rhs->name() != "*") + { + // simply set the new name + this->name(rhs->name()); + // return copy + return this; + } + // return original + return this; + } + + Compound_Selector_Ptr Element_Selector::unify_with(Compound_Selector_Ptr rhs) + { + // TODO: handle namespaces + + // if the rhs is empty, just return a copy of this + if (rhs->length() == 0) { + rhs->append(this); + return rhs; + } + + Simple_Selector_Ptr rhs_0 = rhs->at(0); + // otherwise, this is a tag name + if (name() == "*") + { + if (typeid(*rhs_0) == typeid(Element_Selector)) + { + // if rhs is universal, just return this tagname + rhs's qualifiers + Element_Selector_Ptr ts = Cast(rhs_0); + rhs->at(0) = this->unify_with(ts); + return rhs; + } + else if (Cast(rhs_0) || Cast(rhs_0)) { + // qualifier is `.class`, so we can prefix with `ns|*.class` + if (has_ns() && !rhs_0->has_ns()) { + if (ns() != "*") rhs->elements().insert(rhs->begin(), this); + } + return rhs; + } + + + return rhs; + } + + if (typeid(*rhs_0) == typeid(Element_Selector)) + { + // if rhs is universal, just return this tagname + rhs's qualifiers + if (rhs_0->name() != "*" && rhs_0->ns() != "*" && rhs_0->name() != name()) return 0; + // otherwise create new compound and unify first simple selector + rhs->at(0) = this->unify_with(rhs_0); + return rhs; + + } + // else it's a tag name and a bunch of qualifiers -- just append them + if (name() != "*") rhs->elements().insert(rhs->begin(), this); + return rhs; + } + + Compound_Selector_Ptr Class_Selector::unify_with(Compound_Selector_Ptr rhs) + { + rhs->has_line_break(has_line_break()); + return Simple_Selector::unify_with(rhs); + } + + Compound_Selector_Ptr Id_Selector::unify_with(Compound_Selector_Ptr rhs) + { + for (size_t i = 0, L = rhs->length(); i < L; ++i) + { + if (Id_Selector_Ptr sel = Cast(rhs->at(i))) { + if (sel->name() != name()) return 0; + } + } + rhs->has_line_break(has_line_break()); + return Simple_Selector::unify_with(rhs); + } + + Compound_Selector_Ptr Pseudo_Selector::unify_with(Compound_Selector_Ptr rhs) + { + if (is_pseudo_element()) + { + for (size_t i = 0, L = rhs->length(); i < L; ++i) + { + if (Pseudo_Selector_Ptr sel = Cast(rhs->at(i))) { + if (sel->is_pseudo_element() && sel->name() != name()) return 0; + } + } + } + return Simple_Selector::unify_with(rhs); + } + + bool Attribute_Selector::operator< (const Attribute_Selector& rhs) const + { + if (is_ns_eq(rhs)) { + if (name() == rhs.name()) { + if (matcher() == rhs.matcher()) { + bool no_lhs_val = value().isNull(); + bool no_rhs_val = rhs.value().isNull(); + if (no_lhs_val && no_rhs_val) return false; // equal + else if (no_lhs_val) return true; // lhs is null + else if (no_rhs_val) return false; // rhs is null + return *value() < *rhs.value(); // both are given + } else { return matcher() < rhs.matcher(); } + } else { return name() < rhs.name(); } + } else { return ns() < rhs.ns(); } + } + + bool Attribute_Selector::operator< (const Simple_Selector& rhs) const + { + if (Attribute_Selector_Ptr_Const w = Cast(&rhs)) + { + return *this < *w; + } + if (is_ns_eq(rhs)) + { return name() < rhs.name(); } + return ns() < rhs.ns(); + } + + bool Attribute_Selector::operator== (const Attribute_Selector& rhs) const + { + // get optional value state + bool no_lhs_val = value().isNull(); + bool no_rhs_val = rhs.value().isNull(); + // both are null, therefore equal + if (no_lhs_val && no_rhs_val) { + return (name() == rhs.name()) + && (matcher() == rhs.matcher()) + && (is_ns_eq(rhs)); + } + // both are defined, evaluate + if (no_lhs_val == no_rhs_val) { + return (name() == rhs.name()) + && (matcher() == rhs.matcher()) + && (is_ns_eq(rhs)) + && (*value() == *rhs.value()); + } + // not equal + return false; + + } + + bool Attribute_Selector::operator== (const Simple_Selector& rhs) const + { + if (Attribute_Selector_Ptr_Const w = Cast(&rhs)) + { + return is_ns_eq(rhs) && + name() == rhs.name() && + *this == *w; + } + return false; + } + + bool Element_Selector::operator< (const Element_Selector& rhs) const + { + if (is_ns_eq(rhs)) + { return name() < rhs.name(); } + return ns() < rhs.ns(); + } + + bool Element_Selector::operator< (const Simple_Selector& rhs) const + { + if (Element_Selector_Ptr_Const w = Cast(&rhs)) + { + return *this < *w; + } + if (is_ns_eq(rhs)) + { return name() < rhs.name(); } + return ns() < rhs.ns(); + } + + bool Element_Selector::operator== (const Element_Selector& rhs) const + { + return is_ns_eq(rhs) && + name() == rhs.name(); + } + + bool Element_Selector::operator== (const Simple_Selector& rhs) const + { + if (Element_Selector_Ptr_Const w = Cast(&rhs)) + { + return is_ns_eq(rhs) && + name() == rhs.name() && + *this == *w; + } + return false; + } + + bool Pseudo_Selector::operator== (const Pseudo_Selector& rhs) const + { + if (is_ns_eq(rhs) && name() == rhs.name()) + { + String_Obj lhs_ex = expression(); + String_Obj rhs_ex = rhs.expression(); + if (rhs_ex && lhs_ex) return *lhs_ex == *rhs_ex; + else return lhs_ex.ptr() == rhs_ex.ptr(); + } + else return false; + } + + bool Pseudo_Selector::operator== (const Simple_Selector& rhs) const + { + if (Pseudo_Selector_Ptr_Const w = Cast(&rhs)) + { + return *this == *w; + } + return is_ns_eq(rhs) && + name() == rhs.name(); + } + + bool Pseudo_Selector::operator< (const Pseudo_Selector& rhs) const + { + if (is_ns_eq(rhs) && name() == rhs.name()) + { + String_Obj lhs_ex = expression(); + String_Obj rhs_ex = rhs.expression(); + if (rhs_ex && lhs_ex) return *lhs_ex < *rhs_ex; + else return lhs_ex.ptr() < rhs_ex.ptr(); + } + if (is_ns_eq(rhs)) + { return name() < rhs.name(); } + return ns() < rhs.ns(); + } + + bool Pseudo_Selector::operator< (const Simple_Selector& rhs) const + { + if (Pseudo_Selector_Ptr_Const w = Cast(&rhs)) + { + return *this < *w; + } + if (is_ns_eq(rhs)) + { return name() < rhs.name(); } + return ns() < rhs.ns(); + } + + bool Wrapped_Selector::operator== (const Wrapped_Selector& rhs) const + { + if (is_ns_eq(rhs) && name() == rhs.name()) + { return *(selector()) == *(rhs.selector()); } + else return false; + } + + bool Wrapped_Selector::operator== (const Simple_Selector& rhs) const + { + if (Wrapped_Selector_Ptr_Const w = Cast(&rhs)) + { + return *this == *w; + } + return is_ns_eq(rhs) && + name() == rhs.name(); + } + + bool Wrapped_Selector::operator< (const Wrapped_Selector& rhs) const + { + if (is_ns_eq(rhs) && name() == rhs.name()) + { return *(selector()) < *(rhs.selector()); } + if (is_ns_eq(rhs)) + { return name() < rhs.name(); } + return ns() < rhs.ns(); + } + + bool Wrapped_Selector::operator< (const Simple_Selector& rhs) const + { + if (Wrapped_Selector_Ptr_Const w = Cast(&rhs)) + { + return *this < *w; + } + if (is_ns_eq(rhs)) + { return name() < rhs.name(); } + return ns() < rhs.ns(); + } + + bool Wrapped_Selector::is_superselector_of(Wrapped_Selector_Obj sub) + { + if (this->name() != sub->name()) return false; + if (this->name() == ":current") return false; + if (Selector_List_Obj rhs_list = Cast(sub->selector())) { + if (Selector_List_Obj lhs_list = Cast(selector())) { + return lhs_list->is_superselector_of(rhs_list); + } + } + coreError("is_superselector expected a Selector_List", sub->pstate()); + return false; + } + + bool Compound_Selector::is_superselector_of(Selector_List_Obj rhs, std::string wrapped) + { + for (Complex_Selector_Obj item : rhs->elements()) { + if (is_superselector_of(item, wrapped)) return true; + } + return false; + } + + bool Compound_Selector::is_superselector_of(Complex_Selector_Obj rhs, std::string wrapped) + { + if (rhs->head()) return is_superselector_of(rhs->head(), wrapped); + return false; + } + + bool Compound_Selector::is_superselector_of(Compound_Selector_Obj rhs, std::string wrapping) + { + Compound_Selector_Ptr lhs = this; + Simple_Selector_Ptr lbase = lhs->base(); + Simple_Selector_Ptr rbase = rhs->base(); + + // Check if pseudo-elements are the same between the selectors + + std::set lpsuedoset, rpsuedoset; + for (size_t i = 0, L = length(); i < L; ++i) + { + if ((*this)[i]->is_pseudo_element()) { + std::string pseudo((*this)[i]->to_string()); + pseudo = pseudo.substr(pseudo.find_first_not_of(":")); // strip off colons to ensure :after matches ::after since ruby sass is forgiving + lpsuedoset.insert(pseudo); + } + } + for (size_t i = 0, L = rhs->length(); i < L; ++i) + { + if ((*rhs)[i]->is_pseudo_element()) { + std::string pseudo((*rhs)[i]->to_string()); + pseudo = pseudo.substr(pseudo.find_first_not_of(":")); // strip off colons to ensure :after matches ::after since ruby sass is forgiving + rpsuedoset.insert(pseudo); + } + } + if (lpsuedoset != rpsuedoset) { + return false; + } + + // would like to replace this without stringification + // https://github.com/sass/sass/issues/2229 + // SimpleSelectorSet lset, rset; + std::set lset, rset; + + if (lbase && rbase) + { + if (lbase->to_string() == rbase->to_string()) { + for (size_t i = 1, L = length(); i < L; ++i) + { lset.insert((*this)[i]->to_string()); } + for (size_t i = 1, L = rhs->length(); i < L; ++i) + { rset.insert((*rhs)[i]->to_string()); } + return includes(rset.begin(), rset.end(), lset.begin(), lset.end()); + } + return false; + } + + for (size_t i = 0, iL = length(); i < iL; ++i) + { + Selector_Obj wlhs = (*this)[i]; + // very special case for wrapped matches selector + if (Wrapped_Selector_Obj wrapped = Cast(wlhs)) { + if (wrapped->name() == ":not") { + if (Selector_List_Obj not_list = Cast(wrapped->selector())) { + if (not_list->is_superselector_of(rhs, wrapped->name())) return false; + } else { + throw std::runtime_error("wrapped not selector is not a list"); + } + } + if (wrapped->name() == ":matches" || wrapped->name() == ":-moz-any") { + wlhs = wrapped->selector(); + if (Selector_List_Obj list = Cast(wrapped->selector())) { + if (Compound_Selector_Obj comp = Cast(rhs)) { + if (!wrapping.empty() && wrapping != wrapped->name()) return false; + if (wrapping.empty() || wrapping != wrapped->name()) {; + if (list->is_superselector_of(comp, wrapped->name())) return true; + } + } + } + } + Simple_Selector_Ptr rhs_sel = NULL; + if (rhs->elements().size() > i) rhs_sel = (*rhs)[i]; + if (Wrapped_Selector_Ptr wrapped_r = Cast(rhs_sel)) { + if (wrapped->name() == wrapped_r->name()) { + if (wrapped->is_superselector_of(wrapped_r)) { + continue; + }} + } + } + // match from here on as strings + lset.insert(wlhs->to_string()); + } + + for (size_t n = 0, nL = rhs->length(); n < nL; ++n) + { + Selector_Obj r = (*rhs)[n]; + if (Wrapped_Selector_Obj wrapped = Cast(r)) { + if (wrapped->name() == ":not") { + if (Selector_List_Obj ls = Cast(wrapped->selector())) { + ls->remove_parent_selectors(); + if (is_superselector_of(ls, wrapped->name())) return false; + } + } + if (wrapped->name() == ":matches" || wrapped->name() == ":-moz-any") { + if (!wrapping.empty()) { + if (wrapping != wrapped->name()) return false; + } + if (Selector_List_Obj ls = Cast(wrapped->selector())) { + ls->remove_parent_selectors(); + return (is_superselector_of(ls, wrapped->name())); + } + } + } + rset.insert(r->to_string()); + } + + //for (auto l : lset) { cerr << "l: " << l << endl; } + //for (auto r : rset) { cerr << "r: " << r << endl; } + + if (lset.empty()) return true; + // return true if rset contains all the elements of lset + return includes(rset.begin(), rset.end(), lset.begin(), lset.end()); + + } + + // create complex selector (ancestor of) from compound selector + Complex_Selector_Obj Compound_Selector::to_complex() + { + // create an intermediate complex selector + return SASS_MEMORY_NEW(Complex_Selector, + pstate(), + Complex_Selector::ANCESTOR_OF, + this, + 0); + } + + Selector_List_Ptr Complex_Selector::unify_with(Complex_Selector_Ptr other) + { + + // get last tails (on the right side) + Complex_Selector_Obj l_last = this->last(); + Complex_Selector_Obj r_last = other->last(); + + // check valid pointers (assertion) + SASS_ASSERT(l_last, "lhs is null"); + SASS_ASSERT(r_last, "rhs is null"); + + // Not sure about this check, but closest way I could check + // was to see if this is a ruby 'SimpleSequence' equivalent. + // It seems to do the job correctly as some specs react to this + if (l_last->combinator() != Combinator::ANCESTOR_OF) return 0; + if (r_last->combinator() != Combinator::ANCESTOR_OF ) return 0; + + // get the headers for the last tails + Compound_Selector_Obj l_last_head = l_last->head(); + Compound_Selector_Obj r_last_head = r_last->head(); + + // check valid head pointers (assertion) + SASS_ASSERT(l_last_head, "lhs head is null"); + SASS_ASSERT(r_last_head, "rhs head is null"); + + // get the unification of the last compound selectors + Compound_Selector_Obj unified = r_last_head->unify_with(l_last_head); + + // abort if we could not unify heads + if (unified == 0) return 0; + + // check for universal (star: `*`) selector + bool is_universal = l_last_head->is_universal() || + r_last_head->is_universal(); + + if (is_universal) + { + // move the head + l_last->head(0); + r_last->head(unified); + } + + // create nodes from both selectors + Node lhsNode = complexSelectorToNode(this); + Node rhsNode = complexSelectorToNode(other); + + // overwrite universal base + if (!is_universal) + { + // create some temporaries to convert to node + Complex_Selector_Obj fake = unified->to_complex(); + Node unified_node = complexSelectorToNode(fake); + // add to permutate the list? + rhsNode.plus(unified_node); + } + + // do some magic we inherit from node and extend + Node node = subweave(lhsNode, rhsNode); + Selector_List_Obj result = SASS_MEMORY_NEW(Selector_List, pstate()); + NodeDequePtr col = node.collection(); // move from collection to list + for (NodeDeque::iterator it = col->begin(), end = col->end(); it != end; it++) + { result->append(nodeToComplexSelector(Node::naiveTrim(*it))); } + + // only return if list has some entries + return result->length() ? result.detach() : 0; + + } + + bool Compound_Selector::operator== (const Compound_Selector& rhs) const + { + // for array access + size_t i = 0, n = 0; + size_t iL = length(); + size_t nL = rhs.length(); + // create temporary vectors and sort them + std::vector l_lst = this->elements(); + std::vector r_lst = rhs.elements(); + std::sort(l_lst.begin(), l_lst.end(), OrderNodes()); + std::sort(r_lst.begin(), r_lst.end(), OrderNodes()); + // process loop + while (true) + { + // first check for valid index + if (i == iL) return iL == nL; + else if (n == nL) return iL == nL; + // the access the vector items + Simple_Selector_Obj l = l_lst[i]; + Simple_Selector_Obj r = r_lst[n]; + // skip nulls + if (!l) ++i; + if (!r) ++n; + // do the check now + else if (*l != *r) + { return false; } + // advance now + ++i; ++n; + } + // there is no break?! + } + + bool Complex_Selector::is_superselector_of(Compound_Selector_Obj rhs, std::string wrapping) + { + return last()->head() && last()->head()->is_superselector_of(rhs, wrapping); + } + + bool Complex_Selector::is_superselector_of(Complex_Selector_Obj rhs, std::string wrapping) + { + Complex_Selector_Ptr lhs = this; + // check for selectors with leading or trailing combinators + if (!lhs->head() || !rhs->head()) + { return false; } + Complex_Selector_Obj l_innermost = lhs->innermost(); + if (l_innermost->combinator() != Complex_Selector::ANCESTOR_OF) + { return false; } + Complex_Selector_Obj r_innermost = rhs->innermost(); + if (r_innermost->combinator() != Complex_Selector::ANCESTOR_OF) + { return false; } + // more complex (i.e., longer) selectors are always more specific + size_t l_len = lhs->length(), r_len = rhs->length(); + if (l_len > r_len) + { return false; } + + if (l_len == 1) + { return lhs->head()->is_superselector_of(rhs->last()->head(), wrapping); } + + // we have to look one tail deeper, since we cary the + // combinator around for it (which is important here) + if (rhs->tail() && lhs->tail() && combinator() != Complex_Selector::ANCESTOR_OF) { + Complex_Selector_Obj lhs_tail = lhs->tail(); + Complex_Selector_Obj rhs_tail = rhs->tail(); + if (lhs_tail->combinator() != rhs_tail->combinator()) return false; + if (lhs_tail->head() && !rhs_tail->head()) return false; + if (!lhs_tail->head() && rhs_tail->head()) return false; + if (lhs_tail->head() && rhs_tail->head()) { + if (!lhs_tail->head()->is_superselector_of(rhs_tail->head())) return false; + } + } + + bool found = false; + Complex_Selector_Obj marker = rhs; + for (size_t i = 0, L = rhs->length(); i < L; ++i) { + if (i == L-1) + { return false; } + if (lhs->head() && marker->head() && lhs->head()->is_superselector_of(marker->head(), wrapping)) + { found = true; break; } + marker = marker->tail(); + } + if (!found) + { return false; } + + /* + Hmm, I hope I have the logic right: + + if lhs has a combinator: + if !(marker has a combinator) return false + if !(lhs.combinator == '~' ? marker.combinator != '>' : lhs.combinator == marker.combinator) return false + return lhs.tail-without-innermost.is_superselector_of(marker.tail-without-innermost) + else if marker has a combinator: + if !(marker.combinator == ">") return false + return lhs.tail.is_superselector_of(marker.tail) + else + return lhs.tail.is_superselector_of(marker.tail) + */ + if (lhs->combinator() != Complex_Selector::ANCESTOR_OF) + { + if (marker->combinator() == Complex_Selector::ANCESTOR_OF) + { return false; } + if (!(lhs->combinator() == Complex_Selector::PRECEDES ? marker->combinator() != Complex_Selector::PARENT_OF : lhs->combinator() == marker->combinator())) + { return false; } + return lhs->tail()->is_superselector_of(marker->tail()); + } + else if (marker->combinator() != Complex_Selector::ANCESTOR_OF) + { + if (marker->combinator() != Complex_Selector::PARENT_OF) + { return false; } + return lhs->tail()->is_superselector_of(marker->tail()); + } + return lhs->tail()->is_superselector_of(marker->tail()); + } + + size_t Complex_Selector::length() const + { + // TODO: make this iterative + if (!tail()) return 1; + return 1 + tail()->length(); + } + + // append another complex selector at the end + // check if we need to append some headers + // then we need to check for the combinator + // only then we can safely set the new tail + void Complex_Selector::append(Complex_Selector_Obj ss, Backtraces& traces) + { + + Complex_Selector_Obj t = ss->tail(); + Combinator c = ss->combinator(); + String_Obj r = ss->reference(); + Compound_Selector_Obj h = ss->head(); + + if (ss->has_line_feed()) has_line_feed(true); + if (ss->has_line_break()) has_line_break(true); + + // append old headers + if (h && h->length()) { + if (last()->combinator() != ANCESTOR_OF && c != ANCESTOR_OF) { + traces.push_back(Backtrace(pstate())); + throw Exception::InvalidParent(this, traces, ss); + } else if (last()->head_ && last()->head_->length()) { + Compound_Selector_Obj rh = last()->head(); + size_t i; + size_t L = h->length(); + if (Cast(h->first())) { + if (Class_Selector_Ptr cs = Cast(rh->last())) { + Class_Selector_Ptr sqs = SASS_MEMORY_COPY(cs); + sqs->name(sqs->name() + (*h)[0]->name()); + sqs->pstate((*h)[0]->pstate()); + (*rh)[rh->length()-1] = sqs; + rh->pstate(h->pstate()); + for (i = 1; i < L; ++i) rh->append((*h)[i]); + } else if (Id_Selector_Ptr is = Cast(rh->last())) { + Id_Selector_Ptr sqs = SASS_MEMORY_COPY(is); + sqs->name(sqs->name() + (*h)[0]->name()); + sqs->pstate((*h)[0]->pstate()); + (*rh)[rh->length()-1] = sqs; + rh->pstate(h->pstate()); + for (i = 1; i < L; ++i) rh->append((*h)[i]); + } else if (Element_Selector_Ptr ts = Cast(rh->last())) { + Element_Selector_Ptr tss = SASS_MEMORY_COPY(ts); + tss->name(tss->name() + (*h)[0]->name()); + tss->pstate((*h)[0]->pstate()); + (*rh)[rh->length()-1] = tss; + rh->pstate(h->pstate()); + for (i = 1; i < L; ++i) rh->append((*h)[i]); + } else if (Placeholder_Selector_Ptr ps = Cast(rh->last())) { + Placeholder_Selector_Ptr pss = SASS_MEMORY_COPY(ps); + pss->name(pss->name() + (*h)[0]->name()); + pss->pstate((*h)[0]->pstate()); + (*rh)[rh->length()-1] = pss; + rh->pstate(h->pstate()); + for (i = 1; i < L; ++i) rh->append((*h)[i]); + } else { + last()->head_->concat(h); + } + } else { + last()->head_->concat(h); + } + } else if (last()->head_) { + last()->head_->concat(h); + } + } else { + // std::cerr << "has no or empty head\n"; + } + + if (last()) { + if (last()->combinator() != ANCESTOR_OF && c != ANCESTOR_OF) { + Complex_Selector_Ptr inter = SASS_MEMORY_NEW(Complex_Selector, pstate()); + inter->reference(r); + inter->combinator(c); + inter->tail(t); + last()->tail(inter); + } else { + if (last()->combinator() == ANCESTOR_OF) { + last()->combinator(c); + last()->reference(r); + } + last()->tail(t); + } + } + + } + + Selector_List_Obj Selector_List::eval(Eval& eval) + { + Selector_List_Obj list = schema() ? + eval(schema()) : eval(this); + list->schema(schema()); + return list; + } + + Selector_List_Ptr Selector_List::resolve_parent_refs(std::vector& pstack, Backtraces& traces, bool implicit_parent) + { + if (!this->has_parent_ref()) return this; + Selector_List_Ptr ss = SASS_MEMORY_NEW(Selector_List, pstate()); + Selector_List_Ptr ps = pstack.back(); + for (size_t pi = 0, pL = ps->length(); pi < pL; ++pi) { + for (size_t si = 0, sL = this->length(); si < sL; ++si) { + Selector_List_Obj rv = at(si)->resolve_parent_refs(pstack, traces, implicit_parent); + ss->concat(rv); + } + } + return ss; + } + + Selector_List_Ptr Complex_Selector::resolve_parent_refs(std::vector& pstack, Backtraces& traces, bool implicit_parent) + { + Complex_Selector_Obj tail = this->tail(); + Compound_Selector_Obj head = this->head(); + Selector_List_Ptr parents = pstack.back(); + + if (!this->has_real_parent_ref() && !implicit_parent) { + Selector_List_Ptr retval = SASS_MEMORY_NEW(Selector_List, pstate()); + retval->append(this); + return retval; + } + + // first resolve_parent_refs the tail (which may return an expanded list) + Selector_List_Obj tails = tail ? tail->resolve_parent_refs(pstack, traces, implicit_parent) : 0; + + if (head && head->length() > 0) { + + Selector_List_Obj retval; + // we have a parent selector in a simple compound list + // mix parent complex selector into the compound list + if (Cast((*head)[0])) { + retval = SASS_MEMORY_NEW(Selector_List, pstate()); + + // it turns out that real parent references reach + // across @at-root rules, which comes unexpected + if (parents == NULL && head->has_real_parent_ref()) { + int i = pstack.size() - 1; + while (!parents && i > -1) { + parents = pstack.at(i--); + } + } + + if (parents && parents->length()) { + if (tails && tails->length() > 0) { + for (size_t n = 0, nL = tails->length(); n < nL; ++n) { + for (size_t i = 0, iL = parents->length(); i < iL; ++i) { + Complex_Selector_Obj t = (*tails)[n]; + Complex_Selector_Obj parent = (*parents)[i]; + Complex_Selector_Obj s = SASS_MEMORY_CLONE(parent); + Complex_Selector_Obj ss = SASS_MEMORY_CLONE(this); + ss->tail(t ? SASS_MEMORY_CLONE(t) : NULL); + Compound_Selector_Obj h = SASS_MEMORY_COPY(head_); + // remove parent selector from sequence + if (h->length()) { + h->erase(h->begin()); + ss->head(h); + } else { + ss->head(NULL); + } + // adjust for parent selector (1 char) + // if (h->length()) { + // ParserState state(h->at(0)->pstate()); + // state.offset.column += 1; + // state.column -= 1; + // (*h)[0]->pstate(state); + // } + // keep old parser state + s->pstate(pstate()); + // append new tail + s->append(ss, traces); + retval->append(s); + } + } + } + // have no tails but parents + // loop above is inside out + else { + for (size_t i = 0, iL = parents->length(); i < iL; ++i) { + Complex_Selector_Obj parent = (*parents)[i]; + Complex_Selector_Obj s = SASS_MEMORY_CLONE(parent); + Complex_Selector_Obj ss = SASS_MEMORY_CLONE(this); + // this is only if valid if the parent has no trailing op + // otherwise we cannot append more simple selectors to head + if (parent->last()->combinator() != ANCESTOR_OF) { + traces.push_back(Backtrace(pstate())); + throw Exception::InvalidParent(parent, traces, ss); + } + ss->tail(tail ? SASS_MEMORY_CLONE(tail) : NULL); + Compound_Selector_Obj h = SASS_MEMORY_COPY(head_); + // remove parent selector from sequence + if (h->length()) { + h->erase(h->begin()); + ss->head(h); + } else { + ss->head(NULL); + } + // \/ IMO ruby sass bug \/ + ss->has_line_feed(false); + // adjust for parent selector (1 char) + // if (h->length()) { + // ParserState state(h->at(0)->pstate()); + // state.offset.column += 1; + // state.column -= 1; + // (*h)[0]->pstate(state); + // } + // keep old parser state + s->pstate(pstate()); + // append new tail + s->append(ss, traces); + retval->append(s); + } + } + } + // have no parent but some tails + else { + if (tails && tails->length() > 0) { + for (size_t n = 0, nL = tails->length(); n < nL; ++n) { + Complex_Selector_Obj cpy = SASS_MEMORY_CLONE(this); + cpy->tail(SASS_MEMORY_CLONE(tails->at(n))); + cpy->head(SASS_MEMORY_NEW(Compound_Selector, head->pstate())); + for (size_t i = 1, L = this->head()->length(); i < L; ++i) + cpy->head()->append((*this->head())[i]); + if (!cpy->head()->length()) cpy->head(0); + retval->append(cpy->skip_empty_reference()); + } + } + // have no parent nor tails + else { + Complex_Selector_Obj cpy = SASS_MEMORY_CLONE(this); + cpy->head(SASS_MEMORY_NEW(Compound_Selector, head->pstate())); + for (size_t i = 1, L = this->head()->length(); i < L; ++i) + cpy->head()->append((*this->head())[i]); + if (!cpy->head()->length()) cpy->head(0); + retval->append(cpy->skip_empty_reference()); + } + } + } + // no parent selector in head + else { + retval = this->tails(tails); + } + + for (Simple_Selector_Obj ss : head->elements()) { + if (Wrapped_Selector_Ptr ws = Cast(ss)) { + if (Selector_List_Ptr sl = Cast(ws->selector())) { + if (parents) ws->selector(sl->resolve_parent_refs(pstack, traces, implicit_parent)); + } + } + } + + return retval.detach(); + + } + // has no head + return this->tails(tails); + } + + Selector_List_Ptr Complex_Selector::tails(Selector_List_Ptr tails) + { + Selector_List_Ptr rv = SASS_MEMORY_NEW(Selector_List, pstate_); + if (tails && tails->length()) { + for (size_t i = 0, iL = tails->length(); i < iL; ++i) { + Complex_Selector_Obj pr = SASS_MEMORY_CLONE(this); + pr->tail(tails->at(i)); + rv->append(pr); + } + } + else { + rv->append(this); + } + return rv; + } + + // return the last tail that is defined + Complex_Selector_Obj Complex_Selector::first() + { + // declare variables used in loop + Complex_Selector_Obj cur = this; + Compound_Selector_Obj head; + // processing loop + while (cur) + { + // get the head + head = cur->head_; + // abort (and return) if it is not a parent selector + if (!head || head->length() != 1 || !Cast((*head)[0])) { + break; + } + // advance to next + cur = cur->tail_; + } + // result + return cur; + } + + // return the last tail that is defined + Complex_Selector_Obj Complex_Selector::last() + { + Complex_Selector_Ptr cur = this; + Complex_Selector_Ptr nxt = cur; + // loop until last + while (nxt) { + cur = nxt; + nxt = cur->tail(); + } + return cur; + } + + Complex_Selector::Combinator Complex_Selector::clear_innermost() + { + Combinator c; + if (!tail() || tail()->tail() == 0) + { c = combinator(); combinator(ANCESTOR_OF); tail(0); } + else + { c = tail()->clear_innermost(); } + return c; + } + + void Complex_Selector::set_innermost(Complex_Selector_Obj val, Combinator c) + { + if (!tail()) + { tail(val); combinator(c); } + else + { tail()->set_innermost(val, c); } + } + + void Complex_Selector::cloneChildren() + { + if (head()) head(SASS_MEMORY_CLONE(head())); + if (tail()) tail(SASS_MEMORY_CLONE(tail())); + } + + void Compound_Selector::cloneChildren() + { + for (size_t i = 0, l = length(); i < l; i++) { + at(i) = SASS_MEMORY_CLONE(at(i)); + } + } + + void Selector_List::cloneChildren() + { + for (size_t i = 0, l = length(); i < l; i++) { + at(i) = SASS_MEMORY_CLONE(at(i)); + } + } + + void Wrapped_Selector::cloneChildren() + { + selector(SASS_MEMORY_CLONE(selector())); + } + + // remove parent selector references + // basically unwraps parsed selectors + void Selector_List::remove_parent_selectors() + { + // Check every rhs selector against left hand list + for(size_t i = 0, L = length(); i < L; ++i) { + if (!(*this)[i]->head()) continue; + if ((*this)[i]->head()->is_empty_reference()) { + // simply move to the next tail if we have "no" combinator + if ((*this)[i]->combinator() == Complex_Selector::ANCESTOR_OF) { + if ((*this)[i]->tail()) { + if ((*this)[i]->has_line_feed()) { + (*this)[i]->tail()->has_line_feed(true); + } + (*this)[i] = (*this)[i]->tail(); + } + } + // otherwise remove the first item from head + else { + (*this)[i]->head()->erase((*this)[i]->head()->begin()); + } + } + } + } + + size_t Wrapped_Selector::hash() + { + if (hash_ == 0) { + hash_combine(hash_, Simple_Selector::hash()); + if (selector_) hash_combine(hash_, selector_->hash()); + } + return hash_; + } + bool Wrapped_Selector::has_parent_ref() const { + // if (has_reference()) return true; + if (!selector()) return false; + return selector()->has_parent_ref(); + } + bool Wrapped_Selector::has_real_parent_ref() const { + // if (has_reference()) return true; + if (!selector()) return false; + return selector()->has_real_parent_ref(); + } + unsigned long Wrapped_Selector::specificity() const + { + return selector_ ? selector_->specificity() : 0; + } + + + bool Selector_List::has_parent_ref() const + { + for (Complex_Selector_Obj s : elements()) { + if (s && s->has_parent_ref()) return true; + } + return false; + } + + bool Selector_List::has_real_parent_ref() const + { + for (Complex_Selector_Obj s : elements()) { + if (s && s->has_real_parent_ref()) return true; + } + return false; + } + + bool Selector_Schema::has_parent_ref() const + { + if (String_Schema_Obj schema = Cast(contents())) { + return schema->length() > 0 && Cast(schema->at(0)) != NULL; + } + return false; + } + + bool Selector_Schema::has_real_parent_ref() const + { + if (String_Schema_Obj schema = Cast(contents())) { + Parent_Selector_Obj p = Cast(schema->at(0)); + return schema->length() > 0 && p && p->is_real_parent_ref(); + } + return false; + } + + void Selector_List::adjust_after_pushing(Complex_Selector_Obj c) + { + // if (c->has_reference()) has_reference(true); + } + + // it's a superselector if every selector of the right side + // list is a superselector of the given left side selector + bool Complex_Selector::is_superselector_of(Selector_List_Obj sub, std::string wrapping) + { + // Check every rhs selector against left hand list + for(size_t i = 0, L = sub->length(); i < L; ++i) { + if (!is_superselector_of((*sub)[i], wrapping)) return false; + } + return true; + } + + // it's a superselector if every selector of the right side + // list is a superselector of the given left side selector + bool Selector_List::is_superselector_of(Selector_List_Obj sub, std::string wrapping) + { + // Check every rhs selector against left hand list + for(size_t i = 0, L = sub->length(); i < L; ++i) { + if (!is_superselector_of((*sub)[i], wrapping)) return false; + } + return true; + } + + // it's a superselector if every selector on the right side + // is a superselector of any one of the left side selectors + bool Selector_List::is_superselector_of(Compound_Selector_Obj sub, std::string wrapping) + { + // Check every lhs selector against right hand + for(size_t i = 0, L = length(); i < L; ++i) { + if ((*this)[i]->is_superselector_of(sub, wrapping)) return true; + } + return false; + } + + // it's a superselector if every selector on the right side + // is a superselector of any one of the left side selectors + bool Selector_List::is_superselector_of(Complex_Selector_Obj sub, std::string wrapping) + { + // Check every lhs selector against right hand + for(size_t i = 0, L = length(); i < L; ++i) { + if ((*this)[i]->is_superselector_of(sub)) return true; + } + return false; + } + + Selector_List_Ptr Selector_List::unify_with(Selector_List_Ptr rhs) { + std::vector unified_complex_selectors; + // Unify all of children with RHS's children, storing the results in `unified_complex_selectors` + for (size_t lhs_i = 0, lhs_L = length(); lhs_i < lhs_L; ++lhs_i) { + Complex_Selector_Obj seq1 = (*this)[lhs_i]; + for(size_t rhs_i = 0, rhs_L = rhs->length(); rhs_i < rhs_L; ++rhs_i) { + Complex_Selector_Ptr seq2 = rhs->at(rhs_i); + + Selector_List_Obj result = seq1->unify_with(seq2); + if( result ) { + for(size_t i = 0, L = result->length(); i < L; ++i) { + unified_complex_selectors.push_back( (*result)[i] ); + } + } + } + } + + // Creates the final Selector_List by combining all the complex selectors + Selector_List_Ptr final_result = SASS_MEMORY_NEW(Selector_List, pstate()); + for (auto itr = unified_complex_selectors.begin(); itr != unified_complex_selectors.end(); ++itr) { + final_result->append(*itr); + } + return final_result; + } + + void Selector_List::populate_extends(Selector_List_Obj extendee, Subset_Map& extends) + { + + Selector_List_Ptr extender = this; + for (auto complex_sel : extendee->elements()) { + Complex_Selector_Obj c = complex_sel; + + + // Ignore any parent selectors, until we find the first non Selectorerence head + Compound_Selector_Obj compound_sel = c->head(); + Complex_Selector_Obj pIter = complex_sel; + while (pIter) { + Compound_Selector_Obj pHead = pIter->head(); + if (pHead && Cast(pHead->elements()[0]) == NULL) { + compound_sel = pHead; + break; + } + + pIter = pIter->tail(); + } + + if (!pIter->head() || pIter->tail()) { + coreError("nested selectors may not be extended", c->pstate()); + } + + compound_sel->is_optional(extendee->is_optional()); + + for (size_t i = 0, L = extender->length(); i < L; ++i) { + extends.put(compound_sel, std::make_pair((*extender)[i], compound_sel)); + } + } + }; + + void Compound_Selector::append(Simple_Selector_Ptr element) + { + Vectorized::append(element); + pstate_.offset += element->pstate().offset; + } + + Compound_Selector_Ptr Compound_Selector::minus(Compound_Selector_Ptr rhs) + { + Compound_Selector_Ptr result = SASS_MEMORY_NEW(Compound_Selector, pstate()); + // result->has_parent_reference(has_parent_reference()); + + // not very efficient because it needs to preserve order + for (size_t i = 0, L = length(); i < L; ++i) + { + bool found = false; + std::string thisSelector((*this)[i]->to_string()); + for (size_t j = 0, M = rhs->length(); j < M; ++j) + { + if (thisSelector == (*rhs)[j]->to_string()) + { + found = true; + break; + } + } + if (!found) result->append((*this)[i]); + } + + return result; + } + + void Compound_Selector::mergeSources(ComplexSelectorSet& sources) + { + for (ComplexSelectorSet::iterator iterator = sources.begin(), endIterator = sources.end(); iterator != endIterator; ++iterator) { + this->sources_.insert(SASS_MEMORY_CLONE(*iterator)); + } + } + + Argument_Obj Arguments::get_rest_argument() + { + if (this->has_rest_argument()) { + for (Argument_Obj arg : this->elements()) { + if (arg->is_rest_argument()) { + return arg; + } + } + } + return NULL; + } + + Argument_Obj Arguments::get_keyword_argument() + { + if (this->has_keyword_argument()) { + for (Argument_Obj arg : this->elements()) { + if (arg->is_keyword_argument()) { + return arg; + } + } + } + return NULL; + } + + void Arguments::adjust_after_pushing(Argument_Obj a) + { + if (!a->name().empty()) { + if (has_keyword_argument()) { + coreError("named arguments must precede variable-length argument", a->pstate()); + } + has_named_arguments(true); + } + else if (a->is_rest_argument()) { + if (has_rest_argument()) { + coreError("functions and mixins may only be called with one variable-length argument", a->pstate()); + } + if (has_keyword_argument_) { + coreError("only keyword arguments may follow variable arguments", a->pstate()); + } + has_rest_argument(true); + } + else if (a->is_keyword_argument()) { + if (has_keyword_argument()) { + coreError("functions and mixins may only be called with one keyword argument", a->pstate()); + } + has_keyword_argument(true); + } + else { + if (has_rest_argument()) { + coreError("ordinal arguments must precede variable-length arguments", a->pstate()); + } + if (has_named_arguments()) { + coreError("ordinal arguments must precede named arguments", a->pstate()); + } + } + } + + bool Ruleset::is_invisible() const { + if (Selector_List_Ptr sl = Cast(selector())) { + for (size_t i = 0, L = sl->length(); i < L; ++i) + if (!(*sl)[i]->has_placeholder()) return false; + } + return true; + } + + bool Media_Block::is_invisible() const { + for (size_t i = 0, L = block()->length(); i < L; ++i) { + Statement_Obj stm = block()->at(i); + if (!stm->is_invisible()) return false; + } + return true; + } + + Number::Number(ParserState pstate, double val, std::string u, bool zero) + : Value(pstate), + Units(), + value_(val), + zero_(zero), + hash_(0) + { + size_t l = 0; + size_t r; + if (!u.empty()) { + bool nominator = true; + while (true) { + r = u.find_first_of("*/", l); + std::string unit(u.substr(l, r == std::string::npos ? r : r - l)); + if (!unit.empty()) { + if (nominator) numerators.push_back(unit); + else denominators.push_back(unit); + } + if (r == std::string::npos) break; + // ToDo: should error for multiple slashes + // if (!nominator && u[r] == '/') error(...) + if (u[r] == '/') + nominator = false; + // strange math parsing? + // else if (u[r] == '*') + // nominator = true; + l = r + 1; + } + } + concrete_type(NUMBER); + } + + // cancel out unnecessary units + void Number::reduce() + { + // apply conversion factor + value_ *= this->Units::reduce(); + } + + void Number::normalize() + { + // apply conversion factor + value_ *= this->Units::normalize(); + } + + bool Custom_Warning::operator== (const Expression& rhs) const + { + if (Custom_Warning_Ptr_Const r = Cast(&rhs)) { + return message() == r->message(); + } + return false; + } + + bool Custom_Error::operator== (const Expression& rhs) const + { + if (Custom_Error_Ptr_Const r = Cast(&rhs)) { + return message() == r->message(); + } + return false; + } + + bool Number::operator== (const Expression& rhs) const + { + if (auto rhsnr = Cast(&rhs)) { + return *this == *rhsnr; + } + return false; + } + + bool Number::operator== (const Number& rhs) const + { + Number l(*this), r(rhs); l.reduce(); r.reduce(); + size_t lhs_units = l.numerators.size() + l.denominators.size(); + size_t rhs_units = r.numerators.size() + r.denominators.size(); + // unitless and only having one unit seems equivalent (will change in future) + if (!lhs_units || !rhs_units) { + return NEAR_EQUAL(l.value(), r.value()); + } + l.normalize(); r.normalize(); + Units &lhs_unit = l, &rhs_unit = r; + return lhs_unit == rhs_unit && + NEAR_EQUAL(l.value(), r.value()); + } + + bool Number::operator< (const Number& rhs) const + { + Number l(*this), r(rhs); l.reduce(); r.reduce(); + size_t lhs_units = l.numerators.size() + l.denominators.size(); + size_t rhs_units = r.numerators.size() + r.denominators.size(); + // unitless and only having one unit seems equivalent (will change in future) + if (!lhs_units || !rhs_units) { + return l.value() < r.value(); + } + l.normalize(); r.normalize(); + Units &lhs_unit = l, &rhs_unit = r; + if (!(lhs_unit == rhs_unit)) { + /* ToDo: do we always get usefull backtraces? */ + throw Exception::IncompatibleUnits(rhs, *this); + } + return lhs_unit < rhs_unit || + l.value() < r.value(); + } + + bool String_Quoted::operator== (const Expression& rhs) const + { + if (String_Quoted_Ptr_Const qstr = Cast(&rhs)) { + return (value() == qstr->value()); + } else if (String_Constant_Ptr_Const cstr = Cast(&rhs)) { + return (value() == cstr->value()); + } + return false; + } + + bool String_Constant::is_invisible() const { + return value_.empty() && quote_mark_ == 0; + } + + bool String_Constant::operator== (const Expression& rhs) const + { + if (String_Quoted_Ptr_Const qstr = Cast(&rhs)) { + return (value() == qstr->value()); + } else if (String_Constant_Ptr_Const cstr = Cast(&rhs)) { + return (value() == cstr->value()); + } + return false; + } + + bool String_Schema::is_left_interpolant(void) const + { + return length() && first()->is_left_interpolant(); + } + bool String_Schema::is_right_interpolant(void) const + { + return length() && last()->is_right_interpolant(); + } + + bool String_Schema::operator== (const Expression& rhs) const + { + if (String_Schema_Ptr_Const r = Cast(&rhs)) { + if (length() != r->length()) return false; + for (size_t i = 0, L = length(); i < L; ++i) { + Expression_Obj rv = (*r)[i]; + Expression_Obj lv = (*this)[i]; + if (!lv || !rv) return false; + if (!(*lv == *rv)) return false; + } + return true; + } + return false; + } + + bool Boolean::operator== (const Expression& rhs) const + { + if (Boolean_Ptr_Const r = Cast(&rhs)) { + return (value() == r->value()); + } + return false; + } + + bool Color::operator== (const Expression& rhs) const + { + if (Color_Ptr_Const r = Cast(&rhs)) { + return r_ == r->r() && + g_ == r->g() && + b_ == r->b() && + a_ == r->a(); + } + return false; + } + + bool List::operator== (const Expression& rhs) const + { + if (List_Ptr_Const r = Cast(&rhs)) { + if (length() != r->length()) return false; + if (separator() != r->separator()) return false; + if (is_bracketed() != r->is_bracketed()) return false; + for (size_t i = 0, L = length(); i < L; ++i) { + Expression_Obj rv = r->at(i); + Expression_Obj lv = this->at(i); + if (!lv || !rv) return false; + if (!(*lv == *rv)) return false; + } + return true; + } + return false; + } + + bool Map::operator== (const Expression& rhs) const + { + if (Map_Ptr_Const r = Cast(&rhs)) { + if (length() != r->length()) return false; + for (auto key : keys()) { + Expression_Obj lv = at(key); + Expression_Obj rv = r->at(key); + if (!rv || !lv) return false; + if (!(*lv == *rv)) return false; + } + return true; + } + return false; + } + + bool Null::operator== (const Expression& rhs) const + { + return rhs.concrete_type() == NULL_VAL; + } + + bool Function::operator== (const Expression& rhs) const + { + if (Function_Ptr_Const r = Cast(&rhs)) { + Definition_Ptr_Const d1 = Cast(definition()); + Definition_Ptr_Const d2 = Cast(r->definition()); + return d1 && d2 && d1 == d2 && is_css() == r->is_css(); + } + return false; + } + + size_t List::size() const { + if (!is_arglist_) return length(); + // arglist expects a list of arguments + // so we need to break before keywords + for (size_t i = 0, L = length(); i < L; ++i) { + Expression_Obj obj = this->at(i); + if (Argument_Ptr arg = Cast(obj)) { + if (!arg->name().empty()) return i; + } + } + return length(); + } + + Expression_Obj Hashed::at(Expression_Obj k) const + { + if (elements_.count(k)) + { return elements_.at(k); } + else { return NULL; } + } + + bool Binary_Expression::is_left_interpolant(void) const + { + return is_interpolant() || (left() && left()->is_left_interpolant()); + } + bool Binary_Expression::is_right_interpolant(void) const + { + return is_interpolant() || (right() && right()->is_right_interpolant()); + } + + const std::string AST_Node::to_string(Sass_Inspect_Options opt) const + { + Sass_Output_Options out(opt); + Emitter emitter(out); + Inspect i(emitter); + i.in_declaration = true; + // ToDo: inspect should be const + const_cast(this)->perform(&i); + return i.get_buffer(); + } + + const std::string AST_Node::to_string() const + { + return to_string({ NESTED, 5 }); + } + + std::string String_Quoted::inspect() const + { + return quote(value_, '*'); + } + + std::string String_Constant::inspect() const + { + return quote(value_, '*'); + } + + bool Declaration::is_invisible() const + { + if (is_custom_property()) return false; + + return !(value_ && value_->concrete_type() != Expression::NULL_VAL); + } + + ////////////////////////////////////////////////////////////////////////////////////////// + // Additional method on Lists to retrieve values directly or from an encompassed Argument. + ////////////////////////////////////////////////////////////////////////////////////////// + Expression_Obj List::value_at_index(size_t i) { + Expression_Obj obj = this->at(i); + if (is_arglist_) { + if (Argument_Ptr arg = Cast(obj)) { + return arg->value(); + } else { + return obj; + } + } else { + return obj; + } + } + + ////////////////////////////////////////////////////////////////////////////////////////// + // Convert map to (key, value) list. + ////////////////////////////////////////////////////////////////////////////////////////// + List_Obj Map::to_list(ParserState& pstate) { + List_Obj ret = SASS_MEMORY_NEW(List, pstate, length(), SASS_COMMA); + + for (auto key : keys()) { + List_Obj l = SASS_MEMORY_NEW(List, pstate, 2); + l->append(key); + l->append(at(key)); + ret->append(l); + } + + return ret; + } + + ////////////////////////////////////////////////////////////////////////////////////////// + // Copy implementations + ////////////////////////////////////////////////////////////////////////////////////////// + + #ifdef DEBUG_SHARED_PTR + + #define IMPLEMENT_AST_OPERATORS(klass) \ + klass##_Ptr klass::copy(std::string file, size_t line) const { \ + klass##_Ptr cpy = new klass(this); \ + cpy->trace(file, line); \ + return cpy; \ + } \ + klass##_Ptr klass::clone(std::string file, size_t line) const { \ + klass##_Ptr cpy = copy(file, line); \ + cpy->cloneChildren(); \ + return cpy; \ + } \ + + #else + + #define IMPLEMENT_AST_OPERATORS(klass) \ + klass##_Ptr klass::copy() const { \ + return new klass(this); \ + } \ + klass##_Ptr klass::clone() const { \ + klass##_Ptr cpy = copy(); \ + cpy->cloneChildren(); \ + return cpy; \ + } \ + + #endif + + IMPLEMENT_AST_OPERATORS(Supports_Operator); + IMPLEMENT_AST_OPERATORS(Supports_Negation); + IMPLEMENT_AST_OPERATORS(Compound_Selector); + IMPLEMENT_AST_OPERATORS(Complex_Selector); + IMPLEMENT_AST_OPERATORS(Element_Selector); + IMPLEMENT_AST_OPERATORS(Class_Selector); + IMPLEMENT_AST_OPERATORS(Id_Selector); + IMPLEMENT_AST_OPERATORS(Pseudo_Selector); + IMPLEMENT_AST_OPERATORS(Wrapped_Selector); + IMPLEMENT_AST_OPERATORS(Selector_List); + IMPLEMENT_AST_OPERATORS(Ruleset); + IMPLEMENT_AST_OPERATORS(Media_Block); + IMPLEMENT_AST_OPERATORS(Custom_Warning); + IMPLEMENT_AST_OPERATORS(Custom_Error); + IMPLEMENT_AST_OPERATORS(List); + IMPLEMENT_AST_OPERATORS(Map); + IMPLEMENT_AST_OPERATORS(Function); + IMPLEMENT_AST_OPERATORS(Number); + IMPLEMENT_AST_OPERATORS(Binary_Expression); + IMPLEMENT_AST_OPERATORS(String_Schema); + IMPLEMENT_AST_OPERATORS(String_Constant); + IMPLEMENT_AST_OPERATORS(String_Quoted); + IMPLEMENT_AST_OPERATORS(Boolean); + IMPLEMENT_AST_OPERATORS(Color); + IMPLEMENT_AST_OPERATORS(Null); + IMPLEMENT_AST_OPERATORS(Parent_Selector); + IMPLEMENT_AST_OPERATORS(Import); + IMPLEMENT_AST_OPERATORS(Import_Stub); + IMPLEMENT_AST_OPERATORS(Function_Call); + IMPLEMENT_AST_OPERATORS(Directive); + IMPLEMENT_AST_OPERATORS(At_Root_Block); + IMPLEMENT_AST_OPERATORS(Supports_Block); + IMPLEMENT_AST_OPERATORS(While); + IMPLEMENT_AST_OPERATORS(Each); + IMPLEMENT_AST_OPERATORS(For); + IMPLEMENT_AST_OPERATORS(If); + IMPLEMENT_AST_OPERATORS(Mixin_Call); + IMPLEMENT_AST_OPERATORS(Extension); + IMPLEMENT_AST_OPERATORS(Media_Query); + IMPLEMENT_AST_OPERATORS(Media_Query_Expression); + IMPLEMENT_AST_OPERATORS(Debug); + IMPLEMENT_AST_OPERATORS(Error); + IMPLEMENT_AST_OPERATORS(Warning); + IMPLEMENT_AST_OPERATORS(Assignment); + IMPLEMENT_AST_OPERATORS(Return); + IMPLEMENT_AST_OPERATORS(At_Root_Query); + IMPLEMENT_AST_OPERATORS(Variable); + IMPLEMENT_AST_OPERATORS(Comment); + IMPLEMENT_AST_OPERATORS(Attribute_Selector); + IMPLEMENT_AST_OPERATORS(Supports_Interpolation); + IMPLEMENT_AST_OPERATORS(Supports_Declaration); + IMPLEMENT_AST_OPERATORS(Supports_Condition); + IMPLEMENT_AST_OPERATORS(Parameters); + IMPLEMENT_AST_OPERATORS(Parameter); + IMPLEMENT_AST_OPERATORS(Arguments); + IMPLEMENT_AST_OPERATORS(Argument); + IMPLEMENT_AST_OPERATORS(Unary_Expression); + IMPLEMENT_AST_OPERATORS(Function_Call_Schema); + IMPLEMENT_AST_OPERATORS(Block); + IMPLEMENT_AST_OPERATORS(Content); + IMPLEMENT_AST_OPERATORS(Trace); + IMPLEMENT_AST_OPERATORS(Keyframe_Rule); + IMPLEMENT_AST_OPERATORS(Bubble); + IMPLEMENT_AST_OPERATORS(Selector_Schema); + IMPLEMENT_AST_OPERATORS(Placeholder_Selector); + IMPLEMENT_AST_OPERATORS(Definition); + IMPLEMENT_AST_OPERATORS(Declaration); +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/ast.hpp b/mybulma/node_modules/node-sass/src/libsass/src/ast.hpp new file mode 100644 index 0000000..a2be868 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/ast.hpp @@ -0,0 +1,3049 @@ +#ifndef SASS_AST_H +#define SASS_AST_H + +#include "sass.hpp" +#include +#include +#include +#include +#include +#include +#include +#include +#include "sass/base.h" +#include "ast_fwd_decl.hpp" + +#ifdef DEBUG_SHARED_PTR + +#define ATTACH_VIRTUAL_AST_OPERATIONS(klass) \ + virtual klass##_Ptr copy(std::string, size_t) const = 0; \ + virtual klass##_Ptr clone(std::string, size_t) const = 0; \ + +#define ATTACH_AST_OPERATIONS(klass) \ + virtual klass##_Ptr copy(std::string, size_t) const; \ + virtual klass##_Ptr clone(std::string, size_t) const; \ + +#else + +#define ATTACH_VIRTUAL_AST_OPERATIONS(klass) \ + virtual klass##_Ptr copy() const = 0; \ + virtual klass##_Ptr clone() const = 0; \ + +#define ATTACH_AST_OPERATIONS(klass) \ + virtual klass##_Ptr copy() const; \ + virtual klass##_Ptr clone() const; \ + +#endif + +#ifdef __clang__ + +/* + * There are some overloads used here that trigger the clang overload + * hiding warning. Specifically: + * + * Type type() which hides string type() from Expression + * + */ +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Woverloaded-virtual" + +#endif + +#include "util.hpp" +#include "units.hpp" +#include "context.hpp" +#include "position.hpp" +#include "constants.hpp" +#include "operation.hpp" +#include "position.hpp" +#include "inspect.hpp" +#include "source_map.hpp" +#include "environment.hpp" +#include "error_handling.hpp" +#include "ast_def_macros.hpp" +#include "ast_fwd_decl.hpp" +#include "source_map.hpp" + +#include "sass.h" + +namespace Sass { + + // easier to search with name + const bool DELAYED = true; + + // ToDo: should this really be hardcoded + // Note: most methods follow precision option + const double NUMBER_EPSILON = 0.00000000000001; + + // macro to test if numbers are equal within a small error margin + #define NEAR_EQUAL(lhs, rhs) std::fabs(lhs - rhs) < NUMBER_EPSILON + + // ToDo: where does this fit best? + // We don't share this with C-API? + class Operand { + public: + Operand(Sass_OP operand, bool ws_before = false, bool ws_after = false) + : operand(operand), ws_before(ws_before), ws_after(ws_after) + { } + public: + enum Sass_OP operand; + bool ws_before; + bool ws_after; + }; + + ////////////////////////////////////////////////////////// + // `hash_combine` comes from boost (functional/hash): + // http://www.boost.org/doc/libs/1_35_0/doc/html/hash/combine.html + // Boost Software License - Version 1.0 + // http://www.boost.org/users/license.html + template + void hash_combine (std::size_t& seed, const T& val) + { + seed ^= std::hash()(val) + 0x9e3779b9 + + (seed<<6) + (seed>>2); + } + ////////////////////////////////////////////////////////// + + ////////////////////////////////////////////////////////// + // Abstract base class for all abstract syntax tree nodes. + ////////////////////////////////////////////////////////// + class AST_Node : public SharedObj { + ADD_PROPERTY(ParserState, pstate) + public: + AST_Node(ParserState pstate) + : pstate_(pstate) + { } + AST_Node(const AST_Node* ptr) + : pstate_(ptr->pstate_) + { } + + // AST_Node(AST_Node& ptr) = delete; + + virtual ~AST_Node() = 0; + virtual size_t hash() { return 0; } + ATTACH_VIRTUAL_AST_OPERATIONS(AST_Node); + virtual std::string inspect() const { return to_string({ INSPECT, 5 }); } + virtual std::string to_sass() const { return to_string({ TO_SASS, 5 }); } + virtual const std::string to_string(Sass_Inspect_Options opt) const; + virtual const std::string to_string() const; + virtual void cloneChildren() {}; + // generic find function (not fully implemented yet) + // ToDo: add specific implementions to all children + virtual bool find ( bool (*f)(AST_Node_Obj) ) { return f(this); }; + public: + void update_pstate(const ParserState& pstate); + public: + Offset off() { return pstate(); } + Position pos() { return pstate(); } + ATTACH_OPERATIONS() + }; + inline AST_Node::~AST_Node() { } + + ////////////////////////////////////////////////////////////////////// + // define cast template now (need complete type) + ////////////////////////////////////////////////////////////////////// + + template + T* Cast(AST_Node* ptr) { + return ptr && typeid(T) == typeid(*ptr) ? + static_cast(ptr) : NULL; + }; + + template + const T* Cast(const AST_Node* ptr) { + return ptr && typeid(T) == typeid(*ptr) ? + static_cast(ptr) : NULL; + }; + + ////////////////////////////////////////////////////////////////////// + // Abstract base class for expressions. This side of the AST hierarchy + // represents elements in value contexts, which exist primarily to be + // evaluated and returned. + ////////////////////////////////////////////////////////////////////// + class Expression : public AST_Node { + public: + enum Concrete_Type { + NONE, + BOOLEAN, + NUMBER, + COLOR, + STRING, + LIST, + MAP, + SELECTOR, + NULL_VAL, + FUNCTION_VAL, + C_WARNING, + C_ERROR, + FUNCTION, + VARIABLE, + NUM_TYPES + }; + enum Simple_Type { + SIMPLE, + ATTR_SEL, + PSEUDO_SEL, + WRAPPED_SEL, + }; + private: + // expressions in some contexts shouldn't be evaluated + ADD_PROPERTY(bool, is_delayed) + ADD_PROPERTY(bool, is_expanded) + ADD_PROPERTY(bool, is_interpolant) + ADD_PROPERTY(Concrete_Type, concrete_type) + public: + Expression(ParserState pstate, + bool d = false, bool e = false, bool i = false, Concrete_Type ct = NONE) + : AST_Node(pstate), + is_delayed_(d), + is_expanded_(e), + is_interpolant_(i), + concrete_type_(ct) + { } + Expression(const Expression* ptr) + : AST_Node(ptr), + is_delayed_(ptr->is_delayed_), + is_expanded_(ptr->is_expanded_), + is_interpolant_(ptr->is_interpolant_), + concrete_type_(ptr->concrete_type_) + { } + virtual operator bool() { return true; } + virtual ~Expression() { } + virtual std::string type() const { return ""; /* TODO: raise an error? */ } + virtual bool is_invisible() const { return false; } + static std::string type_name() { return ""; } + virtual bool is_false() { return false; } + // virtual bool is_true() { return !is_false(); } + virtual bool operator== (const Expression& rhs) const { return false; } + virtual bool eq(const Expression& rhs) const { return *this == rhs; }; + virtual void set_delayed(bool delayed) { is_delayed(delayed); } + virtual bool has_interpolant() const { return is_interpolant(); } + virtual bool is_left_interpolant() const { return is_interpolant(); } + virtual bool is_right_interpolant() const { return is_interpolant(); } + virtual std::string inspect() const { return to_string({ INSPECT, 5 }); } + virtual std::string to_sass() const { return to_string({ TO_SASS, 5 }); } + ATTACH_VIRTUAL_AST_OPERATIONS(Expression); + virtual size_t hash() { return 0; } + }; + + ////////////////////////////////////////////////////////////////////// + // Still just an expression, but with a to_string method + ////////////////////////////////////////////////////////////////////// + class PreValue : public Expression { + public: + PreValue(ParserState pstate, + bool d = false, bool e = false, bool i = false, Concrete_Type ct = NONE) + : Expression(pstate, d, e, i, ct) + { } + PreValue(const PreValue* ptr) + : Expression(ptr) + { } + ATTACH_VIRTUAL_AST_OPERATIONS(PreValue); + virtual ~PreValue() { } + }; + + ////////////////////////////////////////////////////////////////////// + // base class for values that support operations + ////////////////////////////////////////////////////////////////////// + class Value : public Expression { + public: + Value(ParserState pstate, + bool d = false, bool e = false, bool i = false, Concrete_Type ct = NONE) + : Expression(pstate, d, e, i, ct) + { } + Value(const Value* ptr) + : Expression(ptr) + { } + ATTACH_VIRTUAL_AST_OPERATIONS(Value); + virtual bool operator== (const Expression& rhs) const = 0; + }; +} + +///////////////////////////////////////////////////////////////////////////////////// +// Hash method specializations for std::unordered_map to work with Sass::Expression +///////////////////////////////////////////////////////////////////////////////////// + +namespace std { + template<> + struct hash + { + size_t operator()(Sass::Expression_Obj s) const + { + return s->hash(); + } + }; + template<> + struct equal_to + { + bool operator()( Sass::Expression_Obj lhs, Sass::Expression_Obj rhs) const + { + return lhs->hash() == rhs->hash(); + } + }; +} + +namespace Sass { + + ///////////////////////////////////////////////////////////////////////////// + // Mixin class for AST nodes that should behave like vectors. Uses the + // "Template Method" design pattern to allow subclasses to adjust their flags + // when certain objects are pushed. + ///////////////////////////////////////////////////////////////////////////// + template + class Vectorized { + std::vector elements_; + protected: + size_t hash_; + void reset_hash() { hash_ = 0; } + virtual void adjust_after_pushing(T element) { } + public: + Vectorized(size_t s = 0) : elements_(std::vector()), hash_(0) + { elements_.reserve(s); } + virtual ~Vectorized() = 0; + size_t length() const { return elements_.size(); } + bool empty() const { return elements_.empty(); } + void clear() { return elements_.clear(); } + T last() const { return elements_.back(); } + T first() const { return elements_.front(); } + T& operator[](size_t i) { return elements_[i]; } + virtual const T& at(size_t i) const { return elements_.at(i); } + virtual T& at(size_t i) { return elements_.at(i); } + const T& operator[](size_t i) const { return elements_[i]; } + virtual void append(T element) + { + if (element) { + reset_hash(); + elements_.push_back(element); + adjust_after_pushing(element); + } + } + virtual void concat(Vectorized* v) + { + for (size_t i = 0, L = v->length(); i < L; ++i) this->append((*v)[i]); + } + Vectorized& unshift(T element) + { + elements_.insert(elements_.begin(), element); + return *this; + } + std::vector& elements() { return elements_; } + const std::vector& elements() const { return elements_; } + std::vector& elements(std::vector& e) { elements_ = e; return elements_; } + + virtual size_t hash() + { + if (hash_ == 0) { + for (T& el : elements_) { + hash_combine(hash_, el->hash()); + } + } + return hash_; + } + + typename std::vector::iterator end() { return elements_.end(); } + typename std::vector::iterator begin() { return elements_.begin(); } + typename std::vector::const_iterator end() const { return elements_.end(); } + typename std::vector::const_iterator begin() const { return elements_.begin(); } + typename std::vector::iterator erase(typename std::vector::iterator el) { return elements_.erase(el); } + typename std::vector::const_iterator erase(typename std::vector::const_iterator el) { return elements_.erase(el); } + + }; + template + inline Vectorized::~Vectorized() { } + + ///////////////////////////////////////////////////////////////////////////// + // Mixin class for AST nodes that should behave like a hash table. Uses an + // extra internally to maintain insertion order for interation. + ///////////////////////////////////////////////////////////////////////////// + class Hashed { + private: + ExpressionMap elements_; + std::vector list_; + protected: + size_t hash_; + Expression_Obj duplicate_key_; + void reset_hash() { hash_ = 0; } + void reset_duplicate_key() { duplicate_key_ = 0; } + virtual void adjust_after_pushing(std::pair p) { } + public: + Hashed(size_t s = 0) + : elements_(ExpressionMap(s)), + list_(std::vector()), + hash_(0), duplicate_key_(NULL) + { elements_.reserve(s); list_.reserve(s); } + virtual ~Hashed(); + size_t length() const { return list_.size(); } + bool empty() const { return list_.empty(); } + bool has(Expression_Obj k) const { return elements_.count(k) == 1; } + Expression_Obj at(Expression_Obj k) const; + bool has_duplicate_key() const { return duplicate_key_ != 0; } + Expression_Obj get_duplicate_key() const { return duplicate_key_; } + const ExpressionMap elements() { return elements_; } + Hashed& operator<<(std::pair p) + { + reset_hash(); + + if (!has(p.first)) list_.push_back(p.first); + else if (!duplicate_key_) duplicate_key_ = p.first; + + elements_[p.first] = p.second; + + adjust_after_pushing(p); + return *this; + } + Hashed& operator+=(Hashed* h) + { + if (length() == 0) { + this->elements_ = h->elements_; + this->list_ = h->list_; + return *this; + } + + for (auto key : h->keys()) { + *this << std::make_pair(key, h->at(key)); + } + + reset_duplicate_key(); + return *this; + } + const ExpressionMap& pairs() const { return elements_; } + const std::vector& keys() const { return list_; } + +// std::unordered_map::iterator end() { return elements_.end(); } +// std::unordered_map::iterator begin() { return elements_.begin(); } +// std::unordered_map::const_iterator end() const { return elements_.end(); } +// std::unordered_map::const_iterator begin() const { return elements_.begin(); } + + }; + inline Hashed::~Hashed() { } + + + ///////////////////////////////////////////////////////////////////////// + // Abstract base class for statements. This side of the AST hierarchy + // represents elements in expansion contexts, which exist primarily to be + // rewritten and macro-expanded. + ///////////////////////////////////////////////////////////////////////// + class Statement : public AST_Node { + public: + enum Statement_Type { + NONE, + RULESET, + MEDIA, + DIRECTIVE, + SUPPORTS, + ATROOT, + BUBBLE, + CONTENT, + KEYFRAMERULE, + DECLARATION, + ASSIGNMENT, + IMPORT_STUB, + IMPORT, + COMMENT, + WARNING, + RETURN, + EXTEND, + ERROR, + DEBUGSTMT, + WHILE, + EACH, + FOR, + IF + }; + private: + ADD_PROPERTY(Statement_Type, statement_type) + ADD_PROPERTY(size_t, tabs) + ADD_PROPERTY(bool, group_end) + public: + Statement(ParserState pstate, Statement_Type st = NONE, size_t t = 0) + : AST_Node(pstate), statement_type_(st), tabs_(t), group_end_(false) + { } + Statement(const Statement* ptr) + : AST_Node(ptr), + statement_type_(ptr->statement_type_), + tabs_(ptr->tabs_), + group_end_(ptr->group_end_) + { } + virtual ~Statement() = 0; + // needed for rearranging nested rulesets during CSS emission + virtual bool is_invisible() const { return false; } + virtual bool bubbles() { return false; } + virtual bool has_content() + { + return statement_type_ == CONTENT; + } + }; + inline Statement::~Statement() { } + + //////////////////////// + // Blocks of statements. + //////////////////////// + class Block : public Statement, public Vectorized { + ADD_PROPERTY(bool, is_root) + // needed for properly formatted CSS emission + protected: + void adjust_after_pushing(Statement_Obj s) + { + } + public: + Block(ParserState pstate, size_t s = 0, bool r = false) + : Statement(pstate), + Vectorized(s), + is_root_(r) + { } + Block(const Block* ptr) + : Statement(ptr), + Vectorized(*ptr), + is_root_(ptr->is_root_) + { } + virtual bool has_content() + { + for (size_t i = 0, L = elements().size(); i < L; ++i) { + if (elements()[i]->has_content()) return true; + } + return Statement::has_content(); + } + ATTACH_AST_OPERATIONS(Block) + ATTACH_OPERATIONS() + }; + + //////////////////////////////////////////////////////////////////////// + // Abstract base class for statements that contain blocks of statements. + //////////////////////////////////////////////////////////////////////// + class Has_Block : public Statement { + ADD_PROPERTY(Block_Obj, block) + public: + Has_Block(ParserState pstate, Block_Obj b) + : Statement(pstate), block_(b) + { } + Has_Block(const Has_Block* ptr) + : Statement(ptr), block_(ptr->block_) + { } + virtual bool has_content() + { + return (block_ && block_->has_content()) || Statement::has_content(); + } + virtual ~Has_Block() = 0; + }; + inline Has_Block::~Has_Block() { } + + ///////////////////////////////////////////////////////////////////////////// + // Rulesets (i.e., sets of styles headed by a selector and containing a block + // of style declarations. + ///////////////////////////////////////////////////////////////////////////// + class Ruleset : public Has_Block { + ADD_PROPERTY(Selector_List_Obj, selector) + ADD_PROPERTY(bool, is_root); + public: + Ruleset(ParserState pstate, Selector_List_Obj s = 0, Block_Obj b = 0) + : Has_Block(pstate, b), selector_(s), is_root_(false) + { statement_type(RULESET); } + Ruleset(const Ruleset* ptr) + : Has_Block(ptr), + selector_(ptr->selector_), + is_root_(ptr->is_root_) + { statement_type(RULESET); } + bool is_invisible() const; + ATTACH_AST_OPERATIONS(Ruleset) + ATTACH_OPERATIONS() + }; + + ///////////////// + // Bubble. + ///////////////// + class Bubble : public Statement { + ADD_PROPERTY(Statement_Obj, node) + ADD_PROPERTY(bool, group_end) + public: + Bubble(ParserState pstate, Statement_Obj n, Statement_Obj g = 0, size_t t = 0) + : Statement(pstate, Statement::BUBBLE, t), node_(n), group_end_(g == 0) + { } + Bubble(const Bubble* ptr) + : Statement(ptr), + node_(ptr->node_), + group_end_(ptr->group_end_) + { } + bool bubbles() { return true; } + ATTACH_AST_OPERATIONS(Bubble) + ATTACH_OPERATIONS() + }; + + ///////////////// + // Trace. + ///////////////// + class Trace : public Has_Block { + ADD_CONSTREF(char, type) + ADD_CONSTREF(std::string, name) + public: + Trace(ParserState pstate, std::string n, Block_Obj b = 0, char type = 'm') + : Has_Block(pstate, b), type_(type), name_(n) + { } + Trace(const Trace* ptr) + : Has_Block(ptr), + type_(ptr->type_), + name_(ptr->name_) + { } + ATTACH_AST_OPERATIONS(Trace) + ATTACH_OPERATIONS() + }; + + ///////////////// + // Media queries. + ///////////////// + class Media_Block : public Has_Block { + ADD_PROPERTY(List_Obj, media_queries) + public: + Media_Block(ParserState pstate, List_Obj mqs, Block_Obj b) + : Has_Block(pstate, b), media_queries_(mqs) + { statement_type(MEDIA); } + Media_Block(const Media_Block* ptr) + : Has_Block(ptr), media_queries_(ptr->media_queries_) + { statement_type(MEDIA); } + bool bubbles() { return true; } + bool is_invisible() const; + ATTACH_AST_OPERATIONS(Media_Block) + ATTACH_OPERATIONS() + }; + + /////////////////////////////////////////////////////////////////////// + // At-rules -- arbitrary directives beginning with "@" that may have an + // optional statement block. + /////////////////////////////////////////////////////////////////////// + class Directive : public Has_Block { + ADD_CONSTREF(std::string, keyword) + ADD_PROPERTY(Selector_List_Obj, selector) + ADD_PROPERTY(Expression_Obj, value) + public: + Directive(ParserState pstate, std::string kwd, Selector_List_Obj sel = 0, Block_Obj b = 0, Expression_Obj val = 0) + : Has_Block(pstate, b), keyword_(kwd), selector_(sel), value_(val) // set value manually if needed + { statement_type(DIRECTIVE); } + Directive(const Directive* ptr) + : Has_Block(ptr), + keyword_(ptr->keyword_), + selector_(ptr->selector_), + value_(ptr->value_) // set value manually if needed + { statement_type(DIRECTIVE); } + bool bubbles() { return is_keyframes() || is_media(); } + bool is_media() { + return keyword_.compare("@-webkit-media") == 0 || + keyword_.compare("@-moz-media") == 0 || + keyword_.compare("@-o-media") == 0 || + keyword_.compare("@media") == 0; + } + bool is_keyframes() { + return keyword_.compare("@-webkit-keyframes") == 0 || + keyword_.compare("@-moz-keyframes") == 0 || + keyword_.compare("@-o-keyframes") == 0 || + keyword_.compare("@keyframes") == 0; + } + ATTACH_AST_OPERATIONS(Directive) + ATTACH_OPERATIONS() + }; + + /////////////////////////////////////////////////////////////////////// + // Keyframe-rules -- the child blocks of "@keyframes" nodes. + /////////////////////////////////////////////////////////////////////// + class Keyframe_Rule : public Has_Block { + // according to css spec, this should be + // = | + ADD_PROPERTY(Selector_List_Obj, name) + public: + Keyframe_Rule(ParserState pstate, Block_Obj b) + : Has_Block(pstate, b), name_() + { statement_type(KEYFRAMERULE); } + Keyframe_Rule(const Keyframe_Rule* ptr) + : Has_Block(ptr), name_(ptr->name_) + { statement_type(KEYFRAMERULE); } + ATTACH_AST_OPERATIONS(Keyframe_Rule) + ATTACH_OPERATIONS() + }; + + //////////////////////////////////////////////////////////////////////// + // Declarations -- style rules consisting of a property name and values. + //////////////////////////////////////////////////////////////////////// + class Declaration : public Has_Block { + ADD_PROPERTY(String_Obj, property) + ADD_PROPERTY(Expression_Obj, value) + ADD_PROPERTY(bool, is_important) + ADD_PROPERTY(bool, is_custom_property) + ADD_PROPERTY(bool, is_indented) + public: + Declaration(ParserState pstate, + String_Obj prop, Expression_Obj val, bool i = false, bool c = false, Block_Obj b = 0) + : Has_Block(pstate, b), property_(prop), value_(val), is_important_(i), is_custom_property_(c), is_indented_(false) + { statement_type(DECLARATION); } + Declaration(const Declaration* ptr) + : Has_Block(ptr), + property_(ptr->property_), + value_(ptr->value_), + is_important_(ptr->is_important_), + is_custom_property_(ptr->is_custom_property_), + is_indented_(ptr->is_indented_) + { statement_type(DECLARATION); } + virtual bool is_invisible() const; + ATTACH_AST_OPERATIONS(Declaration) + ATTACH_OPERATIONS() + }; + + ///////////////////////////////////// + // Assignments -- variable and value. + ///////////////////////////////////// + class Assignment : public Statement { + ADD_CONSTREF(std::string, variable) + ADD_PROPERTY(Expression_Obj, value) + ADD_PROPERTY(bool, is_default) + ADD_PROPERTY(bool, is_global) + public: + Assignment(ParserState pstate, + std::string var, Expression_Obj val, + bool is_default = false, + bool is_global = false) + : Statement(pstate), variable_(var), value_(val), is_default_(is_default), is_global_(is_global) + { statement_type(ASSIGNMENT); } + Assignment(const Assignment* ptr) + : Statement(ptr), + variable_(ptr->variable_), + value_(ptr->value_), + is_default_(ptr->is_default_), + is_global_(ptr->is_global_) + { statement_type(ASSIGNMENT); } + ATTACH_AST_OPERATIONS(Assignment) + ATTACH_OPERATIONS() + }; + + //////////////////////////////////////////////////////////////////////////// + // Import directives. CSS and Sass import lists can be intermingled, so it's + // necessary to store a list of each in an Import node. + //////////////////////////////////////////////////////////////////////////// + class Import : public Statement { + std::vector urls_; + std::vector incs_; + ADD_PROPERTY(List_Obj, import_queries); + public: + Import(ParserState pstate) + : Statement(pstate), + urls_(std::vector()), + incs_(std::vector()), + import_queries_() + { statement_type(IMPORT); } + Import(const Import* ptr) + : Statement(ptr), + urls_(ptr->urls_), + incs_(ptr->incs_), + import_queries_(ptr->import_queries_) + { statement_type(IMPORT); } + std::vector& urls() { return urls_; } + std::vector& incs() { return incs_; } + ATTACH_AST_OPERATIONS(Import) + ATTACH_OPERATIONS() + }; + + // not yet resolved single import + // so far we only know requested name + class Import_Stub : public Statement { + Include resource_; + public: + std::string abs_path() { return resource_.abs_path; }; + std::string imp_path() { return resource_.imp_path; }; + Include resource() { return resource_; }; + + Import_Stub(ParserState pstate, Include res) + : Statement(pstate), resource_(res) + { statement_type(IMPORT_STUB); } + Import_Stub(const Import_Stub* ptr) + : Statement(ptr), resource_(ptr->resource_) + { statement_type(IMPORT_STUB); } + ATTACH_AST_OPERATIONS(Import_Stub) + ATTACH_OPERATIONS() + }; + + ////////////////////////////// + // The Sass `@warn` directive. + ////////////////////////////// + class Warning : public Statement { + ADD_PROPERTY(Expression_Obj, message) + public: + Warning(ParserState pstate, Expression_Obj msg) + : Statement(pstate), message_(msg) + { statement_type(WARNING); } + Warning(const Warning* ptr) + : Statement(ptr), message_(ptr->message_) + { statement_type(WARNING); } + ATTACH_AST_OPERATIONS(Warning) + ATTACH_OPERATIONS() + }; + + /////////////////////////////// + // The Sass `@error` directive. + /////////////////////////////// + class Error : public Statement { + ADD_PROPERTY(Expression_Obj, message) + public: + Error(ParserState pstate, Expression_Obj msg) + : Statement(pstate), message_(msg) + { statement_type(ERROR); } + Error(const Error* ptr) + : Statement(ptr), message_(ptr->message_) + { statement_type(ERROR); } + ATTACH_AST_OPERATIONS(Error) + ATTACH_OPERATIONS() + }; + + /////////////////////////////// + // The Sass `@debug` directive. + /////////////////////////////// + class Debug : public Statement { + ADD_PROPERTY(Expression_Obj, value) + public: + Debug(ParserState pstate, Expression_Obj val) + : Statement(pstate), value_(val) + { statement_type(DEBUGSTMT); } + Debug(const Debug* ptr) + : Statement(ptr), value_(ptr->value_) + { statement_type(DEBUGSTMT); } + ATTACH_AST_OPERATIONS(Debug) + ATTACH_OPERATIONS() + }; + + /////////////////////////////////////////// + // CSS comments. These may be interpolated. + /////////////////////////////////////////// + class Comment : public Statement { + ADD_PROPERTY(String_Obj, text) + ADD_PROPERTY(bool, is_important) + public: + Comment(ParserState pstate, String_Obj txt, bool is_important) + : Statement(pstate), text_(txt), is_important_(is_important) + { statement_type(COMMENT); } + Comment(const Comment* ptr) + : Statement(ptr), + text_(ptr->text_), + is_important_(ptr->is_important_) + { statement_type(COMMENT); } + virtual bool is_invisible() const + { return /* is_important() == */ false; } + ATTACH_AST_OPERATIONS(Comment) + ATTACH_OPERATIONS() + }; + + //////////////////////////////////// + // The Sass `@if` control directive. + //////////////////////////////////// + class If : public Has_Block { + ADD_PROPERTY(Expression_Obj, predicate) + ADD_PROPERTY(Block_Obj, alternative) + public: + If(ParserState pstate, Expression_Obj pred, Block_Obj con, Block_Obj alt = 0) + : Has_Block(pstate, con), predicate_(pred), alternative_(alt) + { statement_type(IF); } + If(const If* ptr) + : Has_Block(ptr), + predicate_(ptr->predicate_), + alternative_(ptr->alternative_) + { statement_type(IF); } + virtual bool has_content() + { + return Has_Block::has_content() || (alternative_ && alternative_->has_content()); + } + ATTACH_AST_OPERATIONS(If) + ATTACH_OPERATIONS() + }; + + ///////////////////////////////////// + // The Sass `@for` control directive. + ///////////////////////////////////// + class For : public Has_Block { + ADD_CONSTREF(std::string, variable) + ADD_PROPERTY(Expression_Obj, lower_bound) + ADD_PROPERTY(Expression_Obj, upper_bound) + ADD_PROPERTY(bool, is_inclusive) + public: + For(ParserState pstate, + std::string var, Expression_Obj lo, Expression_Obj hi, Block_Obj b, bool inc) + : Has_Block(pstate, b), + variable_(var), lower_bound_(lo), upper_bound_(hi), is_inclusive_(inc) + { statement_type(FOR); } + For(const For* ptr) + : Has_Block(ptr), + variable_(ptr->variable_), + lower_bound_(ptr->lower_bound_), + upper_bound_(ptr->upper_bound_), + is_inclusive_(ptr->is_inclusive_) + { statement_type(FOR); } + ATTACH_AST_OPERATIONS(For) + ATTACH_OPERATIONS() + }; + + ////////////////////////////////////// + // The Sass `@each` control directive. + ////////////////////////////////////// + class Each : public Has_Block { + ADD_PROPERTY(std::vector, variables) + ADD_PROPERTY(Expression_Obj, list) + public: + Each(ParserState pstate, std::vector vars, Expression_Obj lst, Block_Obj b) + : Has_Block(pstate, b), variables_(vars), list_(lst) + { statement_type(EACH); } + Each(const Each* ptr) + : Has_Block(ptr), variables_(ptr->variables_), list_(ptr->list_) + { statement_type(EACH); } + ATTACH_AST_OPERATIONS(Each) + ATTACH_OPERATIONS() + }; + + /////////////////////////////////////// + // The Sass `@while` control directive. + /////////////////////////////////////// + class While : public Has_Block { + ADD_PROPERTY(Expression_Obj, predicate) + public: + While(ParserState pstate, Expression_Obj pred, Block_Obj b) + : Has_Block(pstate, b), predicate_(pred) + { statement_type(WHILE); } + While(const While* ptr) + : Has_Block(ptr), predicate_(ptr->predicate_) + { statement_type(WHILE); } + ATTACH_AST_OPERATIONS(While) + ATTACH_OPERATIONS() + }; + + ///////////////////////////////////////////////////////////// + // The @return directive for use inside SassScript functions. + ///////////////////////////////////////////////////////////// + class Return : public Statement { + ADD_PROPERTY(Expression_Obj, value) + public: + Return(ParserState pstate, Expression_Obj val) + : Statement(pstate), value_(val) + { statement_type(RETURN); } + Return(const Return* ptr) + : Statement(ptr), value_(ptr->value_) + { statement_type(RETURN); } + ATTACH_AST_OPERATIONS(Return) + ATTACH_OPERATIONS() + }; + + //////////////////////////////// + // The Sass `@extend` directive. + //////////////////////////////// + class Extension : public Statement { + ADD_PROPERTY(Selector_List_Obj, selector) + public: + Extension(ParserState pstate, Selector_List_Obj s) + : Statement(pstate), selector_(s) + { statement_type(EXTEND); } + Extension(const Extension* ptr) + : Statement(ptr), selector_(ptr->selector_) + { statement_type(EXTEND); } + ATTACH_AST_OPERATIONS(Extension) + ATTACH_OPERATIONS() + }; + + ///////////////////////////////////////////////////////////////////////////// + // Definitions for both mixins and functions. The two cases are distinguished + // by a type tag. + ///////////////////////////////////////////////////////////////////////////// + struct Backtrace; + typedef const char* Signature; + typedef Expression_Ptr (*Native_Function)(Env&, Env&, Context&, Signature, ParserState, Backtraces, std::vector); + class Definition : public Has_Block { + public: + enum Type { MIXIN, FUNCTION }; + ADD_CONSTREF(std::string, name) + ADD_PROPERTY(Parameters_Obj, parameters) + ADD_PROPERTY(Env*, environment) + ADD_PROPERTY(Type, type) + ADD_PROPERTY(Native_Function, native_function) + ADD_PROPERTY(Sass_Function_Entry, c_function) + ADD_PROPERTY(void*, cookie) + ADD_PROPERTY(bool, is_overload_stub) + ADD_PROPERTY(Signature, signature) + public: + Definition(const Definition* ptr) + : Has_Block(ptr), + name_(ptr->name_), + parameters_(ptr->parameters_), + environment_(ptr->environment_), + type_(ptr->type_), + native_function_(ptr->native_function_), + c_function_(ptr->c_function_), + cookie_(ptr->cookie_), + is_overload_stub_(ptr->is_overload_stub_), + signature_(ptr->signature_) + { } + + Definition(ParserState pstate, + std::string n, + Parameters_Obj params, + Block_Obj b, + Type t) + : Has_Block(pstate, b), + name_(n), + parameters_(params), + environment_(0), + type_(t), + native_function_(0), + c_function_(0), + cookie_(0), + is_overload_stub_(false), + signature_(0) + { } + Definition(ParserState pstate, + Signature sig, + std::string n, + Parameters_Obj params, + Native_Function func_ptr, + bool overload_stub = false) + : Has_Block(pstate, 0), + name_(n), + parameters_(params), + environment_(0), + type_(FUNCTION), + native_function_(func_ptr), + c_function_(0), + cookie_(0), + is_overload_stub_(overload_stub), + signature_(sig) + { } + Definition(ParserState pstate, + Signature sig, + std::string n, + Parameters_Obj params, + Sass_Function_Entry c_func, + bool whatever, + bool whatever2) + : Has_Block(pstate, 0), + name_(n), + parameters_(params), + environment_(0), + type_(FUNCTION), + native_function_(0), + c_function_(c_func), + cookie_(sass_function_get_cookie(c_func)), + is_overload_stub_(false), + signature_(sig) + { } + ATTACH_AST_OPERATIONS(Definition) + ATTACH_OPERATIONS() + }; + + ////////////////////////////////////// + // Mixin calls (i.e., `@include ...`). + ////////////////////////////////////// + class Mixin_Call : public Has_Block { + ADD_CONSTREF(std::string, name) + ADD_PROPERTY(Arguments_Obj, arguments) + public: + Mixin_Call(ParserState pstate, std::string n, Arguments_Obj args, Block_Obj b = 0) + : Has_Block(pstate, b), name_(n), arguments_(args) + { } + Mixin_Call(const Mixin_Call* ptr) + : Has_Block(ptr), + name_(ptr->name_), + arguments_(ptr->arguments_) + { } + ATTACH_AST_OPERATIONS(Mixin_Call) + ATTACH_OPERATIONS() + }; + + /////////////////////////////////////////////////// + // The @content directive for mixin content blocks. + /////////////////////////////////////////////////// + class Content : public Statement { + ADD_PROPERTY(Media_Block_Ptr, media_block) + public: + Content(ParserState pstate) + : Statement(pstate), + media_block_(NULL) + { statement_type(CONTENT); } + Content(const Content* ptr) + : Statement(ptr), + media_block_(ptr->media_block_) + { statement_type(CONTENT); } + ATTACH_AST_OPERATIONS(Content) + ATTACH_OPERATIONS() + }; + + /////////////////////////////////////////////////////////////////////// + // Lists of values, both comma- and space-separated (distinguished by a + // type-tag.) Also used to represent variable-length argument lists. + /////////////////////////////////////////////////////////////////////// + class List : public Value, public Vectorized { + void adjust_after_pushing(Expression_Obj e) { is_expanded(false); } + private: + ADD_PROPERTY(enum Sass_Separator, separator) + ADD_PROPERTY(bool, is_arglist) + ADD_PROPERTY(bool, is_bracketed) + ADD_PROPERTY(bool, from_selector) + public: + List(ParserState pstate, + size_t size = 0, enum Sass_Separator sep = SASS_SPACE, bool argl = false, bool bracket = false) + : Value(pstate), + Vectorized(size), + separator_(sep), + is_arglist_(argl), + is_bracketed_(bracket), + from_selector_(false) + { concrete_type(LIST); } + List(const List* ptr) + : Value(ptr), + Vectorized(*ptr), + separator_(ptr->separator_), + is_arglist_(ptr->is_arglist_), + is_bracketed_(ptr->is_bracketed_), + from_selector_(ptr->from_selector_) + { concrete_type(LIST); } + std::string type() const { return is_arglist_ ? "arglist" : "list"; } + static std::string type_name() { return "list"; } + const char* sep_string(bool compressed = false) const { + return separator() == SASS_SPACE ? + " " : (compressed ? "," : ", "); + } + bool is_invisible() const { return empty() && !is_bracketed(); } + Expression_Obj value_at_index(size_t i); + + virtual size_t size() const; + + virtual size_t hash() + { + if (hash_ == 0) { + hash_ = std::hash()(sep_string()); + hash_combine(hash_, std::hash()(is_bracketed())); + for (size_t i = 0, L = length(); i < L; ++i) + hash_combine(hash_, (elements()[i])->hash()); + } + return hash_; + } + + virtual void set_delayed(bool delayed) + { + is_delayed(delayed); + // don't set children + } + + virtual bool operator== (const Expression& rhs) const; + + ATTACH_AST_OPERATIONS(List) + ATTACH_OPERATIONS() + }; + + /////////////////////////////////////////////////////////////////////// + // Key value paris. + /////////////////////////////////////////////////////////////////////// + class Map : public Value, public Hashed { + void adjust_after_pushing(std::pair p) { is_expanded(false); } + public: + Map(ParserState pstate, + size_t size = 0) + : Value(pstate), + Hashed(size) + { concrete_type(MAP); } + Map(const Map* ptr) + : Value(ptr), + Hashed(*ptr) + { concrete_type(MAP); } + std::string type() const { return "map"; } + static std::string type_name() { return "map"; } + bool is_invisible() const { return empty(); } + List_Obj to_list(ParserState& pstate); + + virtual size_t hash() + { + if (hash_ == 0) { + for (auto key : keys()) { + hash_combine(hash_, key->hash()); + hash_combine(hash_, at(key)->hash()); + } + } + + return hash_; + } + + virtual bool operator== (const Expression& rhs) const; + + ATTACH_AST_OPERATIONS(Map) + ATTACH_OPERATIONS() + }; + + inline static const std::string sass_op_to_name(enum Sass_OP op) { + switch (op) { + case AND: return "and"; + case OR: return "or"; + case EQ: return "eq"; + case NEQ: return "neq"; + case GT: return "gt"; + case GTE: return "gte"; + case LT: return "lt"; + case LTE: return "lte"; + case ADD: return "plus"; + case SUB: return "sub"; + case MUL: return "times"; + case DIV: return "div"; + case MOD: return "mod"; + // this is only used internally! + case NUM_OPS: return "[OPS]"; + default: return "invalid"; + } + } + + inline static const std::string sass_op_separator(enum Sass_OP op) { + switch (op) { + case AND: return "&&"; + case OR: return "||"; + case EQ: return "=="; + case NEQ: return "!="; + case GT: return ">"; + case GTE: return ">="; + case LT: return "<"; + case LTE: return "<="; + case ADD: return "+"; + case SUB: return "-"; + case MUL: return "*"; + case DIV: return "/"; + case MOD: return "%"; + // this is only used internally! + case NUM_OPS: return "[OPS]"; + default: return "invalid"; + } + } + + ////////////////////////////////////////////////////////////////////////// + // Binary expressions. Represents logical, relational, and arithmetic + // operations. Templatized to avoid large switch statements and repetitive + // subclassing. + ////////////////////////////////////////////////////////////////////////// + class Binary_Expression : public PreValue { + private: + HASH_PROPERTY(Operand, op) + HASH_PROPERTY(Expression_Obj, left) + HASH_PROPERTY(Expression_Obj, right) + size_t hash_; + public: + Binary_Expression(ParserState pstate, + Operand op, Expression_Obj lhs, Expression_Obj rhs) + : PreValue(pstate), op_(op), left_(lhs), right_(rhs), hash_(0) + { } + Binary_Expression(const Binary_Expression* ptr) + : PreValue(ptr), + op_(ptr->op_), + left_(ptr->left_), + right_(ptr->right_), + hash_(ptr->hash_) + { } + const std::string type_name() { + return sass_op_to_name(optype()); + } + const std::string separator() { + return sass_op_separator(optype()); + } + bool is_left_interpolant(void) const; + bool is_right_interpolant(void) const; + bool has_interpolant() const + { + return is_left_interpolant() || + is_right_interpolant(); + } + virtual void set_delayed(bool delayed) + { + right()->set_delayed(delayed); + left()->set_delayed(delayed); + is_delayed(delayed); + } + virtual bool operator==(const Expression& rhs) const + { + try + { + Binary_Expression_Ptr_Const m = Cast(&rhs); + if (m == 0) return false; + return type() == m->type() && + *left() == *m->left() && + *right() == *m->right(); + } + catch (std::bad_cast&) + { + return false; + } + catch (...) { throw; } + } + virtual size_t hash() + { + if (hash_ == 0) { + hash_ = std::hash()(optype()); + hash_combine(hash_, left()->hash()); + hash_combine(hash_, right()->hash()); + } + return hash_; + } + enum Sass_OP optype() const { return op_.operand; } + ATTACH_AST_OPERATIONS(Binary_Expression) + ATTACH_OPERATIONS() + }; + + //////////////////////////////////////////////////////////////////////////// + // Arithmetic negation (logical negation is just an ordinary function call). + //////////////////////////////////////////////////////////////////////////// + class Unary_Expression : public Expression { + public: + enum Type { PLUS, MINUS, NOT, SLASH }; + private: + HASH_PROPERTY(Type, optype) + HASH_PROPERTY(Expression_Obj, operand) + size_t hash_; + public: + Unary_Expression(ParserState pstate, Type t, Expression_Obj o) + : Expression(pstate), optype_(t), operand_(o), hash_(0) + { } + Unary_Expression(const Unary_Expression* ptr) + : Expression(ptr), + optype_(ptr->optype_), + operand_(ptr->operand_), + hash_(ptr->hash_) + { } + const std::string type_name() { + switch (optype_) { + case PLUS: return "plus"; + case MINUS: return "minus"; + case SLASH: return "slash"; + case NOT: return "not"; + default: return "invalid"; + } + } + virtual bool operator==(const Expression& rhs) const + { + try + { + Unary_Expression_Ptr_Const m = Cast(&rhs); + if (m == 0) return false; + return type() == m->type() && + *operand() == *m->operand(); + } + catch (std::bad_cast&) + { + return false; + } + catch (...) { throw; } + } + virtual size_t hash() + { + if (hash_ == 0) { + hash_ = std::hash()(optype_); + hash_combine(hash_, operand()->hash()); + }; + return hash_; + } + ATTACH_AST_OPERATIONS(Unary_Expression) + ATTACH_OPERATIONS() + }; + + //////////////////////////////////////////////////////////// + // Individual argument objects for mixin and function calls. + //////////////////////////////////////////////////////////// + class Argument : public Expression { + HASH_PROPERTY(Expression_Obj, value) + HASH_CONSTREF(std::string, name) + ADD_PROPERTY(bool, is_rest_argument) + ADD_PROPERTY(bool, is_keyword_argument) + size_t hash_; + public: + Argument(ParserState pstate, Expression_Obj val, std::string n = "", bool rest = false, bool keyword = false) + : Expression(pstate), value_(val), name_(n), is_rest_argument_(rest), is_keyword_argument_(keyword), hash_(0) + { + if (!name_.empty() && is_rest_argument_) { + coreError("variable-length argument may not be passed by name", pstate_); + } + } + Argument(const Argument* ptr) + : Expression(ptr), + value_(ptr->value_), + name_(ptr->name_), + is_rest_argument_(ptr->is_rest_argument_), + is_keyword_argument_(ptr->is_keyword_argument_), + hash_(ptr->hash_) + { + if (!name_.empty() && is_rest_argument_) { + coreError("variable-length argument may not be passed by name", pstate_); + } + } + + virtual void set_delayed(bool delayed); + virtual bool operator==(const Expression& rhs) const + { + try + { + Argument_Ptr_Const m = Cast(&rhs); + if (!(m && name() == m->name())) return false; + return *value() == *m->value(); + } + catch (std::bad_cast&) + { + return false; + } + catch (...) { throw; } + } + + virtual size_t hash() + { + if (hash_ == 0) { + hash_ = std::hash()(name()); + hash_combine(hash_, value()->hash()); + } + return hash_; + } + + ATTACH_AST_OPERATIONS(Argument) + ATTACH_OPERATIONS() + }; + + //////////////////////////////////////////////////////////////////////// + // Argument lists -- in their own class to facilitate context-sensitive + // error checking (e.g., ensuring that all ordinal arguments precede all + // named arguments). + //////////////////////////////////////////////////////////////////////// + class Arguments : public Expression, public Vectorized { + ADD_PROPERTY(bool, has_named_arguments) + ADD_PROPERTY(bool, has_rest_argument) + ADD_PROPERTY(bool, has_keyword_argument) + protected: + void adjust_after_pushing(Argument_Obj a); + public: + Arguments(ParserState pstate) + : Expression(pstate), + Vectorized(), + has_named_arguments_(false), + has_rest_argument_(false), + has_keyword_argument_(false) + { } + Arguments(const Arguments* ptr) + : Expression(ptr), + Vectorized(*ptr), + has_named_arguments_(ptr->has_named_arguments_), + has_rest_argument_(ptr->has_rest_argument_), + has_keyword_argument_(ptr->has_keyword_argument_) + { } + + virtual void set_delayed(bool delayed); + + Argument_Obj get_rest_argument(); + Argument_Obj get_keyword_argument(); + + ATTACH_AST_OPERATIONS(Arguments) + ATTACH_OPERATIONS() + }; + + //////////////////////////////////////////////////// + // Function reference. + //////////////////////////////////////////////////// + class Function : public Value { + public: + ADD_PROPERTY(Definition_Obj, definition) + ADD_PROPERTY(bool, is_css) + public: + Function(ParserState pstate, Definition_Obj def, bool css) + : Value(pstate), definition_(def), is_css_(css) + { concrete_type(FUNCTION_VAL); } + Function(const Function* ptr) + : Value(ptr), definition_(ptr->definition_), is_css_(ptr->is_css_) + { concrete_type(FUNCTION_VAL); } + + std::string type() const { return "function"; } + static std::string type_name() { return "function"; } + bool is_invisible() const { return true; } + + std::string name() { + if (definition_) { + return definition_->name(); + } + return ""; + } + + virtual bool operator== (const Expression& rhs) const; + + ATTACH_AST_OPERATIONS(Function) + ATTACH_OPERATIONS() + }; + + ////////////////// + // Function calls. + ////////////////// + class Function_Call : public PreValue { + HASH_CONSTREF(std::string, name) + HASH_PROPERTY(Arguments_Obj, arguments) + HASH_PROPERTY(Function_Obj, func) + ADD_PROPERTY(bool, via_call) + ADD_PROPERTY(void*, cookie) + size_t hash_; + public: + Function_Call(ParserState pstate, std::string n, Arguments_Obj args, void* cookie) + : PreValue(pstate), name_(n), arguments_(args), func_(0), via_call_(false), cookie_(cookie), hash_(0) + { concrete_type(FUNCTION); } + Function_Call(ParserState pstate, std::string n, Arguments_Obj args, Function_Obj func) + : PreValue(pstate), name_(n), arguments_(args), func_(func), via_call_(false), cookie_(0), hash_(0) + { concrete_type(FUNCTION); } + Function_Call(ParserState pstate, std::string n, Arguments_Obj args) + : PreValue(pstate), name_(n), arguments_(args), via_call_(false), cookie_(0), hash_(0) + { concrete_type(FUNCTION); } + Function_Call(const Function_Call* ptr) + : PreValue(ptr), + name_(ptr->name_), + arguments_(ptr->arguments_), + func_(ptr->func_), + via_call_(ptr->via_call_), + cookie_(ptr->cookie_), + hash_(ptr->hash_) + { concrete_type(FUNCTION); } + + bool is_css() { + if (func_) return func_->is_css(); + return false; + } + + virtual bool operator==(const Expression& rhs) const + { + try + { + Function_Call_Ptr_Const m = Cast(&rhs); + if (!(m && name() == m->name())) return false; + if (!(m && arguments()->length() == m->arguments()->length())) return false; + for (size_t i =0, L = arguments()->length(); i < L; ++i) + if (!(*(*arguments())[i] == *(*m->arguments())[i])) return false; + return true; + } + catch (std::bad_cast&) + { + return false; + } + catch (...) { throw; } + } + + virtual size_t hash() + { + if (hash_ == 0) { + hash_ = std::hash()(name()); + for (auto argument : arguments()->elements()) + hash_combine(hash_, argument->hash()); + } + return hash_; + } + ATTACH_AST_OPERATIONS(Function_Call) + ATTACH_OPERATIONS() + }; + + ///////////////////////// + // Function call schemas. + ///////////////////////// + class Function_Call_Schema : public Expression { + ADD_PROPERTY(String_Obj, name) + ADD_PROPERTY(Arguments_Obj, arguments) + public: + Function_Call_Schema(ParserState pstate, String_Obj n, Arguments_Obj args) + : Expression(pstate), name_(n), arguments_(args) + { concrete_type(STRING); } + Function_Call_Schema(const Function_Call_Schema* ptr) + : Expression(ptr), + name_(ptr->name_), + arguments_(ptr->arguments_) + { concrete_type(STRING); } + ATTACH_AST_OPERATIONS(Function_Call_Schema) + ATTACH_OPERATIONS() + }; + + /////////////////////// + // Variable references. + /////////////////////// + class Variable : public PreValue { + ADD_CONSTREF(std::string, name) + public: + Variable(ParserState pstate, std::string n) + : PreValue(pstate), name_(n) + { concrete_type(VARIABLE); } + Variable(const Variable* ptr) + : PreValue(ptr), name_(ptr->name_) + { concrete_type(VARIABLE); } + + virtual bool operator==(const Expression& rhs) const + { + try + { + Variable_Ptr_Const e = Cast(&rhs); + return e && name() == e->name(); + } + catch (std::bad_cast&) + { + return false; + } + catch (...) { throw; } + } + + virtual size_t hash() + { + return std::hash()(name()); + } + + ATTACH_AST_OPERATIONS(Variable) + ATTACH_OPERATIONS() + }; + + //////////////////////////////////////////////// + // Numbers, percentages, dimensions, and colors. + //////////////////////////////////////////////// + class Number : public Value, public Units { + HASH_PROPERTY(double, value) + ADD_PROPERTY(bool, zero) + size_t hash_; + public: + Number(ParserState pstate, double val, std::string u = "", bool zero = true); + + Number(const Number* ptr) + : Value(ptr), + Units(ptr), + value_(ptr->value_), zero_(ptr->zero_), + hash_(ptr->hash_) + { concrete_type(NUMBER); } + + bool zero() { return zero_; } + std::string type() const { return "number"; } + static std::string type_name() { return "number"; } + + void reduce(); + void normalize(); + + virtual size_t hash() + { + if (hash_ == 0) { + hash_ = std::hash()(value_); + for (const auto numerator : numerators) + hash_combine(hash_, std::hash()(numerator)); + for (const auto denominator : denominators) + hash_combine(hash_, std::hash()(denominator)); + } + return hash_; + } + + virtual bool operator< (const Number& rhs) const; + virtual bool operator== (const Number& rhs) const; + virtual bool operator== (const Expression& rhs) const; + ATTACH_AST_OPERATIONS(Number) + ATTACH_OPERATIONS() + }; + + ////////// + // Colors. + ////////// + class Color : public Value { + HASH_PROPERTY(double, r) + HASH_PROPERTY(double, g) + HASH_PROPERTY(double, b) + HASH_PROPERTY(double, a) + ADD_CONSTREF(std::string, disp) + size_t hash_; + public: + Color(ParserState pstate, double r, double g, double b, double a = 1, const std::string disp = "") + : Value(pstate), r_(r), g_(g), b_(b), a_(a), disp_(disp), + hash_(0) + { concrete_type(COLOR); } + Color(const Color* ptr) + : Value(ptr), + r_(ptr->r_), + g_(ptr->g_), + b_(ptr->b_), + a_(ptr->a_), + disp_(ptr->disp_), + hash_(ptr->hash_) + { concrete_type(COLOR); } + std::string type() const { return "color"; } + static std::string type_name() { return "color"; } + + virtual size_t hash() + { + if (hash_ == 0) { + hash_ = std::hash()(a_); + hash_combine(hash_, std::hash()(r_)); + hash_combine(hash_, std::hash()(g_)); + hash_combine(hash_, std::hash()(b_)); + } + return hash_; + } + + virtual bool operator== (const Expression& rhs) const; + + ATTACH_AST_OPERATIONS(Color) + ATTACH_OPERATIONS() + }; + + ////////////////////////////// + // Errors from Sass_Values. + ////////////////////////////// + class Custom_Error : public Value { + ADD_CONSTREF(std::string, message) + public: + Custom_Error(ParserState pstate, std::string msg) + : Value(pstate), message_(msg) + { concrete_type(C_ERROR); } + Custom_Error(const Custom_Error* ptr) + : Value(ptr), message_(ptr->message_) + { concrete_type(C_ERROR); } + virtual bool operator== (const Expression& rhs) const; + ATTACH_AST_OPERATIONS(Custom_Error) + ATTACH_OPERATIONS() + }; + + ////////////////////////////// + // Warnings from Sass_Values. + ////////////////////////////// + class Custom_Warning : public Value { + ADD_CONSTREF(std::string, message) + public: + Custom_Warning(ParserState pstate, std::string msg) + : Value(pstate), message_(msg) + { concrete_type(C_WARNING); } + Custom_Warning(const Custom_Warning* ptr) + : Value(ptr), message_(ptr->message_) + { concrete_type(C_WARNING); } + virtual bool operator== (const Expression& rhs) const; + ATTACH_AST_OPERATIONS(Custom_Warning) + ATTACH_OPERATIONS() + }; + + //////////// + // Booleans. + //////////// + class Boolean : public Value { + HASH_PROPERTY(bool, value) + size_t hash_; + public: + Boolean(ParserState pstate, bool val) + : Value(pstate), value_(val), + hash_(0) + { concrete_type(BOOLEAN); } + Boolean(const Boolean* ptr) + : Value(ptr), + value_(ptr->value_), + hash_(ptr->hash_) + { concrete_type(BOOLEAN); } + virtual operator bool() { return value_; } + std::string type() const { return "bool"; } + static std::string type_name() { return "bool"; } + virtual bool is_false() { return !value_; } + + virtual size_t hash() + { + if (hash_ == 0) { + hash_ = std::hash()(value_); + } + return hash_; + } + + virtual bool operator== (const Expression& rhs) const; + + ATTACH_AST_OPERATIONS(Boolean) + ATTACH_OPERATIONS() + }; + + //////////////////////////////////////////////////////////////////////// + // Abstract base class for Sass string values. Includes interpolated and + // "flat" strings. + //////////////////////////////////////////////////////////////////////// + class String : public Value { + public: + String(ParserState pstate, bool delayed = false) + : Value(pstate, delayed) + { concrete_type(STRING); } + String(const String* ptr) + : Value(ptr) + { concrete_type(STRING); } + static std::string type_name() { return "string"; } + virtual ~String() = 0; + virtual void rtrim() = 0; + virtual bool operator==(const Expression& rhs) const = 0; + virtual bool operator<(const Expression& rhs) const { + return this->to_string() < rhs.to_string(); + }; + ATTACH_VIRTUAL_AST_OPERATIONS(String); + ATTACH_OPERATIONS() + }; + inline String::~String() { }; + + /////////////////////////////////////////////////////////////////////// + // Interpolated strings. Meant to be reduced to flat strings during the + // evaluation phase. + /////////////////////////////////////////////////////////////////////// + class String_Schema : public String, public Vectorized { + ADD_PROPERTY(bool, css) + size_t hash_; + public: + String_Schema(ParserState pstate, size_t size = 0, bool css = true) + : String(pstate), Vectorized(size), css_(css), hash_(0) + { concrete_type(STRING); } + String_Schema(const String_Schema* ptr) + : String(ptr), + Vectorized(*ptr), + css_(ptr->css_), + hash_(ptr->hash_) + { concrete_type(STRING); } + + std::string type() const { return "string"; } + static std::string type_name() { return "string"; } + + bool is_left_interpolant(void) const; + bool is_right_interpolant(void) const; + // void has_interpolants(bool tc) { } + bool has_interpolants() { + for (auto el : elements()) { + if (el->is_interpolant()) return true; + } + return false; + } + virtual void rtrim(); + + virtual size_t hash() + { + if (hash_ == 0) { + for (auto string : elements()) + hash_combine(hash_, string->hash()); + } + return hash_; + } + + virtual void set_delayed(bool delayed) { + is_delayed(delayed); + } + + virtual bool operator==(const Expression& rhs) const; + ATTACH_AST_OPERATIONS(String_Schema) + ATTACH_OPERATIONS() + }; + + //////////////////////////////////////////////////////// + // Flat strings -- the lowest level of raw textual data. + //////////////////////////////////////////////////////// + class String_Constant : public String { + ADD_PROPERTY(char, quote_mark) + ADD_PROPERTY(bool, can_compress_whitespace) + HASH_CONSTREF(std::string, value) + protected: + size_t hash_; + public: + String_Constant(const String_Constant* ptr) + : String(ptr), + quote_mark_(ptr->quote_mark_), + can_compress_whitespace_(ptr->can_compress_whitespace_), + value_(ptr->value_), + hash_(ptr->hash_) + { } + String_Constant(ParserState pstate, std::string val, bool css = true) + : String(pstate), quote_mark_(0), can_compress_whitespace_(false), value_(read_css_string(val, css)), hash_(0) + { } + String_Constant(ParserState pstate, const char* beg, bool css = true) + : String(pstate), quote_mark_(0), can_compress_whitespace_(false), value_(read_css_string(std::string(beg), css)), hash_(0) + { } + String_Constant(ParserState pstate, const char* beg, const char* end, bool css = true) + : String(pstate), quote_mark_(0), can_compress_whitespace_(false), value_(read_css_string(std::string(beg, end-beg), css)), hash_(0) + { } + String_Constant(ParserState pstate, const Token& tok, bool css = true) + : String(pstate), quote_mark_(0), can_compress_whitespace_(false), value_(read_css_string(std::string(tok.begin, tok.end), css)), hash_(0) + { } + std::string type() const { return "string"; } + static std::string type_name() { return "string"; } + virtual bool is_invisible() const; + virtual void rtrim(); + + virtual size_t hash() + { + if (hash_ == 0) { + hash_ = std::hash()(value_); + } + return hash_; + } + + virtual bool operator==(const Expression& rhs) const; + virtual std::string inspect() const; // quotes are forced on inspection + + // static char auto_quote() { return '*'; } + static char double_quote() { return '"'; } + static char single_quote() { return '\''; } + + ATTACH_AST_OPERATIONS(String_Constant) + ATTACH_OPERATIONS() + }; + + //////////////////////////////////////////////////////// + // Possibly quoted string (unquote on instantiation) + //////////////////////////////////////////////////////// + class String_Quoted : public String_Constant { + public: + String_Quoted(ParserState pstate, std::string val, char q = 0, + bool keep_utf8_escapes = false, bool skip_unquoting = false, + bool strict_unquoting = true, bool css = true) + : String_Constant(pstate, val, css) + { + if (skip_unquoting == false) { + value_ = unquote(value_, "e_mark_, keep_utf8_escapes, strict_unquoting); + } + if (q && quote_mark_) quote_mark_ = q; + } + String_Quoted(const String_Quoted* ptr) + : String_Constant(ptr) + { } + virtual bool operator==(const Expression& rhs) const; + virtual std::string inspect() const; // quotes are forced on inspection + ATTACH_AST_OPERATIONS(String_Quoted) + ATTACH_OPERATIONS() + }; + + ///////////////// + // Media queries. + ///////////////// + class Media_Query : public Expression, + public Vectorized { + ADD_PROPERTY(String_Obj, media_type) + ADD_PROPERTY(bool, is_negated) + ADD_PROPERTY(bool, is_restricted) + public: + Media_Query(ParserState pstate, + String_Obj t = 0, size_t s = 0, bool n = false, bool r = false) + : Expression(pstate), Vectorized(s), + media_type_(t), is_negated_(n), is_restricted_(r) + { } + Media_Query(const Media_Query* ptr) + : Expression(ptr), + Vectorized(*ptr), + media_type_(ptr->media_type_), + is_negated_(ptr->is_negated_), + is_restricted_(ptr->is_restricted_) + { } + ATTACH_AST_OPERATIONS(Media_Query) + ATTACH_OPERATIONS() + }; + + //////////////////////////////////////////////////// + // Media expressions (for use inside media queries). + //////////////////////////////////////////////////// + class Media_Query_Expression : public Expression { + ADD_PROPERTY(Expression_Obj, feature) + ADD_PROPERTY(Expression_Obj, value) + ADD_PROPERTY(bool, is_interpolated) + public: + Media_Query_Expression(ParserState pstate, + Expression_Obj f, Expression_Obj v, bool i = false) + : Expression(pstate), feature_(f), value_(v), is_interpolated_(i) + { } + Media_Query_Expression(const Media_Query_Expression* ptr) + : Expression(ptr), + feature_(ptr->feature_), + value_(ptr->value_), + is_interpolated_(ptr->is_interpolated_) + { } + ATTACH_AST_OPERATIONS(Media_Query_Expression) + ATTACH_OPERATIONS() + }; + + //////////////////// + // `@supports` rule. + //////////////////// + class Supports_Block : public Has_Block { + ADD_PROPERTY(Supports_Condition_Obj, condition) + public: + Supports_Block(ParserState pstate, Supports_Condition_Obj condition, Block_Obj block = 0) + : Has_Block(pstate, block), condition_(condition) + { statement_type(SUPPORTS); } + Supports_Block(const Supports_Block* ptr) + : Has_Block(ptr), condition_(ptr->condition_) + { statement_type(SUPPORTS); } + bool bubbles() { return true; } + ATTACH_AST_OPERATIONS(Supports_Block) + ATTACH_OPERATIONS() + }; + + ////////////////////////////////////////////////////// + // The abstract superclass of all Supports conditions. + ////////////////////////////////////////////////////// + class Supports_Condition : public Expression { + public: + Supports_Condition(ParserState pstate) + : Expression(pstate) + { } + Supports_Condition(const Supports_Condition* ptr) + : Expression(ptr) + { } + virtual bool needs_parens(Supports_Condition_Obj cond) const { return false; } + ATTACH_AST_OPERATIONS(Supports_Condition) + ATTACH_OPERATIONS() + }; + + //////////////////////////////////////////////////////////// + // An operator condition (e.g. `CONDITION1 and CONDITION2`). + //////////////////////////////////////////////////////////// + class Supports_Operator : public Supports_Condition { + public: + enum Operand { AND, OR }; + private: + ADD_PROPERTY(Supports_Condition_Obj, left); + ADD_PROPERTY(Supports_Condition_Obj, right); + ADD_PROPERTY(Operand, operand); + public: + Supports_Operator(ParserState pstate, Supports_Condition_Obj l, Supports_Condition_Obj r, Operand o) + : Supports_Condition(pstate), left_(l), right_(r), operand_(o) + { } + Supports_Operator(const Supports_Operator* ptr) + : Supports_Condition(ptr), + left_(ptr->left_), + right_(ptr->right_), + operand_(ptr->operand_) + { } + virtual bool needs_parens(Supports_Condition_Obj cond) const; + ATTACH_AST_OPERATIONS(Supports_Operator) + ATTACH_OPERATIONS() + }; + + ////////////////////////////////////////// + // A negation condition (`not CONDITION`). + ////////////////////////////////////////// + class Supports_Negation : public Supports_Condition { + private: + ADD_PROPERTY(Supports_Condition_Obj, condition); + public: + Supports_Negation(ParserState pstate, Supports_Condition_Obj c) + : Supports_Condition(pstate), condition_(c) + { } + Supports_Negation(const Supports_Negation* ptr) + : Supports_Condition(ptr), condition_(ptr->condition_) + { } + virtual bool needs_parens(Supports_Condition_Obj cond) const; + ATTACH_AST_OPERATIONS(Supports_Negation) + ATTACH_OPERATIONS() + }; + + ///////////////////////////////////////////////////// + // A declaration condition (e.g. `(feature: value)`). + ///////////////////////////////////////////////////// + class Supports_Declaration : public Supports_Condition { + private: + ADD_PROPERTY(Expression_Obj, feature); + ADD_PROPERTY(Expression_Obj, value); + public: + Supports_Declaration(ParserState pstate, Expression_Obj f, Expression_Obj v) + : Supports_Condition(pstate), feature_(f), value_(v) + { } + Supports_Declaration(const Supports_Declaration* ptr) + : Supports_Condition(ptr), + feature_(ptr->feature_), + value_(ptr->value_) + { } + virtual bool needs_parens(Supports_Condition_Obj cond) const { return false; } + ATTACH_AST_OPERATIONS(Supports_Declaration) + ATTACH_OPERATIONS() + }; + + /////////////////////////////////////////////// + // An interpolation condition (e.g. `#{$var}`). + /////////////////////////////////////////////// + class Supports_Interpolation : public Supports_Condition { + private: + ADD_PROPERTY(Expression_Obj, value); + public: + Supports_Interpolation(ParserState pstate, Expression_Obj v) + : Supports_Condition(pstate), value_(v) + { } + Supports_Interpolation(const Supports_Interpolation* ptr) + : Supports_Condition(ptr), + value_(ptr->value_) + { } + virtual bool needs_parens(Supports_Condition_Obj cond) const { return false; } + ATTACH_AST_OPERATIONS(Supports_Interpolation) + ATTACH_OPERATIONS() + }; + + ///////////////////////////////////////////////// + // At root expressions (for use inside @at-root). + ///////////////////////////////////////////////// + class At_Root_Query : public Expression { + private: + ADD_PROPERTY(Expression_Obj, feature) + ADD_PROPERTY(Expression_Obj, value) + public: + At_Root_Query(ParserState pstate, Expression_Obj f = 0, Expression_Obj v = 0, bool i = false) + : Expression(pstate), feature_(f), value_(v) + { } + At_Root_Query(const At_Root_Query* ptr) + : Expression(ptr), + feature_(ptr->feature_), + value_(ptr->value_) + { } + bool exclude(std::string str); + ATTACH_AST_OPERATIONS(At_Root_Query) + ATTACH_OPERATIONS() + }; + + /////////// + // At-root. + /////////// + class At_Root_Block : public Has_Block { + ADD_PROPERTY(At_Root_Query_Obj, expression) + public: + At_Root_Block(ParserState pstate, Block_Obj b = 0, At_Root_Query_Obj e = 0) + : Has_Block(pstate, b), expression_(e) + { statement_type(ATROOT); } + At_Root_Block(const At_Root_Block* ptr) + : Has_Block(ptr), expression_(ptr->expression_) + { statement_type(ATROOT); } + bool bubbles() { return true; } + bool exclude_node(Statement_Obj s) { + if (expression() == 0) + { + return s->statement_type() == Statement::RULESET; + } + + if (s->statement_type() == Statement::DIRECTIVE) + { + if (Directive_Obj dir = Cast(s)) + { + std::string keyword(dir->keyword()); + if (keyword.length() > 0) keyword.erase(0, 1); + return expression()->exclude(keyword); + } + } + if (s->statement_type() == Statement::MEDIA) + { + return expression()->exclude("media"); + } + if (s->statement_type() == Statement::RULESET) + { + return expression()->exclude("rule"); + } + if (s->statement_type() == Statement::SUPPORTS) + { + return expression()->exclude("supports"); + } + if (Directive_Obj dir = Cast(s)) + { + if (dir->is_keyframes()) return expression()->exclude("keyframes"); + } + return false; + } + ATTACH_AST_OPERATIONS(At_Root_Block) + ATTACH_OPERATIONS() + }; + + ////////////////// + // The null value. + ////////////////// + class Null : public Value { + public: + Null(ParserState pstate) : Value(pstate) { concrete_type(NULL_VAL); } + Null(const Null* ptr) : Value(ptr) { concrete_type(NULL_VAL); } + std::string type() const { return "null"; } + static std::string type_name() { return "null"; } + bool is_invisible() const { return true; } + operator bool() { return false; } + bool is_false() { return true; } + + virtual size_t hash() + { + return -1; + } + + virtual bool operator== (const Expression& rhs) const; + + ATTACH_AST_OPERATIONS(Null) + ATTACH_OPERATIONS() + }; + + ///////////////////////////////// + // Thunks for delayed evaluation. + ///////////////////////////////// + class Thunk : public Expression { + ADD_PROPERTY(Expression_Obj, expression) + ADD_PROPERTY(Env*, environment) + public: + Thunk(ParserState pstate, Expression_Obj exp, Env* env = 0) + : Expression(pstate), expression_(exp), environment_(env) + { } + }; + + ///////////////////////////////////////////////////////// + // Individual parameter objects for mixins and functions. + ///////////////////////////////////////////////////////// + class Parameter : public AST_Node { + ADD_CONSTREF(std::string, name) + ADD_PROPERTY(Expression_Obj, default_value) + ADD_PROPERTY(bool, is_rest_parameter) + public: + Parameter(ParserState pstate, + std::string n, Expression_Obj def = 0, bool rest = false) + : AST_Node(pstate), name_(n), default_value_(def), is_rest_parameter_(rest) + { + // tried to come up with a spec test for this, but it does no longer + // get past the parser (it error out earlier). A spec test was added! + // if (default_value_ && is_rest_parameter_) { + // error("variable-length parameter may not have a default value", pstate_); + // } + } + Parameter(const Parameter* ptr) + : AST_Node(ptr), + name_(ptr->name_), + default_value_(ptr->default_value_), + is_rest_parameter_(ptr->is_rest_parameter_) + { + // tried to come up with a spec test for this, but it does no longer + // get past the parser (it error out earlier). A spec test was added! + // if (default_value_ && is_rest_parameter_) { + // error("variable-length parameter may not have a default value", pstate_); + // } + } + ATTACH_AST_OPERATIONS(Parameter) + ATTACH_OPERATIONS() + }; + + ///////////////////////////////////////////////////////////////////////// + // Parameter lists -- in their own class to facilitate context-sensitive + // error checking (e.g., ensuring that all optional parameters follow all + // required parameters). + ///////////////////////////////////////////////////////////////////////// + class Parameters : public AST_Node, public Vectorized { + ADD_PROPERTY(bool, has_optional_parameters) + ADD_PROPERTY(bool, has_rest_parameter) + protected: + void adjust_after_pushing(Parameter_Obj p) + { + if (p->default_value()) { + if (has_rest_parameter()) { + coreError("optional parameters may not be combined with variable-length parameters", p->pstate()); + } + has_optional_parameters(true); + } + else if (p->is_rest_parameter()) { + if (has_rest_parameter()) { + coreError("functions and mixins cannot have more than one variable-length parameter", p->pstate()); + } + has_rest_parameter(true); + } + else { + if (has_rest_parameter()) { + coreError("required parameters must precede variable-length parameters", p->pstate()); + } + if (has_optional_parameters()) { + coreError("required parameters must precede optional parameters", p->pstate()); + } + } + } + public: + Parameters(ParserState pstate) + : AST_Node(pstate), + Vectorized(), + has_optional_parameters_(false), + has_rest_parameter_(false) + { } + Parameters(const Parameters* ptr) + : AST_Node(ptr), + Vectorized(*ptr), + has_optional_parameters_(ptr->has_optional_parameters_), + has_rest_parameter_(ptr->has_rest_parameter_) + { } + ATTACH_AST_OPERATIONS(Parameters) + ATTACH_OPERATIONS() + }; + + ///////////////////////////////////////// + // Abstract base class for CSS selectors. + ///////////////////////////////////////// + class Selector : public Expression { + // ADD_PROPERTY(bool, has_reference) + // line break before list separator + ADD_PROPERTY(bool, has_line_feed) + // line break after list separator + ADD_PROPERTY(bool, has_line_break) + // maybe we have optional flag + ADD_PROPERTY(bool, is_optional) + // parent block pointers + + // must not be a reference counted object + // otherwise we create circular references + ADD_PROPERTY(Media_Block_Ptr, media_block) + protected: + size_t hash_; + public: + Selector(ParserState pstate) + : Expression(pstate), + has_line_feed_(false), + has_line_break_(false), + is_optional_(false), + media_block_(0), + hash_(0) + { concrete_type(SELECTOR); } + Selector(const Selector* ptr) + : Expression(ptr), + // has_reference_(ptr->has_reference_), + has_line_feed_(ptr->has_line_feed_), + has_line_break_(ptr->has_line_break_), + is_optional_(ptr->is_optional_), + media_block_(ptr->media_block_), + hash_(ptr->hash_) + { concrete_type(SELECTOR); } + virtual ~Selector() = 0; + virtual size_t hash() = 0; + virtual unsigned long specificity() const = 0; + virtual void set_media_block(Media_Block_Ptr mb) { + media_block(mb); + } + virtual bool has_parent_ref() const { + return false; + } + virtual bool has_real_parent_ref() const { + return false; + } + // dispatch to correct handlers + virtual bool operator<(const Selector& rhs) const = 0; + virtual bool operator==(const Selector& rhs) const = 0; + ATTACH_VIRTUAL_AST_OPERATIONS(Selector); + }; + inline Selector::~Selector() { } + + ///////////////////////////////////////////////////////////////////////// + // Interpolated selectors -- the interpolated String will be expanded and + // re-parsed into a normal selector class. + ///////////////////////////////////////////////////////////////////////// + class Selector_Schema : public AST_Node { + ADD_PROPERTY(String_Obj, contents) + ADD_PROPERTY(bool, connect_parent); + // must not be a reference counted object + // otherwise we create circular references + ADD_PROPERTY(Media_Block_Ptr, media_block) + // store computed hash + size_t hash_; + public: + Selector_Schema(ParserState pstate, String_Obj c) + : AST_Node(pstate), + contents_(c), + connect_parent_(true), + media_block_(NULL), + hash_(0) + { } + Selector_Schema(const Selector_Schema* ptr) + : AST_Node(ptr), + contents_(ptr->contents_), + connect_parent_(ptr->connect_parent_), + media_block_(ptr->media_block_), + hash_(ptr->hash_) + { } + virtual bool has_parent_ref() const; + virtual bool has_real_parent_ref() const; + virtual bool operator<(const Selector& rhs) const; + virtual bool operator==(const Selector& rhs) const; + // selector schema is not yet a final selector, so we do not + // have a specificity for it yet. We need to + virtual unsigned long specificity() const { return 0; } + virtual size_t hash() { + if (hash_ == 0) { + hash_combine(hash_, contents_->hash()); + } + return hash_; + } + ATTACH_AST_OPERATIONS(Selector_Schema) + ATTACH_OPERATIONS() + }; + + //////////////////////////////////////////// + // Abstract base class for simple selectors. + //////////////////////////////////////////// + class Simple_Selector : public Selector { + ADD_CONSTREF(std::string, ns) + ADD_CONSTREF(std::string, name) + ADD_PROPERTY(Simple_Type, simple_type) + ADD_PROPERTY(bool, has_ns) + public: + Simple_Selector(ParserState pstate, std::string n = "") + : Selector(pstate), ns_(""), name_(n), has_ns_(false) + { + simple_type(SIMPLE); + size_t pos = n.find('|'); + // found some namespace + if (pos != std::string::npos) { + has_ns_ = true; + ns_ = n.substr(0, pos); + name_ = n.substr(pos + 1); + } + } + Simple_Selector(const Simple_Selector* ptr) + : Selector(ptr), + ns_(ptr->ns_), + name_(ptr->name_), + has_ns_(ptr->has_ns_) + { simple_type(SIMPLE); } + virtual std::string ns_name() const + { + std::string name(""); + if (has_ns_) + name += ns_ + "|"; + return name + name_; + } + virtual size_t hash() + { + if (hash_ == 0) { + hash_combine(hash_, std::hash()(SELECTOR)); + hash_combine(hash_, std::hash()(ns())); + hash_combine(hash_, std::hash()(name())); + } + return hash_; + } + // namespace compare functions + bool is_ns_eq(const Simple_Selector& r) const; + // namespace query functions + bool is_universal_ns() const + { + return has_ns_ && ns_ == "*"; + } + bool has_universal_ns() const + { + return !has_ns_ || ns_ == "*"; + } + bool is_empty_ns() const + { + return !has_ns_ || ns_ == ""; + } + bool has_empty_ns() const + { + return has_ns_ && ns_ == ""; + } + bool has_qualified_ns() const + { + return has_ns_ && ns_ != "" && ns_ != "*"; + } + // name query functions + bool is_universal() const + { + return name_ == "*"; + } + + virtual bool has_placeholder() { + return false; + } + + virtual ~Simple_Selector() = 0; + virtual Compound_Selector_Ptr unify_with(Compound_Selector_Ptr); + virtual bool has_parent_ref() const { return false; }; + virtual bool has_real_parent_ref() const { return false; }; + virtual bool is_pseudo_element() const { return false; } + + virtual bool is_superselector_of(Compound_Selector_Obj sub) { return false; } + + virtual bool operator==(const Selector& rhs) const; + virtual bool operator==(const Simple_Selector& rhs) const; + inline bool operator!=(const Simple_Selector& rhs) const { return !(*this == rhs); } + + bool operator<(const Selector& rhs) const; + bool operator<(const Simple_Selector& rhs) const; + // default implementation should work for most of the simple selectors (otherwise overload) + ATTACH_VIRTUAL_AST_OPERATIONS(Simple_Selector); + ATTACH_OPERATIONS(); + }; + inline Simple_Selector::~Simple_Selector() { } + + + ////////////////////////////////// + // The Parent Selector Expression. + ////////////////////////////////// + // parent selectors can occur in selectors but also + // inside strings in declarations (Compound_Selector). + // only one simple parent selector means the first case. + class Parent_Selector : public Simple_Selector { + ADD_PROPERTY(bool, real) + public: + Parent_Selector(ParserState pstate, bool r = true) + : Simple_Selector(pstate, "&"), real_(r) + { /* has_reference(true); */ } + Parent_Selector(const Parent_Selector* ptr) + : Simple_Selector(ptr), real_(ptr->real_) + { /* has_reference(true); */ } + bool is_real_parent_ref() const { return real(); }; + virtual bool has_parent_ref() const { return true; }; + virtual bool has_real_parent_ref() const { return is_real_parent_ref(); }; + virtual unsigned long specificity() const + { + return 0; + } + std::string type() const { return "selector"; } + static std::string type_name() { return "selector"; } + ATTACH_AST_OPERATIONS(Parent_Selector) + ATTACH_OPERATIONS() + }; + + ///////////////////////////////////////////////////////////////////////// + // Placeholder selectors (e.g., "%foo") for use in extend-only selectors. + ///////////////////////////////////////////////////////////////////////// + class Placeholder_Selector : public Simple_Selector { + public: + Placeholder_Selector(ParserState pstate, std::string n) + : Simple_Selector(pstate, n) + { } + Placeholder_Selector(const Placeholder_Selector* ptr) + : Simple_Selector(ptr) + { } + virtual unsigned long specificity() const + { + return Constants::Specificity_Base; + } + virtual bool has_placeholder() { + return true; + } + virtual ~Placeholder_Selector() {}; + ATTACH_AST_OPERATIONS(Placeholder_Selector) + ATTACH_OPERATIONS() + }; + + ///////////////////////////////////////////////////////////////////// + // Element selectors (and the universal selector) -- e.g., div, span, *. + ///////////////////////////////////////////////////////////////////// + class Element_Selector : public Simple_Selector { + public: + Element_Selector(ParserState pstate, std::string n) + : Simple_Selector(pstate, n) + { } + Element_Selector(const Element_Selector* ptr) + : Simple_Selector(ptr) + { } + virtual unsigned long specificity() const + { + if (name() == "*") return 0; + else return Constants::Specificity_Element; + } + virtual Simple_Selector_Ptr unify_with(Simple_Selector_Ptr); + virtual Compound_Selector_Ptr unify_with(Compound_Selector_Ptr); + virtual bool operator==(const Simple_Selector& rhs) const; + virtual bool operator==(const Element_Selector& rhs) const; + virtual bool operator<(const Simple_Selector& rhs) const; + virtual bool operator<(const Element_Selector& rhs) const; + ATTACH_AST_OPERATIONS(Element_Selector) + ATTACH_OPERATIONS() + }; + + //////////////////////////////////////////////// + // Class selectors -- i.e., .foo. + //////////////////////////////////////////////// + class Class_Selector : public Simple_Selector { + public: + Class_Selector(ParserState pstate, std::string n) + : Simple_Selector(pstate, n) + { } + Class_Selector(const Class_Selector* ptr) + : Simple_Selector(ptr) + { } + virtual unsigned long specificity() const + { + return Constants::Specificity_Class; + } + virtual Compound_Selector_Ptr unify_with(Compound_Selector_Ptr); + ATTACH_AST_OPERATIONS(Class_Selector) + ATTACH_OPERATIONS() + }; + + //////////////////////////////////////////////// + // ID selectors -- i.e., #foo. + //////////////////////////////////////////////// + class Id_Selector : public Simple_Selector { + public: + Id_Selector(ParserState pstate, std::string n) + : Simple_Selector(pstate, n) + { } + Id_Selector(const Id_Selector* ptr) + : Simple_Selector(ptr) + { } + virtual unsigned long specificity() const + { + return Constants::Specificity_ID; + } + virtual Compound_Selector_Ptr unify_with(Compound_Selector_Ptr); + ATTACH_AST_OPERATIONS(Id_Selector) + ATTACH_OPERATIONS() + }; + + /////////////////////////////////////////////////// + // Attribute selectors -- e.g., [src*=".jpg"], etc. + /////////////////////////////////////////////////// + class Attribute_Selector : public Simple_Selector { + ADD_CONSTREF(std::string, matcher) + // this cannot be changed to obj atm!!!!!!????!!!!!!! + ADD_PROPERTY(String_Obj, value) // might be interpolated + ADD_PROPERTY(char, modifier); + public: + Attribute_Selector(ParserState pstate, std::string n, std::string m, String_Obj v, char o = 0) + : Simple_Selector(pstate, n), matcher_(m), value_(v), modifier_(o) + { simple_type(ATTR_SEL); } + Attribute_Selector(const Attribute_Selector* ptr) + : Simple_Selector(ptr), + matcher_(ptr->matcher_), + value_(ptr->value_), + modifier_(ptr->modifier_) + { simple_type(ATTR_SEL); } + virtual size_t hash() + { + if (hash_ == 0) { + hash_combine(hash_, Simple_Selector::hash()); + hash_combine(hash_, std::hash()(matcher())); + if (value_) hash_combine(hash_, value_->hash()); + } + return hash_; + } + virtual unsigned long specificity() const + { + return Constants::Specificity_Attr; + } + virtual bool operator==(const Simple_Selector& rhs) const; + virtual bool operator==(const Attribute_Selector& rhs) const; + virtual bool operator<(const Simple_Selector& rhs) const; + virtual bool operator<(const Attribute_Selector& rhs) const; + ATTACH_AST_OPERATIONS(Attribute_Selector) + ATTACH_OPERATIONS() + }; + + ////////////////////////////////////////////////////////////////// + // Pseudo selectors -- e.g., :first-child, :nth-of-type(...), etc. + ////////////////////////////////////////////////////////////////// + /* '::' starts a pseudo-element, ':' a pseudo-class */ + /* Except :first-line, :first-letter, :before and :after */ + /* Note that pseudo-elements are restricted to one per selector */ + /* and occur only in the last simple_selector_sequence. */ + inline bool is_pseudo_class_element(const std::string& name) + { + return name == ":before" || + name == ":after" || + name == ":first-line" || + name == ":first-letter"; + } + + // Pseudo Selector cannot have any namespace? + class Pseudo_Selector : public Simple_Selector { + ADD_PROPERTY(String_Obj, expression) + public: + Pseudo_Selector(ParserState pstate, std::string n, String_Obj expr = 0) + : Simple_Selector(pstate, n), expression_(expr) + { simple_type(PSEUDO_SEL); } + Pseudo_Selector(const Pseudo_Selector* ptr) + : Simple_Selector(ptr), expression_(ptr->expression_) + { simple_type(PSEUDO_SEL); } + + // A pseudo-element is made of two colons (::) followed by the name. + // The `::` notation is introduced by the current document in order to + // establish a discrimination between pseudo-classes and pseudo-elements. + // For compatibility with existing style sheets, user agents must also + // accept the previous one-colon notation for pseudo-elements introduced + // in CSS levels 1 and 2 (namely, :first-line, :first-letter, :before and + // :after). This compatibility is not allowed for the new pseudo-elements + // introduced in this specification. + virtual bool is_pseudo_element() const + { + return (name_[0] == ':' && name_[1] == ':') + || is_pseudo_class_element(name_); + } + virtual size_t hash() + { + if (hash_ == 0) { + hash_combine(hash_, Simple_Selector::hash()); + if (expression_) hash_combine(hash_, expression_->hash()); + } + return hash_; + } + virtual unsigned long specificity() const + { + if (is_pseudo_element()) + return Constants::Specificity_Element; + return Constants::Specificity_Pseudo; + } + virtual bool operator==(const Simple_Selector& rhs) const; + virtual bool operator==(const Pseudo_Selector& rhs) const; + virtual bool operator<(const Simple_Selector& rhs) const; + virtual bool operator<(const Pseudo_Selector& rhs) const; + virtual Compound_Selector_Ptr unify_with(Compound_Selector_Ptr); + ATTACH_AST_OPERATIONS(Pseudo_Selector) + ATTACH_OPERATIONS() + }; + + ///////////////////////////////////////////////// + // Wrapped selector -- pseudo selector that takes a list of selectors as argument(s) e.g., :not(:first-of-type), :-moz-any(ol p.blah, ul, menu, dir) + ///////////////////////////////////////////////// + class Wrapped_Selector : public Simple_Selector { + ADD_PROPERTY(Selector_List_Obj, selector) + public: + Wrapped_Selector(ParserState pstate, std::string n, Selector_List_Obj sel) + : Simple_Selector(pstate, n), selector_(sel) + { simple_type(WRAPPED_SEL); } + Wrapped_Selector(const Wrapped_Selector* ptr) + : Simple_Selector(ptr), selector_(ptr->selector_) + { simple_type(WRAPPED_SEL); } + virtual bool is_superselector_of(Wrapped_Selector_Obj sub); + // Selectors inside the negation pseudo-class are counted like any + // other, but the negation itself does not count as a pseudo-class. + virtual size_t hash(); + virtual bool has_parent_ref() const; + virtual bool has_real_parent_ref() const; + virtual unsigned long specificity() const; + virtual bool find ( bool (*f)(AST_Node_Obj) ); + virtual bool operator==(const Simple_Selector& rhs) const; + virtual bool operator==(const Wrapped_Selector& rhs) const; + virtual bool operator<(const Simple_Selector& rhs) const; + virtual bool operator<(const Wrapped_Selector& rhs) const; + virtual void cloneChildren(); + ATTACH_AST_OPERATIONS(Wrapped_Selector) + ATTACH_OPERATIONS() + }; + + //////////////////////////////////////////////////////////////////////////// + // Simple selector sequences. Maintains flags indicating whether it contains + // any parent references or placeholders, to simplify expansion. + //////////////////////////////////////////////////////////////////////////// + class Compound_Selector : public Selector, public Vectorized { + private: + ComplexSelectorSet sources_; + ADD_PROPERTY(bool, extended); + ADD_PROPERTY(bool, has_parent_reference); + protected: + void adjust_after_pushing(Simple_Selector_Obj s) + { + // if (s->has_reference()) has_reference(true); + // if (s->has_placeholder()) has_placeholder(true); + } + public: + Compound_Selector(ParserState pstate, size_t s = 0) + : Selector(pstate), + Vectorized(s), + extended_(false), + has_parent_reference_(false) + { } + Compound_Selector(const Compound_Selector* ptr) + : Selector(ptr), + Vectorized(*ptr), + extended_(ptr->extended_), + has_parent_reference_(ptr->has_parent_reference_) + { } + bool contains_placeholder() { + for (size_t i = 0, L = length(); i < L; ++i) { + if ((*this)[i]->has_placeholder()) return true; + } + return false; + }; + + void append(Simple_Selector_Ptr element); + + bool is_universal() const + { + return length() == 1 && (*this)[0]->is_universal(); + } + + Complex_Selector_Obj to_complex(); + Compound_Selector_Ptr unify_with(Compound_Selector_Ptr rhs); + // virtual Placeholder_Selector_Ptr find_placeholder(); + virtual bool has_parent_ref() const; + virtual bool has_real_parent_ref() const; + Simple_Selector_Ptr base() const { + if (length() == 0) return 0; + // ToDo: why is this needed? + if (Cast((*this)[0])) + return (*this)[0]; + return 0; + } + virtual bool is_superselector_of(Compound_Selector_Obj sub, std::string wrapped = ""); + virtual bool is_superselector_of(Complex_Selector_Obj sub, std::string wrapped = ""); + virtual bool is_superselector_of(Selector_List_Obj sub, std::string wrapped = ""); + virtual size_t hash() + { + if (Selector::hash_ == 0) { + hash_combine(Selector::hash_, std::hash()(SELECTOR)); + if (length()) hash_combine(Selector::hash_, Vectorized::hash()); + } + return Selector::hash_; + } + virtual unsigned long specificity() const + { + int sum = 0; + for (size_t i = 0, L = length(); i < L; ++i) + { sum += (*this)[i]->specificity(); } + return sum; + } + + virtual bool has_placeholder() + { + if (length() == 0) return false; + if (Simple_Selector_Obj ss = elements().front()) { + if (ss->has_placeholder()) return true; + } + return false; + } + + bool is_empty_reference() + { + return length() == 1 && + Cast((*this)[0]); + } + + virtual bool find ( bool (*f)(AST_Node_Obj) ); + virtual bool operator<(const Selector& rhs) const; + virtual bool operator==(const Selector& rhs) const; + virtual bool operator<(const Compound_Selector& rhs) const; + virtual bool operator==(const Compound_Selector& rhs) const; + inline bool operator!=(const Compound_Selector& rhs) const { return !(*this == rhs); } + + ComplexSelectorSet& sources() { return sources_; } + void clearSources() { sources_.clear(); } + void mergeSources(ComplexSelectorSet& sources); + + Compound_Selector_Ptr minus(Compound_Selector_Ptr rhs); + virtual void cloneChildren(); + ATTACH_AST_OPERATIONS(Compound_Selector) + ATTACH_OPERATIONS() + }; + + //////////////////////////////////////////////////////////////////////////// + // General selectors -- i.e., simple sequences combined with one of the four + // CSS selector combinators (">", "+", "~", and whitespace). Essentially a + // linked list. + //////////////////////////////////////////////////////////////////////////// + class Complex_Selector : public Selector { + public: + enum Combinator { ANCESTOR_OF, PARENT_OF, PRECEDES, ADJACENT_TO, REFERENCE }; + private: + HASH_CONSTREF(Combinator, combinator) + HASH_PROPERTY(Compound_Selector_Obj, head) + HASH_PROPERTY(Complex_Selector_Obj, tail) + HASH_PROPERTY(String_Obj, reference); + public: + bool contains_placeholder() { + if (head() && head()->contains_placeholder()) return true; + if (tail() && tail()->contains_placeholder()) return true; + return false; + }; + Complex_Selector(ParserState pstate, + Combinator c = ANCESTOR_OF, + Compound_Selector_Obj h = 0, + Complex_Selector_Obj t = 0, + String_Obj r = 0) + : Selector(pstate), + combinator_(c), + head_(h), tail_(t), + reference_(r) + {} + Complex_Selector(const Complex_Selector* ptr) + : Selector(ptr), + combinator_(ptr->combinator_), + head_(ptr->head_), tail_(ptr->tail_), + reference_(ptr->reference_) + {}; + virtual bool has_parent_ref() const; + virtual bool has_real_parent_ref() const; + + Complex_Selector_Obj skip_empty_reference() + { + if ((!head_ || !head_->length() || head_->is_empty_reference()) && + combinator() == Combinator::ANCESTOR_OF) + { + if (!tail_) return 0; + tail_->has_line_feed_ = this->has_line_feed_; + // tail_->has_line_break_ = this->has_line_break_; + return tail_->skip_empty_reference(); + } + return this; + } + + // can still have a tail + bool is_empty_ancestor() const + { + return (!head() || head()->length() == 0) && + combinator() == Combinator::ANCESTOR_OF; + } + + Selector_List_Ptr tails(Selector_List_Ptr tails); + + // front returns the first real tail + // skips over parent and empty ones + Complex_Selector_Obj first(); + // last returns the last real tail + Complex_Selector_Obj last(); + + // some shortcuts that should be removed + Complex_Selector_Obj innermost() { return last(); }; + + size_t length() const; + Selector_List_Ptr resolve_parent_refs(std::vector& pstack, Backtraces& traces, bool implicit_parent = true); + virtual bool is_superselector_of(Compound_Selector_Obj sub, std::string wrapping = ""); + virtual bool is_superselector_of(Complex_Selector_Obj sub, std::string wrapping = ""); + virtual bool is_superselector_of(Selector_List_Obj sub, std::string wrapping = ""); + Selector_List_Ptr unify_with(Complex_Selector_Ptr rhs); + Combinator clear_innermost(); + void append(Complex_Selector_Obj, Backtraces& traces); + void set_innermost(Complex_Selector_Obj, Combinator); + virtual size_t hash() + { + if (hash_ == 0) { + hash_combine(hash_, std::hash()(SELECTOR)); + hash_combine(hash_, std::hash()(combinator_)); + if (head_) hash_combine(hash_, head_->hash()); + if (tail_) hash_combine(hash_, tail_->hash()); + } + return hash_; + } + virtual unsigned long specificity() const + { + int sum = 0; + if (head()) sum += head()->specificity(); + if (tail()) sum += tail()->specificity(); + return sum; + } + virtual void set_media_block(Media_Block_Ptr mb) { + media_block(mb); + if (tail_) tail_->set_media_block(mb); + if (head_) head_->set_media_block(mb); + } + virtual bool has_placeholder() { + if (head_ && head_->has_placeholder()) return true; + if (tail_ && tail_->has_placeholder()) return true; + return false; + } + virtual bool find ( bool (*f)(AST_Node_Obj) ); + virtual bool operator<(const Selector& rhs) const; + virtual bool operator==(const Selector& rhs) const; + virtual bool operator<(const Complex_Selector& rhs) const; + virtual bool operator==(const Complex_Selector& rhs) const; + inline bool operator!=(const Complex_Selector& rhs) const { return !(*this == rhs); } + const ComplexSelectorSet sources() + { + //s = Set.new + //seq.map {|sseq_or_op| s.merge sseq_or_op.sources if sseq_or_op.is_a?(SimpleSequence)} + //s + + ComplexSelectorSet srcs; + + Compound_Selector_Obj pHead = head(); + Complex_Selector_Obj pTail = tail(); + + if (pHead) { + const ComplexSelectorSet& headSources = pHead->sources(); + srcs.insert(headSources.begin(), headSources.end()); + } + + if (pTail) { + const ComplexSelectorSet& tailSources = pTail->sources(); + srcs.insert(tailSources.begin(), tailSources.end()); + } + + return srcs; + } + void addSources(ComplexSelectorSet& sources) { + // members.map! {|m| m.is_a?(SimpleSequence) ? m.with_more_sources(sources) : m} + Complex_Selector_Ptr pIter = this; + while (pIter) { + Compound_Selector_Ptr pHead = pIter->head(); + + if (pHead) { + pHead->mergeSources(sources); + } + + pIter = pIter->tail(); + } + } + void clearSources() { + Complex_Selector_Ptr pIter = this; + while (pIter) { + Compound_Selector_Ptr pHead = pIter->head(); + + if (pHead) { + pHead->clearSources(); + } + + pIter = pIter->tail(); + } + } + + virtual void cloneChildren(); + ATTACH_AST_OPERATIONS(Complex_Selector) + ATTACH_OPERATIONS() + }; + + /////////////////////////////////// + // Comma-separated selector groups. + /////////////////////////////////// + class Selector_List : public Selector, public Vectorized { + ADD_PROPERTY(Selector_Schema_Obj, schema) + ADD_CONSTREF(std::vector, wspace) + protected: + void adjust_after_pushing(Complex_Selector_Obj c); + public: + Selector_List(ParserState pstate, size_t s = 0) + : Selector(pstate), + Vectorized(s), + schema_(NULL), + wspace_(0) + { } + Selector_List(const Selector_List* ptr) + : Selector(ptr), + Vectorized(*ptr), + schema_(ptr->schema_), + wspace_(ptr->wspace_) + { } + std::string type() const { return "list"; } + // remove parent selector references + // basically unwraps parsed selectors + virtual bool has_parent_ref() const; + virtual bool has_real_parent_ref() const; + void remove_parent_selectors(); + Selector_List_Ptr resolve_parent_refs(std::vector& pstack, Backtraces& traces, bool implicit_parent = true); + virtual bool is_superselector_of(Compound_Selector_Obj sub, std::string wrapping = ""); + virtual bool is_superselector_of(Complex_Selector_Obj sub, std::string wrapping = ""); + virtual bool is_superselector_of(Selector_List_Obj sub, std::string wrapping = ""); + Selector_List_Ptr unify_with(Selector_List_Ptr); + void populate_extends(Selector_List_Obj, Subset_Map&); + Selector_List_Obj eval(Eval& eval); + virtual size_t hash() + { + if (Selector::hash_ == 0) { + hash_combine(Selector::hash_, std::hash()(SELECTOR)); + hash_combine(Selector::hash_, Vectorized::hash()); + } + return Selector::hash_; + } + virtual unsigned long specificity() const + { + unsigned long sum = 0; + unsigned long specificity; + for (size_t i = 0, L = length(); i < L; ++i) + { + specificity = (*this)[i]->specificity(); + if (sum < specificity) sum = specificity; + } + return sum; + } + virtual void set_media_block(Media_Block_Ptr mb) { + media_block(mb); + for (Complex_Selector_Obj cs : elements()) { + cs->set_media_block(mb); + } + } + virtual bool has_placeholder() { + for (Complex_Selector_Obj cs : elements()) { + if (cs->has_placeholder()) return true; + } + return false; + } + virtual bool find ( bool (*f)(AST_Node_Obj) ); + virtual bool operator<(const Selector& rhs) const; + virtual bool operator==(const Selector& rhs) const; + virtual bool operator<(const Selector_List& rhs) const; + virtual bool operator==(const Selector_List& rhs) const; + // Selector Lists can be compared to comma lists + virtual bool operator==(const Expression& rhs) const; + virtual void cloneChildren(); + ATTACH_AST_OPERATIONS(Selector_List) + ATTACH_OPERATIONS() + }; + + // compare function for sorting and probably other other uses + struct cmp_complex_selector { inline bool operator() (const Complex_Selector_Obj l, const Complex_Selector_Obj r) { return (*l < *r); } }; + struct cmp_compound_selector { inline bool operator() (const Compound_Selector_Obj l, const Compound_Selector_Obj r) { return (*l < *r); } }; + struct cmp_simple_selector { inline bool operator() (const Simple_Selector_Obj l, const Simple_Selector_Obj r) { return (*l < *r); } }; + +} + +#ifdef __clang__ + +#pragma clang diagnostic pop + +#endif + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/ast_def_macros.hpp b/mybulma/node_modules/node-sass/src/libsass/src/ast_def_macros.hpp new file mode 100644 index 0000000..b3a7f8d --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/ast_def_macros.hpp @@ -0,0 +1,80 @@ +#ifndef SASS_AST_DEF_MACROS_H +#define SASS_AST_DEF_MACROS_H + +// Helper class to switch a flag and revert once we go out of scope +template +class LocalOption { + private: + T* var; // pointer to original variable + T orig; // copy of the original option + public: + LocalOption(T& var) + { + this->var = &var; + this->orig = var; + } + LocalOption(T& var, T orig) + { + this->var = &var; + this->orig = var; + *(this->var) = orig; + } + void reset() + { + *(this->var) = this->orig; + } + ~LocalOption() { + *(this->var) = this->orig; + } +}; + +#define LOCAL_FLAG(name,opt) LocalOption flag_##name(name, opt) +#define LOCAL_COUNT(name,opt) LocalOption cnt_##name(name, opt) + +#define NESTING_GUARD(name) \ + LocalOption cnt_##name(name, name + 1); \ + if (name > MAX_NESTING) throw Exception::NestingLimitError(pstate, traces); \ + +#define ATTACH_OPERATIONS()\ +virtual void perform(Operation* op) { (*op)(this); }\ +virtual AST_Node_Ptr perform(Operation* op) { return (*op)(this); }\ +virtual Statement_Ptr perform(Operation* op) { return (*op)(this); }\ +virtual Expression_Ptr perform(Operation* op) { return (*op)(this); }\ +virtual Selector_Ptr perform(Operation* op) { return (*op)(this); }\ +virtual std::string perform(Operation* op) { return (*op)(this); }\ +virtual union Sass_Value* perform(Operation* op) { return (*op)(this); }\ +virtual Value_Ptr perform(Operation* op) { return (*op)(this); } + +#define ADD_PROPERTY(type, name)\ +protected:\ + type name##_;\ +public:\ + type name() const { return name##_; }\ + type name(type name##__) { return name##_ = name##__; }\ +private: + +#define HASH_PROPERTY(type, name)\ +protected:\ + type name##_;\ +public:\ + type name() const { return name##_; }\ + type name(type name##__) { hash_ = 0; return name##_ = name##__; }\ +private: + +#define ADD_CONSTREF(type, name) \ +protected: \ + type name##_; \ +public: \ + const type& name() const { return name##_; } \ + void name(type name##__) { name##_ = name##__; } \ +private: + +#define HASH_CONSTREF(type, name) \ +protected: \ + type name##_; \ +public: \ + const type& name() const { return name##_; } \ + void name(type name##__) { hash_ = 0; name##_ = name##__; } \ +private: + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/ast_fwd_decl.cpp b/mybulma/node_modules/node-sass/src/libsass/src/ast_fwd_decl.cpp new file mode 100644 index 0000000..c9c7672 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/ast_fwd_decl.cpp @@ -0,0 +1,29 @@ +#include "ast.hpp" + +namespace Sass { + + #define IMPLEMENT_BASE_CAST(T) \ + template<> \ + T* Cast(AST_Node* ptr) { \ + return dynamic_cast(ptr); \ + }; \ + \ + template<> \ + const T* Cast(const AST_Node* ptr) { \ + return dynamic_cast(ptr); \ + }; \ + + IMPLEMENT_BASE_CAST(AST_Node) + IMPLEMENT_BASE_CAST(Expression) + IMPLEMENT_BASE_CAST(Statement) + IMPLEMENT_BASE_CAST(Has_Block) + IMPLEMENT_BASE_CAST(PreValue) + IMPLEMENT_BASE_CAST(Value) + IMPLEMENT_BASE_CAST(List) + IMPLEMENT_BASE_CAST(String) + IMPLEMENT_BASE_CAST(String_Constant) + IMPLEMENT_BASE_CAST(Supports_Condition) + IMPLEMENT_BASE_CAST(Selector) + IMPLEMENT_BASE_CAST(Simple_Selector) + +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/ast_fwd_decl.hpp b/mybulma/node_modules/node-sass/src/libsass/src/ast_fwd_decl.hpp new file mode 100644 index 0000000..5145a09 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/ast_fwd_decl.hpp @@ -0,0 +1,463 @@ +#ifndef SASS_AST_FWD_DECL_H +#define SASS_AST_FWD_DECL_H + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "memory/SharedPtr.hpp" +#include "sass/functions.h" + +///////////////////////////////////////////// +// Forward declarations for the AST visitors. +///////////////////////////////////////////// +namespace Sass { + + class AST_Node; + typedef AST_Node* AST_Node_Ptr; + typedef AST_Node const* AST_Node_Ptr_Const; + + class Has_Block; + typedef Has_Block* Has_Block_Ptr; + typedef Has_Block const* Has_Block_Ptr_Const; + + class Simple_Selector; + typedef Simple_Selector* Simple_Selector_Ptr; + typedef Simple_Selector const* Simple_Selector_Ptr_Const; + + class PreValue; + typedef PreValue* PreValue_Ptr; + typedef PreValue const* PreValue_Ptr_Const; + class Thunk; + typedef Thunk* Thunk_Ptr; + typedef Thunk const* Thunk_Ptr_Const; + class Block; + typedef Block* Block_Ptr; + typedef Block const* Block_Ptr_Const; + class Expression; + typedef Expression* Expression_Ptr; + typedef Expression const* Expression_Ptr_Const; + class Statement; + typedef Statement* Statement_Ptr; + typedef Statement const* Statement_Ptr_Const; + class Value; + typedef Value* Value_Ptr; + typedef Value const* Value_Ptr_Const; + class Declaration; + typedef Declaration* Declaration_Ptr; + typedef Declaration const* Declaration_Ptr_Const; + class Ruleset; + typedef Ruleset* Ruleset_Ptr; + typedef Ruleset const* Ruleset_Ptr_Const; + class Bubble; + typedef Bubble* Bubble_Ptr; + typedef Bubble const* Bubble_Ptr_Const; + class Trace; + typedef Trace* Trace_Ptr; + typedef Trace const* Trace_Ptr_Const; + + class Media_Block; + typedef Media_Block* Media_Block_Ptr; + typedef Media_Block const* Media_Block_Ptr_Const; + class Supports_Block; + typedef Supports_Block* Supports_Block_Ptr; + typedef Supports_Block const* Supports_Block_Ptr_Const; + class Directive; + typedef Directive* Directive_Ptr; + typedef Directive const* Directive_Ptr_Const; + + + class Keyframe_Rule; + typedef Keyframe_Rule* Keyframe_Rule_Ptr; + typedef Keyframe_Rule const* Keyframe_Rule_Ptr_Const; + class At_Root_Block; + typedef At_Root_Block* At_Root_Block_Ptr; + typedef At_Root_Block const* At_Root_Block_Ptr_Const; + class Assignment; + typedef Assignment* Assignment_Ptr; + typedef Assignment const* Assignment_Ptr_Const; + + class Import; + typedef Import* Import_Ptr; + typedef Import const* Import_Ptr_Const; + class Import_Stub; + typedef Import_Stub* Import_Stub_Ptr; + typedef Import_Stub const* Import_Stub_Ptr_Const; + class Warning; + typedef Warning* Warning_Ptr; + typedef Warning const* Warning_Ptr_Const; + + class Error; + typedef Error* Error_Ptr; + typedef Error const* Error_Ptr_Const; + class Debug; + typedef Debug* Debug_Ptr; + typedef Debug const* Debug_Ptr_Const; + class Comment; + typedef Comment* Comment_Ptr; + typedef Comment const* Comment_Ptr_Const; + + class If; + typedef If* If_Ptr; + typedef If const* If_Ptr_Const; + class For; + typedef For* For_Ptr; + typedef For const* For_Ptr_Const; + class Each; + typedef Each* Each_Ptr; + typedef Each const* Each_Ptr_Const; + class While; + typedef While* While_Ptr; + typedef While const* While_Ptr_Const; + class Return; + typedef Return* Return_Ptr; + typedef Return const* Return_Ptr_Const; + class Content; + typedef Content* Content_Ptr; + typedef Content const* Content_Ptr_Const; + class Extension; + typedef Extension* Extension_Ptr; + typedef Extension const* Extension_Ptr_Const; + class Definition; + typedef Definition* Definition_Ptr; + typedef Definition const* Definition_Ptr_Const; + + class List; + typedef List* List_Ptr; + typedef List const* List_Ptr_Const; + class Map; + typedef Map* Map_Ptr; + typedef Map const* Map_Ptr_Const; + class Function; + typedef Function* Function_Ptr; + typedef Function const* Function_Ptr_Const; + + class Mixin_Call; + typedef Mixin_Call* Mixin_Call_Ptr; + typedef Mixin_Call const* Mixin_Call_Ptr_Const; + class Binary_Expression; + typedef Binary_Expression* Binary_Expression_Ptr; + typedef Binary_Expression const* Binary_Expression_Ptr_Const; + class Unary_Expression; + typedef Unary_Expression* Unary_Expression_Ptr; + typedef Unary_Expression const* Unary_Expression_Ptr_Const; + class Function_Call; + typedef Function_Call* Function_Call_Ptr; + typedef Function_Call const* Function_Call_Ptr_Const; + class Function_Call_Schema; + typedef Function_Call_Schema* Function_Call_Schema_Ptr; + typedef Function_Call_Schema const* Function_Call_Schema_Ptr_Const; + class Custom_Warning; + typedef Custom_Warning* Custom_Warning_Ptr; + typedef Custom_Warning const* Custom_Warning_Ptr_Const; + class Custom_Error; + typedef Custom_Error* Custom_Error_Ptr; + typedef Custom_Error const* Custom_Error_Ptr_Const; + + class Variable; + typedef Variable* Variable_Ptr; + typedef Variable const* Variable_Ptr_Const; + class Number; + typedef Number* Number_Ptr; + typedef Number const* Number_Ptr_Const; + class Color; + typedef Color* Color_Ptr; + typedef Color const* Color_Ptr_Const; + class Boolean; + typedef Boolean* Boolean_Ptr; + typedef Boolean const* Boolean_Ptr_Const; + class String; + typedef String* String_Ptr; + typedef String const* String_Ptr_Const; + + class String_Schema; + typedef String_Schema* String_Schema_Ptr; + typedef String_Schema const* String_Schema_Ptr_Const; + class String_Constant; + typedef String_Constant* String_Constant_Ptr; + typedef String_Constant const* String_Constant_Ptr_Const; + class String_Quoted; + typedef String_Quoted* String_Quoted_Ptr; + typedef String_Quoted const* String_Quoted_Ptr_Const; + + class Media_Query; + typedef Media_Query* Media_Query_Ptr; + typedef Media_Query const* Media_Query_Ptr_Const; + class Media_Query_Expression; + typedef Media_Query_Expression* Media_Query_Expression_Ptr; + typedef Media_Query_Expression const* Media_Query_Expression_Ptr_Const; + class Supports_Condition; + typedef Supports_Condition* Supports_Condition_Ptr; + typedef Supports_Condition const* Supports_Condition_Ptr_Const; + class Supports_Operator; + typedef Supports_Operator* Supports_Operator_Ptr; + typedef Supports_Operator const* Supports_Operator_Ptr_Const; + class Supports_Negation; + typedef Supports_Negation* Supports_Negation_Ptr; + typedef Supports_Negation const* Supports_Negation_Ptr_Const; + class Supports_Declaration; + typedef Supports_Declaration* Supports_Declaration_Ptr; + typedef Supports_Declaration const* Supports_Declaration_Ptr_Const; + class Supports_Interpolation; + typedef Supports_Interpolation* Supports_Interpolation_Ptr; + typedef Supports_Interpolation const* Supports_Interpolation_Ptr_Const; + + + class Null; + typedef Null* Null_Ptr; + typedef Null const* Null_Ptr_Const; + + class At_Root_Query; + typedef At_Root_Query* At_Root_Query_Ptr; + typedef At_Root_Query const* At_Root_Query_Ptr_Const; + class Parent_Selector; + typedef Parent_Selector* Parent_Selector_Ptr; + typedef Parent_Selector const* Parent_Selector_Ptr_Const; + class Parameter; + typedef Parameter* Parameter_Ptr; + typedef Parameter const* Parameter_Ptr_Const; + class Parameters; + typedef Parameters* Parameters_Ptr; + typedef Parameters const* Parameters_Ptr_Const; + class Argument; + typedef Argument* Argument_Ptr; + typedef Argument const* Argument_Ptr_Const; + class Arguments; + typedef Arguments* Arguments_Ptr; + typedef Arguments const* Arguments_Ptr_Const; + class Selector; + typedef Selector* Selector_Ptr; + typedef Selector const* Selector_Ptr_Const; + + + class Selector_Schema; + typedef Selector_Schema* Selector_Schema_Ptr; + typedef Selector_Schema const* Selector_Schema_Ptr_Const; + class Placeholder_Selector; + typedef Placeholder_Selector* Placeholder_Selector_Ptr; + typedef Placeholder_Selector const* Placeholder_Selector_Ptr_Const; + class Element_Selector; + typedef Element_Selector* Element_Selector_Ptr; + typedef Element_Selector const* Element_Selector_Ptr_Const; + class Class_Selector; + typedef Class_Selector* Class_Selector_Ptr; + typedef Class_Selector const* Class_Selector_Ptr_Const; + class Id_Selector; + typedef Id_Selector* Id_Selector_Ptr; + typedef Id_Selector const* Id_Selector_Ptr_Const; + class Attribute_Selector; + typedef Attribute_Selector* Attribute_Selector_Ptr; + typedef Attribute_Selector const* Attribute_Selector_Ptr_Const; + + class Pseudo_Selector; + typedef Pseudo_Selector* Pseudo_Selector_Ptr; + typedef Pseudo_Selector const * Pseudo_Selector_Ptr_Const; + class Wrapped_Selector; + typedef Wrapped_Selector* Wrapped_Selector_Ptr; + typedef Wrapped_Selector const * Wrapped_Selector_Ptr_Const; + class Compound_Selector; + typedef Compound_Selector* Compound_Selector_Ptr; + typedef Compound_Selector const * Compound_Selector_Ptr_Const; + class Complex_Selector; + typedef Complex_Selector* Complex_Selector_Ptr; + typedef Complex_Selector const * Complex_Selector_Ptr_Const; + class Selector_List; + typedef Selector_List* Selector_List_Ptr; + typedef Selector_List const * Selector_List_Ptr_Const; + + + // common classes + class Context; + class Expand; + class Eval; + + // declare classes that are instances of memory nodes + // #define IMPL_MEM_OBJ(type) using type##_Obj = SharedImpl + #define IMPL_MEM_OBJ(type) typedef SharedImpl type##_Obj + + IMPL_MEM_OBJ(AST_Node); + IMPL_MEM_OBJ(Statement); + IMPL_MEM_OBJ(Block); + IMPL_MEM_OBJ(Ruleset); + IMPL_MEM_OBJ(Bubble); + IMPL_MEM_OBJ(Trace); + IMPL_MEM_OBJ(Media_Block); + IMPL_MEM_OBJ(Supports_Block); + IMPL_MEM_OBJ(Directive); + IMPL_MEM_OBJ(Keyframe_Rule); + IMPL_MEM_OBJ(At_Root_Block); + IMPL_MEM_OBJ(Declaration); + IMPL_MEM_OBJ(Assignment); + IMPL_MEM_OBJ(Import); + IMPL_MEM_OBJ(Import_Stub); + IMPL_MEM_OBJ(Warning); + IMPL_MEM_OBJ(Error); + IMPL_MEM_OBJ(Debug); + IMPL_MEM_OBJ(Comment); + IMPL_MEM_OBJ(PreValue); + IMPL_MEM_OBJ(Has_Block); + IMPL_MEM_OBJ(Thunk); + IMPL_MEM_OBJ(If); + IMPL_MEM_OBJ(For); + IMPL_MEM_OBJ(Each); + IMPL_MEM_OBJ(While); + IMPL_MEM_OBJ(Return); + IMPL_MEM_OBJ(Content); + IMPL_MEM_OBJ(Extension); + IMPL_MEM_OBJ(Definition); + IMPL_MEM_OBJ(Mixin_Call); + IMPL_MEM_OBJ(Value); + IMPL_MEM_OBJ(Expression); + IMPL_MEM_OBJ(List); + IMPL_MEM_OBJ(Map); + IMPL_MEM_OBJ(Function); + IMPL_MEM_OBJ(Binary_Expression); + IMPL_MEM_OBJ(Unary_Expression); + IMPL_MEM_OBJ(Function_Call); + IMPL_MEM_OBJ(Function_Call_Schema); + IMPL_MEM_OBJ(Custom_Warning); + IMPL_MEM_OBJ(Custom_Error); + IMPL_MEM_OBJ(Variable); + IMPL_MEM_OBJ(Number); + IMPL_MEM_OBJ(Color); + IMPL_MEM_OBJ(Boolean); + IMPL_MEM_OBJ(String_Schema); + IMPL_MEM_OBJ(String); + IMPL_MEM_OBJ(String_Constant); + IMPL_MEM_OBJ(String_Quoted); + IMPL_MEM_OBJ(Media_Query); + IMPL_MEM_OBJ(Media_Query_Expression); + IMPL_MEM_OBJ(Supports_Condition); + IMPL_MEM_OBJ(Supports_Operator); + IMPL_MEM_OBJ(Supports_Negation); + IMPL_MEM_OBJ(Supports_Declaration); + IMPL_MEM_OBJ(Supports_Interpolation); + IMPL_MEM_OBJ(At_Root_Query); + IMPL_MEM_OBJ(Null); + IMPL_MEM_OBJ(Parent_Selector); + IMPL_MEM_OBJ(Parameter); + IMPL_MEM_OBJ(Parameters); + IMPL_MEM_OBJ(Argument); + IMPL_MEM_OBJ(Arguments); + IMPL_MEM_OBJ(Selector); + IMPL_MEM_OBJ(Selector_Schema); + IMPL_MEM_OBJ(Simple_Selector); + IMPL_MEM_OBJ(Placeholder_Selector); + IMPL_MEM_OBJ(Element_Selector); + IMPL_MEM_OBJ(Class_Selector); + IMPL_MEM_OBJ(Id_Selector); + IMPL_MEM_OBJ(Attribute_Selector); + IMPL_MEM_OBJ(Pseudo_Selector); + IMPL_MEM_OBJ(Wrapped_Selector); + IMPL_MEM_OBJ(Compound_Selector); + IMPL_MEM_OBJ(Complex_Selector); + IMPL_MEM_OBJ(Selector_List); + + // ########################################################################### + // Implement compare, order and hashing operations for AST Nodes + // ########################################################################### + + struct HashNodes { + template + size_t operator() (const T& ex) const { + return ex.isNull() ? 0 : ex->hash(); + } + }; + struct OrderNodes { + template + bool operator() (const T& lhs, const T& rhs) const { + return !lhs.isNull() && !rhs.isNull() && *lhs < *rhs; + } + }; + struct CompareNodes { + template + bool operator() (const T& lhs, const T& rhs) const { + // code around sass logic issue. 1px == 1 is true + // but both items are still different keys in maps + if (dynamic_cast(lhs.ptr())) + if (dynamic_cast(rhs.ptr())) + return lhs->hash() == rhs->hash(); + return !lhs.isNull() && !rhs.isNull() && *lhs == *rhs; + } + }; + + // ########################################################################### + // some often used typedefs + // ########################################################################### + + typedef std::unordered_map< + Expression_Obj, // key + Expression_Obj, // value + HashNodes, // hasher + CompareNodes // compare + > ExpressionMap; + typedef std::unordered_set< + Expression_Obj, // value + HashNodes, // hasher + CompareNodes // compare + > ExpressionSet; + + typedef std::string SubSetMapKey; + typedef std::vector SubSetMapKeys; + + typedef std::pair SubSetMapPair; + typedef std::pair SubSetMapLookup; + typedef std::vector SubSetMapPairs; + typedef std::vector SubSetMapLookups; + + typedef std::pair SubSetMapResult; + typedef std::vector SubSetMapResults; + + typedef std::deque ComplexSelectorDeque; + typedef std::set SimpleSelectorSet; + typedef std::set ComplexSelectorSet; + typedef std::set CompoundSelectorSet; + typedef std::unordered_set SimpleSelectorDict; + + typedef std::vector* ImporterStack; + + // only to switch implementations for testing + #define environment_map std::map + + // ########################################################################### + // explicit type conversion functions + // ########################################################################### + + template + T* Cast(AST_Node* ptr); + + template + const T* Cast(const AST_Node* ptr); + + // sometimes you know the class you want to cast to is final + // in this case a simple typeid check is faster and safe to use + + #define DECLARE_BASE_CAST(T) \ + template<> T* Cast(AST_Node* ptr); \ + template<> const T* Cast(const AST_Node* ptr); \ + + // ########################################################################### + // implement specialization for final classes + // ########################################################################### + + DECLARE_BASE_CAST(AST_Node) + DECLARE_BASE_CAST(Expression) + DECLARE_BASE_CAST(Statement) + DECLARE_BASE_CAST(Has_Block) + DECLARE_BASE_CAST(PreValue) + DECLARE_BASE_CAST(Value) + DECLARE_BASE_CAST(List) + DECLARE_BASE_CAST(String) + DECLARE_BASE_CAST(String_Constant) + DECLARE_BASE_CAST(Supports_Condition) + DECLARE_BASE_CAST(Selector) + DECLARE_BASE_CAST(Simple_Selector) + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/b64/cencode.h b/mybulma/node_modules/node-sass/src/libsass/src/b64/cencode.h new file mode 100644 index 0000000..1d71e83 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/b64/cencode.h @@ -0,0 +1,32 @@ +/* +cencode.h - c header for a base64 encoding algorithm + +This is part of the libb64 project, and has been placed in the public domain. +For details, see http://sourceforge.net/projects/libb64 +*/ + +#ifndef BASE64_CENCODE_H +#define BASE64_CENCODE_H + +typedef enum +{ + step_A, step_B, step_C +} base64_encodestep; + +typedef struct +{ + base64_encodestep step; + char result; + int stepcount; +} base64_encodestate; + +void base64_init_encodestate(base64_encodestate* state_in); + +char base64_encode_value(char value_in); + +int base64_encode_block(const char* plaintext_in, int length_in, char* code_out, base64_encodestate* state_in); + +int base64_encode_blockend(char* code_out, base64_encodestate* state_in); + +#endif /* BASE64_CENCODE_H */ + diff --git a/mybulma/node_modules/node-sass/src/libsass/src/b64/encode.h b/mybulma/node_modules/node-sass/src/libsass/src/b64/encode.h new file mode 100644 index 0000000..92df8ec --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/b64/encode.h @@ -0,0 +1,79 @@ +// :mode=c++: +/* +encode.h - c++ wrapper for a base64 encoding algorithm + +This is part of the libb64 project, and has been placed in the public domain. +For details, see http://sourceforge.net/projects/libb64 +*/ +#ifndef BASE64_ENCODE_H +#define BASE64_ENCODE_H + +#include + +namespace base64 +{ + extern "C" + { + #include "cencode.h" + } + + struct encoder + { + base64_encodestate _state; + int _buffersize; + + encoder(int buffersize_in = BUFFERSIZE) + : _buffersize(buffersize_in) + { + base64_init_encodestate(&_state); + } + + int encode(char value_in) + { + return base64_encode_value(value_in); + } + + int encode(const char* code_in, const int length_in, char* plaintext_out) + { + return base64_encode_block(code_in, length_in, plaintext_out, &_state); + } + + int encode_end(char* plaintext_out) + { + return base64_encode_blockend(plaintext_out, &_state); + } + + void encode(std::istream& istream_in, std::ostream& ostream_in) + { + base64_init_encodestate(&_state); + // + const int N = _buffersize; + char* plaintext = new char[N]; + char* code = new char[2*N]; + int plainlength; + int codelength; + + do + { + istream_in.read(plaintext, N); + plainlength = static_cast(istream_in.gcount()); + // + codelength = encode(plaintext, plainlength, code); + ostream_in.write(code, codelength); + } + while (istream_in.good() && plainlength > 0); + + codelength = encode_end(code); + ostream_in.write(code, codelength); + // + base64_init_encodestate(&_state); + + delete [] code; + delete [] plaintext; + } + }; + +} // namespace base64 + +#endif // BASE64_ENCODE_H + diff --git a/mybulma/node_modules/node-sass/src/libsass/src/backtrace.cpp b/mybulma/node_modules/node-sass/src/libsass/src/backtrace.cpp new file mode 100644 index 0000000..8da963a --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/backtrace.cpp @@ -0,0 +1,46 @@ +#include "backtrace.hpp" + +namespace Sass { + + const std::string traces_to_string(Backtraces traces, std::string indent) { + + std::stringstream ss; + std::string cwd(File::get_cwd()); + + bool first = true; + size_t i_beg = traces.size() - 1; + size_t i_end = std::string::npos; + for (size_t i = i_beg; i != i_end; i --) { + + const Backtrace& trace = traces[i]; + + // make path relative to the current directory + std::string rel_path(File::abs2rel(trace.pstate.path, cwd, cwd)); + + // skip functions on error cases (unsure why ruby sass does this) + // if (trace.caller.substr(0, 6) == ", in f") continue; + + if (first) { + ss << indent; + ss << "on line "; + ss << trace.pstate.line + 1; + ss << " of " << rel_path; + // ss << trace.caller; + first = false; + } else { + ss << trace.caller; + ss << std::endl; + ss << indent; + ss << "from line "; + ss << trace.pstate.line + 1; + ss << " of " << rel_path; + } + + } + + ss << std::endl; + return ss.str(); + + } + +}; diff --git a/mybulma/node_modules/node-sass/src/libsass/src/backtrace.hpp b/mybulma/node_modules/node-sass/src/libsass/src/backtrace.hpp new file mode 100644 index 0000000..72d5fe5 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/backtrace.hpp @@ -0,0 +1,29 @@ +#ifndef SASS_BACKTRACE_H +#define SASS_BACKTRACE_H + +#include +#include +#include "file.hpp" +#include "position.hpp" + +namespace Sass { + + struct Backtrace { + + ParserState pstate; + std::string caller; + + Backtrace(ParserState pstate, std::string c = "") + : pstate(pstate), + caller(c) + { } + + }; + + typedef std::vector Backtraces; + + const std::string traces_to_string(Backtraces traces, std::string indent = "\t"); + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/base64vlq.cpp b/mybulma/node_modules/node-sass/src/libsass/src/base64vlq.cpp new file mode 100644 index 0000000..be2fb49 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/base64vlq.cpp @@ -0,0 +1,44 @@ +#include "sass.hpp" +#include "base64vlq.hpp" + +namespace Sass { + + std::string Base64VLQ::encode(const int number) const + { + std::string encoded = ""; + + int vlq = to_vlq_signed(number); + + do { + int digit = vlq & VLQ_BASE_MASK; + vlq >>= VLQ_BASE_SHIFT; + if (vlq > 0) { + digit |= VLQ_CONTINUATION_BIT; + } + encoded += base64_encode(digit); + } while (vlq > 0); + + return encoded; + } + + char Base64VLQ::base64_encode(const int number) const + { + int index = number; + if (index < 0) index = 0; + if (index > 63) index = 63; + return CHARACTERS[index]; + } + + int Base64VLQ::to_vlq_signed(const int number) const + { + return (number < 0) ? ((-number) << 1) + 1 : (number << 1) + 0; + } + + const char* Base64VLQ::CHARACTERS = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; + + const int Base64VLQ::VLQ_BASE_SHIFT = 5; + const int Base64VLQ::VLQ_BASE = 1 << VLQ_BASE_SHIFT; + const int Base64VLQ::VLQ_BASE_MASK = VLQ_BASE - 1; + const int Base64VLQ::VLQ_CONTINUATION_BIT = VLQ_BASE; + +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/base64vlq.hpp b/mybulma/node_modules/node-sass/src/libsass/src/base64vlq.hpp new file mode 100644 index 0000000..aca315a --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/base64vlq.hpp @@ -0,0 +1,30 @@ +#ifndef SASS_BASE64VLQ_H +#define SASS_BASE64VLQ_H + +#include + +namespace Sass { + + class Base64VLQ { + + public: + + std::string encode(const int number) const; + + private: + + char base64_encode(const int number) const; + + int to_vlq_signed(const int number) const; + + static const char* CHARACTERS; + + static const int VLQ_BASE_SHIFT; + static const int VLQ_BASE; + static const int VLQ_BASE_MASK; + static const int VLQ_CONTINUATION_BIT; + }; + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/bind.cpp b/mybulma/node_modules/node-sass/src/libsass/src/bind.cpp new file mode 100644 index 0000000..ec20ac8 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/bind.cpp @@ -0,0 +1,311 @@ +#include "sass.hpp" +#include "bind.hpp" +#include "ast.hpp" +#include "context.hpp" +#include "expand.hpp" +#include "eval.hpp" +#include +#include +#include + +namespace Sass { + + void bind(std::string type, std::string name, Parameters_Obj ps, Arguments_Obj as, Context* ctx, Env* env, Eval* eval) + { + std::string callee(type + " " + name); + + std::map param_map; + List_Obj varargs = SASS_MEMORY_NEW(List, as->pstate()); + varargs->is_arglist(true); // enable keyword size handling + + for (size_t i = 0, L = as->length(); i < L; ++i) { + if (auto str = Cast((*as)[i]->value())) { + // force optional quotes (only if needed) + if (str->quote_mark()) { + str->quote_mark('*'); + } + } + } + + // Set up a map to ensure named arguments refer to actual parameters. Also + // eval each default value left-to-right, wrt env, populating env as we go. + for (size_t i = 0, L = ps->length(); i < L; ++i) { + Parameter_Obj p = ps->at(i); + param_map[p->name()] = p; + // if (p->default_value()) { + // env->local_frame()[p->name()] = p->default_value()->perform(eval->with(env)); + // } + } + + // plug in all args; if we have leftover params, deal with it later + size_t ip = 0, LP = ps->length(); + size_t ia = 0, LA = as->length(); + while (ia < LA) { + Argument_Obj a = as->at(ia); + if (ip >= LP) { + // skip empty rest arguments + if (a->is_rest_argument()) { + if (List_Obj l = Cast(a->value())) { + if (l->length() == 0) { + ++ ia; continue; + } + } + } + std::stringstream msg; + msg << "wrong number of arguments (" << LA << " for " << LP << ")"; + msg << " for `" << name << "'"; + return error(msg.str(), as->pstate(), eval->exp.traces); + } + Parameter_Obj p = ps->at(ip); + + // If the current parameter is the rest parameter, process and break the loop + if (p->is_rest_parameter()) { + // The next argument by coincidence provides a rest argument + if (a->is_rest_argument()) { + + // We should always get a list for rest arguments + if (List_Obj rest = Cast(a->value())) { + // create a new list object for wrapped items + List_Ptr arglist = SASS_MEMORY_NEW(List, + p->pstate(), + 0, + rest->separator(), + true); + // wrap each item from list as an argument + for (Expression_Obj item : rest->elements()) { + if (Argument_Obj arg = Cast(item)) { + arglist->append(SASS_MEMORY_COPY(arg)); // copy + } else { + arglist->append(SASS_MEMORY_NEW(Argument, + item->pstate(), + item, + "", + false, + false)); + } + } + // assign new arglist to environment + env->local_frame()[p->name()] = arglist; + } + // invalid state + else { + throw std::runtime_error("invalid state"); + } + } else if (a->is_keyword_argument()) { + + // expand keyword arguments into their parameters + List_Ptr arglist = SASS_MEMORY_NEW(List, p->pstate(), 0, SASS_COMMA, true); + env->local_frame()[p->name()] = arglist; + Map_Obj argmap = Cast(a->value()); + for (auto key : argmap->keys()) { + if (String_Constant_Obj str = Cast(key)) { + std::string param = unquote(str->value()); + arglist->append(SASS_MEMORY_NEW(Argument, + key->pstate(), + argmap->at(key), + "$" + param, + false, + false)); + } else { + eval->exp.traces.push_back(Backtrace(key->pstate())); + throw Exception::InvalidVarKwdType(key->pstate(), eval->exp.traces, key->inspect(), a); + } + } + + } else { + + // create a new list object for wrapped items + List_Obj arglist = SASS_MEMORY_NEW(List, + p->pstate(), + 0, + SASS_COMMA, + true); + // consume the next args + while (ia < LA) { + // get and post inc + a = (*as)[ia++]; + // maybe we have another list as argument + List_Obj ls = Cast(a->value()); + // skip any list completely if empty + if (ls && ls->empty() && a->is_rest_argument()) continue; + + Expression_Obj value = a->value(); + if (Argument_Obj arg = Cast(value)) { + arglist->append(arg); + } + // check if we have rest argument + else if (a->is_rest_argument()) { + // preserve the list separator from rest args + if (List_Obj rest = Cast(a->value())) { + arglist->separator(rest->separator()); + + for (size_t i = 0, L = rest->length(); i < L; ++i) { + Expression_Obj obj = rest->value_at_index(i); + arglist->append(SASS_MEMORY_NEW(Argument, + obj->pstate(), + obj, + "", + false, + false)); + } + } + // no more arguments + break; + } + // wrap all other value types into Argument + else { + arglist->append(SASS_MEMORY_NEW(Argument, + a->pstate(), + a->value(), + a->name(), + false, + false)); + } + } + // assign new arglist to environment + env->local_frame()[p->name()] = arglist; + } + // consumed parameter + ++ip; + // no more paramaters + break; + } + + // If the current argument is the rest argument, extract a value for processing + else if (a->is_rest_argument()) { + // normal param and rest arg + List_Obj arglist = Cast(a->value()); + if (!arglist) { + if (Expression_Obj arg = Cast(a->value())) { + arglist = SASS_MEMORY_NEW(List, a->pstate(), 1); + arglist->append(arg); + } + } + + // empty rest arg - treat all args as default values + if (!arglist || !arglist->length()) { + break; + } else { + if (arglist->length() > LP - ip && !ps->has_rest_parameter()) { + size_t arg_count = (arglist->length() + LA - 1); + std::stringstream msg; + msg << callee << " takes " << LP; + msg << (LP == 1 ? " argument" : " arguments"); + msg << " but " << arg_count; + msg << (arg_count == 1 ? " was passed" : " were passed."); + deprecated_bind(msg.str(), as->pstate()); + + while (arglist->length() > LP - ip) { + arglist->elements().erase(arglist->elements().end() - 1); + } + } + } + // otherwise move one of the rest args into the param, converting to argument if necessary + Expression_Obj obj = arglist->at(0); + if (!(a = Cast(obj))) { + Expression_Ptr a_to_convert = obj; + a = SASS_MEMORY_NEW(Argument, + a_to_convert->pstate(), + a_to_convert, + "", + false, + false); + } + arglist->elements().erase(arglist->elements().begin()); + if (!arglist->length() || (!arglist->is_arglist() && ip + 1 == LP)) { + ++ia; + } + + } else if (a->is_keyword_argument()) { + Map_Obj argmap = Cast(a->value()); + + for (auto key : argmap->keys()) { + String_Constant_Ptr val = Cast(key); + if (val == NULL) { + eval->exp.traces.push_back(Backtrace(key->pstate())); + throw Exception::InvalidVarKwdType(key->pstate(), eval->exp.traces, key->inspect(), a); + } + std::string param = "$" + unquote(val->value()); + + if (!param_map.count(param)) { + std::stringstream msg; + msg << callee << " has no parameter named " << param; + error(msg.str(), a->pstate(), eval->exp.traces); + } + env->local_frame()[param] = argmap->at(key); + } + ++ia; + continue; + } else { + ++ia; + } + + if (a->name().empty()) { + if (env->has_local(p->name())) { + std::stringstream msg; + msg << "parameter " << p->name() + << " provided more than once in call to " << callee; + error(msg.str(), a->pstate(), eval->exp.traces); + } + // ordinal arg -- bind it to the next param + env->local_frame()[p->name()] = a->value(); + ++ip; + } + else { + // named arg -- bind it to the appropriately named param + if (!param_map.count(a->name())) { + if (ps->has_rest_parameter()) { + varargs->append(a); + } else { + std::stringstream msg; + msg << callee << " has no parameter named " << a->name(); + error(msg.str(), a->pstate(), eval->exp.traces); + } + } + if (param_map[a->name()]) { + if (param_map[a->name()]->is_rest_parameter()) { + std::stringstream msg; + msg << "argument " << a->name() << " of " << callee + << "cannot be used as named argument"; + error(msg.str(), a->pstate(), eval->exp.traces); + } + } + if (env->has_local(a->name())) { + std::stringstream msg; + msg << "parameter " << p->name() + << "provided more than once in call to " << callee; + error(msg.str(), a->pstate(), eval->exp.traces); + } + env->local_frame()[a->name()] = a->value(); + } + } + // EO while ia + + // If we make it here, we're out of args but may have leftover params. + // That's only okay if they have default values, or were already bound by + // named arguments, or if it's a single rest-param. + for (size_t i = ip; i < LP; ++i) { + Parameter_Obj leftover = ps->at(i); + // cerr << "env for default params:" << endl; + // env->print(); + // cerr << "********" << endl; + if (!env->has_local(leftover->name())) { + if (leftover->is_rest_parameter()) { + env->local_frame()[leftover->name()] = varargs; + } + else if (leftover->default_value()) { + Expression_Ptr dv = leftover->default_value()->perform(eval); + env->local_frame()[leftover->name()] = dv; + } + else { + // param is unbound and has no default value -- error + throw Exception::MissingArgument(as->pstate(), eval->exp.traces, name, leftover->name(), type); + } + } + } + + return; + } + + +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/bind.hpp b/mybulma/node_modules/node-sass/src/libsass/src/bind.hpp new file mode 100644 index 0000000..93a503a --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/bind.hpp @@ -0,0 +1,13 @@ +#ifndef SASS_BIND_H +#define SASS_BIND_H + +#include +#include "environment.hpp" +#include "ast_fwd_decl.hpp" + +namespace Sass { + + void bind(std::string type, std::string name, Parameters_Obj, Arguments_Obj, Context*, Env*, Eval*); +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/c99func.c b/mybulma/node_modules/node-sass/src/libsass/src/c99func.c new file mode 100644 index 0000000..f846eee --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/c99func.c @@ -0,0 +1,54 @@ +/* + Copyright (C) 2011 Joseph A. Adams (joeyadams3.14159@gmail.com) + All rights reserved. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. +*/ + +#if defined(_MSC_VER) && _MSC_VER < 1900 + +#include +#include +#include + +static int c99_vsnprintf(char* str, size_t size, const char* format, va_list ap) +{ + int count = -1; + + if (size != 0) + count = _vsnprintf_s(str, size, _TRUNCATE, format, ap); + if (count == -1) + count = _vscprintf(format, ap); + + return count; +} + +int snprintf(char* str, size_t size, const char* format, ...) +{ + int count; + va_list ap; + + va_start(ap, format); + count = c99_vsnprintf(str, size, format, ap); + va_end(ap); + + return count; +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/cencode.c b/mybulma/node_modules/node-sass/src/libsass/src/cencode.c new file mode 100644 index 0000000..9109f4b --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/cencode.c @@ -0,0 +1,108 @@ +/* +cencoder.c - c source to a base64 encoding algorithm implementation + +This is part of the libb64 project, and has been placed in the public domain. +For details, see http://sourceforge.net/projects/libb64 +*/ + +#include "b64/cencode.h" + +void base64_init_encodestate(base64_encodestate* state_in) +{ + state_in->step = step_A; + state_in->result = 0; + state_in->stepcount = 0; +} + +char base64_encode_value(char value_in) +{ + static const char* encoding = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; + if (value_in > 63) return '='; + return encoding[(int)value_in]; +} + +int base64_encode_block(const char* plaintext_in, int length_in, char* code_out, base64_encodestate* state_in) +{ + const char* plainchar = plaintext_in; + const char* const plaintextend = plaintext_in + length_in; + char* codechar = code_out; + char result; + char fragment; + + result = state_in->result; + + switch (state_in->step) + { + while (1) + { + case step_A: + if (plainchar == plaintextend) + { + state_in->result = result; + state_in->step = step_A; + return (int)(codechar - code_out); + } + fragment = *plainchar++; + result = (fragment & 0x0fc) >> 2; + *codechar++ = base64_encode_value(result); + result = (fragment & 0x003) << 4; + #ifndef _MSC_VER + /* fall through */ + #endif + case step_B: + if (plainchar == plaintextend) + { + state_in->result = result; + state_in->step = step_B; + return (int)(codechar - code_out); + } + fragment = *plainchar++; + result |= (fragment & 0x0f0) >> 4; + *codechar++ = base64_encode_value(result); + result = (fragment & 0x00f) << 2; + #ifndef _MSC_VER + /* fall through */ + #endif + case step_C: + if (plainchar == plaintextend) + { + state_in->result = result; + state_in->step = step_C; + return (int)(codechar - code_out); + } + fragment = *plainchar++; + result |= (fragment & 0x0c0) >> 6; + *codechar++ = base64_encode_value(result); + result = (fragment & 0x03f) >> 0; + *codechar++ = base64_encode_value(result); + + ++(state_in->stepcount); + } + } + /* control should not reach here */ + return (int)(codechar - code_out); +} + +int base64_encode_blockend(char* code_out, base64_encodestate* state_in) +{ + char* codechar = code_out; + + switch (state_in->step) + { + case step_B: + *codechar++ = base64_encode_value(state_in->result); + *codechar++ = '='; + *codechar++ = '='; + break; + case step_C: + *codechar++ = base64_encode_value(state_in->result); + *codechar++ = '='; + break; + case step_A: + break; + } + *codechar++ = '\n'; + + return (int)(codechar - code_out); +} + diff --git a/mybulma/node_modules/node-sass/src/libsass/src/check_nesting.cpp b/mybulma/node_modules/node-sass/src/libsass/src/check_nesting.cpp new file mode 100644 index 0000000..880bcca --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/check_nesting.cpp @@ -0,0 +1,398 @@ +#include "sass.hpp" +#include + +#include "check_nesting.hpp" + +namespace Sass { + + CheckNesting::CheckNesting() + : parents(std::vector()), + traces(std::vector()), + parent(0), current_mixin_definition(0) + { } + + void error(AST_Node_Ptr node, Backtraces traces, std::string msg) { + traces.push_back(Backtrace(node->pstate())); + throw Exception::InvalidSass(node->pstate(), traces, msg); + } + + Statement_Ptr CheckNesting::visit_children(Statement_Ptr parent) + { + Statement_Ptr old_parent = this->parent; + + if (At_Root_Block_Ptr root = Cast(parent)) { + std::vector old_parents = this->parents; + std::vector new_parents; + + for (size_t i = 0, L = this->parents.size(); i < L; i++) { + Statement_Ptr p = this->parents.at(i); + if (!root->exclude_node(p)) { + new_parents.push_back(p); + } + } + this->parents = new_parents; + + for (size_t i = this->parents.size(); i > 0; i--) { + Statement_Ptr p = 0; + Statement_Ptr gp = 0; + if (i > 0) p = this->parents.at(i - 1); + if (i > 1) gp = this->parents.at(i - 2); + + if (!this->is_transparent_parent(p, gp)) { + this->parent = p; + break; + } + } + + At_Root_Block_Ptr ar = Cast(parent); + Block_Ptr ret = ar->block(); + + if (ret != NULL) { + for (auto n : ret->elements()) { + n->perform(this); + } + } + + this->parent = old_parent; + this->parents = old_parents; + + return ret; + } + + if (!this->is_transparent_parent(parent, old_parent)) { + this->parent = parent; + } + + this->parents.push_back(parent); + + Block_Ptr b = Cast(parent); + + if (Trace_Ptr trace = Cast(parent)) { + if (trace->type() == 'i') { + this->traces.push_back(Backtrace(trace->pstate())); + } + } + + if (!b) { + if (Has_Block_Ptr bb = Cast(parent)) { + b = bb->block(); + } + } + + if (b) { + for (auto n : b->elements()) { + n->perform(this); + } + } + + this->parent = old_parent; + this->parents.pop_back(); + + if (Trace_Ptr trace = Cast(parent)) { + if (trace->type() == 'i') { + this->traces.pop_back(); + } + } + + return b; + } + + + Statement_Ptr CheckNesting::operator()(Block_Ptr b) + { + return this->visit_children(b); + } + + Statement_Ptr CheckNesting::operator()(Definition_Ptr n) + { + if (!this->should_visit(n)) return NULL; + if (!is_mixin(n)) { + visit_children(n); + return n; + } + + Definition_Ptr old_mixin_definition = this->current_mixin_definition; + this->current_mixin_definition = n; + + visit_children(n); + + this->current_mixin_definition = old_mixin_definition; + + return n; + } + + Statement_Ptr CheckNesting::operator()(If_Ptr i) + { + this->visit_children(i); + + if (Block_Ptr b = Cast(i->alternative())) { + for (auto n : b->elements()) n->perform(this); + } + + return i; + } + + Statement_Ptr CheckNesting::fallback_impl(Statement_Ptr s) + { + Block_Ptr b1 = Cast(s); + Has_Block_Ptr b2 = Cast(s); + return b1 || b2 ? visit_children(s) : s; + } + + bool CheckNesting::should_visit(Statement_Ptr node) + { + if (!this->parent) return true; + + if (Cast(node)) + { this->invalid_content_parent(this->parent, node); } + + if (is_charset(node)) + { this->invalid_charset_parent(this->parent, node); } + + if (Cast(node)) + { this->invalid_extend_parent(this->parent, node); } + + // if (Cast(node)) + // { this->invalid_import_parent(this->parent); } + + if (this->is_mixin(node)) + { this->invalid_mixin_definition_parent(this->parent, node); } + + if (this->is_function(node)) + { this->invalid_function_parent(this->parent, node); } + + if (this->is_function(this->parent)) + { this->invalid_function_child(node); } + + if (Declaration_Ptr d = Cast(node)) + { + this->invalid_prop_parent(this->parent, node); + this->invalid_value_child(d->value()); + } + + if (Cast(this->parent)) + { this->invalid_prop_child(node); } + + if (Cast(node)) + { this->invalid_return_parent(this->parent, node); } + + return true; + } + + void CheckNesting::invalid_content_parent(Statement_Ptr parent, AST_Node_Ptr node) + { + if (!this->current_mixin_definition) { + error(node, traces, "@content may only be used within a mixin."); + } + } + + void CheckNesting::invalid_charset_parent(Statement_Ptr parent, AST_Node_Ptr node) + { + if (!( + is_root_node(parent) + )) { + error(node, traces, "@charset may only be used at the root of a document."); + } + } + + void CheckNesting::invalid_extend_parent(Statement_Ptr parent, AST_Node_Ptr node) + { + if (!( + Cast(parent) || + Cast(parent) || + is_mixin(parent) + )) { + error(node, traces, "Extend directives may only be used within rules."); + } + } + + // void CheckNesting::invalid_import_parent(Statement_Ptr parent, AST_Node_Ptr node) + // { + // for (auto pp : this->parents) { + // if ( + // Cast(pp) || + // Cast(pp) || + // Cast(pp) || + // Cast(pp) || + // Cast(pp) || + // Cast(pp) || + // is_mixin(pp) + // ) { + // error(node, traces, "Import directives may not be defined within control directives or other mixins."); + // } + // } + + // if (this->is_root_node(parent)) { + // return; + // } + + // if (false/*n.css_import?*/) { + // error(node, traces, "CSS import directives may only be used at the root of a document."); + // } + // } + + void CheckNesting::invalid_mixin_definition_parent(Statement_Ptr parent, AST_Node_Ptr node) + { + for (Statement_Ptr pp : this->parents) { + if ( + Cast(pp) || + Cast(pp) || + Cast(pp) || + Cast(pp) || + Cast(pp) || + Cast(pp) || + is_mixin(pp) + ) { + error(node, traces, "Mixins may not be defined within control directives or other mixins."); + } + } + } + + void CheckNesting::invalid_function_parent(Statement_Ptr parent, AST_Node_Ptr node) + { + for (Statement_Ptr pp : this->parents) { + if ( + Cast(pp) || + Cast(pp) || + Cast(pp) || + Cast(pp) || + Cast(pp) || + Cast(pp) || + is_mixin(pp) + ) { + error(node, traces, "Functions may not be defined within control directives or other mixins."); + } + } + } + + void CheckNesting::invalid_function_child(Statement_Ptr child) + { + if (!( + Cast(child) || + Cast(child) || + Cast(child) || + Cast(child) || + Cast(child) || + Cast(child) || + Cast(child) || + Cast(child) || + Cast(child) || + // Ruby Sass doesn't distinguish variables and assignments + Cast(child) || + Cast(child) || + Cast(child) + )) { + error(child, traces, "Functions can only contain variable declarations and control directives."); + } + } + + void CheckNesting::invalid_prop_child(Statement_Ptr child) + { + if (!( + Cast(child) || + Cast(child) || + Cast(child) || + Cast(child) || + Cast(child) || + Cast(child) || + Cast(child) || + Cast(child) + )) { + error(child, traces, "Illegal nesting: Only properties may be nested beneath properties."); + } + } + + void CheckNesting::invalid_prop_parent(Statement_Ptr parent, AST_Node_Ptr node) + { + if (!( + is_mixin(parent) || + is_directive_node(parent) || + Cast(parent) || + Cast(parent) || + Cast(parent) || + Cast(parent) + )) { + error(node, traces, "Properties are only allowed within rules, directives, mixin includes, or other properties."); + } + } + + void CheckNesting::invalid_value_child(AST_Node_Ptr d) + { + if (Map_Ptr m = Cast(d)) { + traces.push_back(Backtrace(m->pstate())); + throw Exception::InvalidValue(traces, *m); + } + if (Number_Ptr n = Cast(d)) { + if (!n->is_valid_css_unit()) { + traces.push_back(Backtrace(n->pstate())); + throw Exception::InvalidValue(traces, *n); + } + } + + // error(dbg + " isn't a valid CSS value.", m->pstate(),); + + } + + void CheckNesting::invalid_return_parent(Statement_Ptr parent, AST_Node_Ptr node) + { + if (!this->is_function(parent)) { + error(node, traces, "@return may only be used within a function."); + } + } + + bool CheckNesting::is_transparent_parent(Statement_Ptr parent, Statement_Ptr grandparent) + { + bool parent_bubbles = parent && parent->bubbles(); + + bool valid_bubble_node = parent_bubbles && + !is_root_node(grandparent) && + !is_at_root_node(grandparent); + + return Cast(parent) || + Cast(parent) || + Cast(parent) || + Cast(parent) || + Cast(parent) || + Cast(parent) || + valid_bubble_node; + } + + bool CheckNesting::is_charset(Statement_Ptr n) + { + Directive_Ptr d = Cast(n); + return d && d->keyword() == "charset"; + } + + bool CheckNesting::is_mixin(Statement_Ptr n) + { + Definition_Ptr def = Cast(n); + return def && def->type() == Definition::MIXIN; + } + + bool CheckNesting::is_function(Statement_Ptr n) + { + Definition_Ptr def = Cast(n); + return def && def->type() == Definition::FUNCTION; + } + + bool CheckNesting::is_root_node(Statement_Ptr n) + { + if (Cast(n)) return false; + + Block_Ptr b = Cast(n); + return b && b->is_root(); + } + + bool CheckNesting::is_at_root_node(Statement_Ptr n) + { + return Cast(n) != NULL; + } + + bool CheckNesting::is_directive_node(Statement_Ptr n) + { + return Cast(n) || + Cast(n) || + Cast(n) || + Cast(n); + } +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/check_nesting.hpp b/mybulma/node_modules/node-sass/src/libsass/src/check_nesting.hpp new file mode 100644 index 0000000..62c38d9 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/check_nesting.hpp @@ -0,0 +1,65 @@ +#ifndef SASS_CHECK_NESTING_H +#define SASS_CHECK_NESTING_H + +#include "ast.hpp" +#include "operation.hpp" + +namespace Sass { + + class CheckNesting : public Operation_CRTP { + + std::vector parents; + Backtraces traces; + Statement_Ptr parent; + Definition_Ptr current_mixin_definition; + + Statement_Ptr fallback_impl(Statement_Ptr); + Statement_Ptr before(Statement_Ptr); + Statement_Ptr visit_children(Statement_Ptr); + + public: + CheckNesting(); + ~CheckNesting() { } + + Statement_Ptr operator()(Block_Ptr); + Statement_Ptr operator()(Definition_Ptr); + Statement_Ptr operator()(If_Ptr); + + template + Statement_Ptr fallback(U x) { + Statement_Ptr n = Cast(x); + if (this->should_visit(n)) { + return fallback_impl(n); + } + return NULL; + } + + private: + void invalid_content_parent(Statement_Ptr, AST_Node_Ptr); + void invalid_charset_parent(Statement_Ptr, AST_Node_Ptr); + void invalid_extend_parent(Statement_Ptr, AST_Node_Ptr); + // void invalid_import_parent(Statement_Ptr); + void invalid_mixin_definition_parent(Statement_Ptr, AST_Node_Ptr); + void invalid_function_parent(Statement_Ptr, AST_Node_Ptr); + + void invalid_function_child(Statement_Ptr); + void invalid_prop_child(Statement_Ptr); + void invalid_prop_parent(Statement_Ptr, AST_Node_Ptr); + void invalid_return_parent(Statement_Ptr, AST_Node_Ptr); + void invalid_value_child(AST_Node_Ptr); + + bool is_transparent_parent(Statement_Ptr, Statement_Ptr); + + bool should_visit(Statement_Ptr); + + bool is_charset(Statement_Ptr); + bool is_mixin(Statement_Ptr); + bool is_function(Statement_Ptr); + bool is_root_node(Statement_Ptr); + bool is_at_root_node(Statement_Ptr); + bool is_directive_node(Statement_Ptr); + }; + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/color_maps.cpp b/mybulma/node_modules/node-sass/src/libsass/src/color_maps.cpp new file mode 100644 index 0000000..129e47c --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/color_maps.cpp @@ -0,0 +1,648 @@ +#include "sass.hpp" +#include "ast.hpp" +#include "color_maps.hpp" + +namespace Sass { + + namespace ColorNames + { + const char aliceblue [] = "aliceblue"; + const char antiquewhite [] = "antiquewhite"; + const char cyan [] = "cyan"; + const char aqua [] = "aqua"; + const char aquamarine [] = "aquamarine"; + const char azure [] = "azure"; + const char beige [] = "beige"; + const char bisque [] = "bisque"; + const char black [] = "black"; + const char blanchedalmond [] = "blanchedalmond"; + const char blue [] = "blue"; + const char blueviolet [] = "blueviolet"; + const char brown [] = "brown"; + const char burlywood [] = "burlywood"; + const char cadetblue [] = "cadetblue"; + const char chartreuse [] = "chartreuse"; + const char chocolate [] = "chocolate"; + const char coral [] = "coral"; + const char cornflowerblue [] = "cornflowerblue"; + const char cornsilk [] = "cornsilk"; + const char crimson [] = "crimson"; + const char darkblue [] = "darkblue"; + const char darkcyan [] = "darkcyan"; + const char darkgoldenrod [] = "darkgoldenrod"; + const char darkgray [] = "darkgray"; + const char darkgrey [] = "darkgrey"; + const char darkgreen [] = "darkgreen"; + const char darkkhaki [] = "darkkhaki"; + const char darkmagenta [] = "darkmagenta"; + const char darkolivegreen [] = "darkolivegreen"; + const char darkorange [] = "darkorange"; + const char darkorchid [] = "darkorchid"; + const char darkred [] = "darkred"; + const char darksalmon [] = "darksalmon"; + const char darkseagreen [] = "darkseagreen"; + const char darkslateblue [] = "darkslateblue"; + const char darkslategray [] = "darkslategray"; + const char darkslategrey [] = "darkslategrey"; + const char darkturquoise [] = "darkturquoise"; + const char darkviolet [] = "darkviolet"; + const char deeppink [] = "deeppink"; + const char deepskyblue [] = "deepskyblue"; + const char dimgray [] = "dimgray"; + const char dimgrey [] = "dimgrey"; + const char dodgerblue [] = "dodgerblue"; + const char firebrick [] = "firebrick"; + const char floralwhite [] = "floralwhite"; + const char forestgreen [] = "forestgreen"; + const char magenta [] = "magenta"; + const char fuchsia [] = "fuchsia"; + const char gainsboro [] = "gainsboro"; + const char ghostwhite [] = "ghostwhite"; + const char gold [] = "gold"; + const char goldenrod [] = "goldenrod"; + const char gray [] = "gray"; + const char grey [] = "grey"; + const char green [] = "green"; + const char greenyellow [] = "greenyellow"; + const char honeydew [] = "honeydew"; + const char hotpink [] = "hotpink"; + const char indianred [] = "indianred"; + const char indigo [] = "indigo"; + const char ivory [] = "ivory"; + const char khaki [] = "khaki"; + const char lavender [] = "lavender"; + const char lavenderblush [] = "lavenderblush"; + const char lawngreen [] = "lawngreen"; + const char lemonchiffon [] = "lemonchiffon"; + const char lightblue [] = "lightblue"; + const char lightcoral [] = "lightcoral"; + const char lightcyan [] = "lightcyan"; + const char lightgoldenrodyellow [] = "lightgoldenrodyellow"; + const char lightgray [] = "lightgray"; + const char lightgrey [] = "lightgrey"; + const char lightgreen [] = "lightgreen"; + const char lightpink [] = "lightpink"; + const char lightsalmon [] = "lightsalmon"; + const char lightseagreen [] = "lightseagreen"; + const char lightskyblue [] = "lightskyblue"; + const char lightslategray [] = "lightslategray"; + const char lightslategrey [] = "lightslategrey"; + const char lightsteelblue [] = "lightsteelblue"; + const char lightyellow [] = "lightyellow"; + const char lime [] = "lime"; + const char limegreen [] = "limegreen"; + const char linen [] = "linen"; + const char maroon [] = "maroon"; + const char mediumaquamarine [] = "mediumaquamarine"; + const char mediumblue [] = "mediumblue"; + const char mediumorchid [] = "mediumorchid"; + const char mediumpurple [] = "mediumpurple"; + const char mediumseagreen [] = "mediumseagreen"; + const char mediumslateblue [] = "mediumslateblue"; + const char mediumspringgreen [] = "mediumspringgreen"; + const char mediumturquoise [] = "mediumturquoise"; + const char mediumvioletred [] = "mediumvioletred"; + const char midnightblue [] = "midnightblue"; + const char mintcream [] = "mintcream"; + const char mistyrose [] = "mistyrose"; + const char moccasin [] = "moccasin"; + const char navajowhite [] = "navajowhite"; + const char navy [] = "navy"; + const char oldlace [] = "oldlace"; + const char olive [] = "olive"; + const char olivedrab [] = "olivedrab"; + const char orange [] = "orange"; + const char orangered [] = "orangered"; + const char orchid [] = "orchid"; + const char palegoldenrod [] = "palegoldenrod"; + const char palegreen [] = "palegreen"; + const char paleturquoise [] = "paleturquoise"; + const char palevioletred [] = "palevioletred"; + const char papayawhip [] = "papayawhip"; + const char peachpuff [] = "peachpuff"; + const char peru [] = "peru"; + const char pink [] = "pink"; + const char plum [] = "plum"; + const char powderblue [] = "powderblue"; + const char purple [] = "purple"; + const char red [] = "red"; + const char rosybrown [] = "rosybrown"; + const char royalblue [] = "royalblue"; + const char saddlebrown [] = "saddlebrown"; + const char salmon [] = "salmon"; + const char sandybrown [] = "sandybrown"; + const char seagreen [] = "seagreen"; + const char seashell [] = "seashell"; + const char sienna [] = "sienna"; + const char silver [] = "silver"; + const char skyblue [] = "skyblue"; + const char slateblue [] = "slateblue"; + const char slategray [] = "slategray"; + const char slategrey [] = "slategrey"; + const char snow [] = "snow"; + const char springgreen [] = "springgreen"; + const char steelblue [] = "steelblue"; + const char tan [] = "tan"; + const char teal [] = "teal"; + const char thistle [] = "thistle"; + const char tomato [] = "tomato"; + const char turquoise [] = "turquoise"; + const char violet [] = "violet"; + const char wheat [] = "wheat"; + const char white [] = "white"; + const char whitesmoke [] = "whitesmoke"; + const char yellow [] = "yellow"; + const char yellowgreen [] = "yellowgreen"; + const char rebeccapurple [] = "rebeccapurple"; + const char transparent [] = "transparent"; + } + + namespace Colors { + const ParserState color_table("[COLOR TABLE]"); + const Color aliceblue(color_table, 240, 248, 255, 1); + const Color antiquewhite(color_table, 250, 235, 215, 1); + const Color cyan(color_table, 0, 255, 255, 1); + const Color aqua(color_table, 0, 255, 255, 1); + const Color aquamarine(color_table, 127, 255, 212, 1); + const Color azure(color_table, 240, 255, 255, 1); + const Color beige(color_table, 245, 245, 220, 1); + const Color bisque(color_table, 255, 228, 196, 1); + const Color black(color_table, 0, 0, 0, 1); + const Color blanchedalmond(color_table, 255, 235, 205, 1); + const Color blue(color_table, 0, 0, 255, 1); + const Color blueviolet(color_table, 138, 43, 226, 1); + const Color brown(color_table, 165, 42, 42, 1); + const Color burlywood(color_table, 222, 184, 135, 1); + const Color cadetblue(color_table, 95, 158, 160, 1); + const Color chartreuse(color_table, 127, 255, 0, 1); + const Color chocolate(color_table, 210, 105, 30, 1); + const Color coral(color_table, 255, 127, 80, 1); + const Color cornflowerblue(color_table, 100, 149, 237, 1); + const Color cornsilk(color_table, 255, 248, 220, 1); + const Color crimson(color_table, 220, 20, 60, 1); + const Color darkblue(color_table, 0, 0, 139, 1); + const Color darkcyan(color_table, 0, 139, 139, 1); + const Color darkgoldenrod(color_table, 184, 134, 11, 1); + const Color darkgray(color_table, 169, 169, 169, 1); + const Color darkgrey(color_table, 169, 169, 169, 1); + const Color darkgreen(color_table, 0, 100, 0, 1); + const Color darkkhaki(color_table, 189, 183, 107, 1); + const Color darkmagenta(color_table, 139, 0, 139, 1); + const Color darkolivegreen(color_table, 85, 107, 47, 1); + const Color darkorange(color_table, 255, 140, 0, 1); + const Color darkorchid(color_table, 153, 50, 204, 1); + const Color darkred(color_table, 139, 0, 0, 1); + const Color darksalmon(color_table, 233, 150, 122, 1); + const Color darkseagreen(color_table, 143, 188, 143, 1); + const Color darkslateblue(color_table, 72, 61, 139, 1); + const Color darkslategray(color_table, 47, 79, 79, 1); + const Color darkslategrey(color_table, 47, 79, 79, 1); + const Color darkturquoise(color_table, 0, 206, 209, 1); + const Color darkviolet(color_table, 148, 0, 211, 1); + const Color deeppink(color_table, 255, 20, 147, 1); + const Color deepskyblue(color_table, 0, 191, 255, 1); + const Color dimgray(color_table, 105, 105, 105, 1); + const Color dimgrey(color_table, 105, 105, 105, 1); + const Color dodgerblue(color_table, 30, 144, 255, 1); + const Color firebrick(color_table, 178, 34, 34, 1); + const Color floralwhite(color_table, 255, 250, 240, 1); + const Color forestgreen(color_table, 34, 139, 34, 1); + const Color magenta(color_table, 255, 0, 255, 1); + const Color fuchsia(color_table, 255, 0, 255, 1); + const Color gainsboro(color_table, 220, 220, 220, 1); + const Color ghostwhite(color_table, 248, 248, 255, 1); + const Color gold(color_table, 255, 215, 0, 1); + const Color goldenrod(color_table, 218, 165, 32, 1); + const Color gray(color_table, 128, 128, 128, 1); + const Color grey(color_table, 128, 128, 128, 1); + const Color green(color_table, 0, 128, 0, 1); + const Color greenyellow(color_table, 173, 255, 47, 1); + const Color honeydew(color_table, 240, 255, 240, 1); + const Color hotpink(color_table, 255, 105, 180, 1); + const Color indianred(color_table, 205, 92, 92, 1); + const Color indigo(color_table, 75, 0, 130, 1); + const Color ivory(color_table, 255, 255, 240, 1); + const Color khaki(color_table, 240, 230, 140, 1); + const Color lavender(color_table, 230, 230, 250, 1); + const Color lavenderblush(color_table, 255, 240, 245, 1); + const Color lawngreen(color_table, 124, 252, 0, 1); + const Color lemonchiffon(color_table, 255, 250, 205, 1); + const Color lightblue(color_table, 173, 216, 230, 1); + const Color lightcoral(color_table, 240, 128, 128, 1); + const Color lightcyan(color_table, 224, 255, 255, 1); + const Color lightgoldenrodyellow(color_table, 250, 250, 210, 1); + const Color lightgray(color_table, 211, 211, 211, 1); + const Color lightgrey(color_table, 211, 211, 211, 1); + const Color lightgreen(color_table, 144, 238, 144, 1); + const Color lightpink(color_table, 255, 182, 193, 1); + const Color lightsalmon(color_table, 255, 160, 122, 1); + const Color lightseagreen(color_table, 32, 178, 170, 1); + const Color lightskyblue(color_table, 135, 206, 250, 1); + const Color lightslategray(color_table, 119, 136, 153, 1); + const Color lightslategrey(color_table, 119, 136, 153, 1); + const Color lightsteelblue(color_table, 176, 196, 222, 1); + const Color lightyellow(color_table, 255, 255, 224, 1); + const Color lime(color_table, 0, 255, 0, 1); + const Color limegreen(color_table, 50, 205, 50, 1); + const Color linen(color_table, 250, 240, 230, 1); + const Color maroon(color_table, 128, 0, 0, 1); + const Color mediumaquamarine(color_table, 102, 205, 170, 1); + const Color mediumblue(color_table, 0, 0, 205, 1); + const Color mediumorchid(color_table, 186, 85, 211, 1); + const Color mediumpurple(color_table, 147, 112, 219, 1); + const Color mediumseagreen(color_table, 60, 179, 113, 1); + const Color mediumslateblue(color_table, 123, 104, 238, 1); + const Color mediumspringgreen(color_table, 0, 250, 154, 1); + const Color mediumturquoise(color_table, 72, 209, 204, 1); + const Color mediumvioletred(color_table, 199, 21, 133, 1); + const Color midnightblue(color_table, 25, 25, 112, 1); + const Color mintcream(color_table, 245, 255, 250, 1); + const Color mistyrose(color_table, 255, 228, 225, 1); + const Color moccasin(color_table, 255, 228, 181, 1); + const Color navajowhite(color_table, 255, 222, 173, 1); + const Color navy(color_table, 0, 0, 128, 1); + const Color oldlace(color_table, 253, 245, 230, 1); + const Color olive(color_table, 128, 128, 0, 1); + const Color olivedrab(color_table, 107, 142, 35, 1); + const Color orange(color_table, 255, 165, 0, 1); + const Color orangered(color_table, 255, 69, 0, 1); + const Color orchid(color_table, 218, 112, 214, 1); + const Color palegoldenrod(color_table, 238, 232, 170, 1); + const Color palegreen(color_table, 152, 251, 152, 1); + const Color paleturquoise(color_table, 175, 238, 238, 1); + const Color palevioletred(color_table, 219, 112, 147, 1); + const Color papayawhip(color_table, 255, 239, 213, 1); + const Color peachpuff(color_table, 255, 218, 185, 1); + const Color peru(color_table, 205, 133, 63, 1); + const Color pink(color_table, 255, 192, 203, 1); + const Color plum(color_table, 221, 160, 221, 1); + const Color powderblue(color_table, 176, 224, 230, 1); + const Color purple(color_table, 128, 0, 128, 1); + const Color red(color_table, 255, 0, 0, 1); + const Color rosybrown(color_table, 188, 143, 143, 1); + const Color royalblue(color_table, 65, 105, 225, 1); + const Color saddlebrown(color_table, 139, 69, 19, 1); + const Color salmon(color_table, 250, 128, 114, 1); + const Color sandybrown(color_table, 244, 164, 96, 1); + const Color seagreen(color_table, 46, 139, 87, 1); + const Color seashell(color_table, 255, 245, 238, 1); + const Color sienna(color_table, 160, 82, 45, 1); + const Color silver(color_table, 192, 192, 192, 1); + const Color skyblue(color_table, 135, 206, 235, 1); + const Color slateblue(color_table, 106, 90, 205, 1); + const Color slategray(color_table, 112, 128, 144, 1); + const Color slategrey(color_table, 112, 128, 144, 1); + const Color snow(color_table, 255, 250, 250, 1); + const Color springgreen(color_table, 0, 255, 127, 1); + const Color steelblue(color_table, 70, 130, 180, 1); + const Color tan(color_table, 210, 180, 140, 1); + const Color teal(color_table, 0, 128, 128, 1); + const Color thistle(color_table, 216, 191, 216, 1); + const Color tomato(color_table, 255, 99, 71, 1); + const Color turquoise(color_table, 64, 224, 208, 1); + const Color violet(color_table, 238, 130, 238, 1); + const Color wheat(color_table, 245, 222, 179, 1); + const Color white(color_table, 255, 255, 255, 1); + const Color whitesmoke(color_table, 245, 245, 245, 1); + const Color yellow(color_table, 255, 255, 0, 1); + const Color yellowgreen(color_table, 154, 205, 50, 1); + const Color rebeccapurple(color_table, 102, 51, 153, 1); + const Color transparent(color_table, 0, 0, 0, 0); + } + + const std::map colors_to_names { + { 240 * 0x10000 + 248 * 0x100 + 255, ColorNames::aliceblue }, + { 250 * 0x10000 + 235 * 0x100 + 215, ColorNames::antiquewhite }, + { 0 * 0x10000 + 255 * 0x100 + 255, ColorNames::cyan }, + { 127 * 0x10000 + 255 * 0x100 + 212, ColorNames::aquamarine }, + { 240 * 0x10000 + 255 * 0x100 + 255, ColorNames::azure }, + { 245 * 0x10000 + 245 * 0x100 + 220, ColorNames::beige }, + { 255 * 0x10000 + 228 * 0x100 + 196, ColorNames::bisque }, + { 0 * 0x10000 + 0 * 0x100 + 0, ColorNames::black }, + { 255 * 0x10000 + 235 * 0x100 + 205, ColorNames::blanchedalmond }, + { 0 * 0x10000 + 0 * 0x100 + 255, ColorNames::blue }, + { 138 * 0x10000 + 43 * 0x100 + 226, ColorNames::blueviolet }, + { 165 * 0x10000 + 42 * 0x100 + 42, ColorNames::brown }, + { 222 * 0x10000 + 184 * 0x100 + 135, ColorNames::burlywood }, + { 95 * 0x10000 + 158 * 0x100 + 160, ColorNames::cadetblue }, + { 127 * 0x10000 + 255 * 0x100 + 0, ColorNames::chartreuse }, + { 210 * 0x10000 + 105 * 0x100 + 30, ColorNames::chocolate }, + { 255 * 0x10000 + 127 * 0x100 + 80, ColorNames::coral }, + { 100 * 0x10000 + 149 * 0x100 + 237, ColorNames::cornflowerblue }, + { 255 * 0x10000 + 248 * 0x100 + 220, ColorNames::cornsilk }, + { 220 * 0x10000 + 20 * 0x100 + 60, ColorNames::crimson }, + { 0 * 0x10000 + 0 * 0x100 + 139, ColorNames::darkblue }, + { 0 * 0x10000 + 139 * 0x100 + 139, ColorNames::darkcyan }, + { 184 * 0x10000 + 134 * 0x100 + 11, ColorNames::darkgoldenrod }, + { 169 * 0x10000 + 169 * 0x100 + 169, ColorNames::darkgray }, + { 0 * 0x10000 + 100 * 0x100 + 0, ColorNames::darkgreen }, + { 189 * 0x10000 + 183 * 0x100 + 107, ColorNames::darkkhaki }, + { 139 * 0x10000 + 0 * 0x100 + 139, ColorNames::darkmagenta }, + { 85 * 0x10000 + 107 * 0x100 + 47, ColorNames::darkolivegreen }, + { 255 * 0x10000 + 140 * 0x100 + 0, ColorNames::darkorange }, + { 153 * 0x10000 + 50 * 0x100 + 204, ColorNames::darkorchid }, + { 139 * 0x10000 + 0 * 0x100 + 0, ColorNames::darkred }, + { 233 * 0x10000 + 150 * 0x100 + 122, ColorNames::darksalmon }, + { 143 * 0x10000 + 188 * 0x100 + 143, ColorNames::darkseagreen }, + { 72 * 0x10000 + 61 * 0x100 + 139, ColorNames::darkslateblue }, + { 47 * 0x10000 + 79 * 0x100 + 79, ColorNames::darkslategray }, + { 0 * 0x10000 + 206 * 0x100 + 209, ColorNames::darkturquoise }, + { 148 * 0x10000 + 0 * 0x100 + 211, ColorNames::darkviolet }, + { 255 * 0x10000 + 20 * 0x100 + 147, ColorNames::deeppink }, + { 0 * 0x10000 + 191 * 0x100 + 255, ColorNames::deepskyblue }, + { 105 * 0x10000 + 105 * 0x100 + 105, ColorNames::dimgray }, + { 30 * 0x10000 + 144 * 0x100 + 255, ColorNames::dodgerblue }, + { 178 * 0x10000 + 34 * 0x100 + 34, ColorNames::firebrick }, + { 255 * 0x10000 + 250 * 0x100 + 240, ColorNames::floralwhite }, + { 34 * 0x10000 + 139 * 0x100 + 34, ColorNames::forestgreen }, + { 255 * 0x10000 + 0 * 0x100 + 255, ColorNames::magenta }, + { 220 * 0x10000 + 220 * 0x100 + 220, ColorNames::gainsboro }, + { 248 * 0x10000 + 248 * 0x100 + 255, ColorNames::ghostwhite }, + { 255 * 0x10000 + 215 * 0x100 + 0, ColorNames::gold }, + { 218 * 0x10000 + 165 * 0x100 + 32, ColorNames::goldenrod }, + { 128 * 0x10000 + 128 * 0x100 + 128, ColorNames::gray }, + { 0 * 0x10000 + 128 * 0x100 + 0, ColorNames::green }, + { 173 * 0x10000 + 255 * 0x100 + 47, ColorNames::greenyellow }, + { 240 * 0x10000 + 255 * 0x100 + 240, ColorNames::honeydew }, + { 255 * 0x10000 + 105 * 0x100 + 180, ColorNames::hotpink }, + { 205 * 0x10000 + 92 * 0x100 + 92, ColorNames::indianred }, + { 75 * 0x10000 + 0 * 0x100 + 130, ColorNames::indigo }, + { 255 * 0x10000 + 255 * 0x100 + 240, ColorNames::ivory }, + { 240 * 0x10000 + 230 * 0x100 + 140, ColorNames::khaki }, + { 230 * 0x10000 + 230 * 0x100 + 250, ColorNames::lavender }, + { 255 * 0x10000 + 240 * 0x100 + 245, ColorNames::lavenderblush }, + { 124 * 0x10000 + 252 * 0x100 + 0, ColorNames::lawngreen }, + { 255 * 0x10000 + 250 * 0x100 + 205, ColorNames::lemonchiffon }, + { 173 * 0x10000 + 216 * 0x100 + 230, ColorNames::lightblue }, + { 240 * 0x10000 + 128 * 0x100 + 128, ColorNames::lightcoral }, + { 224 * 0x10000 + 255 * 0x100 + 255, ColorNames::lightcyan }, + { 250 * 0x10000 + 250 * 0x100 + 210, ColorNames::lightgoldenrodyellow }, + { 211 * 0x10000 + 211 * 0x100 + 211, ColorNames::lightgray }, + { 144 * 0x10000 + 238 * 0x100 + 144, ColorNames::lightgreen }, + { 255 * 0x10000 + 182 * 0x100 + 193, ColorNames::lightpink }, + { 255 * 0x10000 + 160 * 0x100 + 122, ColorNames::lightsalmon }, + { 32 * 0x10000 + 178 * 0x100 + 170, ColorNames::lightseagreen }, + { 135 * 0x10000 + 206 * 0x100 + 250, ColorNames::lightskyblue }, + { 119 * 0x10000 + 136 * 0x100 + 153, ColorNames::lightslategray }, + { 176 * 0x10000 + 196 * 0x100 + 222, ColorNames::lightsteelblue }, + { 255 * 0x10000 + 255 * 0x100 + 224, ColorNames::lightyellow }, + { 0 * 0x10000 + 255 * 0x100 + 0, ColorNames::lime }, + { 50 * 0x10000 + 205 * 0x100 + 50, ColorNames::limegreen }, + { 250 * 0x10000 + 240 * 0x100 + 230, ColorNames::linen }, + { 128 * 0x10000 + 0 * 0x100 + 0, ColorNames::maroon }, + { 102 * 0x10000 + 205 * 0x100 + 170, ColorNames::mediumaquamarine }, + { 0 * 0x10000 + 0 * 0x100 + 205, ColorNames::mediumblue }, + { 186 * 0x10000 + 85 * 0x100 + 211, ColorNames::mediumorchid }, + { 147 * 0x10000 + 112 * 0x100 + 219, ColorNames::mediumpurple }, + { 60 * 0x10000 + 179 * 0x100 + 113, ColorNames::mediumseagreen }, + { 123 * 0x10000 + 104 * 0x100 + 238, ColorNames::mediumslateblue }, + { 0 * 0x10000 + 250 * 0x100 + 154, ColorNames::mediumspringgreen }, + { 72 * 0x10000 + 209 * 0x100 + 204, ColorNames::mediumturquoise }, + { 199 * 0x10000 + 21 * 0x100 + 133, ColorNames::mediumvioletred }, + { 25 * 0x10000 + 25 * 0x100 + 112, ColorNames::midnightblue }, + { 245 * 0x10000 + 255 * 0x100 + 250, ColorNames::mintcream }, + { 255 * 0x10000 + 228 * 0x100 + 225, ColorNames::mistyrose }, + { 255 * 0x10000 + 228 * 0x100 + 181, ColorNames::moccasin }, + { 255 * 0x10000 + 222 * 0x100 + 173, ColorNames::navajowhite }, + { 0 * 0x10000 + 0 * 0x100 + 128, ColorNames::navy }, + { 253 * 0x10000 + 245 * 0x100 + 230, ColorNames::oldlace }, + { 128 * 0x10000 + 128 * 0x100 + 0, ColorNames::olive }, + { 107 * 0x10000 + 142 * 0x100 + 35, ColorNames::olivedrab }, + { 255 * 0x10000 + 165 * 0x100 + 0, ColorNames::orange }, + { 255 * 0x10000 + 69 * 0x100 + 0, ColorNames::orangered }, + { 218 * 0x10000 + 112 * 0x100 + 214, ColorNames::orchid }, + { 238 * 0x10000 + 232 * 0x100 + 170, ColorNames::palegoldenrod }, + { 152 * 0x10000 + 251 * 0x100 + 152, ColorNames::palegreen }, + { 175 * 0x10000 + 238 * 0x100 + 238, ColorNames::paleturquoise }, + { 219 * 0x10000 + 112 * 0x100 + 147, ColorNames::palevioletred }, + { 255 * 0x10000 + 239 * 0x100 + 213, ColorNames::papayawhip }, + { 255 * 0x10000 + 218 * 0x100 + 185, ColorNames::peachpuff }, + { 205 * 0x10000 + 133 * 0x100 + 63, ColorNames::peru }, + { 255 * 0x10000 + 192 * 0x100 + 203, ColorNames::pink }, + { 221 * 0x10000 + 160 * 0x100 + 221, ColorNames::plum }, + { 176 * 0x10000 + 224 * 0x100 + 230, ColorNames::powderblue }, + { 128 * 0x10000 + 0 * 0x100 + 128, ColorNames::purple }, + { 255 * 0x10000 + 0 * 0x100 + 0, ColorNames::red }, + { 188 * 0x10000 + 143 * 0x100 + 143, ColorNames::rosybrown }, + { 65 * 0x10000 + 105 * 0x100 + 225, ColorNames::royalblue }, + { 139 * 0x10000 + 69 * 0x100 + 19, ColorNames::saddlebrown }, + { 250 * 0x10000 + 128 * 0x100 + 114, ColorNames::salmon }, + { 244 * 0x10000 + 164 * 0x100 + 96, ColorNames::sandybrown }, + { 46 * 0x10000 + 139 * 0x100 + 87, ColorNames::seagreen }, + { 255 * 0x10000 + 245 * 0x100 + 238, ColorNames::seashell }, + { 160 * 0x10000 + 82 * 0x100 + 45, ColorNames::sienna }, + { 192 * 0x10000 + 192 * 0x100 + 192, ColorNames::silver }, + { 135 * 0x10000 + 206 * 0x100 + 235, ColorNames::skyblue }, + { 106 * 0x10000 + 90 * 0x100 + 205, ColorNames::slateblue }, + { 112 * 0x10000 + 128 * 0x100 + 144, ColorNames::slategray }, + { 255 * 0x10000 + 250 * 0x100 + 250, ColorNames::snow }, + { 0 * 0x10000 + 255 * 0x100 + 127, ColorNames::springgreen }, + { 70 * 0x10000 + 130 * 0x100 + 180, ColorNames::steelblue }, + { 210 * 0x10000 + 180 * 0x100 + 140, ColorNames::tan }, + { 0 * 0x10000 + 128 * 0x100 + 128, ColorNames::teal }, + { 216 * 0x10000 + 191 * 0x100 + 216, ColorNames::thistle }, + { 255 * 0x10000 + 99 * 0x100 + 71, ColorNames::tomato }, + { 64 * 0x10000 + 224 * 0x100 + 208, ColorNames::turquoise }, + { 238 * 0x10000 + 130 * 0x100 + 238, ColorNames::violet }, + { 245 * 0x10000 + 222 * 0x100 + 179, ColorNames::wheat }, + { 255 * 0x10000 + 255 * 0x100 + 255, ColorNames::white }, + { 245 * 0x10000 + 245 * 0x100 + 245, ColorNames::whitesmoke }, + { 255 * 0x10000 + 255 * 0x100 + 0, ColorNames::yellow }, + { 154 * 0x10000 + 205 * 0x100 + 50, ColorNames::yellowgreen }, + { 102 * 0x10000 + 51 * 0x100 + 153, ColorNames::rebeccapurple } + }; + + const std::map names_to_colors + { + { ColorNames::aliceblue, &Colors::aliceblue }, + { ColorNames::antiquewhite, &Colors::antiquewhite }, + { ColorNames::cyan, &Colors::cyan }, + { ColorNames::aqua, &Colors::aqua }, + { ColorNames::aquamarine, &Colors::aquamarine }, + { ColorNames::azure, &Colors::azure }, + { ColorNames::beige, &Colors::beige }, + { ColorNames::bisque, &Colors::bisque }, + { ColorNames::black, &Colors::black }, + { ColorNames::blanchedalmond, &Colors::blanchedalmond }, + { ColorNames::blue, &Colors::blue }, + { ColorNames::blueviolet, &Colors::blueviolet }, + { ColorNames::brown, &Colors::brown }, + { ColorNames::burlywood, &Colors::burlywood }, + { ColorNames::cadetblue, &Colors::cadetblue }, + { ColorNames::chartreuse, &Colors::chartreuse }, + { ColorNames::chocolate, &Colors::chocolate }, + { ColorNames::coral, &Colors::coral }, + { ColorNames::cornflowerblue, &Colors::cornflowerblue }, + { ColorNames::cornsilk, &Colors::cornsilk }, + { ColorNames::crimson, &Colors::crimson }, + { ColorNames::darkblue, &Colors::darkblue }, + { ColorNames::darkcyan, &Colors::darkcyan }, + { ColorNames::darkgoldenrod, &Colors::darkgoldenrod }, + { ColorNames::darkgray, &Colors::darkgray }, + { ColorNames::darkgrey, &Colors::darkgrey }, + { ColorNames::darkgreen, &Colors::darkgreen }, + { ColorNames::darkkhaki, &Colors::darkkhaki }, + { ColorNames::darkmagenta, &Colors::darkmagenta }, + { ColorNames::darkolivegreen, &Colors::darkolivegreen }, + { ColorNames::darkorange, &Colors::darkorange }, + { ColorNames::darkorchid, &Colors::darkorchid }, + { ColorNames::darkred, &Colors::darkred }, + { ColorNames::darksalmon, &Colors::darksalmon }, + { ColorNames::darkseagreen, &Colors::darkseagreen }, + { ColorNames::darkslateblue, &Colors::darkslateblue }, + { ColorNames::darkslategray, &Colors::darkslategray }, + { ColorNames::darkslategrey, &Colors::darkslategrey }, + { ColorNames::darkturquoise, &Colors::darkturquoise }, + { ColorNames::darkviolet, &Colors::darkviolet }, + { ColorNames::deeppink, &Colors::deeppink }, + { ColorNames::deepskyblue, &Colors::deepskyblue }, + { ColorNames::dimgray, &Colors::dimgray }, + { ColorNames::dimgrey, &Colors::dimgrey }, + { ColorNames::dodgerblue, &Colors::dodgerblue }, + { ColorNames::firebrick, &Colors::firebrick }, + { ColorNames::floralwhite, &Colors::floralwhite }, + { ColorNames::forestgreen, &Colors::forestgreen }, + { ColorNames::magenta, &Colors::magenta }, + { ColorNames::fuchsia, &Colors::fuchsia }, + { ColorNames::gainsboro, &Colors::gainsboro }, + { ColorNames::ghostwhite, &Colors::ghostwhite }, + { ColorNames::gold, &Colors::gold }, + { ColorNames::goldenrod, &Colors::goldenrod }, + { ColorNames::gray, &Colors::gray }, + { ColorNames::grey, &Colors::grey }, + { ColorNames::green, &Colors::green }, + { ColorNames::greenyellow, &Colors::greenyellow }, + { ColorNames::honeydew, &Colors::honeydew }, + { ColorNames::hotpink, &Colors::hotpink }, + { ColorNames::indianred, &Colors::indianred }, + { ColorNames::indigo, &Colors::indigo }, + { ColorNames::ivory, &Colors::ivory }, + { ColorNames::khaki, &Colors::khaki }, + { ColorNames::lavender, &Colors::lavender }, + { ColorNames::lavenderblush, &Colors::lavenderblush }, + { ColorNames::lawngreen, &Colors::lawngreen }, + { ColorNames::lemonchiffon, &Colors::lemonchiffon }, + { ColorNames::lightblue, &Colors::lightblue }, + { ColorNames::lightcoral, &Colors::lightcoral }, + { ColorNames::lightcyan, &Colors::lightcyan }, + { ColorNames::lightgoldenrodyellow, &Colors::lightgoldenrodyellow }, + { ColorNames::lightgray, &Colors::lightgray }, + { ColorNames::lightgrey, &Colors::lightgrey }, + { ColorNames::lightgreen, &Colors::lightgreen }, + { ColorNames::lightpink, &Colors::lightpink }, + { ColorNames::lightsalmon, &Colors::lightsalmon }, + { ColorNames::lightseagreen, &Colors::lightseagreen }, + { ColorNames::lightskyblue, &Colors::lightskyblue }, + { ColorNames::lightslategray, &Colors::lightslategray }, + { ColorNames::lightslategrey, &Colors::lightslategrey }, + { ColorNames::lightsteelblue, &Colors::lightsteelblue }, + { ColorNames::lightyellow, &Colors::lightyellow }, + { ColorNames::lime, &Colors::lime }, + { ColorNames::limegreen, &Colors::limegreen }, + { ColorNames::linen, &Colors::linen }, + { ColorNames::maroon, &Colors::maroon }, + { ColorNames::mediumaquamarine, &Colors::mediumaquamarine }, + { ColorNames::mediumblue, &Colors::mediumblue }, + { ColorNames::mediumorchid, &Colors::mediumorchid }, + { ColorNames::mediumpurple, &Colors::mediumpurple }, + { ColorNames::mediumseagreen, &Colors::mediumseagreen }, + { ColorNames::mediumslateblue, &Colors::mediumslateblue }, + { ColorNames::mediumspringgreen, &Colors::mediumspringgreen }, + { ColorNames::mediumturquoise, &Colors::mediumturquoise }, + { ColorNames::mediumvioletred, &Colors::mediumvioletred }, + { ColorNames::midnightblue, &Colors::midnightblue }, + { ColorNames::mintcream, &Colors::mintcream }, + { ColorNames::mistyrose, &Colors::mistyrose }, + { ColorNames::moccasin, &Colors::moccasin }, + { ColorNames::navajowhite, &Colors::navajowhite }, + { ColorNames::navy, &Colors::navy }, + { ColorNames::oldlace, &Colors::oldlace }, + { ColorNames::olive, &Colors::olive }, + { ColorNames::olivedrab, &Colors::olivedrab }, + { ColorNames::orange, &Colors::orange }, + { ColorNames::orangered, &Colors::orangered }, + { ColorNames::orchid, &Colors::orchid }, + { ColorNames::palegoldenrod, &Colors::palegoldenrod }, + { ColorNames::palegreen, &Colors::palegreen }, + { ColorNames::paleturquoise, &Colors::paleturquoise }, + { ColorNames::palevioletred, &Colors::palevioletred }, + { ColorNames::papayawhip, &Colors::papayawhip }, + { ColorNames::peachpuff, &Colors::peachpuff }, + { ColorNames::peru, &Colors::peru }, + { ColorNames::pink, &Colors::pink }, + { ColorNames::plum, &Colors::plum }, + { ColorNames::powderblue, &Colors::powderblue }, + { ColorNames::purple, &Colors::purple }, + { ColorNames::red, &Colors::red }, + { ColorNames::rosybrown, &Colors::rosybrown }, + { ColorNames::royalblue, &Colors::royalblue }, + { ColorNames::saddlebrown, &Colors::saddlebrown }, + { ColorNames::salmon, &Colors::salmon }, + { ColorNames::sandybrown, &Colors::sandybrown }, + { ColorNames::seagreen, &Colors::seagreen }, + { ColorNames::seashell, &Colors::seashell }, + { ColorNames::sienna, &Colors::sienna }, + { ColorNames::silver, &Colors::silver }, + { ColorNames::skyblue, &Colors::skyblue }, + { ColorNames::slateblue, &Colors::slateblue }, + { ColorNames::slategray, &Colors::slategray }, + { ColorNames::slategrey, &Colors::slategrey }, + { ColorNames::snow, &Colors::snow }, + { ColorNames::springgreen, &Colors::springgreen }, + { ColorNames::steelblue, &Colors::steelblue }, + { ColorNames::tan, &Colors::tan }, + { ColorNames::teal, &Colors::teal }, + { ColorNames::thistle, &Colors::thistle }, + { ColorNames::tomato, &Colors::tomato }, + { ColorNames::turquoise, &Colors::turquoise }, + { ColorNames::violet, &Colors::violet }, + { ColorNames::wheat, &Colors::wheat }, + { ColorNames::white, &Colors::white }, + { ColorNames::whitesmoke, &Colors::whitesmoke }, + { ColorNames::yellow, &Colors::yellow }, + { ColorNames::yellowgreen, &Colors::yellowgreen }, + { ColorNames::rebeccapurple, &Colors::rebeccapurple }, + { ColorNames::transparent, &Colors::transparent } + }; + + Color_Ptr_Const name_to_color(const char* key) + { + return name_to_color(std::string(key)); + } + + Color_Ptr_Const name_to_color(const std::string& key) + { + // case insensitive lookup. See #2462 + std::string lower{key}; + std::transform(lower.begin(), lower.end(), lower.begin(), ::tolower); + + auto p = names_to_colors.find(lower.c_str()); + if (p != names_to_colors.end()) { + return p->second; + } + return 0; + } + + const char* color_to_name(const int key) + { + auto p = colors_to_names.find(key); + if (p != colors_to_names.end()) { + return p->second; + } + return 0; + } + + const char* color_to_name(const double key) + { + return color_to_name((int)key); + } + + const char* color_to_name(const Color& c) + { + double key = c.r() * 0x10000 + + c.g() * 0x100 + + c.b(); + return color_to_name(key); + } + +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/color_maps.hpp b/mybulma/node_modules/node-sass/src/libsass/src/color_maps.hpp new file mode 100644 index 0000000..d4fd416 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/color_maps.hpp @@ -0,0 +1,331 @@ + +#ifndef SASS_COLOR_MAPS_H +#define SASS_COLOR_MAPS_H + +#include +#include "ast.hpp" + +namespace Sass { + + struct map_cmp_str + { + bool operator()(char const *a, char const *b) const + { + return std::strcmp(a, b) < 0; + } + }; + + namespace ColorNames + { + extern const char aliceblue[]; + extern const char antiquewhite[]; + extern const char cyan[]; + extern const char aqua[]; + extern const char aquamarine[]; + extern const char azure[]; + extern const char beige[]; + extern const char bisque[]; + extern const char black[]; + extern const char blanchedalmond[]; + extern const char blue[]; + extern const char blueviolet[]; + extern const char brown[]; + extern const char burlywood[]; + extern const char cadetblue[]; + extern const char chartreuse[]; + extern const char chocolate[]; + extern const char coral[]; + extern const char cornflowerblue[]; + extern const char cornsilk[]; + extern const char crimson[]; + extern const char darkblue[]; + extern const char darkcyan[]; + extern const char darkgoldenrod[]; + extern const char darkgray[]; + extern const char darkgrey[]; + extern const char darkgreen[]; + extern const char darkkhaki[]; + extern const char darkmagenta[]; + extern const char darkolivegreen[]; + extern const char darkorange[]; + extern const char darkorchid[]; + extern const char darkred[]; + extern const char darksalmon[]; + extern const char darkseagreen[]; + extern const char darkslateblue[]; + extern const char darkslategray[]; + extern const char darkslategrey[]; + extern const char darkturquoise[]; + extern const char darkviolet[]; + extern const char deeppink[]; + extern const char deepskyblue[]; + extern const char dimgray[]; + extern const char dimgrey[]; + extern const char dodgerblue[]; + extern const char firebrick[]; + extern const char floralwhite[]; + extern const char forestgreen[]; + extern const char magenta[]; + extern const char fuchsia[]; + extern const char gainsboro[]; + extern const char ghostwhite[]; + extern const char gold[]; + extern const char goldenrod[]; + extern const char gray[]; + extern const char grey[]; + extern const char green[]; + extern const char greenyellow[]; + extern const char honeydew[]; + extern const char hotpink[]; + extern const char indianred[]; + extern const char indigo[]; + extern const char ivory[]; + extern const char khaki[]; + extern const char lavender[]; + extern const char lavenderblush[]; + extern const char lawngreen[]; + extern const char lemonchiffon[]; + extern const char lightblue[]; + extern const char lightcoral[]; + extern const char lightcyan[]; + extern const char lightgoldenrodyellow[]; + extern const char lightgray[]; + extern const char lightgrey[]; + extern const char lightgreen[]; + extern const char lightpink[]; + extern const char lightsalmon[]; + extern const char lightseagreen[]; + extern const char lightskyblue[]; + extern const char lightslategray[]; + extern const char lightslategrey[]; + extern const char lightsteelblue[]; + extern const char lightyellow[]; + extern const char lime[]; + extern const char limegreen[]; + extern const char linen[]; + extern const char maroon[]; + extern const char mediumaquamarine[]; + extern const char mediumblue[]; + extern const char mediumorchid[]; + extern const char mediumpurple[]; + extern const char mediumseagreen[]; + extern const char mediumslateblue[]; + extern const char mediumspringgreen[]; + extern const char mediumturquoise[]; + extern const char mediumvioletred[]; + extern const char midnightblue[]; + extern const char mintcream[]; + extern const char mistyrose[]; + extern const char moccasin[]; + extern const char navajowhite[]; + extern const char navy[]; + extern const char oldlace[]; + extern const char olive[]; + extern const char olivedrab[]; + extern const char orange[]; + extern const char orangered[]; + extern const char orchid[]; + extern const char palegoldenrod[]; + extern const char palegreen[]; + extern const char paleturquoise[]; + extern const char palevioletred[]; + extern const char papayawhip[]; + extern const char peachpuff[]; + extern const char peru[]; + extern const char pink[]; + extern const char plum[]; + extern const char powderblue[]; + extern const char purple[]; + extern const char red[]; + extern const char rosybrown[]; + extern const char royalblue[]; + extern const char saddlebrown[]; + extern const char salmon[]; + extern const char sandybrown[]; + extern const char seagreen[]; + extern const char seashell[]; + extern const char sienna[]; + extern const char silver[]; + extern const char skyblue[]; + extern const char slateblue[]; + extern const char slategray[]; + extern const char slategrey[]; + extern const char snow[]; + extern const char springgreen[]; + extern const char steelblue[]; + extern const char tan[]; + extern const char teal[]; + extern const char thistle[]; + extern const char tomato[]; + extern const char turquoise[]; + extern const char violet[]; + extern const char wheat[]; + extern const char white[]; + extern const char whitesmoke[]; + extern const char yellow[]; + extern const char yellowgreen[]; + extern const char rebeccapurple[]; + extern const char transparent[]; + } + + namespace Colors { + extern const Color aliceblue; + extern const Color antiquewhite; + extern const Color cyan; + extern const Color aqua; + extern const Color aquamarine; + extern const Color azure; + extern const Color beige; + extern const Color bisque; + extern const Color black; + extern const Color blanchedalmond; + extern const Color blue; + extern const Color blueviolet; + extern const Color brown; + extern const Color burlywood; + extern const Color cadetblue; + extern const Color chartreuse; + extern const Color chocolate; + extern const Color coral; + extern const Color cornflowerblue; + extern const Color cornsilk; + extern const Color crimson; + extern const Color darkblue; + extern const Color darkcyan; + extern const Color darkgoldenrod; + extern const Color darkgray; + extern const Color darkgrey; + extern const Color darkgreen; + extern const Color darkkhaki; + extern const Color darkmagenta; + extern const Color darkolivegreen; + extern const Color darkorange; + extern const Color darkorchid; + extern const Color darkred; + extern const Color darksalmon; + extern const Color darkseagreen; + extern const Color darkslateblue; + extern const Color darkslategray; + extern const Color darkslategrey; + extern const Color darkturquoise; + extern const Color darkviolet; + extern const Color deeppink; + extern const Color deepskyblue; + extern const Color dimgray; + extern const Color dimgrey; + extern const Color dodgerblue; + extern const Color firebrick; + extern const Color floralwhite; + extern const Color forestgreen; + extern const Color magenta; + extern const Color fuchsia; + extern const Color gainsboro; + extern const Color ghostwhite; + extern const Color gold; + extern const Color goldenrod; + extern const Color gray; + extern const Color grey; + extern const Color green; + extern const Color greenyellow; + extern const Color honeydew; + extern const Color hotpink; + extern const Color indianred; + extern const Color indigo; + extern const Color ivory; + extern const Color khaki; + extern const Color lavender; + extern const Color lavenderblush; + extern const Color lawngreen; + extern const Color lemonchiffon; + extern const Color lightblue; + extern const Color lightcoral; + extern const Color lightcyan; + extern const Color lightgoldenrodyellow; + extern const Color lightgray; + extern const Color lightgrey; + extern const Color lightgreen; + extern const Color lightpink; + extern const Color lightsalmon; + extern const Color lightseagreen; + extern const Color lightskyblue; + extern const Color lightslategray; + extern const Color lightslategrey; + extern const Color lightsteelblue; + extern const Color lightyellow; + extern const Color lime; + extern const Color limegreen; + extern const Color linen; + extern const Color maroon; + extern const Color mediumaquamarine; + extern const Color mediumblue; + extern const Color mediumorchid; + extern const Color mediumpurple; + extern const Color mediumseagreen; + extern const Color mediumslateblue; + extern const Color mediumspringgreen; + extern const Color mediumturquoise; + extern const Color mediumvioletred; + extern const Color midnightblue; + extern const Color mintcream; + extern const Color mistyrose; + extern const Color moccasin; + extern const Color navajowhite; + extern const Color navy; + extern const Color oldlace; + extern const Color olive; + extern const Color olivedrab; + extern const Color orange; + extern const Color orangered; + extern const Color orchid; + extern const Color palegoldenrod; + extern const Color palegreen; + extern const Color paleturquoise; + extern const Color palevioletred; + extern const Color papayawhip; + extern const Color peachpuff; + extern const Color peru; + extern const Color pink; + extern const Color plum; + extern const Color powderblue; + extern const Color purple; + extern const Color red; + extern const Color rosybrown; + extern const Color royalblue; + extern const Color saddlebrown; + extern const Color salmon; + extern const Color sandybrown; + extern const Color seagreen; + extern const Color seashell; + extern const Color sienna; + extern const Color silver; + extern const Color skyblue; + extern const Color slateblue; + extern const Color slategray; + extern const Color slategrey; + extern const Color snow; + extern const Color springgreen; + extern const Color steelblue; + extern const Color tan; + extern const Color teal; + extern const Color thistle; + extern const Color tomato; + extern const Color turquoise; + extern const Color violet; + extern const Color wheat; + extern const Color white; + extern const Color whitesmoke; + extern const Color yellow; + extern const Color yellowgreen; + extern const Color rebeccapurple; + extern const Color transparent; + } + + Color_Ptr_Const name_to_color(const char*); + Color_Ptr_Const name_to_color(const std::string&); + const char* color_to_name(const int); + const char* color_to_name(const Color&); + const char* color_to_name(const double); + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/constants.cpp b/mybulma/node_modules/node-sass/src/libsass/src/constants.cpp new file mode 100644 index 0000000..0ba28e2 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/constants.cpp @@ -0,0 +1,179 @@ +#include "sass.hpp" +#include "constants.hpp" + +namespace Sass { + namespace Constants { + + extern const unsigned long MaxCallStack = 1024; + + // https://github.com/sass/libsass/issues/592 + // https://developer.mozilla.org/en-US/docs/Web/CSS/Specificity + // https://github.com/sass/sass/issues/1495#issuecomment-61189114 + extern const unsigned long Specificity_Star = 0; + extern const unsigned long Specificity_Universal = 0; + extern const unsigned long Specificity_Element = 1; + extern const unsigned long Specificity_Base = 1000; + extern const unsigned long Specificity_Class = 1000; + extern const unsigned long Specificity_Attr = 1000; + extern const unsigned long Specificity_Pseudo = 1000; + extern const unsigned long Specificity_ID = 1000000; + + // sass keywords + extern const char at_root_kwd[] = "@at-root"; + extern const char import_kwd[] = "@import"; + extern const char mixin_kwd[] = "@mixin"; + extern const char function_kwd[] = "@function"; + extern const char return_kwd[] = "@return"; + extern const char include_kwd[] = "@include"; + extern const char content_kwd[] = "@content"; + extern const char extend_kwd[] = "@extend"; + extern const char if_kwd[] = "@if"; + extern const char else_kwd[] = "@else"; + extern const char if_after_else_kwd[] = "if"; + extern const char for_kwd[] = "@for"; + extern const char from_kwd[] = "from"; + extern const char to_kwd[] = "to"; + extern const char through_kwd[] = "through"; + extern const char each_kwd[] = "@each"; + extern const char in_kwd[] = "in"; + extern const char while_kwd[] = "@while"; + extern const char warn_kwd[] = "@warn"; + extern const char error_kwd[] = "@error"; + extern const char debug_kwd[] = "@debug"; + extern const char default_kwd[] = "default"; + extern const char global_kwd[] = "global"; + extern const char null_kwd[] = "null"; + extern const char optional_kwd[] = "optional"; + extern const char with_kwd[] = "with"; + extern const char without_kwd[] = "without"; + extern const char all_kwd[] = "all"; + extern const char rule_kwd[] = "rule"; + + // css standard units + extern const char em_kwd[] = "em"; + extern const char ex_kwd[] = "ex"; + extern const char px_kwd[] = "px"; + extern const char cm_kwd[] = "cm"; + extern const char mm_kwd[] = "mm"; + extern const char pt_kwd[] = "pt"; + extern const char pc_kwd[] = "pc"; + extern const char deg_kwd[] = "deg"; + extern const char rad_kwd[] = "rad"; + extern const char grad_kwd[] = "grad"; + extern const char turn_kwd[] = "turn"; + extern const char ms_kwd[] = "ms"; + extern const char s_kwd[] = "s"; + extern const char Hz_kwd[] = "Hz"; + extern const char kHz_kwd[] = "kHz"; + + // vendor prefixes + extern const char vendor_opera_kwd[] = "-o-"; + extern const char vendor_webkit_kwd[] = "-webkit-"; + extern const char vendor_mozilla_kwd[] = "-moz-"; + extern const char vendor_ms_kwd[] = "-ms-"; + extern const char vendor_khtml_kwd[] = "-khtml-"; + + // css functions and keywords + extern const char charset_kwd[] = "@charset"; + extern const char media_kwd[] = "@media"; + extern const char supports_kwd[] = "@supports"; + extern const char keyframes_kwd[] = "keyframes"; + extern const char only_kwd[] = "only"; + extern const char rgb_fn_kwd[] = "rgb("; + extern const char url_fn_kwd[] = "url("; + extern const char url_kwd[] = "url"; + // extern const char url_prefix_fn_kwd[] = "url-prefix("; + extern const char important_kwd[] = "important"; + extern const char pseudo_not_fn_kwd[] = ":not("; + extern const char even_kwd[] = "even"; + extern const char odd_kwd[] = "odd"; + extern const char progid_kwd[] = "progid"; + extern const char expression_kwd[] = "expression"; + extern const char calc_fn_kwd[] = "calc"; + + extern const char almost_any_value_class[] = "\"'#!;{}"; + + // css selector keywords + extern const char sel_deep_kwd[] = "/deep/"; + + // css attribute-matching operators + extern const char tilde_equal[] = "~="; + extern const char pipe_equal[] = "|="; + extern const char caret_equal[] = "^="; + extern const char dollar_equal[] = "$="; + extern const char star_equal[] = "*="; + + // relational & logical operators and constants + extern const char and_kwd[] = "and"; + extern const char or_kwd[] = "or"; + extern const char not_kwd[] = "not"; + extern const char gt[] = ">"; + extern const char gte[] = ">="; + extern const char lt[] = "<"; + extern const char lte[] = "<="; + extern const char eq[] = "=="; + extern const char neq[] = "!="; + extern const char true_kwd[] = "true"; + extern const char false_kwd[] = "false"; + + // miscellaneous punctuation and delimiters + extern const char percent_str[] = "%"; + extern const char empty_str[] = ""; + extern const char slash_slash[] = "//"; + extern const char slash_star[] = "/*"; + extern const char star_slash[] = "*/"; + extern const char hash_lbrace[] = "#{"; + extern const char rbrace[] = "}"; + extern const char rparen[] = ")"; + extern const char sign_chars[] = "-+"; + extern const char op_chars[] = "-+"; + extern const char hyphen[] = "-"; + extern const char ellipsis[] = "..."; + // extern const char url_space_chars[] = " \t\r\n\f"; + // type names + extern const char numeric_name[] = "numeric value"; + extern const char number_name[] = "number"; + extern const char percentage_name[] = "percentage"; + extern const char dimension_name[] = "numeric dimension"; + extern const char string_name[] = "string"; + extern const char bool_name[] = "bool"; + extern const char color_name[] = "color"; + extern const char list_name[] = "list"; + extern const char map_name[] = "map"; + extern const char arglist_name[] = "arglist"; + + // constants for uri parsing (RFC 3986 Appendix A.) + extern const char uri_chars[] = ":;/?!%&#@|[]{}'`^\"*+-.,_=~"; + extern const char real_uri_chars[] = "#%&"; + + // some specific constant character classes + // they must be static to be useable by lexer + extern const char static_ops[] = "*/%"; + // some character classes for the parser + extern const char selector_list_delims[] = "){};!"; + extern const char complex_selector_delims[] = ",){};!"; + extern const char selector_combinator_ops[] = "+~>"; + // optional modifiers for alternative compare context + extern const char attribute_compare_modifiers[] = "~|^$*"; + extern const char selector_lookahead_ops[] = "*&%,()[]"; + + // byte order marks + // (taken from http://en.wikipedia.org/wiki/Byte_order_mark) + extern const unsigned char utf_8_bom[] = { 0xEF, 0xBB, 0xBF }; + extern const unsigned char utf_16_bom_be[] = { 0xFE, 0xFF }; + extern const unsigned char utf_16_bom_le[] = { 0xFF, 0xFE }; + extern const unsigned char utf_32_bom_be[] = { 0x00, 0x00, 0xFE, 0xFF }; + extern const unsigned char utf_32_bom_le[] = { 0xFF, 0xFE, 0x00, 0x00 }; + extern const unsigned char utf_7_bom_1[] = { 0x2B, 0x2F, 0x76, 0x38 }; + extern const unsigned char utf_7_bom_2[] = { 0x2B, 0x2F, 0x76, 0x39 }; + extern const unsigned char utf_7_bom_3[] = { 0x2B, 0x2F, 0x76, 0x2B }; + extern const unsigned char utf_7_bom_4[] = { 0x2B, 0x2F, 0x76, 0x2F }; + extern const unsigned char utf_7_bom_5[] = { 0x2B, 0x2F, 0x76, 0x38, 0x2D }; + extern const unsigned char utf_1_bom[] = { 0xF7, 0x64, 0x4C }; + extern const unsigned char utf_ebcdic_bom[] = { 0xDD, 0x73, 0x66, 0x73 }; + extern const unsigned char scsu_bom[] = { 0x0E, 0xFE, 0xFF }; + extern const unsigned char bocu_1_bom[] = { 0xFB, 0xEE, 0x28 }; + extern const unsigned char gb_18030_bom[] = { 0x84, 0x31, 0x95, 0x33 }; + + } +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/constants.hpp b/mybulma/node_modules/node-sass/src/libsass/src/constants.hpp new file mode 100644 index 0000000..4fe9357 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/constants.hpp @@ -0,0 +1,181 @@ +#ifndef SASS_CONSTANTS_H +#define SASS_CONSTANTS_H + +namespace Sass { + namespace Constants { + + // The maximum call stack that can be created + extern const unsigned long MaxCallStack; + + // https://developer.mozilla.org/en-US/docs/Web/CSS/Specificity + // The following list of selectors is by increasing specificity: + extern const unsigned long Specificity_Star; + extern const unsigned long Specificity_Universal; + extern const unsigned long Specificity_Element; + extern const unsigned long Specificity_Base; + extern const unsigned long Specificity_Class; + extern const unsigned long Specificity_Attr; + extern const unsigned long Specificity_Pseudo; + extern const unsigned long Specificity_ID; + + // sass keywords + extern const char at_root_kwd[]; + extern const char import_kwd[]; + extern const char mixin_kwd[]; + extern const char function_kwd[]; + extern const char return_kwd[]; + extern const char include_kwd[]; + extern const char content_kwd[]; + extern const char extend_kwd[]; + extern const char if_kwd[]; + extern const char else_kwd[]; + extern const char if_after_else_kwd[]; + extern const char for_kwd[]; + extern const char from_kwd[]; + extern const char to_kwd[]; + extern const char through_kwd[]; + extern const char each_kwd[]; + extern const char in_kwd[]; + extern const char while_kwd[]; + extern const char warn_kwd[]; + extern const char error_kwd[]; + extern const char debug_kwd[]; + extern const char default_kwd[]; + extern const char global_kwd[]; + extern const char null_kwd[]; + extern const char optional_kwd[]; + extern const char with_kwd[]; + extern const char without_kwd[]; + extern const char all_kwd[]; + extern const char rule_kwd[]; + + // css standard units + extern const char em_kwd[]; + extern const char ex_kwd[]; + extern const char px_kwd[]; + extern const char cm_kwd[]; + extern const char mm_kwd[]; + extern const char pt_kwd[]; + extern const char pc_kwd[]; + extern const char deg_kwd[]; + extern const char rad_kwd[]; + extern const char grad_kwd[]; + extern const char turn_kwd[]; + extern const char ms_kwd[]; + extern const char s_kwd[]; + extern const char Hz_kwd[]; + extern const char kHz_kwd[]; + + // vendor prefixes + extern const char vendor_opera_kwd[]; + extern const char vendor_webkit_kwd[]; + extern const char vendor_mozilla_kwd[]; + extern const char vendor_ms_kwd[]; + extern const char vendor_khtml_kwd[]; + + // css functions and keywords + extern const char charset_kwd[]; + extern const char media_kwd[]; + extern const char supports_kwd[]; + extern const char keyframes_kwd[]; + extern const char only_kwd[]; + extern const char rgb_fn_kwd[]; + extern const char url_fn_kwd[]; + extern const char url_kwd[]; + // extern const char url_prefix_fn_kwd[]; + extern const char important_kwd[]; + extern const char pseudo_not_fn_kwd[]; + extern const char even_kwd[]; + extern const char odd_kwd[]; + extern const char progid_kwd[]; + extern const char expression_kwd[]; + extern const char calc_fn_kwd[]; + + // char classes for "regular expressions" + extern const char almost_any_value_class[]; + + // css selector keywords + extern const char sel_deep_kwd[]; + + // css attribute-matching operators + extern const char tilde_equal[]; + extern const char pipe_equal[]; + extern const char caret_equal[]; + extern const char dollar_equal[]; + extern const char star_equal[]; + + // relational & logical operators and constants + extern const char and_kwd[]; + extern const char or_kwd[]; + extern const char not_kwd[]; + extern const char gt[]; + extern const char gte[]; + extern const char lt[]; + extern const char lte[]; + extern const char eq[]; + extern const char neq[]; + extern const char true_kwd[]; + extern const char false_kwd[]; + + // miscellaneous punctuation and delimiters + extern const char percent_str[]; + extern const char empty_str[]; + extern const char slash_slash[]; + extern const char slash_star[]; + extern const char star_slash[]; + extern const char hash_lbrace[]; + extern const char rbrace[]; + extern const char rparen[]; + extern const char sign_chars[]; + extern const char op_chars[]; + extern const char hyphen[]; + extern const char ellipsis[]; + // extern const char url_space_chars[]; + + // type names + extern const char numeric_name[]; + extern const char number_name[]; + extern const char percentage_name[]; + extern const char dimension_name[]; + extern const char string_name[]; + extern const char bool_name[]; + extern const char color_name[]; + extern const char list_name[]; + extern const char map_name[]; + extern const char arglist_name[]; + + // constants for uri parsing (RFC 3986 Appendix A.) + extern const char uri_chars[]; + extern const char real_uri_chars[]; + + // some specific constant character classes + // they must be static to be useable by lexer + extern const char static_ops[]; + extern const char selector_list_delims[]; + extern const char complex_selector_delims[]; + extern const char selector_combinator_ops[]; + extern const char attribute_compare_modifiers[]; + extern const char selector_lookahead_ops[]; + + // byte order marks + // (taken from http://en.wikipedia.org/wiki/Byte_order_mark) + extern const unsigned char utf_8_bom[]; + extern const unsigned char utf_16_bom_be[]; + extern const unsigned char utf_16_bom_le[]; + extern const unsigned char utf_32_bom_be[]; + extern const unsigned char utf_32_bom_le[]; + extern const unsigned char utf_7_bom_1[]; + extern const unsigned char utf_7_bom_2[]; + extern const unsigned char utf_7_bom_3[]; + extern const unsigned char utf_7_bom_4[]; + extern const unsigned char utf_7_bom_5[]; + extern const unsigned char utf_1_bom[]; + extern const unsigned char utf_ebcdic_bom[]; + extern const unsigned char scsu_bom[]; + extern const unsigned char bocu_1_bom[]; + extern const unsigned char gb_18030_bom[]; + + } +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/context.cpp b/mybulma/node_modules/node-sass/src/libsass/src/context.cpp new file mode 100644 index 0000000..dae2cbd --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/context.cpp @@ -0,0 +1,880 @@ +#include "sass.hpp" +#include +#include +#include +#include +#include +#include + +#include "ast.hpp" +#include "util.hpp" +#include "sass.h" +#include "context.hpp" +#include "plugins.hpp" +#include "constants.hpp" +#include "parser.hpp" +#include "file.hpp" +#include "inspect.hpp" +#include "output.hpp" +#include "expand.hpp" +#include "eval.hpp" +#include "check_nesting.hpp" +#include "cssize.hpp" +#include "listize.hpp" +#include "extend.hpp" +#include "remove_placeholders.hpp" +#include "functions.hpp" +#include "sass_functions.hpp" +#include "backtrace.hpp" +#include "sass2scss.h" +#include "prelexer.hpp" +#include "emitter.hpp" + +namespace Sass { + using namespace Constants; + using namespace File; + using namespace Sass; + + inline bool sort_importers (const Sass_Importer_Entry& i, const Sass_Importer_Entry& j) + { return sass_importer_get_priority(i) > sass_importer_get_priority(j); } + + static std::string safe_input(const char* in_path) + { + // enforce some safe defaults + // used to create relative file links + std::string safe_path(in_path ? in_path : ""); + return safe_path == "" ? "stdin" : safe_path; + } + + static std::string safe_output(const char* out_path, const std::string& input_path = "") + { + std::string safe_path(out_path ? out_path : ""); + // maybe we can extract an output path from input path + if (safe_path == "" && input_path != "") { + int lastindex = static_cast(input_path.find_last_of(".")); + return (lastindex > -1 ? input_path.substr(0, lastindex) : input_path) + ".css"; + } + // enforce some safe defaults + // used to create relative file links + return safe_path == "" ? "stdout" : safe_path; + } + + Context::Context(struct Sass_Context& c_ctx) + : CWD(File::get_cwd()), + c_options(c_ctx), + entry_path(""), + head_imports(0), + plugins(), + emitter(c_options), + + ast_gc(), + strings(), + resources(), + sheets(), + subset_map(), + import_stack(), + callee_stack(), + traces(), + c_compiler(NULL), + + c_headers (std::vector()), + c_importers (std::vector()), + c_functions (std::vector()), + + indent (safe_str(c_options.indent, " ")), + linefeed (safe_str(c_options.linefeed, "\n")), + + input_path (make_canonical_path(safe_input(c_options.input_path))), + output_path (make_canonical_path(safe_output(c_options.output_path, input_path))), + source_map_file (make_canonical_path(safe_str(c_options.source_map_file, ""))), + source_map_root (make_canonical_path(safe_str(c_options.source_map_root, ""))) + + { + + // Sass 3.4: The current working directory will no longer be placed onto the Sass load path by default. + // If you need the current working directory to be available, set SASS_PATH=. in your shell's environment. + // include_paths.push_back(CWD); + + // collect more paths from different options + collect_include_paths(c_options.include_path); + collect_include_paths(c_options.include_paths); + collect_plugin_paths(c_options.plugin_path); + collect_plugin_paths(c_options.plugin_paths); + + // load plugins and register custom behaviors + for(auto plug : plugin_paths) plugins.load_plugins(plug); + for(auto fn : plugins.get_headers()) c_headers.push_back(fn); + for(auto fn : plugins.get_importers()) c_importers.push_back(fn); + for(auto fn : plugins.get_functions()) c_functions.push_back(fn); + + // sort the items by priority (lowest first) + sort (c_headers.begin(), c_headers.end(), sort_importers); + sort (c_importers.begin(), c_importers.end(), sort_importers); + + emitter.set_filename(abs2rel(output_path, source_map_file, CWD)); + + } + + void Context::add_c_function(Sass_Function_Entry function) + { + c_functions.push_back(function); + } + void Context::add_c_header(Sass_Importer_Entry header) + { + c_headers.push_back(header); + // need to sort the array afterwards (no big deal) + sort (c_headers.begin(), c_headers.end(), sort_importers); + } + void Context::add_c_importer(Sass_Importer_Entry importer) + { + c_importers.push_back(importer); + // need to sort the array afterwards (no big deal) + sort (c_importers.begin(), c_importers.end(), sort_importers); + } + + Context::~Context() + { + // resources were allocated by malloc + for (size_t i = 0; i < resources.size(); ++i) { + free(resources[i].contents); + free(resources[i].srcmap); + } + // free all strings we kept alive during compiler execution + for (size_t n = 0; n < strings.size(); ++n) free(strings[n]); + // everything that gets put into sources will be freed by us + // this shouldn't have anything in it anyway!? + for (size_t m = 0; m < import_stack.size(); ++m) { + sass_import_take_source(import_stack[m]); + sass_import_take_srcmap(import_stack[m]); + sass_delete_import(import_stack[m]); + } + // clear inner structures (vectors) and input source + resources.clear(); import_stack.clear(); + subset_map.clear(), sheets.clear(); + } + + Data_Context::~Data_Context() + { + // --> this will be freed by resources + // make sure we free the source even if not processed! + // if (resources.size() == 0 && source_c_str) free(source_c_str); + // if (resources.size() == 0 && srcmap_c_str) free(srcmap_c_str); + // source_c_str = 0; srcmap_c_str = 0; + } + + File_Context::~File_Context() + { + } + + void Context::collect_include_paths(const char* paths_str) + { + if (paths_str) { + const char* beg = paths_str; + const char* end = Prelexer::find_first(beg); + + while (end) { + std::string path(beg, end - beg); + if (!path.empty()) { + if (*path.rbegin() != '/') path += '/'; + include_paths.push_back(path); + } + beg = end + 1; + end = Prelexer::find_first(beg); + } + + std::string path(beg); + if (!path.empty()) { + if (*path.rbegin() != '/') path += '/'; + include_paths.push_back(path); + } + } + } + + void Context::collect_include_paths(string_list* paths_array) + { + while (paths_array) + { + collect_include_paths(paths_array->string); + paths_array = paths_array->next; + } + } + + void Context::collect_plugin_paths(const char* paths_str) + { + if (paths_str) { + const char* beg = paths_str; + const char* end = Prelexer::find_first(beg); + + while (end) { + std::string path(beg, end - beg); + if (!path.empty()) { + if (*path.rbegin() != '/') path += '/'; + plugin_paths.push_back(path); + } + beg = end + 1; + end = Prelexer::find_first(beg); + } + + std::string path(beg); + if (!path.empty()) { + if (*path.rbegin() != '/') path += '/'; + plugin_paths.push_back(path); + } + } + } + + void Context::collect_plugin_paths(string_list* paths_array) + { + while (paths_array) + { + collect_plugin_paths(paths_array->string); + paths_array = paths_array->next; + } + } + + // resolve the imp_path in base_path or include_paths + // looks for alternatives and returns a list from one directory + std::vector Context::find_includes(const Importer& import) + { + // make sure we resolve against an absolute path + std::string base_path(rel2abs(import.base_path)); + // first try to resolve the load path relative to the base path + std::vector vec(resolve_includes(base_path, import.imp_path)); + // then search in every include path (but only if nothing found yet) + for (size_t i = 0, S = include_paths.size(); vec.size() == 0 && i < S; ++i) + { + // call resolve_includes and individual base path and append all results + std::vector resolved(resolve_includes(include_paths[i], import.imp_path)); + if (resolved.size()) vec.insert(vec.end(), resolved.begin(), resolved.end()); + } + // return vector + return vec; + } + + // register include with resolved path and its content + // memory of the resources will be freed by us on exit + void Context::register_resource(const Include& inc, const Resource& res) + { + + // do not parse same resource twice + // maybe raise an error in this case + // if (sheets.count(inc.abs_path)) { + // free(res.contents); free(res.srcmap); + // throw std::runtime_error("duplicate resource registered"); + // return; + // } + + // get index for this resource + size_t idx = resources.size(); + + // tell emitter about new resource + emitter.add_source_index(idx); + + // put resources under our control + // the memory will be freed later + resources.push_back(res); + + // add a relative link to the working directory + included_files.push_back(inc.abs_path); + // add a relative link to the source map output file + srcmap_links.push_back(abs2rel(inc.abs_path, source_map_file, CWD)); + + // get pointer to the loaded content + Sass_Import_Entry import = sass_make_import( + inc.imp_path.c_str(), + inc.abs_path.c_str(), + res.contents, + res.srcmap + ); + // add the entry to the stack + import_stack.push_back(import); + + // get pointer to the loaded content + const char* contents = resources[idx].contents; + // keep a copy of the path around (for parserstates) + // ToDo: we clean it, but still not very elegant!? + strings.push_back(sass_copy_c_string(inc.abs_path.c_str())); + // create the initial parser state from resource + ParserState pstate(strings.back(), contents, idx); + + // check existing import stack for possible recursion + for (size_t i = 0; i < import_stack.size() - 2; ++i) { + auto parent = import_stack[i]; + if (std::strcmp(parent->abs_path, import->abs_path) == 0) { + std::string cwd(File::get_cwd()); + // make path relative to the current directory + std::string stack("An @import loop has been found:"); + for (size_t n = 1; n < i + 2; ++n) { + stack += "\n " + std::string(File::abs2rel(import_stack[n]->abs_path, cwd, cwd)) + + " imports " + std::string(File::abs2rel(import_stack[n+1]->abs_path, cwd, cwd)); + } + // implement error throw directly until we + // decided how to handle full stack traces + throw Exception::InvalidSyntax(pstate, traces, stack); + // error(stack, prstate ? *prstate : pstate, import_stack); + } + } + + // create a parser instance from the given c_str buffer + Parser p(Parser::from_c_str(contents, *this, traces, pstate)); + // do not yet dispose these buffers + sass_import_take_source(import); + sass_import_take_srcmap(import); + // then parse the root block + Block_Obj root = p.parse(); + // delete memory of current stack frame + sass_delete_import(import_stack.back()); + // remove current stack frame + import_stack.pop_back(); + // create key/value pair for ast node + std::pair + ast_pair(inc.abs_path, { res, root }); + // register resulting resource + sheets.insert(ast_pair); + } + + // register include with resolved path and its content + // memory of the resources will be freed by us on exit + void Context::register_resource(const Include& inc, const Resource& res, ParserState& prstate) + { + traces.push_back(Backtrace(prstate)); + register_resource(inc, res); + traces.pop_back(); + } + + // Add a new import to the context (called from `import_url`) + Include Context::load_import(const Importer& imp, ParserState pstate) + { + + // search for valid imports (ie. partials) on the filesystem + // this may return more than one valid result (ambiguous imp_path) + const std::vector resolved(find_includes(imp)); + + // error nicely on ambiguous imp_path + if (resolved.size() > 1) { + std::stringstream msg_stream; + msg_stream << "It's not clear which file to import for "; + msg_stream << "'@import \"" << imp.imp_path << "\"'." << "\n"; + msg_stream << "Candidates:" << "\n"; + for (size_t i = 0, L = resolved.size(); i < L; ++i) + { msg_stream << " " << resolved[i].imp_path << "\n"; } + msg_stream << "Please delete or rename all but one of these files." << "\n"; + error(msg_stream.str(), pstate, traces); + } + + // process the resolved entry + else if (resolved.size() == 1) { + bool use_cache = c_importers.size() == 0; + // use cache for the resource loading + if (use_cache && sheets.count(resolved[0].abs_path)) return resolved[0]; + // try to read the content of the resolved file entry + // the memory buffer returned must be freed by us! + if (char* contents = read_file(resolved[0].abs_path)) { + // register the newly resolved file resource + register_resource(resolved[0], { contents, 0 }, pstate); + // return resolved entry + return resolved[0]; + } + } + + // nothing found + return { imp, "" }; + + } + + void Context::import_url (Import_Ptr imp, std::string load_path, const std::string& ctx_path) { + + ParserState pstate(imp->pstate()); + std::string imp_path(unquote(load_path)); + std::string protocol("file"); + + using namespace Prelexer; + if (const char* proto = sequence< identifier, exactly<':'>, exactly<'/'>, exactly<'/'> >(imp_path.c_str())) { + + protocol = std::string(imp_path.c_str(), proto - 3); + // if (protocol.compare("file") && true) { } + } + + // add urls (protocol other than file) and urls without procotol to `urls` member + // ToDo: if ctx_path is already a file resource, we should not add it here? + if (imp->import_queries() || protocol != "file" || imp_path.substr(0, 2) == "//") { + imp->urls().push_back(SASS_MEMORY_NEW(String_Quoted, imp->pstate(), load_path)); + } + else if (imp_path.length() > 4 && imp_path.substr(imp_path.length() - 4, 4) == ".css") { + String_Constant_Ptr loc = SASS_MEMORY_NEW(String_Constant, pstate, unquote(load_path)); + Argument_Obj loc_arg = SASS_MEMORY_NEW(Argument, pstate, loc); + Arguments_Obj loc_args = SASS_MEMORY_NEW(Arguments, pstate); + loc_args->append(loc_arg); + Function_Call_Ptr new_url = SASS_MEMORY_NEW(Function_Call, pstate, "url", loc_args); + imp->urls().push_back(new_url); + } + else { + const Importer importer(imp_path, ctx_path); + Include include(load_import(importer, pstate)); + if (include.abs_path.empty()) { + error("File to import not found or unreadable: " + imp_path + ".", pstate, traces); + } + imp->incs().push_back(include); + } + + } + + + // call custom importers on the given (unquoted) load_path and eventually parse the resulting style_sheet + bool Context::call_loader(const std::string& load_path, const char* ctx_path, ParserState& pstate, Import_Ptr imp, std::vector importers, bool only_one) + { + // unique counter + size_t count = 0; + // need one correct import + bool has_import = false; + // process all custom importers (or custom headers) + for (Sass_Importer_Entry& importer_ent : importers) { + // int priority = sass_importer_get_priority(importer); + Sass_Importer_Fn fn = sass_importer_get_function(importer_ent); + // skip importer if it returns NULL + if (Sass_Import_List includes = + fn(load_path.c_str(), importer_ent, c_compiler) + ) { + // get c pointer copy to iterate over + Sass_Import_List it_includes = includes; + while (*it_includes) { ++count; + // create unique path to use as key + std::string uniq_path = load_path; + if (!only_one && count) { + std::stringstream path_strm; + path_strm << uniq_path << ":" << count; + uniq_path = path_strm.str(); + } + // create the importer struct + Importer importer(uniq_path, ctx_path); + // query data from the current include + Sass_Import_Entry include_ent = *it_includes; + char* source = sass_import_take_source(include_ent); + char* srcmap = sass_import_take_srcmap(include_ent); + size_t line = sass_import_get_error_line(include_ent); + size_t column = sass_import_get_error_column(include_ent); + const char *abs_path = sass_import_get_abs_path(include_ent); + // handle error message passed back from custom importer + // it may (or may not) override the line and column info + if (const char* err_message = sass_import_get_error_message(include_ent)) { + if (source || srcmap) register_resource({ importer, uniq_path }, { source, srcmap }, pstate); + if (line == std::string::npos && column == std::string::npos) error(err_message, pstate, traces); + else error(err_message, ParserState(ctx_path, source, Position(line, column)), traces); + } + // content for import was set + else if (source) { + // resolved abs_path should be set by custom importer + // use the created uniq_path as fallback (maybe enforce) + std::string path_key(abs_path ? abs_path : uniq_path); + // create the importer struct + Include include(importer, path_key); + // attach information to AST node + imp->incs().push_back(include); + // register the resource buffers + register_resource(include, { source, srcmap }, pstate); + } + // only a path was retuned + // try to load it like normal + else if(abs_path) { + // checks some urls to preserve + // `http://`, `https://` and `//` + // or dispatchs to `import_file` + // which will check for a `.css` extension + // or resolves the file on the filesystem + // added and resolved via `add_file` + // finally stores everything on `imp` + import_url(imp, abs_path, ctx_path); + } + // move to next + ++it_includes; + } + // deallocate the returned memory + sass_delete_import_list(includes); + // set success flag + has_import = true; + // break out of loop + if (only_one) break; + } + } + // return result + return has_import; + } + + void register_function(Context&, Signature sig, Native_Function f, Env* env); + void register_function(Context&, Signature sig, Native_Function f, size_t arity, Env* env); + void register_overload_stub(Context&, std::string name, Env* env); + void register_built_in_functions(Context&, Env* env); + void register_c_functions(Context&, Env* env, Sass_Function_List); + void register_c_function(Context&, Env* env, Sass_Function_Entry); + + char* Context::render(Block_Obj root) + { + // check for valid block + if (!root) return 0; + // start the render process + root->perform(&emitter); + // finish emitter stream + emitter.finalize(); + // get the resulting buffer from stream + OutputBuffer emitted = emitter.get_buffer(); + // should we append a source map url? + if (!c_options.omit_source_map_url) { + // generate an embeded source map + if (c_options.source_map_embed) { + emitted.buffer += linefeed; + emitted.buffer += format_embedded_source_map(); + } + // or just link the generated one + else if (source_map_file != "") { + emitted.buffer += linefeed; + emitted.buffer += format_source_mapping_url(source_map_file); + } + } + // create a copy of the resulting buffer string + // this must be freed or taken over by implementor + return sass_copy_c_string(emitted.buffer.c_str()); + } + + void Context::apply_custom_headers(Block_Obj root, const char* ctx_path, ParserState pstate) + { + // create a custom import to resolve headers + Import_Obj imp = SASS_MEMORY_NEW(Import, pstate); + // dispatch headers which will add custom functions + // custom headers are added to the import instance + call_headers(entry_path, ctx_path, pstate, imp); + // increase head count to skip later + head_imports += resources.size() - 1; + // add the statement if we have urls + if (!imp->urls().empty()) root->append(imp); + // process all other resources (add Import_Stub nodes) + for (size_t i = 0, S = imp->incs().size(); i < S; ++i) { + root->append(SASS_MEMORY_NEW(Import_Stub, pstate, imp->incs()[i])); + } + } + + Block_Obj File_Context::parse() + { + + // check if entry file is given + if (input_path.empty()) return 0; + + // create absolute path from input filename + // ToDo: this should be resolved via custom importers + std::string abs_path(rel2abs(input_path, CWD)); + + // try to load the entry file + char* contents = read_file(abs_path); + + // alternatively also look inside each include path folder + // I think this differs from ruby sass (IMO too late to remove) + for (size_t i = 0, S = include_paths.size(); contents == 0 && i < S; ++i) { + // build absolute path for this include path entry + abs_path = rel2abs(input_path, include_paths[i]); + // try to load the resulting path + contents = read_file(abs_path); + } + + // abort early if no content could be loaded (various reasons) + if (!contents) throw std::runtime_error("File to read not found or unreadable: " + input_path); + + // store entry path + entry_path = abs_path; + + // create entry only for import stack + Sass_Import_Entry import = sass_make_import( + input_path.c_str(), + entry_path.c_str(), + contents, + 0 + ); + // add the entry to the stack + import_stack.push_back(import); + + // create the source entry for file entry + register_resource({{ input_path, "." }, abs_path }, { contents, 0 }); + + // create root ast tree node + return compile(); + + } + + Block_Obj Data_Context::parse() + { + + // check if source string is given + if (!source_c_str) return 0; + + // convert indented sass syntax + if(c_options.is_indented_syntax_src) { + // call sass2scss to convert the string + char * converted = sass2scss(source_c_str, + // preserve the structure as much as possible + SASS2SCSS_PRETTIFY_1 | SASS2SCSS_KEEP_COMMENT); + // replace old source_c_str with converted + free(source_c_str); source_c_str = converted; + } + + // remember entry path (defaults to stdin for string) + entry_path = input_path.empty() ? "stdin" : input_path; + + // ToDo: this may be resolved via custom importers + std::string abs_path(rel2abs(entry_path)); + char* abs_path_c_str = sass_copy_c_string(abs_path.c_str()); + strings.push_back(abs_path_c_str); + + // create entry only for the import stack + Sass_Import_Entry import = sass_make_import( + entry_path.c_str(), + abs_path_c_str, + source_c_str, + srcmap_c_str + ); + // add the entry to the stack + import_stack.push_back(import); + + // register a synthetic resource (path does not really exist, skip in includes) + register_resource({{ input_path, "." }, input_path }, { source_c_str, srcmap_c_str }); + + // create root ast tree node + return compile(); + } + + + + // parse root block from includes + Block_Obj Context::compile() + { + // abort if there is no data + if (resources.size() == 0) return 0; + // get root block from the first style sheet + Block_Obj root = sheets.at(entry_path).root; + // abort on invalid root + if (root.isNull()) return 0; + Env global; // create root environment + // register built-in functions on env + register_built_in_functions(*this, &global); + // register custom functions (defined via C-API) + for (size_t i = 0, S = c_functions.size(); i < S; ++i) + { register_c_function(*this, &global, c_functions[i]); } + // create initial backtrace entry + // create crtp visitor objects + Expand expand(*this, &global); + Cssize cssize(*this); + CheckNesting check_nesting; + // check nesting in all files + for (auto sheet : sheets) { + auto styles = sheet.second; + check_nesting(styles.root); + } + // expand and eval the tree + root = expand(root); + // check nesting + check_nesting(root); + // merge and bubble certain rules + root = cssize(root); + // should we extend something? + if (!subset_map.empty()) { + // create crtp visitor object + Extend extend(subset_map); + extend.setEval(expand.eval); + // extend tree nodes + extend(root); + } + + // clean up by removing empty placeholders + // ToDo: maybe we can do this somewhere else? + Remove_Placeholders remove_placeholders; + root->perform(&remove_placeholders); + // return processed tree + return root; + } + // EO compile + + std::string Context::format_embedded_source_map() + { + std::string map = emitter.render_srcmap(*this); + std::istringstream is( map ); + std::ostringstream buffer; + base64::encoder E; + E.encode(is, buffer); + std::string url = "data:application/json;base64," + buffer.str(); + url.erase(url.size() - 1); + return "/*# sourceMappingURL=" + url + " */"; + } + + std::string Context::format_source_mapping_url(const std::string& file) + { + std::string url = abs2rel(file, output_path, CWD); + return "/*# sourceMappingURL=" + url + " */"; + } + + char* Context::render_srcmap() + { + if (source_map_file == "") return 0; + std::string map = emitter.render_srcmap(*this); + return sass_copy_c_string(map.c_str()); + } + + + // for data context we want to start after "stdin" + // we probably always want to skip the header includes? + std::vector Context::get_included_files(bool skip, size_t headers) + { + // create a copy of the vector for manipulations + std::vector includes = included_files; + if (includes.size() == 0) return includes; + if (skip) { includes.erase( includes.begin(), includes.begin() + 1 + headers); } + else { includes.erase( includes.begin() + 1, includes.begin() + 1 + headers); } + includes.erase( std::unique( includes.begin(), includes.end() ), includes.end() ); + std::sort( includes.begin() + (skip ? 0 : 1), includes.end() ); + return includes; + } + + void register_function(Context& ctx, Signature sig, Native_Function f, Env* env) + { + Definition_Ptr def = make_native_function(sig, f, ctx); + def->environment(env); + (*env)[def->name() + "[f]"] = def; + } + + void register_function(Context& ctx, Signature sig, Native_Function f, size_t arity, Env* env) + { + Definition_Ptr def = make_native_function(sig, f, ctx); + std::stringstream ss; + ss << def->name() << "[f]" << arity; + def->environment(env); + (*env)[ss.str()] = def; + } + + void register_overload_stub(Context& ctx, std::string name, Env* env) + { + Definition_Ptr stub = SASS_MEMORY_NEW(Definition, + ParserState("[built-in function]"), + 0, + name, + 0, + 0, + true); + (*env)[name + "[f]"] = stub; + } + + + void register_built_in_functions(Context& ctx, Env* env) + { + using namespace Functions; + // RGB Functions + register_function(ctx, rgb_sig, rgb, env); + register_overload_stub(ctx, "rgba", env); + register_function(ctx, rgba_4_sig, rgba_4, 4, env); + register_function(ctx, rgba_2_sig, rgba_2, 2, env); + register_function(ctx, red_sig, red, env); + register_function(ctx, green_sig, green, env); + register_function(ctx, blue_sig, blue, env); + register_function(ctx, mix_sig, mix, env); + // HSL Functions + register_function(ctx, hsl_sig, hsl, env); + register_function(ctx, hsla_sig, hsla, env); + register_function(ctx, hue_sig, hue, env); + register_function(ctx, saturation_sig, saturation, env); + register_function(ctx, lightness_sig, lightness, env); + register_function(ctx, adjust_hue_sig, adjust_hue, env); + register_function(ctx, lighten_sig, lighten, env); + register_function(ctx, darken_sig, darken, env); + register_function(ctx, saturate_sig, saturate, env); + register_function(ctx, desaturate_sig, desaturate, env); + register_function(ctx, grayscale_sig, grayscale, env); + register_function(ctx, complement_sig, complement, env); + register_function(ctx, invert_sig, invert, env); + // Opacity Functions + register_function(ctx, alpha_sig, alpha, env); + register_function(ctx, opacity_sig, alpha, env); + register_function(ctx, opacify_sig, opacify, env); + register_function(ctx, fade_in_sig, opacify, env); + register_function(ctx, transparentize_sig, transparentize, env); + register_function(ctx, fade_out_sig, transparentize, env); + // Other Color Functions + register_function(ctx, adjust_color_sig, adjust_color, env); + register_function(ctx, scale_color_sig, scale_color, env); + register_function(ctx, change_color_sig, change_color, env); + register_function(ctx, ie_hex_str_sig, ie_hex_str, env); + // String Functions + register_function(ctx, unquote_sig, sass_unquote, env); + register_function(ctx, quote_sig, sass_quote, env); + register_function(ctx, str_length_sig, str_length, env); + register_function(ctx, str_insert_sig, str_insert, env); + register_function(ctx, str_index_sig, str_index, env); + register_function(ctx, str_slice_sig, str_slice, env); + register_function(ctx, to_upper_case_sig, to_upper_case, env); + register_function(ctx, to_lower_case_sig, to_lower_case, env); + // Number Functions + register_function(ctx, percentage_sig, percentage, env); + register_function(ctx, round_sig, round, env); + register_function(ctx, ceil_sig, ceil, env); + register_function(ctx, floor_sig, floor, env); + register_function(ctx, abs_sig, abs, env); + register_function(ctx, min_sig, min, env); + register_function(ctx, max_sig, max, env); + register_function(ctx, random_sig, random, env); + // List Functions + register_function(ctx, length_sig, length, env); + register_function(ctx, nth_sig, nth, env); + register_function(ctx, set_nth_sig, set_nth, env); + register_function(ctx, index_sig, index, env); + register_function(ctx, join_sig, join, env); + register_function(ctx, append_sig, append, env); + register_function(ctx, zip_sig, zip, env); + register_function(ctx, list_separator_sig, list_separator, env); + register_function(ctx, is_bracketed_sig, is_bracketed, env); + // Map Functions + register_function(ctx, map_get_sig, map_get, env); + register_function(ctx, map_merge_sig, map_merge, env); + register_function(ctx, map_remove_sig, map_remove, env); + register_function(ctx, map_keys_sig, map_keys, env); + register_function(ctx, map_values_sig, map_values, env); + register_function(ctx, map_has_key_sig, map_has_key, env); + register_function(ctx, keywords_sig, keywords, env); + // Introspection Functions + register_function(ctx, type_of_sig, type_of, env); + register_function(ctx, unit_sig, unit, env); + register_function(ctx, unitless_sig, unitless, env); + register_function(ctx, comparable_sig, comparable, env); + register_function(ctx, variable_exists_sig, variable_exists, env); + register_function(ctx, global_variable_exists_sig, global_variable_exists, env); + register_function(ctx, function_exists_sig, function_exists, env); + register_function(ctx, mixin_exists_sig, mixin_exists, env); + register_function(ctx, feature_exists_sig, feature_exists, env); + register_function(ctx, call_sig, call, env); + register_function(ctx, content_exists_sig, content_exists, env); + register_function(ctx, get_function_sig, get_function, env); + // Boolean Functions + register_function(ctx, not_sig, sass_not, env); + register_function(ctx, if_sig, sass_if, env); + // Misc Functions + register_function(ctx, inspect_sig, inspect, env); + register_function(ctx, unique_id_sig, unique_id, env); + // Selector functions + register_function(ctx, selector_nest_sig, selector_nest, env); + register_function(ctx, selector_append_sig, selector_append, env); + register_function(ctx, selector_extend_sig, selector_extend, env); + register_function(ctx, selector_replace_sig, selector_replace, env); + register_function(ctx, selector_unify_sig, selector_unify, env); + register_function(ctx, is_superselector_sig, is_superselector, env); + register_function(ctx, simple_selectors_sig, simple_selectors, env); + register_function(ctx, selector_parse_sig, selector_parse, env); + } + + void register_c_functions(Context& ctx, Env* env, Sass_Function_List descrs) + { + while (descrs && *descrs) { + register_c_function(ctx, env, *descrs); + ++descrs; + } + } + void register_c_function(Context& ctx, Env* env, Sass_Function_Entry descr) + { + Definition_Ptr def = make_c_function(descr, ctx); + def->environment(env); + (*env)[def->name() + "[f]"] = def; + } + +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/context.hpp b/mybulma/node_modules/node-sass/src/libsass/src/context.hpp new file mode 100644 index 0000000..d3caba1 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/context.hpp @@ -0,0 +1,152 @@ +#ifndef SASS_CONTEXT_H +#define SASS_CONTEXT_H + +#include +#include +#include + +#define BUFFERSIZE 255 +#include "b64/encode.h" + +#include "ast_fwd_decl.hpp" +#include "kwd_arg_macros.hpp" +#include "ast_fwd_decl.hpp" +#include "sass_context.hpp" +#include "environment.hpp" +#include "source_map.hpp" +#include "subset_map.hpp" +#include "backtrace.hpp" +#include "output.hpp" +#include "plugins.hpp" +#include "file.hpp" + + +struct Sass_Function; + +namespace Sass { + + class Context { + public: + void import_url (Import_Ptr imp, std::string load_path, const std::string& ctx_path); + bool call_headers(const std::string& load_path, const char* ctx_path, ParserState& pstate, Import_Ptr imp) + { return call_loader(load_path, ctx_path, pstate, imp, c_headers, false); }; + bool call_importers(const std::string& load_path, const char* ctx_path, ParserState& pstate, Import_Ptr imp) + { return call_loader(load_path, ctx_path, pstate, imp, c_importers, true); }; + + private: + bool call_loader(const std::string& load_path, const char* ctx_path, ParserState& pstate, Import_Ptr imp, std::vector importers, bool only_one = true); + + public: + const std::string CWD; + struct Sass_Options& c_options; + std::string entry_path; + size_t head_imports; + Plugins plugins; + Output emitter; + + // generic ast node garbage container + // used to avoid possible circular refs + std::vector ast_gc; + // resources add under our control + // these are guaranteed to be freed + std::vector strings; + std::vector resources; + std::map sheets; + Subset_Map subset_map; + std::vector import_stack; + std::vector callee_stack; + std::vector traces; + + struct Sass_Compiler* c_compiler; + + // absolute paths to includes + std::vector included_files; + // relative includes for sourcemap + std::vector srcmap_links; + // vectors above have same size + + std::vector plugin_paths; // relative paths to load plugins + std::vector include_paths; // lookup paths for includes + + + + + + void apply_custom_headers(Block_Obj root, const char* path, ParserState pstate); + + std::vector c_headers; + std::vector c_importers; + std::vector c_functions; + + void add_c_header(Sass_Importer_Entry header); + void add_c_importer(Sass_Importer_Entry importer); + void add_c_function(Sass_Function_Entry function); + + const std::string indent; // String to be used for indentation + const std::string linefeed; // String to be used for line feeds + const std::string input_path; // for relative paths in src-map + const std::string output_path; // for relative paths to the output + const std::string source_map_file; // path to source map file (enables feature) + const std::string source_map_root; // path for sourceRoot property (pass-through) + + virtual ~Context(); + Context(struct Sass_Context&); + virtual Block_Obj parse() = 0; + virtual Block_Obj compile(); + virtual char* render(Block_Obj root); + virtual char* render_srcmap(); + + void register_resource(const Include&, const Resource&); + void register_resource(const Include&, const Resource&, ParserState&); + std::vector find_includes(const Importer& import); + Include load_import(const Importer&, ParserState pstate); + + Sass_Output_Style output_style() { return c_options.output_style; }; + std::vector get_included_files(bool skip = false, size_t headers = 0); + + private: + void collect_plugin_paths(const char* paths_str); + void collect_plugin_paths(string_list* paths_array); + void collect_include_paths(const char* paths_str); + void collect_include_paths(string_list* paths_array); + std::string format_embedded_source_map(); + std::string format_source_mapping_url(const std::string& out_path); + + + // void register_built_in_functions(Env* env); + // void register_function(Signature sig, Native_Function f, Env* env); + // void register_function(Signature sig, Native_Function f, size_t arity, Env* env); + // void register_overload_stub(std::string name, Env* env); + + public: + const std::string& cwd() { return CWD; }; + }; + + class File_Context : public Context { + public: + File_Context(struct Sass_File_Context& ctx) + : Context(ctx) + { } + virtual ~File_Context(); + virtual Block_Obj parse(); + }; + + class Data_Context : public Context { + public: + char* source_c_str; + char* srcmap_c_str; + Data_Context(struct Sass_Data_Context& ctx) + : Context(ctx) + { + source_c_str = ctx.source_string; + srcmap_c_str = ctx.srcmap_string; + ctx.source_string = 0; // passed away + ctx.srcmap_string = 0; // passed away + } + virtual ~Data_Context(); + virtual Block_Obj parse(); + }; + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/cssize.cpp b/mybulma/node_modules/node-sass/src/libsass/src/cssize.cpp new file mode 100644 index 0000000..6a12fdf --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/cssize.cpp @@ -0,0 +1,606 @@ +#include "sass.hpp" +#include +#include +#include + +#include "cssize.hpp" +#include "context.hpp" + +namespace Sass { + + Cssize::Cssize(Context& ctx) + : ctx(ctx), + traces(ctx.traces), + block_stack(std::vector()), + p_stack(std::vector()) + { } + + Statement_Ptr Cssize::parent() + { + return p_stack.size() ? p_stack.back() : block_stack.front(); + } + + Block_Ptr Cssize::operator()(Block_Ptr b) + { + Block_Obj bb = SASS_MEMORY_NEW(Block, b->pstate(), b->length(), b->is_root()); + // bb->tabs(b->tabs()); + block_stack.push_back(bb); + append_block(b, bb); + block_stack.pop_back(); + return bb.detach(); + } + + Statement_Ptr Cssize::operator()(Trace_Ptr t) + { + traces.push_back(Backtrace(t->pstate())); + auto result = t->block()->perform(this); + traces.pop_back(); + return result; + } + + Statement_Ptr Cssize::operator()(Declaration_Ptr d) + { + String_Obj property = Cast(d->property()); + + if (Declaration_Ptr dd = Cast(parent())) { + String_Obj parent_property = Cast(dd->property()); + property = SASS_MEMORY_NEW(String_Constant, + d->property()->pstate(), + parent_property->to_string() + "-" + property->to_string()); + if (!dd->value()) { + d->tabs(dd->tabs() + 1); + } + } + + Declaration_Obj dd = SASS_MEMORY_NEW(Declaration, + d->pstate(), + property, + d->value(), + d->is_important(), + d->is_custom_property()); + dd->is_indented(d->is_indented()); + dd->tabs(d->tabs()); + + p_stack.push_back(dd); + Block_Obj bb = d->block() ? operator()(d->block()) : NULL; + p_stack.pop_back(); + + if (bb && bb->length()) { + if (dd->value() && !dd->value()->is_invisible()) { + bb->unshift(dd); + } + return bb.detach(); + } + else if (dd->value() && !dd->value()->is_invisible()) { + return dd.detach(); + } + + return 0; + } + + Statement_Ptr Cssize::operator()(Directive_Ptr r) + { + if (!r->block() || !r->block()->length()) return r; + + if (parent()->statement_type() == Statement::RULESET) + { + return (r->is_keyframes()) ? SASS_MEMORY_NEW(Bubble, r->pstate(), r) : bubble(r); + } + + p_stack.push_back(r); + Directive_Obj rr = SASS_MEMORY_NEW(Directive, + r->pstate(), + r->keyword(), + r->selector(), + r->block() ? operator()(r->block()) : 0); + if (r->value()) rr->value(r->value()); + p_stack.pop_back(); + + bool directive_exists = false; + size_t L = rr->block() ? rr->block()->length() : 0; + for (size_t i = 0; i < L && !directive_exists; ++i) { + Statement_Obj s = r->block()->at(i); + if (s->statement_type() != Statement::BUBBLE) directive_exists = true; + else { + Bubble_Obj s_obj = Cast(s); + s = s_obj->node(); + if (s->statement_type() != Statement::DIRECTIVE) directive_exists = false; + else directive_exists = (Cast(s)->keyword() == rr->keyword()); + } + + } + + Block_Ptr result = SASS_MEMORY_NEW(Block, rr->pstate()); + if (!(directive_exists || rr->is_keyframes())) + { + Directive_Ptr empty_node = Cast(rr); + empty_node->block(SASS_MEMORY_NEW(Block, rr->block() ? rr->block()->pstate() : rr->pstate())); + result->append(empty_node); + } + + Block_Obj db = rr->block(); + if (db.isNull()) db = SASS_MEMORY_NEW(Block, rr->pstate()); + Block_Obj ss = debubble(db, rr); + for (size_t i = 0, L = ss->length(); i < L; ++i) { + result->append(ss->at(i)); + } + + return result; + } + + Statement_Ptr Cssize::operator()(Keyframe_Rule_Ptr r) + { + if (!r->block() || !r->block()->length()) return r; + + Keyframe_Rule_Obj rr = SASS_MEMORY_NEW(Keyframe_Rule, + r->pstate(), + operator()(r->block())); + if (!r->name().isNull()) rr->name(r->name()); + + return debubble(rr->block(), rr); + } + + Statement_Ptr Cssize::operator()(Ruleset_Ptr r) + { + p_stack.push_back(r); + // this can return a string schema + // string schema is not a statement! + // r->block() is already a string schema + // and that is comming from propset expand + Block_Ptr bb = operator()(r->block()); + // this should protect us (at least a bit) from our mess + // fixing this properly is harder that it should be ... + if (Cast(bb) == NULL) { + error("Illegal nesting: Only properties may be nested beneath properties.", r->block()->pstate(), traces); + } + Ruleset_Obj rr = SASS_MEMORY_NEW(Ruleset, + r->pstate(), + r->selector(), + bb); + + rr->is_root(r->is_root()); + // rr->tabs(r->block()->tabs()); + p_stack.pop_back(); + + if (!rr->block()) { + error("Illegal nesting: Only properties may be nested beneath properties.", r->block()->pstate(), traces); + } + + Block_Obj props = SASS_MEMORY_NEW(Block, rr->block()->pstate()); + Block_Ptr rules = SASS_MEMORY_NEW(Block, rr->block()->pstate()); + for (size_t i = 0, L = rr->block()->length(); i < L; i++) + { + Statement_Ptr s = rr->block()->at(i); + if (bubblable(s)) rules->append(s); + if (!bubblable(s)) props->append(s); + } + + if (props->length()) + { + Block_Obj pb = SASS_MEMORY_NEW(Block, rr->block()->pstate()); + pb->concat(props); + rr->block(pb); + + for (size_t i = 0, L = rules->length(); i < L; i++) + { + Statement_Ptr stm = rules->at(i); + stm->tabs(stm->tabs() + 1); + } + + rules->unshift(rr); + } + + Block_Ptr ptr = rules; + rules = debubble(rules); + void* lp = ptr; + void* rp = rules; + if (lp != rp) { + Block_Obj obj = ptr; + } + + if (!(!rules->length() || + !bubblable(rules->last()) || + parent()->statement_type() == Statement::RULESET)) + { + rules->last()->group_end(true); + } + return rules; + } + + Statement_Ptr Cssize::operator()(Null_Ptr m) + { + return 0; + } + + Statement_Ptr Cssize::operator()(Media_Block_Ptr m) + { + if (parent()->statement_type() == Statement::RULESET) + { return bubble(m); } + + if (parent()->statement_type() == Statement::MEDIA) + { return SASS_MEMORY_NEW(Bubble, m->pstate(), m); } + + p_stack.push_back(m); + + Media_Block_Obj mm = SASS_MEMORY_NEW(Media_Block, + m->pstate(), + m->media_queries(), + operator()(m->block())); + mm->tabs(m->tabs()); + + p_stack.pop_back(); + + return debubble(mm->block(), mm); + } + + Statement_Ptr Cssize::operator()(Supports_Block_Ptr m) + { + if (!m->block()->length()) + { return m; } + + if (parent()->statement_type() == Statement::RULESET) + { return bubble(m); } + + p_stack.push_back(m); + + Supports_Block_Obj mm = SASS_MEMORY_NEW(Supports_Block, + m->pstate(), + m->condition(), + operator()(m->block())); + mm->tabs(m->tabs()); + + p_stack.pop_back(); + + return debubble(mm->block(), mm); + } + + Statement_Ptr Cssize::operator()(At_Root_Block_Ptr m) + { + bool tmp = false; + for (size_t i = 0, L = p_stack.size(); i < L; ++i) { + Statement_Ptr s = p_stack[i]; + tmp |= m->exclude_node(s); + } + + if (!tmp && m->block()) + { + Block_Ptr bb = operator()(m->block()); + for (size_t i = 0, L = bb->length(); i < L; ++i) { + // (bb->elements())[i]->tabs(m->tabs()); + Statement_Obj stm = bb->at(i); + if (bubblable(stm)) stm->tabs(stm->tabs() + m->tabs()); + } + if (bb->length() && bubblable(bb->last())) bb->last()->group_end(m->group_end()); + return bb; + } + + if (m->exclude_node(parent())) + { + return SASS_MEMORY_NEW(Bubble, m->pstate(), m); + } + + return bubble(m); + } + + Statement_Ptr Cssize::bubble(Directive_Ptr m) + { + Block_Ptr bb = SASS_MEMORY_NEW(Block, this->parent()->pstate()); + Has_Block_Obj new_rule = Cast(SASS_MEMORY_COPY(this->parent())); + new_rule->block(bb); + new_rule->tabs(this->parent()->tabs()); + new_rule->block()->concat(m->block()); + + Block_Obj wrapper_block = SASS_MEMORY_NEW(Block, m->block() ? m->block()->pstate() : m->pstate()); + wrapper_block->append(new_rule); + Directive_Obj mm = SASS_MEMORY_NEW(Directive, + m->pstate(), + m->keyword(), + m->selector(), + wrapper_block); + if (m->value()) mm->value(m->value()); + + Bubble_Ptr bubble = SASS_MEMORY_NEW(Bubble, mm->pstate(), mm); + return bubble; + } + + Statement_Ptr Cssize::bubble(At_Root_Block_Ptr m) + { + if (!m || !m->block()) return NULL; + Block_Ptr bb = SASS_MEMORY_NEW(Block, this->parent()->pstate()); + Has_Block_Obj new_rule = Cast(SASS_MEMORY_COPY(this->parent())); + Block_Ptr wrapper_block = SASS_MEMORY_NEW(Block, m->block()->pstate()); + if (new_rule) { + new_rule->block(bb); + new_rule->tabs(this->parent()->tabs()); + new_rule->block()->concat(m->block()); + wrapper_block->append(new_rule); + } + + At_Root_Block_Ptr mm = SASS_MEMORY_NEW(At_Root_Block, + m->pstate(), + wrapper_block, + m->expression()); + Bubble_Ptr bubble = SASS_MEMORY_NEW(Bubble, mm->pstate(), mm); + return bubble; + } + + Statement_Ptr Cssize::bubble(Supports_Block_Ptr m) + { + Ruleset_Obj parent = Cast(SASS_MEMORY_COPY(this->parent())); + + Block_Ptr bb = SASS_MEMORY_NEW(Block, parent->block()->pstate()); + Ruleset_Ptr new_rule = SASS_MEMORY_NEW(Ruleset, + parent->pstate(), + parent->selector(), + bb); + new_rule->tabs(parent->tabs()); + new_rule->block()->concat(m->block()); + + Block_Ptr wrapper_block = SASS_MEMORY_NEW(Block, m->block()->pstate()); + wrapper_block->append(new_rule); + Supports_Block_Ptr mm = SASS_MEMORY_NEW(Supports_Block, + m->pstate(), + m->condition(), + wrapper_block); + + mm->tabs(m->tabs()); + + Bubble_Ptr bubble = SASS_MEMORY_NEW(Bubble, mm->pstate(), mm); + return bubble; + } + + Statement_Ptr Cssize::bubble(Media_Block_Ptr m) + { + Ruleset_Obj parent = Cast(SASS_MEMORY_COPY(this->parent())); + + Block_Ptr bb = SASS_MEMORY_NEW(Block, parent->block()->pstate()); + Ruleset_Ptr new_rule = SASS_MEMORY_NEW(Ruleset, + parent->pstate(), + parent->selector(), + bb); + new_rule->tabs(parent->tabs()); + new_rule->block()->concat(m->block()); + + Block_Ptr wrapper_block = SASS_MEMORY_NEW(Block, m->block()->pstate()); + wrapper_block->append(new_rule); + Media_Block_Obj mm = SASS_MEMORY_NEW(Media_Block, + m->pstate(), + m->media_queries(), + wrapper_block); + + mm->tabs(m->tabs()); + + return SASS_MEMORY_NEW(Bubble, mm->pstate(), mm); + } + + bool Cssize::bubblable(Statement_Ptr s) + { + return Cast(s) || s->bubbles(); + } + + Block_Ptr Cssize::flatten(Block_Ptr b) + { + Block_Ptr result = SASS_MEMORY_NEW(Block, b->pstate(), 0, b->is_root()); + for (size_t i = 0, L = b->length(); i < L; ++i) { + Statement_Ptr ss = b->at(i); + if (Block_Ptr bb = Cast(ss)) { + Block_Obj bs = flatten(bb); + for (size_t j = 0, K = bs->length(); j < K; ++j) { + result->append(bs->at(j)); + } + } + else { + result->append(ss); + } + } + return result; + } + + std::vector> Cssize::slice_by_bubble(Block_Ptr b) + { + std::vector> results; + + for (size_t i = 0, L = b->length(); i < L; ++i) { + Statement_Obj value = b->at(i); + bool key = Cast(value) != NULL; + + if (!results.empty() && results.back().first == key) + { + Block_Obj wrapper_block = results.back().second; + wrapper_block->append(value); + } + else + { + Block_Ptr wrapper_block = SASS_MEMORY_NEW(Block, value->pstate()); + wrapper_block->append(value); + results.push_back(std::make_pair(key, wrapper_block)); + } + } + return results; + } + + Block_Ptr Cssize::debubble(Block_Ptr children, Statement_Ptr parent) + { + Has_Block_Obj previous_parent = 0; + std::vector> baz = slice_by_bubble(children); + Block_Obj result = SASS_MEMORY_NEW(Block, children->pstate()); + + for (size_t i = 0, L = baz.size(); i < L; ++i) { + bool is_bubble = baz[i].first; + Block_Obj slice = baz[i].second; + + if (!is_bubble) { + if (!parent) { + result->append(slice); + } + else if (previous_parent) { + previous_parent->block()->concat(slice); + } + else { + previous_parent = Cast(SASS_MEMORY_COPY(parent)); + previous_parent->block(slice); + previous_parent->tabs(parent->tabs()); + + result->append(previous_parent); + } + continue; + } + + for (size_t j = 0, K = slice->length(); j < K; ++j) + { + Statement_Ptr ss; + Statement_Obj stm = slice->at(j); + // this has to go now here (too bad) + Bubble_Obj node = Cast(stm); + Media_Block_Ptr m1 = NULL; + Media_Block_Ptr m2 = NULL; + if (parent) m1 = Cast(parent); + if (node) m2 = Cast(node->node()); + if (!parent || + parent->statement_type() != Statement::MEDIA || + node->node()->statement_type() != Statement::MEDIA || + (m1 && m2 && *m1->media_queries() == *m2->media_queries()) + ) + { + ss = node->node(); + } + else + { + List_Obj mq = merge_media_queries( + Cast(node->node()), + Cast(parent) + ); + if (!mq->length()) continue; + if (Media_Block* b = Cast(node->node())) { + b->media_queries(mq); + } + ss = node->node(); + } + + if (!ss) continue; + + ss->tabs(ss->tabs() + node->tabs()); + ss->group_end(node->group_end()); + + Block_Obj bb = SASS_MEMORY_NEW(Block, + children->pstate(), + children->length(), + children->is_root()); + bb->append(ss->perform(this)); + + Block_Obj wrapper_block = SASS_MEMORY_NEW(Block, + children->pstate(), + children->length(), + children->is_root()); + + Block_Ptr wrapper = flatten(bb); + wrapper_block->append(wrapper); + + if (wrapper->length()) { + previous_parent = NULL; + } + + if (wrapper_block) { + result->append(wrapper_block); + } + } + } + + return flatten(result); + } + + Statement_Ptr Cssize::fallback_impl(AST_Node_Ptr n) + { + return static_cast(n); + } + + void Cssize::append_block(Block_Ptr b, Block_Ptr cur) + { + for (size_t i = 0, L = b->length(); i < L; ++i) { + Statement_Obj ith = b->at(i)->perform(this); + if (Block_Ptr bb = Cast(ith)) { + for (size_t j = 0, K = bb->length(); j < K; ++j) { + cur->append(bb->at(j)); + } + } + else if (ith) { + cur->append(ith); + } + } + } + + List_Ptr Cssize::merge_media_queries(Media_Block_Ptr m1, Media_Block_Ptr m2) + { + List_Ptr qq = SASS_MEMORY_NEW(List, + m1->media_queries()->pstate(), + m1->media_queries()->length(), + SASS_COMMA); + + for (size_t i = 0, L = m1->media_queries()->length(); i < L; i++) { + for (size_t j = 0, K = m2->media_queries()->length(); j < K; j++) { + Expression_Obj l1 = m1->media_queries()->at(i); + Expression_Obj l2 = m2->media_queries()->at(j); + Media_Query_Ptr mq1 = Cast(l1); + Media_Query_Ptr mq2 = Cast(l2); + Media_Query_Ptr mq = merge_media_query(mq1, mq2); + if (mq) qq->append(mq); + } + } + + return qq; + } + + + Media_Query_Ptr Cssize::merge_media_query(Media_Query_Ptr mq1, Media_Query_Ptr mq2) + { + + std::string type; + std::string mod; + + std::string m1 = std::string(mq1->is_restricted() ? "only" : mq1->is_negated() ? "not" : ""); + std::string t1 = mq1->media_type() ? mq1->media_type()->to_string(ctx.c_options) : ""; + std::string m2 = std::string(mq2->is_restricted() ? "only" : mq2->is_negated() ? "not" : ""); + std::string t2 = mq2->media_type() ? mq2->media_type()->to_string(ctx.c_options) : ""; + + + if (t1.empty()) t1 = t2; + if (t2.empty()) t2 = t1; + + if ((m1 == "not") ^ (m2 == "not")) { + if (t1 == t2) { + return 0; + } + type = m1 == "not" ? t2 : t1; + mod = m1 == "not" ? m2 : m1; + } + else if (m1 == "not" && m2 == "not") { + if (t1 != t2) { + return 0; + } + type = t1; + mod = "not"; + } + else if (t1 != t2) { + return 0; + } else { + type = t1; + mod = m1.empty() ? m2 : m1; + } + + Media_Query_Ptr mm = SASS_MEMORY_NEW(Media_Query, + mq1->pstate(), + 0, + mq1->length() + mq2->length(), + mod == "not", + mod == "only"); + + if (!type.empty()) { + mm->media_type(SASS_MEMORY_NEW(String_Quoted, mq1->pstate(), type)); + } + + mm->concat(mq2); + mm->concat(mq1); + + return mm; + } +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/cssize.hpp b/mybulma/node_modules/node-sass/src/libsass/src/cssize.hpp new file mode 100644 index 0000000..5a6c704 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/cssize.hpp @@ -0,0 +1,77 @@ +#ifndef SASS_CSSIZE_H +#define SASS_CSSIZE_H + +#include "ast.hpp" +#include "context.hpp" +#include "operation.hpp" +#include "environment.hpp" + +namespace Sass { + + struct Backtrace; + + class Cssize : public Operation_CRTP { + + Context& ctx; + Backtraces& traces; + std::vector block_stack; + std::vector p_stack; + + Statement_Ptr fallback_impl(AST_Node_Ptr n); + + public: + Cssize(Context&); + ~Cssize() { } + + Selector_List_Ptr selector(); + + Block_Ptr operator()(Block_Ptr); + Statement_Ptr operator()(Ruleset_Ptr); + // Statement_Ptr operator()(Bubble_Ptr); + Statement_Ptr operator()(Media_Block_Ptr); + Statement_Ptr operator()(Supports_Block_Ptr); + Statement_Ptr operator()(At_Root_Block_Ptr); + Statement_Ptr operator()(Directive_Ptr); + Statement_Ptr operator()(Keyframe_Rule_Ptr); + Statement_Ptr operator()(Trace_Ptr); + Statement_Ptr operator()(Declaration_Ptr); + // Statement_Ptr operator()(Assignment_Ptr); + // Statement_Ptr operator()(Import_Ptr); + // Statement_Ptr operator()(Import_Stub_Ptr); + // Statement_Ptr operator()(Warning_Ptr); + // Statement_Ptr operator()(Error_Ptr); + // Statement_Ptr operator()(Comment_Ptr); + // Statement_Ptr operator()(If_Ptr); + // Statement_Ptr operator()(For_Ptr); + // Statement_Ptr operator()(Each_Ptr); + // Statement_Ptr operator()(While_Ptr); + // Statement_Ptr operator()(Return_Ptr); + // Statement_Ptr operator()(Extension_Ptr); + // Statement_Ptr operator()(Definition_Ptr); + // Statement_Ptr operator()(Mixin_Call_Ptr); + // Statement_Ptr operator()(Content_Ptr); + Statement_Ptr operator()(Null_Ptr); + + Statement_Ptr parent(); + std::vector> slice_by_bubble(Block_Ptr); + Statement_Ptr bubble(Directive_Ptr); + Statement_Ptr bubble(At_Root_Block_Ptr); + Statement_Ptr bubble(Media_Block_Ptr); + Statement_Ptr bubble(Supports_Block_Ptr); + + Block_Ptr debubble(Block_Ptr children, Statement_Ptr parent = 0); + Block_Ptr flatten(Block_Ptr); + bool bubblable(Statement_Ptr); + + List_Ptr merge_media_queries(Media_Block_Ptr, Media_Block_Ptr); + Media_Query_Ptr merge_media_query(Media_Query_Ptr, Media_Query_Ptr); + + template + Statement_Ptr fallback(U x) { return fallback_impl(x); } + + void append_block(Block_Ptr, Block_Ptr); + }; + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/debug.hpp b/mybulma/node_modules/node-sass/src/libsass/src/debug.hpp new file mode 100644 index 0000000..43fe05e --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/debug.hpp @@ -0,0 +1,43 @@ +#ifndef SASS_DEBUG_H +#define SASS_DEBUG_H + +#include + +#ifndef UINT32_MAX + #define UINT32_MAX 0xffffffffU +#endif + +enum dbg_lvl_t : uint32_t { + NONE = 0, + TRIM = 1, + CHUNKS = 2, + SUBWEAVE = 4, + WEAVE = 8, + EXTEND_COMPOUND = 16, + EXTEND_COMPLEX = 32, + LCS = 64, + EXTEND_OBJECT = 128, + ALL = UINT32_MAX +}; + +#ifdef DEBUG + +#ifndef DEBUG_LVL +const uint32_t debug_lvl = UINT32_MAX; +#else +const uint32_t debug_lvl = (DEBUG_LVL); +#endif // DEBUG_LVL + +#define DEBUG_PRINT(lvl, x) if((lvl) & debug_lvl) { std::cerr << x; } +#define DEBUG_PRINTLN(lvl, x) if((lvl) & debug_lvl) { std::cerr << x << std::endl; } +#define DEBUG_EXEC(lvl, x) if((lvl) & debug_lvl) { x; } + +#else // DEBUG + +#define DEBUG_PRINT(lvl, x) +#define DEBUG_PRINTLN(lvl, x) +#define DEBUG_EXEC(lvl, x) + +#endif // DEBUG + +#endif // SASS_DEBUG diff --git a/mybulma/node_modules/node-sass/src/libsass/src/debugger.hpp b/mybulma/node_modules/node-sass/src/libsass/src/debugger.hpp new file mode 100644 index 0000000..f1ceabd --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/debugger.hpp @@ -0,0 +1,801 @@ +#ifndef SASS_DEBUGGER_H +#define SASS_DEBUGGER_H + +#include +#include +#include "node.hpp" +#include "ast_fwd_decl.hpp" + +using namespace Sass; + +inline void debug_ast(AST_Node_Ptr node, std::string ind = "", Env* env = 0); + +inline void debug_ast(const AST_Node* node, std::string ind = "", Env* env = 0) { + debug_ast(const_cast(node), ind, env); +} + +inline void debug_sources_set(ComplexSelectorSet& set, std::string ind = "") +{ + if (ind == "") std::cerr << "#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n"; + for(auto const &pair : set) { + debug_ast(pair, ind + ""); + // debug_ast(set[pair], ind + "first: "); + } + if (ind == "") std::cerr << "#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n"; +} + +inline std::string str_replace(std::string str, const std::string& oldStr, const std::string& newStr) +{ + size_t pos = 0; + while((pos = str.find(oldStr, pos)) != std::string::npos) + { + str.replace(pos, oldStr.length(), newStr); + pos += newStr.length(); + } + return str; +} + +inline std::string prettyprint(const std::string& str) { + std::string clean = str_replace(str, "\n", "\\n"); + clean = str_replace(clean, " ", "\\t"); + clean = str_replace(clean, "\r", "\\r"); + return clean; +} + +inline std::string longToHex(long long t) { + std::stringstream is; + is << std::hex << t; + return is.str(); +} + +inline std::string pstate_source_position(AST_Node_Ptr node) +{ + std::stringstream str; + Position start(node->pstate()); + Position end(start + node->pstate().offset); + str << (start.file == std::string::npos ? -1 : start.file) + << "@[" << start.line << ":" << start.column << "]" + << "-[" << end.line << ":" << end.column << "]"; +#ifdef DEBUG_SHARED_PTR + str << "x" << node->getRefCount() << "" + << " " << node->getDbgFile() + << "@" << node->getDbgLine(); +#endif + return str.str(); +} + +inline void debug_ast(AST_Node_Ptr node, std::string ind, Env* env) +{ + if (node == 0) return; + if (ind == "") std::cerr << "####################################################################\n"; + if (Cast(node)) { + Bubble_Ptr bubble = Cast(node); + std::cerr << ind << "Bubble " << bubble; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " " << bubble->tabs(); + std::cerr << std::endl; + debug_ast(bubble->node(), ind + " ", env); + } else if (Cast(node)) { + Trace_Ptr trace = Cast(node); + std::cerr << ind << "Trace " << trace; + std::cerr << " (" << pstate_source_position(node) << ")" + << " [name:" << trace->name() << ", type: " << trace->type() << "]" + << std::endl; + debug_ast(trace->block(), ind + " ", env); + } else if (Cast(node)) { + At_Root_Block_Ptr root_block = Cast(node); + std::cerr << ind << "At_Root_Block " << root_block; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " " << root_block->tabs(); + std::cerr << std::endl; + debug_ast(root_block->expression(), ind + ":", env); + debug_ast(root_block->block(), ind + " ", env); + } else if (Cast(node)) { + Selector_List_Ptr selector = Cast(node); + std::cerr << ind << "Selector_List " << selector; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " <" << selector->hash() << ">"; + std::cerr << " [@media:" << selector->media_block() << "]"; + std::cerr << (selector->is_invisible() ? " [INVISIBLE]": " -"); + std::cerr << (selector->has_placeholder() ? " [PLACEHOLDER]": " -"); + std::cerr << (selector->is_optional() ? " [is_optional]": " -"); + std::cerr << (selector->has_parent_ref() ? " [has-parent]": " -"); + std::cerr << (selector->has_line_break() ? " [line-break]": " -"); + std::cerr << (selector->has_line_feed() ? " [line-feed]": " -"); + std::cerr << std::endl; + debug_ast(selector->schema(), ind + "#{} "); + + for(const Complex_Selector_Obj& i : selector->elements()) { debug_ast(i, ind + " ", env); } + +// } else if (Cast(node)) { +// Expression_Ptr expression = Cast(node); +// std::cerr << ind << "Expression " << expression << " " << expression->concrete_type() << std::endl; + + } else if (Cast(node)) { + Parent_Selector_Ptr selector = Cast(node); + std::cerr << ind << "Parent_Selector " << selector; +// if (selector->not_selector()) cerr << " [in_declaration]"; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " <" << selector->hash() << ">"; + std::cerr << " [" << (selector->is_real_parent_ref() ? "REAL" : "FAKE") << "]"; + std::cerr << " <" << prettyprint(selector->pstate().token.ws_before()) << ">" << std::endl; +// debug_ast(selector->selector(), ind + "->", env); + + } else if (Cast(node)) { + Complex_Selector_Ptr selector = Cast(node); + std::cerr << ind << "Complex_Selector " << selector + << " (" << pstate_source_position(node) << ")" + << " <" << selector->hash() << ">" + << " [length:" << longToHex(selector->length()) << "]" + << " [weight:" << longToHex(selector->specificity()) << "]" + << " [@media:" << selector->media_block() << "]" + << (selector->is_invisible() ? " [INVISIBLE]": " -") + << (selector->has_placeholder() ? " [PLACEHOLDER]": " -") + << (selector->is_optional() ? " [is_optional]": " -") + << (selector->has_parent_ref() ? " [has parent]": " -") + << (selector->has_line_feed() ? " [line-feed]": " -") + << (selector->has_line_break() ? " [line-break]": " -") + << " -- "; + std::string del; + switch (selector->combinator()) { + case Complex_Selector::PARENT_OF: del = ">"; break; + case Complex_Selector::PRECEDES: del = "~"; break; + case Complex_Selector::ADJACENT_TO: del = "+"; break; + case Complex_Selector::ANCESTOR_OF: del = " "; break; + case Complex_Selector::REFERENCE: del = "//"; break; + } + // if (del = "/") del += selector->reference()->perform(&to_string) + "/"; + std::cerr << " <" << prettyprint(selector->pstate().token.ws_before()) << ">" << std::endl; + debug_ast(selector->head(), ind + " " /* + "[" + del + "]" */, env); + if (selector->tail()) { + debug_ast(selector->tail(), ind + "{" + del + "}", env); + } else if(del != " ") { + std::cerr << ind << " |" << del << "| {trailing op}" << std::endl; + } + ComplexSelectorSet set = selector->sources(); + // debug_sources_set(set, ind + " @--> "); + } else if (Cast(node)) { + Compound_Selector_Ptr selector = Cast(node); + std::cerr << ind << "Compound_Selector " << selector; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " <" << selector->hash() << ">"; + std::cerr << " [weight:" << longToHex(selector->specificity()) << "]"; + std::cerr << " [@media:" << selector->media_block() << "]"; + std::cerr << (selector->extended() ? " [extended]": " -"); + std::cerr << (selector->is_optional() ? " [is_optional]": " -"); + std::cerr << (selector->has_parent_ref() ? " [has-parent]": " -"); + std::cerr << (selector->has_line_break() ? " [line-break]": " -"); + std::cerr << (selector->has_line_feed() ? " [line-feed]": " -"); + std::cerr << " <" << prettyprint(selector->pstate().token.ws_before()) << ">" << std::endl; + for(const Simple_Selector_Obj& i : selector->elements()) { debug_ast(i, ind + " ", env); } + } else if (Cast(node)) { + Wrapped_Selector_Ptr selector = Cast(node); + std::cerr << ind << "Wrapped_Selector " << selector; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " <" << selector->hash() << ">"; + std::cerr << " <<" << selector->ns_name() << ">>"; + std::cerr << (selector->is_optional() ? " [is_optional]": " -"); + std::cerr << (selector->has_parent_ref() ? " [has-parent]": " -"); + std::cerr << (selector->has_line_break() ? " [line-break]": " -"); + std::cerr << (selector->has_line_feed() ? " [line-feed]": " -"); + std::cerr << std::endl; + debug_ast(selector->selector(), ind + " () ", env); + } else if (Cast(node)) { + Pseudo_Selector_Ptr selector = Cast(node); + std::cerr << ind << "Pseudo_Selector " << selector; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " <" << selector->hash() << ">"; + std::cerr << " <<" << selector->ns_name() << ">>"; + std::cerr << (selector->is_optional() ? " [is_optional]": " -"); + std::cerr << (selector->has_parent_ref() ? " [has-parent]": " -"); + std::cerr << (selector->has_line_break() ? " [line-break]": " -"); + std::cerr << (selector->has_line_feed() ? " [line-feed]": " -"); + std::cerr << std::endl; + debug_ast(selector->expression(), ind + " <= ", env); + } else if (Cast(node)) { + Attribute_Selector_Ptr selector = Cast(node); + std::cerr << ind << "Attribute_Selector " << selector; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " <" << selector->hash() << ">"; + std::cerr << " <<" << selector->ns_name() << ">>"; + std::cerr << (selector->is_optional() ? " [is_optional]": " -"); + std::cerr << (selector->has_parent_ref() ? " [has-parent]": " -"); + std::cerr << (selector->has_line_break() ? " [line-break]": " -"); + std::cerr << (selector->has_line_feed() ? " [line-feed]": " -"); + std::cerr << std::endl; + debug_ast(selector->value(), ind + "[" + selector->matcher() + "] ", env); + } else if (Cast(node)) { + Class_Selector_Ptr selector = Cast(node); + std::cerr << ind << "Class_Selector " << selector; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " <" << selector->hash() << ">"; + std::cerr << " <<" << selector->ns_name() << ">>"; + std::cerr << (selector->is_optional() ? " [is_optional]": " -"); + std::cerr << (selector->has_parent_ref() ? " [has-parent]": " -"); + std::cerr << (selector->has_line_break() ? " [line-break]": " -"); + std::cerr << (selector->has_line_feed() ? " [line-feed]": " -"); + std::cerr << std::endl; + } else if (Cast(node)) { + Id_Selector_Ptr selector = Cast(node); + std::cerr << ind << "Id_Selector " << selector; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " <" << selector->hash() << ">"; + std::cerr << " <<" << selector->ns_name() << ">>"; + std::cerr << (selector->is_optional() ? " [is_optional]": " -"); + std::cerr << (selector->has_parent_ref() ? " [has-parent]": " -"); + std::cerr << (selector->has_line_break() ? " [line-break]": " -"); + std::cerr << (selector->has_line_feed() ? " [line-feed]": " -"); + std::cerr << std::endl; + } else if (Cast(node)) { + Element_Selector_Ptr selector = Cast(node); + std::cerr << ind << "Element_Selector " << selector; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " <" << selector->hash() << ">"; + std::cerr << " <<" << selector->ns_name() << ">>"; + std::cerr << (selector->is_optional() ? " [is_optional]": " -"); + std::cerr << (selector->has_parent_ref() ? " [has-parent]": " -"); + std::cerr << (selector->has_line_break() ? " [line-break]": " -"); + std::cerr << (selector->has_line_feed() ? " [line-feed]": " -"); + std::cerr << " <" << prettyprint(selector->pstate().token.ws_before()) << ">"; + std::cerr << std::endl; + } else if (Cast(node)) { + + Placeholder_Selector_Ptr selector = Cast(node); + std::cerr << ind << "Placeholder_Selector [" << selector->ns_name() << "] " << selector; + std::cerr << " (" << pstate_source_position(selector) << ")" + << " <" << selector->hash() << ">" + << " [@media:" << selector->media_block() << "]" + << (selector->is_optional() ? " [is_optional]": " -") + << (selector->has_line_break() ? " [line-break]": " -") + << (selector->has_line_feed() ? " [line-feed]": " -") + << std::endl; + + } else if (Cast(node)) { + Simple_Selector* selector = Cast(node); + std::cerr << ind << "Simple_Selector " << selector; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << (selector->has_line_break() ? " [line-break]": " -") << (selector->has_line_feed() ? " [line-feed]": " -") << std::endl; + + } else if (Cast(node)) { + Selector_Schema_Ptr selector = Cast(node); + std::cerr << ind << "Selector_Schema " << selector; + std::cerr << " (" << pstate_source_position(node) << ")" + << " [@media:" << selector->media_block() << "]" + << (selector->connect_parent() ? " [connect-parent]": " -") + << std::endl; + + debug_ast(selector->contents(), ind + " "); + // for(auto i : selector->elements()) { debug_ast(i, ind + " ", env); } + + } else if (Cast(node)) { + Selector_Ptr selector = Cast(node); + std::cerr << ind << "Selector " << selector; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << (selector->has_line_break() ? " [line-break]": " -") + << (selector->has_line_feed() ? " [line-feed]": " -") + << std::endl; + + } else if (Cast(node)) { + Media_Query_Expression_Ptr block = Cast(node); + std::cerr << ind << "Media_Query_Expression " << block; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << (block->is_interpolated() ? " [is_interpolated]": " -") + << std::endl; + debug_ast(block->feature(), ind + " feature) "); + debug_ast(block->value(), ind + " value) "); + + } else if (Cast(node)) { + Media_Query_Ptr block = Cast(node); + std::cerr << ind << "Media_Query " << block; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << (block->is_negated() ? " [is_negated]": " -") + << (block->is_restricted() ? " [is_restricted]": " -") + << std::endl; + debug_ast(block->media_type(), ind + " "); + for(const auto& i : block->elements()) { debug_ast(i, ind + " ", env); } + + } else if (Cast(node)) { + Media_Block_Ptr block = Cast(node); + std::cerr << ind << "Media_Block " << block; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " " << block->tabs() << std::endl; + debug_ast(block->media_queries(), ind + " =@ "); + if (block->block()) for(const Statement_Obj& i : block->block()->elements()) { debug_ast(i, ind + " ", env); } + } else if (Cast(node)) { + Supports_Block_Ptr block = Cast(node); + std::cerr << ind << "Supports_Block " << block; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " " << block->tabs() << std::endl; + debug_ast(block->condition(), ind + " =@ "); + debug_ast(block->block(), ind + " <>"); + } else if (Cast(node)) { + Supports_Operator_Ptr block = Cast(node); + std::cerr << ind << "Supports_Operator " << block; + std::cerr << " (" << pstate_source_position(node) << ")" + << std::endl; + debug_ast(block->left(), ind + " left) "); + debug_ast(block->right(), ind + " right) "); + } else if (Cast(node)) { + Supports_Negation_Ptr block = Cast(node); + std::cerr << ind << "Supports_Negation " << block; + std::cerr << " (" << pstate_source_position(node) << ")" + << std::endl; + debug_ast(block->condition(), ind + " condition) "); + } else if (Cast(node)) { + At_Root_Query_Ptr block = Cast(node); + std::cerr << ind << "At_Root_Query " << block; + std::cerr << " (" << pstate_source_position(node) << ")" + << std::endl; + debug_ast(block->feature(), ind + " feature) "); + debug_ast(block->value(), ind + " value) "); + } else if (Cast(node)) { + Supports_Declaration_Ptr block = Cast(node); + std::cerr << ind << "Supports_Declaration " << block; + std::cerr << " (" << pstate_source_position(node) << ")" + << std::endl; + debug_ast(block->feature(), ind + " feature) "); + debug_ast(block->value(), ind + " value) "); + } else if (Cast(node)) { + Block_Ptr root_block = Cast(node); + std::cerr << ind << "Block " << root_block; + std::cerr << " (" << pstate_source_position(node) << ")"; + if (root_block->is_root()) std::cerr << " [root]"; + std::cerr << " " << root_block->tabs() << std::endl; + for(const Statement_Obj& i : root_block->elements()) { debug_ast(i, ind + " ", env); } + } else if (Cast(node)) { + Warning_Ptr block = Cast(node); + std::cerr << ind << "Warning " << block; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " " << block->tabs() << std::endl; + debug_ast(block->message(), ind + " : "); + } else if (Cast(node)) { + Error_Ptr block = Cast(node); + std::cerr << ind << "Error " << block; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " " << block->tabs() << std::endl; + } else if (Cast(node)) { + Debug_Ptr block = Cast(node); + std::cerr << ind << "Debug " << block; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " " << block->tabs() << std::endl; + debug_ast(block->value(), ind + " "); + } else if (Cast(node)) { + Comment_Ptr block = Cast(node); + std::cerr << ind << "Comment " << block; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " " << block->tabs() << + " <" << prettyprint(block->pstate().token.ws_before()) << ">" << std::endl; + debug_ast(block->text(), ind + "// ", env); + } else if (Cast(node)) { + If_Ptr block = Cast(node); + std::cerr << ind << "If " << block; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " " << block->tabs() << std::endl; + debug_ast(block->predicate(), ind + " = "); + debug_ast(block->block(), ind + " <>"); + debug_ast(block->alternative(), ind + " ><"); + } else if (Cast(node)) { + Return_Ptr block = Cast(node); + std::cerr << ind << "Return " << block; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " " << block->tabs() << std::endl; + } else if (Cast(node)) { + Extension_Ptr block = Cast(node); + std::cerr << ind << "Extension " << block; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " " << block->tabs() << std::endl; + debug_ast(block->selector(), ind + "-> ", env); + } else if (Cast(node)) { + Content_Ptr block = Cast(node); + std::cerr << ind << "Content " << block; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " [@media:" << block->media_block() << "]"; + std::cerr << " " << block->tabs() << std::endl; + } else if (Cast(node)) { + Import_Stub_Ptr block = Cast(node); + std::cerr << ind << "Import_Stub " << block; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " [" << block->imp_path() << "] "; + std::cerr << " " << block->tabs() << std::endl; + } else if (Cast(node)) { + Import_Ptr block = Cast(node); + std::cerr << ind << "Import " << block; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " " << block->tabs() << std::endl; + // std::vector files_; + for (auto imp : block->urls()) debug_ast(imp, ind + "@: ", env); + debug_ast(block->import_queries(), ind + "@@ "); + } else if (Cast(node)) { + Assignment_Ptr block = Cast(node); + std::cerr << ind << "Assignment " << block; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " <<" << block->variable() << ">> " << block->tabs() << std::endl; + debug_ast(block->value(), ind + "=", env); + } else if (Cast(node)) { + Declaration_Ptr block = Cast(node); + std::cerr << ind << "Declaration " << block; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " [is_custom_property: " << block->is_custom_property() << "] "; + std::cerr << " " << block->tabs() << std::endl; + debug_ast(block->property(), ind + " prop: ", env); + debug_ast(block->value(), ind + " value: ", env); + debug_ast(block->block(), ind + " ", env); + } else if (Cast(node)) { + Keyframe_Rule_Ptr has_block = Cast(node); + std::cerr << ind << "Keyframe_Rule " << has_block; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " " << has_block->tabs() << std::endl; + if (has_block->name()) debug_ast(has_block->name(), ind + "@"); + if (has_block->block()) for(const Statement_Obj& i : has_block->block()->elements()) { debug_ast(i, ind + " ", env); } + } else if (Cast(node)) { + Directive_Ptr block = Cast(node); + std::cerr << ind << "Directive " << block; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " [" << block->keyword() << "] " << block->tabs() << std::endl; + debug_ast(block->selector(), ind + "~", env); + debug_ast(block->value(), ind + "+", env); + if (block->block()) for(const Statement_Obj& i : block->block()->elements()) { debug_ast(i, ind + " ", env); } + } else if (Cast(node)) { + Each_Ptr block = Cast(node); + std::cerr << ind << "Each " << block; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " " << block->tabs() << std::endl; + if (block->block()) for(const Statement_Obj& i : block->block()->elements()) { debug_ast(i, ind + " ", env); } + } else if (Cast(node)) { + For_Ptr block = Cast(node); + std::cerr << ind << "For " << block; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " " << block->tabs() << std::endl; + if (block->block()) for(const Statement_Obj& i : block->block()->elements()) { debug_ast(i, ind + " ", env); } + } else if (Cast(node)) { + While_Ptr block = Cast(node); + std::cerr << ind << "While " << block; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " " << block->tabs() << std::endl; + if (block->block()) for(const Statement_Obj& i : block->block()->elements()) { debug_ast(i, ind + " ", env); } + } else if (Cast(node)) { + Definition_Ptr block = Cast(node); + std::cerr << ind << "Definition " << block; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " [name: " << block->name() << "] "; + std::cerr << " [type: " << (block->type() == Sass::Definition::Type::MIXIN ? "Mixin " : "Function ") << "] "; + // this seems to lead to segfaults some times? + // std::cerr << " [signature: " << block->signature() << "] "; + std::cerr << " [native: " << block->native_function() << "] "; + std::cerr << " " << block->tabs() << std::endl; + debug_ast(block->parameters(), ind + " params: ", env); + if (block->block()) debug_ast(block->block(), ind + " ", env); + } else if (Cast(node)) { + Mixin_Call_Ptr block = Cast(node); + std::cerr << ind << "Mixin_Call " << block << " " << block->tabs(); + std::cerr << " (" << pstate_source_position(block) << ")"; + std::cerr << " [" << block->name() << "]"; + std::cerr << " [has_content: " << block->has_content() << "] " << std::endl; + debug_ast(block->arguments(), ind + " args: "); + if (block->block()) debug_ast(block->block(), ind + " ", env); + } else if (Ruleset_Ptr ruleset = Cast(node)) { + std::cerr << ind << "Ruleset " << ruleset; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " [indent: " << ruleset->tabs() << "]"; + std::cerr << (ruleset->is_invisible() ? " [INVISIBLE]" : ""); + std::cerr << (ruleset->is_root() ? " [root]" : ""); + std::cerr << std::endl; + debug_ast(ruleset->selector(), ind + ">"); + debug_ast(ruleset->block(), ind + " "); + } else if (Cast(node)) { + Block_Ptr block = Cast(node); + std::cerr << ind << "Block " << block; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << (block->is_invisible() ? " [INVISIBLE]" : ""); + std::cerr << " [indent: " << block->tabs() << "]" << std::endl; + for(const Statement_Obj& i : block->elements()) { debug_ast(i, ind + " ", env); } + } else if (Cast(node)) { + Variable_Ptr expression = Cast(node); + std::cerr << ind << "Variable " << expression; + std::cerr << " [interpolant: " << expression->is_interpolant() << "] "; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " [" << expression->name() << "]" << std::endl; + std::string name(expression->name()); + if (env && env->has(name)) debug_ast(Cast((*env)[name]), ind + " -> ", env); + } else if (Cast(node)) { + Function_Call_Schema_Ptr expression = Cast(node); + std::cerr << ind << "Function_Call_Schema " << expression; + std::cerr << " [interpolant: " << expression->is_interpolant() << "] "; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << "" << std::endl; + debug_ast(expression->name(), ind + "name: ", env); + debug_ast(expression->arguments(), ind + " args: ", env); + } else if (Cast(node)) { + Function_Call_Ptr expression = Cast(node); + std::cerr << ind << "Function_Call " << expression; + std::cerr << " [interpolant: " << expression->is_interpolant() << "] "; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " [" << expression->name() << "]"; + if (expression->is_delayed()) std::cerr << " [delayed]"; + if (expression->is_interpolant()) std::cerr << " [interpolant]"; + if (expression->is_css()) std::cerr << " [css]"; + std::cerr << std::endl; + debug_ast(expression->arguments(), ind + " args: ", env); + debug_ast(expression->func(), ind + " func: ", env); + } else if (Cast(node)) { + Function_Ptr expression = Cast(node); + std::cerr << ind << "Function " << expression; + std::cerr << " (" << pstate_source_position(node) << ")"; + if (expression->is_css()) std::cerr << " [css]"; + std::cerr << std::endl; + debug_ast(expression->definition(), ind + " definition: ", env); + } else if (Cast(node)) { + Arguments_Ptr expression = Cast(node); + std::cerr << ind << "Arguments " << expression; + if (expression->is_delayed()) std::cerr << " [delayed]"; + std::cerr << " (" << pstate_source_position(node) << ")"; + if (expression->has_named_arguments()) std::cerr << " [has_named_arguments]"; + if (expression->has_rest_argument()) std::cerr << " [has_rest_argument]"; + if (expression->has_keyword_argument()) std::cerr << " [has_keyword_argument]"; + std::cerr << std::endl; + for(const Argument_Obj& i : expression->elements()) { debug_ast(i, ind + " ", env); } + } else if (Cast(node)) { + Argument_Ptr expression = Cast(node); + std::cerr << ind << "Argument " << expression; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " [" << expression->value().ptr() << "]"; + std::cerr << " [name: " << expression->name() << "] "; + std::cerr << " [rest: " << expression->is_rest_argument() << "] "; + std::cerr << " [keyword: " << expression->is_keyword_argument() << "] " << std::endl; + debug_ast(expression->value(), ind + " value: ", env); + } else if (Cast(node)) { + Parameters_Ptr expression = Cast(node); + std::cerr << ind << "Parameters " << expression; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " [has_optional: " << expression->has_optional_parameters() << "] "; + std::cerr << " [has_rest: " << expression->has_rest_parameter() << "] "; + std::cerr << std::endl; + for(const Parameter_Obj& i : expression->elements()) { debug_ast(i, ind + " ", env); } + } else if (Cast(node)) { + Parameter_Ptr expression = Cast(node); + std::cerr << ind << "Parameter " << expression; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " [name: " << expression->name() << "] "; + std::cerr << " [default: " << expression->default_value().ptr() << "] "; + std::cerr << " [rest: " << expression->is_rest_parameter() << "] " << std::endl; + } else if (Cast(node)) { + Unary_Expression_Ptr expression = Cast(node); + std::cerr << ind << "Unary_Expression " << expression; + std::cerr << " [interpolant: " << expression->is_interpolant() << "] "; + std::cerr << " [delayed: " << expression->is_delayed() << "] "; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " [" << expression->type() << "]" << std::endl; + debug_ast(expression->operand(), ind + " operand: ", env); + } else if (Cast(node)) { + Binary_Expression_Ptr expression = Cast(node); + std::cerr << ind << "Binary_Expression " << expression; + if (expression->is_interpolant()) std::cerr << " [is interpolant] "; + if (expression->is_left_interpolant()) std::cerr << " [left interpolant] "; + if (expression->is_right_interpolant()) std::cerr << " [right interpolant] "; + std::cerr << " [delayed: " << expression->is_delayed() << "] "; + std::cerr << " [ws_before: " << expression->op().ws_before << "] "; + std::cerr << " [ws_after: " << expression->op().ws_after << "] "; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " [" << expression->type_name() << "]" << std::endl; + debug_ast(expression->left(), ind + " left: ", env); + debug_ast(expression->right(), ind + " right: ", env); + } else if (Cast(node)) { + Map_Ptr expression = Cast(node); + std::cerr << ind << "Map " << expression; + std::cerr << " [interpolant: " << expression->is_interpolant() << "] "; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " [Hashed]" << std::endl; + for (const auto& i : expression->elements()) { + debug_ast(i.first, ind + " key: "); + debug_ast(i.second, ind + " val: "); + } + } else if (Cast(node)) { + List_Ptr expression = Cast(node); + std::cerr << ind << "List " << expression; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " (" << expression->length() << ") " << + (expression->separator() == SASS_COMMA ? "Comma " : expression->separator() == SASS_HASH ? "Map " : "Space ") << + " [delayed: " << expression->is_delayed() << "] " << + " [interpolant: " << expression->is_interpolant() << "] " << + " [listized: " << expression->from_selector() << "] " << + " [arglist: " << expression->is_arglist() << "] " << + " [bracketed: " << expression->is_bracketed() << "] " << + " [expanded: " << expression->is_expanded() << "] " << + " [hash: " << expression->hash() << "] " << + std::endl; + for(const auto& i : expression->elements()) { debug_ast(i, ind + " ", env); } + } else if (Cast(node)) { + Content_Ptr expression = Cast(node); + std::cerr << ind << "Content " << expression; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " [@media:" << expression->media_block() << "]"; + std::cerr << " [Statement]" << std::endl; + } else if (Cast(node)) { + Boolean_Ptr expression = Cast(node); + std::cerr << ind << "Boolean " << expression; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " [interpolant: " << expression->is_interpolant() << "] "; + std::cerr << " [" << expression->value() << "]" << std::endl; + } else if (Cast(node)) { + Color_Ptr expression = Cast(node); + std::cerr << ind << "Color " << expression; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " [delayed: " << expression->is_delayed() << "] "; + std::cerr << " [interpolant: " << expression->is_interpolant() << "] "; + std::cerr << " [" << expression->r() << ":" << expression->g() << ":" << expression->b() << "@" << expression->a() << "]" << std::endl; + } else if (Cast(node)) { + Number_Ptr expression = Cast(node); + std::cerr << ind << "Number " << expression; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " [delayed: " << expression->is_delayed() << "] "; + std::cerr << " [interpolant: " << expression->is_interpolant() << "] "; + std::cerr << " [" << expression->value() << expression->unit() << "]" << + " [hash: " << expression->hash() << "] " << + std::endl; + } else if (Cast(node)) { + Null_Ptr expression = Cast(node); + std::cerr << ind << "Null " << expression; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " [interpolant: " << expression->is_interpolant() << "] " + // " [hash: " << expression->hash() << "] " + << std::endl; + } else if (Cast(node)) { + String_Quoted_Ptr expression = Cast(node); + std::cerr << ind << "String_Quoted " << expression; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " [" << prettyprint(expression->value()) << "]"; + if (expression->is_delayed()) std::cerr << " [delayed]"; + if (expression->is_interpolant()) std::cerr << " [interpolant]"; + if (expression->quote_mark()) std::cerr << " [quote_mark: " << expression->quote_mark() << "]"; + std::cerr << " <" << prettyprint(expression->pstate().token.ws_before()) << ">" << std::endl; + } else if (Cast(node)) { + String_Constant_Ptr expression = Cast(node); + std::cerr << ind << "String_Constant " << expression; + if (expression->concrete_type()) { + std::cerr << " " << expression->concrete_type(); + } + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " [" << prettyprint(expression->value()) << "]"; + if (expression->is_delayed()) std::cerr << " [delayed]"; + if (expression->is_interpolant()) std::cerr << " [interpolant]"; + std::cerr << " <" << prettyprint(expression->pstate().token.ws_before()) << ">" << std::endl; + } else if (Cast(node)) { + String_Schema_Ptr expression = Cast(node); + std::cerr << ind << "String_Schema " << expression; + std::cerr << " (" << pstate_source_position(expression) << ")"; + std::cerr << " " << expression->concrete_type(); + std::cerr << " (" << pstate_source_position(node) << ")"; + if (expression->css()) std::cerr << " [css]"; + if (expression->is_delayed()) std::cerr << " [delayed]"; + if (expression->is_interpolant()) std::cerr << " [is interpolant]"; + if (expression->has_interpolant()) std::cerr << " [has interpolant]"; + if (expression->is_left_interpolant()) std::cerr << " [left interpolant] "; + if (expression->is_right_interpolant()) std::cerr << " [right interpolant] "; + std::cerr << " <" << prettyprint(expression->pstate().token.ws_before()) << ">" << std::endl; + for(const auto& i : expression->elements()) { debug_ast(i, ind + " ", env); } + } else if (Cast(node)) { + String_Ptr expression = Cast(node); + std::cerr << ind << "String " << expression; + std::cerr << " " << expression->concrete_type(); + std::cerr << " (" << pstate_source_position(node) << ")"; + if (expression->is_interpolant()) std::cerr << " [interpolant]"; + std::cerr << " <" << prettyprint(expression->pstate().token.ws_before()) << ">" << std::endl; + } else if (Cast(node)) { + Expression_Ptr expression = Cast(node); + std::cerr << ind << "Expression " << expression; + std::cerr << " (" << pstate_source_position(node) << ")"; + switch (expression->concrete_type()) { + case Expression::Concrete_Type::NONE: std::cerr << " [NONE]"; break; + case Expression::Concrete_Type::BOOLEAN: std::cerr << " [BOOLEAN]"; break; + case Expression::Concrete_Type::NUMBER: std::cerr << " [NUMBER]"; break; + case Expression::Concrete_Type::COLOR: std::cerr << " [COLOR]"; break; + case Expression::Concrete_Type::STRING: std::cerr << " [STRING]"; break; + case Expression::Concrete_Type::LIST: std::cerr << " [LIST]"; break; + case Expression::Concrete_Type::MAP: std::cerr << " [MAP]"; break; + case Expression::Concrete_Type::SELECTOR: std::cerr << " [SELECTOR]"; break; + case Expression::Concrete_Type::NULL_VAL: std::cerr << " [NULL_VAL]"; break; + case Expression::Concrete_Type::C_WARNING: std::cerr << " [C_WARNING]"; break; + case Expression::Concrete_Type::C_ERROR: std::cerr << " [C_ERROR]"; break; + case Expression::Concrete_Type::FUNCTION: std::cerr << " [FUNCTION]"; break; + case Expression::Concrete_Type::NUM_TYPES: std::cerr << " [NUM_TYPES]"; break; + case Expression::Concrete_Type::VARIABLE: std::cerr << " [VARIABLE]"; break; + case Expression::Concrete_Type::FUNCTION_VAL: std::cerr << " [FUNCTION_VAL]"; break; + } + std::cerr << std::endl; + } else if (Cast(node)) { + Has_Block_Ptr has_block = Cast(node); + std::cerr << ind << "Has_Block " << has_block; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " " << has_block->tabs() << std::endl; + if (has_block->block()) for(const Statement_Obj& i : has_block->block()->elements()) { debug_ast(i, ind + " ", env); } + } else if (Cast(node)) { + Statement_Ptr statement = Cast(node); + std::cerr << ind << "Statement " << statement; + std::cerr << " (" << pstate_source_position(node) << ")"; + std::cerr << " " << statement->tabs() << std::endl; + } + + if (ind == "") std::cerr << "####################################################################\n"; +} + +inline void debug_node(Node* node, std::string ind = "") +{ + if (ind == "") std::cerr << "#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n"; + if (node->isCombinator()) { + std::cerr << ind; + std::cerr << "Combinator "; + std::cerr << node << " "; + if (node->got_line_feed) std::cerr << "[LF] "; + switch (node->combinator()) { + case Complex_Selector::ADJACENT_TO: std::cerr << "{+} "; break; + case Complex_Selector::PARENT_OF: std::cerr << "{>} "; break; + case Complex_Selector::PRECEDES: std::cerr << "{~} "; break; + case Complex_Selector::REFERENCE: std::cerr << "{@} "; break; + case Complex_Selector::ANCESTOR_OF: std::cerr << "{ } "; break; + } + std::cerr << std::endl; + // debug_ast(node->combinator(), ind + " "); + } else if (node->isSelector()) { + std::cerr << ind; + std::cerr << "Selector "; + std::cerr << node << " "; + if (node->got_line_feed) std::cerr << "[LF] "; + std::cerr << std::endl; + debug_ast(node->selector(), ind + " "); + } else if (node->isCollection()) { + std::cerr << ind; + std::cerr << "Collection "; + std::cerr << node << " "; + if (node->got_line_feed) std::cerr << "[LF] "; + std::cerr << std::endl; + for(auto n : (*node->collection())) { + debug_node(&n, ind + " "); + } + } else if (node->isNil()) { + std::cerr << ind; + std::cerr << "Nil "; + std::cerr << node << " "; + if (node->got_line_feed) std::cerr << "[LF] "; + std::cerr << std::endl; + } else { + std::cerr << ind; + std::cerr << "OTHER "; + std::cerr << node << " "; + if (node->got_line_feed) std::cerr << "[LF] "; + std::cerr << std::endl; + } + if (ind == "") std::cerr << "#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n"; +} + +/* +inline void debug_ast(const AST_Node_Ptr node, std::string ind = "", Env* env = 0) +{ + debug_ast(const_cast(node), ind, env); +} +*/ +inline void debug_node(const Node* node, std::string ind = "") +{ + debug_node(const_cast(node), ind); +} + +inline void debug_subset_map(Sass::Subset_Map& map, std::string ind = "") +{ + if (ind == "") std::cerr << "#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n"; + for(auto const &it : map.values()) { + debug_ast(it.first, ind + "first: "); + debug_ast(it.second, ind + "second: "); + } + if (ind == "") std::cerr << "#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n"; +} + +inline void debug_subset_entries(SubSetMapPairs* entries, std::string ind = "") +{ + if (ind == "") std::cerr << "#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n"; + for(auto const &pair : *entries) { + debug_ast(pair.first, ind + "first: "); + debug_ast(pair.second, ind + "second: "); + } + if (ind == "") std::cerr << "#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n"; +} + +#endif // SASS_DEBUGGER diff --git a/mybulma/node_modules/node-sass/src/libsass/src/emitter.cpp b/mybulma/node_modules/node-sass/src/libsass/src/emitter.cpp new file mode 100644 index 0000000..161e689 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/emitter.cpp @@ -0,0 +1,297 @@ +#include "sass.hpp" +#include "util.hpp" +#include "context.hpp" +#include "output.hpp" +#include "emitter.hpp" +#include "utf8_string.hpp" + +namespace Sass { + + Emitter::Emitter(struct Sass_Output_Options& opt) + : wbuf(), + opt(opt), + indentation(0), + scheduled_space(0), + scheduled_linefeed(0), + scheduled_delimiter(false), + scheduled_crutch(0), + scheduled_mapping(0), + in_custom_property(false), + in_comment(false), + in_wrapped(false), + in_media_block(false), + in_declaration(false), + in_space_array(false), + in_comma_array(false) + { } + + // return buffer as string + std::string Emitter::get_buffer(void) + { + return wbuf.buffer; + } + + Sass_Output_Style Emitter::output_style(void) const + { + return opt.output_style; + } + + // PROXY METHODS FOR SOURCE MAPS + + void Emitter::add_source_index(size_t idx) + { wbuf.smap.source_index.push_back(idx); } + + std::string Emitter::render_srcmap(Context &ctx) + { return wbuf.smap.render_srcmap(ctx); } + + void Emitter::set_filename(const std::string& str) + { wbuf.smap.file = str; } + + void Emitter::schedule_mapping(const AST_Node_Ptr node) + { scheduled_mapping = node; } + void Emitter::add_open_mapping(const AST_Node_Ptr node) + { wbuf.smap.add_open_mapping(node); } + void Emitter::add_close_mapping(const AST_Node_Ptr node) + { wbuf.smap.add_close_mapping(node); } + ParserState Emitter::remap(const ParserState& pstate) + { return wbuf.smap.remap(pstate); } + + // MAIN BUFFER MANIPULATION + + // add outstanding delimiter + void Emitter::finalize(bool final) + { + scheduled_space = 0; + if (output_style() == SASS_STYLE_COMPRESSED) + if (final) scheduled_delimiter = false; + if (scheduled_linefeed) + scheduled_linefeed = 1; + flush_schedules(); + } + + // flush scheduled space/linefeed + void Emitter::flush_schedules(void) + { + // check the schedule + if (scheduled_linefeed) { + std::string linefeeds = ""; + + for (size_t i = 0; i < scheduled_linefeed; i++) + linefeeds += opt.linefeed; + scheduled_space = 0; + scheduled_linefeed = 0; + append_string(linefeeds); + + } else if (scheduled_space) { + std::string spaces(scheduled_space, ' '); + scheduled_space = 0; + append_string(spaces); + } + if (scheduled_delimiter) { + scheduled_delimiter = false; + append_string(";"); + } + } + + // prepend some text or token to the buffer + void Emitter::prepend_output(const OutputBuffer& output) + { + wbuf.smap.prepend(output); + wbuf.buffer = output.buffer + wbuf.buffer; + } + + // prepend some text or token to the buffer + void Emitter::prepend_string(const std::string& text) + { + // do not adjust mappings for utf8 bom + // seems they are not counted in any UA + if (text.compare("\xEF\xBB\xBF") != 0) { + wbuf.smap.prepend(Offset(text)); + } + wbuf.buffer = text + wbuf.buffer; + } + + char Emitter::last_char() + { + return wbuf.buffer.back(); + } + + // append a single char to the buffer + void Emitter::append_char(const char chr) + { + // write space/lf + flush_schedules(); + // add to buffer + wbuf.buffer += chr; + // account for data in source-maps + wbuf.smap.append(Offset(chr)); + } + + // append some text or token to the buffer + void Emitter::append_string(const std::string& text) + { + + // write space/lf + flush_schedules(); + + if (in_comment && output_style() == COMPACT) { + // unescape comment nodes + std::string out = comment_to_string(text); + // add to buffer + wbuf.buffer += out; + // account for data in source-maps + wbuf.smap.append(Offset(out)); + } else { + // add to buffer + wbuf.buffer += text; + // account for data in source-maps + wbuf.smap.append(Offset(text)); + } + } + + // append some white-space only text + void Emitter::append_wspace(const std::string& text) + { + if (text.empty()) return; + if (peek_linefeed(text.c_str())) { + scheduled_space = 0; + append_mandatory_linefeed(); + } + } + + // append some text or token to the buffer + // this adds source-mappings for node start and end + void Emitter::append_token(const std::string& text, const AST_Node_Ptr node) + { + flush_schedules(); + add_open_mapping(node); + // hotfix for browser issues + // this is pretty ugly indeed + if (scheduled_crutch) { + add_open_mapping(scheduled_crutch); + scheduled_crutch = 0; + } + append_string(text); + add_close_mapping(node); + } + + // HELPER METHODS + + void Emitter::append_indentation() + { + if (output_style() == COMPRESSED) return; + if (output_style() == COMPACT) return; + if (in_declaration && in_comma_array) return; + if (scheduled_linefeed && indentation) + scheduled_linefeed = 1; + std::string indent = ""; + for (size_t i = 0; i < indentation; i++) + indent += opt.indent; + append_string(indent); + } + + void Emitter::append_delimiter() + { + scheduled_delimiter = true; + if (output_style() == COMPACT) { + if (indentation == 0) { + append_mandatory_linefeed(); + } else { + append_mandatory_space(); + } + } else if (output_style() != COMPRESSED) { + append_optional_linefeed(); + } + } + + void Emitter::append_comma_separator() + { + // scheduled_space = 0; + append_string(","); + append_optional_space(); + } + + void Emitter::append_colon_separator() + { + scheduled_space = 0; + append_string(":"); + if (!in_custom_property) append_optional_space(); + } + + void Emitter::append_mandatory_space() + { + scheduled_space = 1; + } + + void Emitter::append_optional_space() + { + if ((output_style() != COMPRESSED) && buffer().size()) { + unsigned char lst = buffer().at(buffer().length() - 1); + if (!isspace(lst) || scheduled_delimiter) { + if (last_char() != '(') { + append_mandatory_space(); + } + } + } + } + + void Emitter::append_special_linefeed() + { + if (output_style() == COMPACT) { + append_mandatory_linefeed(); + for (size_t p = 0; p < indentation; p++) + append_string(opt.indent); + } + } + + void Emitter::append_optional_linefeed() + { + if (in_declaration && in_comma_array) return; + if (output_style() == COMPACT) { + append_mandatory_space(); + } else { + append_mandatory_linefeed(); + } + } + + void Emitter::append_mandatory_linefeed() + { + if (output_style() != COMPRESSED) { + scheduled_linefeed = 1; + scheduled_space = 0; + // flush_schedules(); + } + } + + void Emitter::append_scope_opener(AST_Node_Ptr node) + { + scheduled_linefeed = 0; + append_optional_space(); + flush_schedules(); + if (node) add_open_mapping(node); + append_string("{"); + append_optional_linefeed(); + // append_optional_space(); + ++ indentation; + } + void Emitter::append_scope_closer(AST_Node_Ptr node) + { + -- indentation; + scheduled_linefeed = 0; + if (output_style() == COMPRESSED) + scheduled_delimiter = false; + if (output_style() == EXPANDED) { + append_optional_linefeed(); + append_indentation(); + } else { + append_optional_space(); + } + append_string("}"); + if (node) add_close_mapping(node); + append_optional_linefeed(); + if (indentation != 0) return; + if (output_style() != COMPRESSED) + scheduled_linefeed = 2; + } + +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/emitter.hpp b/mybulma/node_modules/node-sass/src/libsass/src/emitter.hpp new file mode 100644 index 0000000..3bf8f60 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/emitter.hpp @@ -0,0 +1,99 @@ +#ifndef SASS_EMITTER_H +#define SASS_EMITTER_H + +#include +#include "sass.hpp" +#include "sass/base.h" +#include "source_map.hpp" +#include "ast_fwd_decl.hpp" + +namespace Sass { + class Context; + + class Emitter { + + public: + Emitter(struct Sass_Output_Options& opt); + virtual ~Emitter() { } + + protected: + OutputBuffer wbuf; + public: + const std::string& buffer(void) { return wbuf.buffer; } + const SourceMap smap(void) { return wbuf.smap; } + const OutputBuffer output(void) { return wbuf; } + // proxy methods for source maps + void add_source_index(size_t idx); + void set_filename(const std::string& str); + void add_open_mapping(const AST_Node_Ptr node); + void add_close_mapping(const AST_Node_Ptr node); + void schedule_mapping(const AST_Node_Ptr node); + std::string render_srcmap(Context &ctx); + ParserState remap(const ParserState& pstate); + + public: + struct Sass_Output_Options& opt; + size_t indentation; + size_t scheduled_space; + size_t scheduled_linefeed; + bool scheduled_delimiter; + AST_Node_Ptr scheduled_crutch; + AST_Node_Ptr scheduled_mapping; + + public: + // output strings different in custom css properties + bool in_custom_property; + // output strings different in comments + bool in_comment; + // selector list does not get linefeeds + bool in_wrapped; + // lists always get a space after delimiter + bool in_media_block; + // nested list must not have parentheses + bool in_declaration; + // nested lists need parentheses + bool in_space_array; + bool in_comma_array; + + public: + // return buffer as std::string + std::string get_buffer(void); + // flush scheduled space/linefeed + Sass_Output_Style output_style(void) const; + // add outstanding linefeed + void finalize(bool final = true); + // flush scheduled space/linefeed + void flush_schedules(void); + // prepend some text or token to the buffer + void prepend_string(const std::string& text); + void prepend_output(const OutputBuffer& out); + // append some text or token to the buffer + void append_string(const std::string& text); + // append a single character to buffer + void append_char(const char chr); + // append some white-space only text + void append_wspace(const std::string& text); + // append some text or token to the buffer + // this adds source-mappings for node start and end + void append_token(const std::string& text, const AST_Node_Ptr node); + // query last appended character + char last_char(); + + public: // syntax sugar + void append_indentation(); + void append_optional_space(void); + void append_mandatory_space(void); + void append_special_linefeed(void); + void append_optional_linefeed(void); + void append_mandatory_linefeed(void); + void append_scope_opener(AST_Node_Ptr node = 0); + void append_scope_closer(AST_Node_Ptr node = 0); + void append_comma_separator(void); + void append_colon_separator(void); + void append_delimiter(void); + + }; + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/environment.cpp b/mybulma/node_modules/node-sass/src/libsass/src/environment.cpp new file mode 100644 index 0000000..e382e7e --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/environment.cpp @@ -0,0 +1,246 @@ +#include "sass.hpp" +#include "ast.hpp" +#include "environment.hpp" + +namespace Sass { + + template + Environment::Environment(bool is_shadow) + : local_frame_(environment_map()), + parent_(0), is_shadow_(false) + { } + template + Environment::Environment(Environment* env, bool is_shadow) + : local_frame_(environment_map()), + parent_(env), is_shadow_(is_shadow) + { } + template + Environment::Environment(Environment& env, bool is_shadow) + : local_frame_(environment_map()), + parent_(&env), is_shadow_(is_shadow) + { } + + // link parent to create a stack + template + void Environment::link(Environment& env) { parent_ = &env; } + template + void Environment::link(Environment* env) { parent_ = env; } + + // this is used to find the global frame + // which is the second last on the stack + template + bool Environment::is_lexical() const + { + return !! parent_ && parent_->parent_; + } + + // only match the real root scope + // there is still a parent around + // not sure what it is actually use for + // I guess we store functions etc. there + template + bool Environment::is_global() const + { + return parent_ && ! parent_->parent_; + } + + template + environment_map& Environment::local_frame() { + return local_frame_; + } + + template + bool Environment::has_local(const std::string& key) const + { return local_frame_.find(key) != local_frame_.end(); } + + template EnvResult + Environment::find_local(const std::string& key) + { + auto end = local_frame_.end(); + auto it = local_frame_.find(key); + return EnvResult(it, it != end); + } + + template + T& Environment::get_local(const std::string& key) + { return local_frame_[key]; } + + template + void Environment::set_local(const std::string& key, const T& val) + { + local_frame_[key] = val; + } + template + void Environment::set_local(const std::string& key, T&& val) + { + local_frame_[key] = val; + } + + template + void Environment::del_local(const std::string& key) + { local_frame_.erase(key); } + + template + Environment* Environment::global_env() + { + Environment* cur = this; + while (cur->is_lexical()) { + cur = cur->parent_; + } + return cur; + } + + template + bool Environment::has_global(const std::string& key) + { return global_env()->has(key); } + + template + T& Environment::get_global(const std::string& key) + { return (*global_env())[key]; } + + template + void Environment::set_global(const std::string& key, const T& val) + { + global_env()->local_frame_[key] = val; + } + template + void Environment::set_global(const std::string& key, T&& val) + { + global_env()->local_frame_[key] = val; + } + + template + void Environment::del_global(const std::string& key) + { global_env()->local_frame_.erase(key); } + + template + Environment* Environment::lexical_env(const std::string& key) + { + Environment* cur = this; + while (cur) { + if (cur->has_local(key)) { + return cur; + } + cur = cur->parent_; + } + return this; + } + + // see if we have a lexical variable + // move down the stack but stop before we + // reach the global frame (is not included) + template + bool Environment::has_lexical(const std::string& key) const + { + auto cur = this; + while (cur->is_lexical()) { + if (cur->has_local(key)) return true; + cur = cur->parent_; + } + return false; + } + + // see if we have a lexical we could update + // either update already existing lexical value + // or if flag is set, we create one if no lexical found + template + void Environment::set_lexical(const std::string& key, const T& val) + { + Environment* cur = this; + bool shadow = false; + while ((cur && cur->is_lexical()) || shadow) { + EnvResult rv(cur->find_local(key)); + if (rv.found) { + rv.it->second = val; + return; + } + shadow = cur->is_shadow(); + cur = cur->parent_; + } + set_local(key, val); + } + // this one moves the value + template + void Environment::set_lexical(const std::string& key, T&& val) + { + Environment* cur = this; + bool shadow = false; + while ((cur && cur->is_lexical()) || shadow) { + EnvResult rv(cur->find_local(key)); + if (rv.found) { + rv.it->second = val; + return; + } + shadow = cur->is_shadow(); + cur = cur->parent_; + } + set_local(key, val); + } + + // look on the full stack for key + // include all scopes available + template + bool Environment::has(const std::string& key) const + { + auto cur = this; + while (cur) { + if (cur->has_local(key)) { + return true; + } + cur = cur->parent_; + } + return false; + } + + // look on the full stack for key + // include all scopes available + template EnvResult + Environment::find(const std::string& key) + { + auto cur = this; + while (true) { + EnvResult rv(cur->find_local(key)); + if (rv.found) return rv; + cur = cur->parent_; + if (!cur) return rv; + } + }; + + // use array access for getter and setter functions + template + T& Environment::operator[](const std::string& key) + { + auto cur = this; + while (cur) { + if (cur->has_local(key)) { + return cur->get_local(key); + } + cur = cur->parent_; + } + return get_local(key); + } +/* + #ifdef DEBUG + template + size_t Environment::print(std::string prefix) + { + size_t indent = 0; + if (parent_) indent = parent_->print(prefix) + 1; + std::cerr << prefix << std::string(indent, ' ') << "== " << this << std::endl; + for (typename environment_map::iterator i = local_frame_.begin(); i != local_frame_.end(); ++i) { + if (!ends_with(i->first, "[f]") && !ends_with(i->first, "[f]4") && !ends_with(i->first, "[f]2")) { + std::cerr << prefix << std::string(indent, ' ') << i->first << " " << i->second; + if (Value_Ptr val = Cast(i->second)) + { std::cerr << " : " << val->to_string(); } + std::cerr << std::endl; + } + } + return indent ; + } + #endif +*/ + // compile implementation for AST_Node + template class Environment; + +} + diff --git a/mybulma/node_modules/node-sass/src/libsass/src/environment.hpp b/mybulma/node_modules/node-sass/src/libsass/src/environment.hpp new file mode 100644 index 0000000..a6939be --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/environment.hpp @@ -0,0 +1,113 @@ +#ifndef SASS_ENVIRONMENT_H +#define SASS_ENVIRONMENT_H + +#include +#include "ast_fwd_decl.hpp" +#include "ast_def_macros.hpp" + +namespace Sass { + + typedef environment_map::iterator EnvIter; + + class EnvResult { + public: + EnvIter it; + bool found; + public: + EnvResult(EnvIter it, bool found) + : it(it), found(found) {} + }; + + template + class Environment { + // TODO: test with map + environment_map local_frame_; + ADD_PROPERTY(Environment*, parent) + ADD_PROPERTY(bool, is_shadow) + + public: + Environment(bool is_shadow = false); + Environment(Environment* env, bool is_shadow = false); + Environment(Environment& env, bool is_shadow = false); + + // link parent to create a stack + void link(Environment& env); + void link(Environment* env); + + // this is used to find the global frame + // which is the second last on the stack + bool is_lexical() const; + + // only match the real root scope + // there is still a parent around + // not sure what it is actually use for + // I guess we store functions etc. there + bool is_global() const; + + // scope operates on the current frame + + environment_map& local_frame(); + + bool has_local(const std::string& key) const; + + EnvResult find_local(const std::string& key); + + T& get_local(const std::string& key); + + // set variable on the current frame + void set_local(const std::string& key, const T& val); + void set_local(const std::string& key, T&& val); + + void del_local(const std::string& key); + + // global operates on the global frame + // which is the second last on the stack + Environment* global_env(); + // get the env where the variable already exists + // if it does not yet exist, we return current env + Environment* lexical_env(const std::string& key); + + bool has_global(const std::string& key); + + T& get_global(const std::string& key); + + // set a variable on the global frame + void set_global(const std::string& key, const T& val); + void set_global(const std::string& key, T&& val); + + void del_global(const std::string& key); + + // see if we have a lexical variable + // move down the stack but stop before we + // reach the global frame (is not included) + bool has_lexical(const std::string& key) const; + + // see if we have a lexical we could update + // either update already existing lexical value + // or we create a new one on the current frame + void set_lexical(const std::string& key, T&& val); + void set_lexical(const std::string& key, const T& val); + + // look on the full stack for key + // include all scopes available + bool has(const std::string& key) const; + + // look on the full stack for key + // include all scopes available + EnvResult find(const std::string& key); + + // use array access for getter and setter functions + T& operator[](const std::string& key); + + #ifdef DEBUG + size_t print(std::string prefix = ""); + #endif + + }; + + // define typedef for our use case + typedef Environment Env; + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/error_handling.cpp b/mybulma/node_modules/node-sass/src/libsass/src/error_handling.cpp new file mode 100644 index 0000000..745f655 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/error_handling.cpp @@ -0,0 +1,235 @@ +#include "sass.hpp" +#include "ast.hpp" +#include "prelexer.hpp" +#include "backtrace.hpp" +#include "error_handling.hpp" + +#include + +namespace Sass { + + namespace Exception { + + Base::Base(ParserState pstate, std::string msg, Backtraces traces) + : std::runtime_error(msg), msg(msg), + prefix("Error"), pstate(pstate), traces(traces) + { } + + InvalidSass::InvalidSass(ParserState pstate, Backtraces traces, std::string msg) + : Base(pstate, msg, traces) + { } + + + InvalidParent::InvalidParent(Selector_Ptr parent, Backtraces traces, Selector_Ptr selector) + : Base(selector->pstate(), def_msg, traces), parent(parent), selector(selector) + { + msg = "Invalid parent selector for \""; + msg += selector->to_string(Sass_Inspect_Options()); + msg += "\": \""; + msg += parent->to_string(Sass_Inspect_Options()); + msg += "\""; + } + + InvalidVarKwdType::InvalidVarKwdType(ParserState pstate, Backtraces traces, std::string name, const Argument_Ptr arg) + : Base(pstate, def_msg, traces), name(name), arg(arg) + { + msg = "Variable keyword argument map must have string keys.\n"; + msg += name + " is not a string in " + arg->to_string() + "."; + } + + InvalidArgumentType::InvalidArgumentType(ParserState pstate, Backtraces traces, std::string fn, std::string arg, std::string type, const Value_Ptr value) + : Base(pstate, def_msg, traces), fn(fn), arg(arg), type(type), value(value) + { + msg = arg + ": \""; + if (value) msg += value->to_string(Sass_Inspect_Options()); + msg += "\" is not a " + type; + msg += " for `" + fn + "'"; + } + + MissingArgument::MissingArgument(ParserState pstate, Backtraces traces, std::string fn, std::string arg, std::string fntype) + : Base(pstate, def_msg, traces), fn(fn), arg(arg), fntype(fntype) + { + msg = fntype + " " + fn; + msg += " is missing argument "; + msg += arg + "."; + } + + InvalidSyntax::InvalidSyntax(ParserState pstate, Backtraces traces, std::string msg) + : Base(pstate, msg, traces) + { } + + NestingLimitError::NestingLimitError(ParserState pstate, Backtraces traces, std::string msg) + : Base(pstate, msg, traces) + { } + + DuplicateKeyError::DuplicateKeyError(Backtraces traces, const Map& dup, const Expression& org) + : Base(org.pstate(), def_msg, traces), dup(dup), org(org) + { + msg = "Duplicate key "; + msg += dup.get_duplicate_key()->inspect(); + msg += " in map ("; + msg += org.inspect(); + msg += ")."; + } + + TypeMismatch::TypeMismatch(Backtraces traces, const Expression& var, const std::string type) + : Base(var.pstate(), def_msg, traces), var(var), type(type) + { + msg = var.to_string(); + msg += " is not an "; + msg += type; + msg += "."; + } + + InvalidValue::InvalidValue(Backtraces traces, const Expression& val) + : Base(val.pstate(), def_msg, traces), val(val) + { + msg = val.to_string(); + msg += " isn't a valid CSS value."; + } + + StackError::StackError(Backtraces traces, const AST_Node& node) + : Base(node.pstate(), def_msg, traces), node(node) + { + msg = "stack level too deep"; + } + + IncompatibleUnits::IncompatibleUnits(const Units& lhs, const Units& rhs) + { + msg = "Incompatible units: '"; + msg += rhs.unit(); + msg += "' and '"; + msg += lhs.unit(); + msg += "'."; + } + + IncompatibleUnits::IncompatibleUnits(const UnitType lhs, const UnitType rhs) + { + msg = "Incompatible units: '"; + msg += unit_to_string(rhs); + msg += "' and '"; + msg += unit_to_string(lhs); + msg += "'."; + } + + AlphaChannelsNotEqual::AlphaChannelsNotEqual(Expression_Ptr_Const lhs, Expression_Ptr_Const rhs, enum Sass_OP op) + : OperationError(), lhs(lhs), rhs(rhs), op(op) + { + msg = "Alpha channels must be equal: "; + msg += lhs->to_string({ NESTED, 5 }); + msg += " " + sass_op_to_name(op) + " "; + msg += rhs->to_string({ NESTED, 5 }); + msg += "."; + } + + ZeroDivisionError::ZeroDivisionError(const Expression& lhs, const Expression& rhs) + : OperationError(), lhs(lhs), rhs(rhs) + { + msg = "divided by 0"; + } + + UndefinedOperation::UndefinedOperation(Expression_Ptr_Const lhs, Expression_Ptr_Const rhs, enum Sass_OP op) + : OperationError(), lhs(lhs), rhs(rhs), op(op) + { + msg = def_op_msg + ": \""; + msg += lhs->to_string({ NESTED, 5 }); + msg += " " + sass_op_to_name(op) + " "; + msg += rhs->to_string({ TO_SASS, 5 }); + msg += "\"."; + } + + InvalidNullOperation::InvalidNullOperation(Expression_Ptr_Const lhs, Expression_Ptr_Const rhs, enum Sass_OP op) + : UndefinedOperation(lhs, rhs, op) + { + msg = def_op_null_msg + ": \""; + msg += lhs->inspect(); + msg += " " + sass_op_to_name(op) + " "; + msg += rhs->inspect(); + msg += "\"."; + } + + SassValueError::SassValueError(Backtraces traces, ParserState pstate, OperationError& err) + : Base(pstate, err.what(), traces) + { + msg = err.what(); + prefix = err.errtype(); + } + + } + + + void warn(std::string msg, ParserState pstate) + { + std::cerr << "Warning: " << msg << std::endl; + } + + void warning(std::string msg, ParserState pstate) + { + std::string cwd(Sass::File::get_cwd()); + std::string abs_path(Sass::File::rel2abs(pstate.path, cwd, cwd)); + std::string rel_path(Sass::File::abs2rel(pstate.path, cwd, cwd)); + std::string output_path(Sass::File::path_for_console(rel_path, abs_path, pstate.path)); + + std::cerr << "WARNING on line " << pstate.line+1 << ", column " << pstate.column+1 << " of " << output_path << ":" << std::endl; + std::cerr << msg << std::endl << std::endl; + } + + void warn(std::string msg, ParserState pstate, Backtrace* bt) + { + warn(msg, pstate); + } + + void deprecated_function(std::string msg, ParserState pstate) + { + std::string cwd(Sass::File::get_cwd()); + std::string abs_path(Sass::File::rel2abs(pstate.path, cwd, cwd)); + std::string rel_path(Sass::File::abs2rel(pstate.path, cwd, cwd)); + std::string output_path(Sass::File::path_for_console(rel_path, abs_path, pstate.path)); + + std::cerr << "DEPRECATION WARNING: " << msg << std::endl; + std::cerr << "will be an error in future versions of Sass." << std::endl; + std::cerr << " on line " << pstate.line+1 << " of " << output_path << std::endl; + } + + void deprecated(std::string msg, std::string msg2, bool with_column, ParserState pstate) + { + std::string cwd(Sass::File::get_cwd()); + std::string abs_path(Sass::File::rel2abs(pstate.path, cwd, cwd)); + std::string rel_path(Sass::File::abs2rel(pstate.path, cwd, cwd)); + std::string output_path(Sass::File::path_for_console(rel_path, pstate.path, pstate.path)); + + std::cerr << "DEPRECATION WARNING on line " << pstate.line + 1; + if (with_column) std::cerr << ", column " << pstate.column + pstate.offset.column + 1; + if (output_path.length()) std::cerr << " of " << output_path; + std::cerr << ":" << std::endl; + std::cerr << msg << std::endl; + if (msg2.length()) std::cerr << msg2 << std::endl; + std::cerr << std::endl; + } + + void deprecated_bind(std::string msg, ParserState pstate) + { + std::string cwd(Sass::File::get_cwd()); + std::string abs_path(Sass::File::rel2abs(pstate.path, cwd, cwd)); + std::string rel_path(Sass::File::abs2rel(pstate.path, cwd, cwd)); + std::string output_path(Sass::File::path_for_console(rel_path, abs_path, pstate.path)); + + std::cerr << "WARNING: " << msg << std::endl; + std::cerr << " on line " << pstate.line+1 << " of " << output_path << std::endl; + std::cerr << "This will be an error in future versions of Sass." << std::endl; + } + + // should be replaced with error with backtraces + void coreError(std::string msg, ParserState pstate) + { + Backtraces traces; + throw Exception::InvalidSyntax(pstate, traces, msg); + } + + void error(std::string msg, ParserState pstate, Backtraces& traces) + { + traces.push_back(Backtrace(pstate)); + throw Exception::InvalidSyntax(pstate, traces, msg); + } + +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/error_handling.hpp b/mybulma/node_modules/node-sass/src/libsass/src/error_handling.hpp new file mode 100644 index 0000000..f863792 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/error_handling.hpp @@ -0,0 +1,216 @@ +#ifndef SASS_ERROR_HANDLING_H +#define SASS_ERROR_HANDLING_H + +#include +#include +#include +#include "position.hpp" +#include "backtrace.hpp" +#include "ast_fwd_decl.hpp" +#include "sass/functions.h" + +namespace Sass { + + struct Backtrace; + + namespace Exception { + + const std::string def_msg = "Invalid sass detected"; + const std::string def_op_msg = "Undefined operation"; + const std::string def_op_null_msg = "Invalid null operation"; + const std::string def_nesting_limit = "Code too deeply neested"; + + class Base : public std::runtime_error { + protected: + std::string msg; + std::string prefix; + public: + ParserState pstate; + Backtraces traces; + public: + Base(ParserState pstate, std::string msg, Backtraces traces); + virtual const char* errtype() const { return prefix.c_str(); } + virtual const char* what() const throw() { return msg.c_str(); } + virtual ~Base() throw() {}; + }; + + class InvalidSass : public Base { + public: + InvalidSass(ParserState pstate, Backtraces traces, std::string msg); + virtual ~InvalidSass() throw() {}; + }; + + class InvalidParent : public Base { + protected: + Selector_Ptr parent; + Selector_Ptr selector; + public: + InvalidParent(Selector_Ptr parent, Backtraces traces, Selector_Ptr selector); + virtual ~InvalidParent() throw() {}; + }; + + class MissingArgument : public Base { + protected: + std::string fn; + std::string arg; + std::string fntype; + public: + MissingArgument(ParserState pstate, Backtraces traces, std::string fn, std::string arg, std::string fntype); + virtual ~MissingArgument() throw() {}; + }; + + class InvalidArgumentType : public Base { + protected: + std::string fn; + std::string arg; + std::string type; + const Value_Ptr value; + public: + InvalidArgumentType(ParserState pstate, Backtraces traces, std::string fn, std::string arg, std::string type, const Value_Ptr value = 0); + virtual ~InvalidArgumentType() throw() {}; + }; + + class InvalidVarKwdType : public Base { + protected: + std::string name; + const Argument_Ptr arg; + public: + InvalidVarKwdType(ParserState pstate, Backtraces traces, std::string name, const Argument_Ptr arg = 0); + virtual ~InvalidVarKwdType() throw() {}; + }; + + class InvalidSyntax : public Base { + public: + InvalidSyntax(ParserState pstate, Backtraces traces, std::string msg); + virtual ~InvalidSyntax() throw() {}; + }; + + class NestingLimitError : public Base { + public: + NestingLimitError(ParserState pstate, Backtraces traces, std::string msg = def_nesting_limit); + virtual ~NestingLimitError() throw() {}; + }; + + class DuplicateKeyError : public Base { + protected: + const Map& dup; + const Expression& org; + public: + DuplicateKeyError(Backtraces traces, const Map& dup, const Expression& org); + virtual const char* errtype() const { return "Error"; } + virtual ~DuplicateKeyError() throw() {}; + }; + + class TypeMismatch : public Base { + protected: + const Expression& var; + const std::string type; + public: + TypeMismatch(Backtraces traces, const Expression& var, const std::string type); + virtual const char* errtype() const { return "Error"; } + virtual ~TypeMismatch() throw() {}; + }; + + class InvalidValue : public Base { + protected: + const Expression& val; + public: + InvalidValue(Backtraces traces, const Expression& val); + virtual const char* errtype() const { return "Error"; } + virtual ~InvalidValue() throw() {}; + }; + + class StackError : public Base { + protected: + const AST_Node& node; + public: + StackError(Backtraces traces, const AST_Node& node); + virtual const char* errtype() const { return "SystemStackError"; } + virtual ~StackError() throw() {}; + }; + + /* common virtual base class (has no pstate or trace) */ + class OperationError : public std::runtime_error { + protected: + std::string msg; + public: + OperationError(std::string msg = def_op_msg) + : std::runtime_error(msg), msg(msg) + {}; + public: + virtual const char* errtype() const { return "Error"; } + virtual const char* what() const throw() { return msg.c_str(); } + virtual ~OperationError() throw() {}; + }; + + class ZeroDivisionError : public OperationError { + protected: + const Expression& lhs; + const Expression& rhs; + public: + ZeroDivisionError(const Expression& lhs, const Expression& rhs); + virtual const char* errtype() const { return "ZeroDivisionError"; } + virtual ~ZeroDivisionError() throw() {}; + }; + + class IncompatibleUnits : public OperationError { + protected: + // const Sass::UnitType lhs; + // const Sass::UnitType rhs; + public: + IncompatibleUnits(const Units& lhs, const Units& rhs); + IncompatibleUnits(const UnitType lhs, const UnitType rhs); + virtual ~IncompatibleUnits() throw() {}; + }; + + class UndefinedOperation : public OperationError { + protected: + Expression_Ptr_Const lhs; + Expression_Ptr_Const rhs; + const Sass_OP op; + public: + UndefinedOperation(Expression_Ptr_Const lhs, Expression_Ptr_Const rhs, enum Sass_OP op); + // virtual const char* errtype() const { return "Error"; } + virtual ~UndefinedOperation() throw() {}; + }; + + class InvalidNullOperation : public UndefinedOperation { + public: + InvalidNullOperation(Expression_Ptr_Const lhs, Expression_Ptr_Const rhs, enum Sass_OP op); + virtual ~InvalidNullOperation() throw() {}; + }; + + class AlphaChannelsNotEqual : public OperationError { + protected: + Expression_Ptr_Const lhs; + Expression_Ptr_Const rhs; + const Sass_OP op; + public: + AlphaChannelsNotEqual(Expression_Ptr_Const lhs, Expression_Ptr_Const rhs, enum Sass_OP op); + // virtual const char* errtype() const { return "Error"; } + virtual ~AlphaChannelsNotEqual() throw() {}; + }; + + class SassValueError : public Base { + public: + SassValueError(Backtraces traces, ParserState pstate, OperationError& err); + virtual ~SassValueError() throw() {}; + }; + + } + + void warn(std::string msg, ParserState pstate); + void warn(std::string msg, ParserState pstate, Backtrace* bt); + void warning(std::string msg, ParserState pstate); + + void deprecated_function(std::string msg, ParserState pstate); + void deprecated(std::string msg, std::string msg2, bool with_column, ParserState pstate); + void deprecated_bind(std::string msg, ParserState pstate); + // void deprecated(std::string msg, ParserState pstate, Backtrace* bt); + + void coreError(std::string msg, ParserState pstate); + void error(std::string msg, ParserState pstate, Backtraces& traces); + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/eval.cpp b/mybulma/node_modules/node-sass/src/libsass/src/eval.cpp new file mode 100644 index 0000000..841f727 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/eval.cpp @@ -0,0 +1,1663 @@ +#include "sass.hpp" +#include +#include +#include +#include +#include +#include + +#include "file.hpp" +#include "eval.hpp" +#include "ast.hpp" +#include "bind.hpp" +#include "util.hpp" +#include "inspect.hpp" +#include "operators.hpp" +#include "environment.hpp" +#include "position.hpp" +#include "sass/values.h" +#include "to_value.hpp" +#include "to_c.hpp" +#include "context.hpp" +#include "backtrace.hpp" +#include "lexer.hpp" +#include "prelexer.hpp" +#include "parser.hpp" +#include "expand.hpp" +#include "color_maps.hpp" +#include "sass_functions.hpp" + +namespace Sass { + + Eval::Eval(Expand& exp) + : exp(exp), + ctx(exp.ctx), + traces(exp.traces), + force(false), + is_in_comment(false), + is_in_selector_schema(false) + { + bool_true = SASS_MEMORY_NEW(Boolean, "[NA]", true); + bool_false = SASS_MEMORY_NEW(Boolean, "[NA]", false); + } + Eval::~Eval() { } + + Env* Eval::environment() + { + return exp.environment(); + } + + Selector_List_Obj Eval::selector() + { + return exp.selector(); + } + + Expression_Ptr Eval::operator()(Block_Ptr b) + { + Expression_Ptr val = 0; + for (size_t i = 0, L = b->length(); i < L; ++i) { + val = b->at(i)->perform(this); + if (val) return val; + } + return val; + } + + Expression_Ptr Eval::operator()(Assignment_Ptr a) + { + Env* env = exp.environment(); + std::string var(a->variable()); + if (a->is_global()) { + if (a->is_default()) { + if (env->has_global(var)) { + Expression_Ptr e = Cast(env->get_global(var)); + if (!e || e->concrete_type() == Expression::NULL_VAL) { + env->set_global(var, a->value()->perform(this)); + } + } + else { + env->set_global(var, a->value()->perform(this)); + } + } + else { + env->set_global(var, a->value()->perform(this)); + } + } + else if (a->is_default()) { + if (env->has_lexical(var)) { + auto cur = env; + while (cur && cur->is_lexical()) { + if (cur->has_local(var)) { + if (AST_Node_Obj node = cur->get_local(var)) { + Expression_Ptr e = Cast(node); + if (!e || e->concrete_type() == Expression::NULL_VAL) { + cur->set_local(var, a->value()->perform(this)); + } + } + else { + throw std::runtime_error("Env not in sync"); + } + return 0; + } + cur = cur->parent(); + } + throw std::runtime_error("Env not in sync"); + } + else if (env->has_global(var)) { + if (AST_Node_Obj node = env->get_global(var)) { + Expression_Ptr e = Cast(node); + if (!e || e->concrete_type() == Expression::NULL_VAL) { + env->set_global(var, a->value()->perform(this)); + } + } + } + else if (env->is_lexical()) { + env->set_local(var, a->value()->perform(this)); + } + else { + env->set_local(var, a->value()->perform(this)); + } + } + else { + env->set_lexical(var, a->value()->perform(this)); + } + return 0; + } + + Expression_Ptr Eval::operator()(If_Ptr i) + { + Expression_Obj rv = 0; + Env env(exp.environment()); + exp.env_stack.push_back(&env); + Expression_Obj cond = i->predicate()->perform(this); + if (!cond->is_false()) { + rv = i->block()->perform(this); + } + else { + Block_Obj alt = i->alternative(); + if (alt) rv = alt->perform(this); + } + exp.env_stack.pop_back(); + return rv.detach(); + } + + // For does not create a new env scope + // But iteration vars are reset afterwards + Expression_Ptr Eval::operator()(For_Ptr f) + { + std::string variable(f->variable()); + Expression_Obj low = f->lower_bound()->perform(this); + if (low->concrete_type() != Expression::NUMBER) { + traces.push_back(Backtrace(low->pstate())); + throw Exception::TypeMismatch(traces, *low, "integer"); + } + Expression_Obj high = f->upper_bound()->perform(this); + if (high->concrete_type() != Expression::NUMBER) { + traces.push_back(Backtrace(high->pstate())); + throw Exception::TypeMismatch(traces, *high, "integer"); + } + Number_Obj sass_start = Cast(low); + Number_Obj sass_end = Cast(high); + // check if units are valid for sequence + if (sass_start->unit() != sass_end->unit()) { + std::stringstream msg; msg << "Incompatible units: '" + << sass_end->unit() << "' and '" + << sass_start->unit() << "'."; + error(msg.str(), low->pstate(), traces); + } + double start = sass_start->value(); + double end = sass_end->value(); + // only create iterator once in this environment + Env env(environment(), true); + exp.env_stack.push_back(&env); + Block_Obj body = f->block(); + Expression_Ptr val = 0; + if (start < end) { + if (f->is_inclusive()) ++end; + for (double i = start; + i < end; + ++i) { + Number_Obj it = SASS_MEMORY_NEW(Number, low->pstate(), i, sass_end->unit()); + env.set_local(variable, it); + val = body->perform(this); + if (val) break; + } + } else { + if (f->is_inclusive()) --end; + for (double i = start; + i > end; + --i) { + Number_Obj it = SASS_MEMORY_NEW(Number, low->pstate(), i, sass_end->unit()); + env.set_local(variable, it); + val = body->perform(this); + if (val) break; + } + } + exp.env_stack.pop_back(); + return val; + } + + // Eval does not create a new env scope + // But iteration vars are reset afterwards + Expression_Ptr Eval::operator()(Each_Ptr e) + { + std::vector variables(e->variables()); + Expression_Obj expr = e->list()->perform(this); + Env env(environment(), true); + exp.env_stack.push_back(&env); + List_Obj list = 0; + Map_Ptr map = 0; + if (expr->concrete_type() == Expression::MAP) { + map = Cast(expr); + } + else if (Selector_List_Ptr ls = Cast(expr)) { + Listize listize; + Expression_Obj rv = ls->perform(&listize); + list = Cast(rv); + } + else if (expr->concrete_type() != Expression::LIST) { + list = SASS_MEMORY_NEW(List, expr->pstate(), 1, SASS_COMMA); + list->append(expr); + } + else { + list = Cast(expr); + } + + Block_Obj body = e->block(); + Expression_Obj val = 0; + + if (map) { + for (Expression_Obj key : map->keys()) { + Expression_Obj value = map->at(key); + + if (variables.size() == 1) { + List_Ptr variable = SASS_MEMORY_NEW(List, map->pstate(), 2, SASS_SPACE); + variable->append(key); + variable->append(value); + env.set_local(variables[0], variable); + } else { + env.set_local(variables[0], key); + env.set_local(variables[1], value); + } + + val = body->perform(this); + if (val) break; + } + } + else { + if (list->length() == 1 && Cast(list)) { + list = Cast(list); + } + for (size_t i = 0, L = list->length(); i < L; ++i) { + Expression_Ptr item = list->at(i); + // unwrap value if the expression is an argument + if (Argument_Ptr arg = Cast(item)) item = arg->value(); + // check if we got passed a list of args (investigate) + if (List_Ptr scalars = Cast(item)) { + if (variables.size() == 1) { + Expression_Ptr var = scalars; + env.set_local(variables[0], var); + } else { + // XXX: this is never hit via spec tests + for (size_t j = 0, K = variables.size(); j < K; ++j) { + Expression_Ptr res = j >= scalars->length() + ? SASS_MEMORY_NEW(Null, expr->pstate()) + : scalars->at(j); + env.set_local(variables[j], res); + } + } + } else { + if (variables.size() > 0) { + env.set_local(variables.at(0), item); + for (size_t j = 1, K = variables.size(); j < K; ++j) { + // XXX: this is never hit via spec tests + Expression_Ptr res = SASS_MEMORY_NEW(Null, expr->pstate()); + env.set_local(variables[j], res); + } + } + } + val = body->perform(this); + if (val) break; + } + } + exp.env_stack.pop_back(); + return val.detach(); + } + + Expression_Ptr Eval::operator()(While_Ptr w) + { + Expression_Obj pred = w->predicate(); + Block_Obj body = w->block(); + Env env(environment(), true); + exp.env_stack.push_back(&env); + Expression_Obj cond = pred->perform(this); + while (!cond->is_false()) { + Expression_Obj val = body->perform(this); + if (val) { + exp.env_stack.pop_back(); + return val.detach(); + } + cond = pred->perform(this); + } + exp.env_stack.pop_back(); + return 0; + } + + Expression_Ptr Eval::operator()(Return_Ptr r) + { + return r->value()->perform(this); + } + + Expression_Ptr Eval::operator()(Warning_Ptr w) + { + Sass_Output_Style outstyle = ctx.c_options.output_style; + ctx.c_options.output_style = NESTED; + Expression_Obj message = w->message()->perform(this); + Env* env = exp.environment(); + + // try to use generic function + if (env->has("@warn[f]")) { + + // add call stack entry + ctx.callee_stack.push_back({ + "@warn", + w->pstate().path, + w->pstate().line + 1, + w->pstate().column + 1, + SASS_CALLEE_FUNCTION, + { env } + }); + + Definition_Ptr def = Cast((*env)["@warn[f]"]); + // Block_Obj body = def->block(); + // Native_Function func = def->native_function(); + Sass_Function_Entry c_function = def->c_function(); + Sass_Function_Fn c_func = sass_function_get_function(c_function); + + To_C to_c; + union Sass_Value* c_args = sass_make_list(1, SASS_COMMA, false); + sass_list_set_value(c_args, 0, message->perform(&to_c)); + union Sass_Value* c_val = c_func(c_args, c_function, ctx.c_compiler); + ctx.c_options.output_style = outstyle; + ctx.callee_stack.pop_back(); + sass_delete_value(c_args); + sass_delete_value(c_val); + return 0; + + } + + std::string result(unquote(message->to_sass())); + std::cerr << "WARNING: " << result << std::endl; + traces.push_back(Backtrace(w->pstate())); + std::cerr << traces_to_string(traces, " "); + std::cerr << std::endl; + ctx.c_options.output_style = outstyle; + traces.pop_back(); + return 0; + } + + Expression_Ptr Eval::operator()(Error_Ptr e) + { + Sass_Output_Style outstyle = ctx.c_options.output_style; + ctx.c_options.output_style = NESTED; + Expression_Obj message = e->message()->perform(this); + Env* env = exp.environment(); + + // try to use generic function + if (env->has("@error[f]")) { + + // add call stack entry + ctx.callee_stack.push_back({ + "@error", + e->pstate().path, + e->pstate().line + 1, + e->pstate().column + 1, + SASS_CALLEE_FUNCTION, + { env } + }); + + Definition_Ptr def = Cast((*env)["@error[f]"]); + // Block_Obj body = def->block(); + // Native_Function func = def->native_function(); + Sass_Function_Entry c_function = def->c_function(); + Sass_Function_Fn c_func = sass_function_get_function(c_function); + + To_C to_c; + union Sass_Value* c_args = sass_make_list(1, SASS_COMMA, false); + sass_list_set_value(c_args, 0, message->perform(&to_c)); + union Sass_Value* c_val = c_func(c_args, c_function, ctx.c_compiler); + ctx.c_options.output_style = outstyle; + ctx.callee_stack.pop_back(); + sass_delete_value(c_args); + sass_delete_value(c_val); + return 0; + + } + + std::string result(unquote(message->to_sass())); + ctx.c_options.output_style = outstyle; + error(result, e->pstate(), traces); + return 0; + } + + Expression_Ptr Eval::operator()(Debug_Ptr d) + { + Sass_Output_Style outstyle = ctx.c_options.output_style; + ctx.c_options.output_style = NESTED; + Expression_Obj message = d->value()->perform(this); + Env* env = exp.environment(); + + // try to use generic function + if (env->has("@debug[f]")) { + + // add call stack entry + ctx.callee_stack.push_back({ + "@debug", + d->pstate().path, + d->pstate().line + 1, + d->pstate().column + 1, + SASS_CALLEE_FUNCTION, + { env } + }); + + Definition_Ptr def = Cast((*env)["@debug[f]"]); + // Block_Obj body = def->block(); + // Native_Function func = def->native_function(); + Sass_Function_Entry c_function = def->c_function(); + Sass_Function_Fn c_func = sass_function_get_function(c_function); + + To_C to_c; + union Sass_Value* c_args = sass_make_list(1, SASS_COMMA, false); + sass_list_set_value(c_args, 0, message->perform(&to_c)); + union Sass_Value* c_val = c_func(c_args, c_function, ctx.c_compiler); + ctx.c_options.output_style = outstyle; + ctx.callee_stack.pop_back(); + sass_delete_value(c_args); + sass_delete_value(c_val); + return 0; + + } + + std::string cwd(ctx.cwd()); + std::string result(unquote(message->to_sass())); + std::string abs_path(Sass::File::rel2abs(d->pstate().path, cwd, cwd)); + std::string rel_path(Sass::File::abs2rel(d->pstate().path, cwd, cwd)); + std::string output_path(Sass::File::path_for_console(rel_path, abs_path, d->pstate().path)); + ctx.c_options.output_style = outstyle; + + std::cerr << output_path << ":" << d->pstate().line+1 << " DEBUG: " << result; + std::cerr << std::endl; + return 0; + } + + Expression_Ptr Eval::operator()(List_Ptr l) + { + // special case for unevaluated map + if (l->separator() == SASS_HASH) { + Map_Obj lm = SASS_MEMORY_NEW(Map, + l->pstate(), + l->length() / 2); + for (size_t i = 0, L = l->length(); i < L; i += 2) + { + Expression_Obj key = (*l)[i+0]->perform(this); + Expression_Obj val = (*l)[i+1]->perform(this); + // make sure the color key never displays its real name + key->is_delayed(true); // verified + *lm << std::make_pair(key, val); + } + if (lm->has_duplicate_key()) { + traces.push_back(Backtrace(l->pstate())); + throw Exception::DuplicateKeyError(traces, *lm, *l); + } + + lm->is_interpolant(l->is_interpolant()); + return lm->perform(this); + } + // check if we should expand it + if (l->is_expanded()) return l; + // regular case for unevaluated lists + List_Obj ll = SASS_MEMORY_NEW(List, + l->pstate(), + l->length(), + l->separator(), + l->is_arglist(), + l->is_bracketed()); + for (size_t i = 0, L = l->length(); i < L; ++i) { + ll->append((*l)[i]->perform(this)); + } + ll->is_interpolant(l->is_interpolant()); + ll->from_selector(l->from_selector()); + ll->is_expanded(true); + return ll.detach(); + } + + Expression_Ptr Eval::operator()(Map_Ptr m) + { + if (m->is_expanded()) return m; + + // make sure we're not starting with duplicate keys. + // the duplicate key state will have been set in the parser phase. + if (m->has_duplicate_key()) { + traces.push_back(Backtrace(m->pstate())); + throw Exception::DuplicateKeyError(traces, *m, *m); + } + + Map_Obj mm = SASS_MEMORY_NEW(Map, + m->pstate(), + m->length()); + for (auto key : m->keys()) { + Expression_Ptr ex_key = key->perform(this); + Expression_Ptr ex_val = m->at(key); + if (ex_val == NULL) continue; + ex_val = ex_val->perform(this); + *mm << std::make_pair(ex_key, ex_val); + } + + // check the evaluated keys aren't duplicates. + if (mm->has_duplicate_key()) { + traces.push_back(Backtrace(m->pstate())); + throw Exception::DuplicateKeyError(traces, *mm, *m); + } + + mm->is_expanded(true); + return mm.detach(); + } + + Expression_Ptr Eval::operator()(Binary_Expression_Ptr b_in) + { + + Expression_Obj lhs = b_in->left(); + Expression_Obj rhs = b_in->right(); + enum Sass_OP op_type = b_in->optype(); + + if (op_type == Sass_OP::AND) { + // LOCAL_FLAG(force, true); + lhs = lhs->perform(this); + if (!*lhs) return lhs.detach(); + return rhs->perform(this); + } + else if (op_type == Sass_OP::OR) { + // LOCAL_FLAG(force, true); + lhs = lhs->perform(this); + if (*lhs) return lhs.detach(); + return rhs->perform(this); + } + + // Evaluate variables as early o + while (Variable_Ptr l_v = Cast(lhs)) { + lhs = operator()(l_v); + } + while (Variable_Ptr r_v = Cast(rhs)) { + rhs = operator()(r_v); + } + + Binary_Expression_Obj b = b_in; + + // Evaluate sub-expressions early on + while (Binary_Expression_Ptr l_b = Cast(lhs)) { + if (!force && l_b->is_delayed()) break; + lhs = operator()(l_b); + } + while (Binary_Expression_Ptr r_b = Cast(rhs)) { + if (!force && r_b->is_delayed()) break; + rhs = operator()(r_b); + } + + // don't eval delayed expressions (the '/' when used as a separator) + if (!force && op_type == Sass_OP::DIV && b->is_delayed()) { + b->right(b->right()->perform(this)); + b->left(b->left()->perform(this)); + return b.detach(); + } + + // specific types we know are final + // handle them early to avoid overhead + if (Number_Ptr l_n = Cast(lhs)) { + // lhs is number and rhs is number + if (Number_Ptr r_n = Cast(rhs)) { + try { + switch (op_type) { + case Sass_OP::EQ: return *l_n == *r_n ? bool_true : bool_false; + case Sass_OP::NEQ: return *l_n == *r_n ? bool_false : bool_true; + case Sass_OP::LT: return *l_n < *r_n ? bool_true : bool_false; + case Sass_OP::GTE: return *l_n < *r_n ? bool_false : bool_true; + case Sass_OP::LTE: return *l_n < *r_n || *l_n == *r_n ? bool_true : bool_false; + case Sass_OP::GT: return *l_n < *r_n || *l_n == *r_n ? bool_false : bool_true; + case Sass_OP::ADD: case Sass_OP::SUB: case Sass_OP::MUL: case Sass_OP::DIV: case Sass_OP::MOD: + return Operators::op_numbers(op_type, *l_n, *r_n, ctx.c_options, b_in->pstate()); + default: break; + } + } + catch (Exception::OperationError& err) + { + traces.push_back(Backtrace(b_in->pstate())); + throw Exception::SassValueError(traces, b_in->pstate(), err); + } + } + // lhs is number and rhs is color + else if (Color_Ptr r_c = Cast(rhs)) { + try { + switch (op_type) { + case Sass_OP::EQ: return *l_n == *r_c ? bool_true : bool_false; + case Sass_OP::NEQ: return *l_n == *r_c ? bool_false : bool_true; + case Sass_OP::ADD: case Sass_OP::SUB: case Sass_OP::MUL: case Sass_OP::DIV: case Sass_OP::MOD: + return Operators::op_number_color(op_type, *l_n, *r_c, ctx.c_options, b_in->pstate()); + default: break; + } + } + catch (Exception::OperationError& err) + { + traces.push_back(Backtrace(b_in->pstate())); + throw Exception::SassValueError(traces, b_in->pstate(), err); + } + } + } + else if (Color_Ptr l_c = Cast(lhs)) { + // lhs is color and rhs is color + if (Color_Ptr r_c = Cast(rhs)) { + try { + switch (op_type) { + case Sass_OP::EQ: return *l_c == *r_c ? bool_true : bool_false; + case Sass_OP::NEQ: return *l_c == *r_c ? bool_false : bool_true; + case Sass_OP::LT: return *l_c < *r_c ? bool_true : bool_false; + case Sass_OP::GTE: return *l_c < *r_c ? bool_false : bool_true; + case Sass_OP::LTE: return *l_c < *r_c || *l_c == *r_c ? bool_true : bool_false; + case Sass_OP::GT: return *l_c < *r_c || *l_c == *r_c ? bool_false : bool_true; + case Sass_OP::ADD: case Sass_OP::SUB: case Sass_OP::MUL: case Sass_OP::DIV: case Sass_OP::MOD: + return Operators::op_colors(op_type, *l_c, *r_c, ctx.c_options, b_in->pstate()); + default: break; + } + } + catch (Exception::OperationError& err) + { + traces.push_back(Backtrace(b_in->pstate())); + throw Exception::SassValueError(traces, b_in->pstate(), err); + } + } + // lhs is color and rhs is number + else if (Number_Ptr r_n = Cast(rhs)) { + try { + switch (op_type) { + case Sass_OP::EQ: return *l_c == *r_n ? bool_true : bool_false; + case Sass_OP::NEQ: return *l_c == *r_n ? bool_false : bool_true; + case Sass_OP::ADD: case Sass_OP::SUB: case Sass_OP::MUL: case Sass_OP::DIV: case Sass_OP::MOD: + return Operators::op_color_number(op_type, *l_c, *r_n, ctx.c_options, b_in->pstate()); + default: break; + } + } + catch (Exception::OperationError& err) + { + traces.push_back(Backtrace(b_in->pstate())); + throw Exception::SassValueError(traces, b_in->pstate(), err); + } + } + } + + String_Schema_Obj ret_schema; + + // only the last item will be used to eval the binary expression + if (String_Schema_Ptr s_l = Cast(b->left())) { + if (!s_l->has_interpolant() && (!s_l->is_right_interpolant())) { + ret_schema = SASS_MEMORY_NEW(String_Schema, b->pstate()); + Binary_Expression_Obj bin_ex = SASS_MEMORY_NEW(Binary_Expression, b->pstate(), + b->op(), s_l->last(), b->right()); + bin_ex->is_delayed(b->left()->is_delayed() || b->right()->is_delayed()); // unverified + for (size_t i = 0; i < s_l->length() - 1; ++i) { + ret_schema->append(s_l->at(i)->perform(this)); + } + ret_schema->append(bin_ex->perform(this)); + return ret_schema->perform(this); + } + } + if (String_Schema_Ptr s_r = Cast(b->right())) { + + if (!s_r->has_interpolant() && (!s_r->is_left_interpolant() || op_type == Sass_OP::DIV)) { + ret_schema = SASS_MEMORY_NEW(String_Schema, b->pstate()); + Binary_Expression_Obj bin_ex = SASS_MEMORY_NEW(Binary_Expression, b->pstate(), + b->op(), b->left(), s_r->first()); + bin_ex->is_delayed(b->left()->is_delayed() || b->right()->is_delayed()); // verified + ret_schema->append(bin_ex->perform(this)); + for (size_t i = 1; i < s_r->length(); ++i) { + ret_schema->append(s_r->at(i)->perform(this)); + } + return ret_schema->perform(this); + } + } + + // fully evaluate their values + if (op_type == Sass_OP::EQ || + op_type == Sass_OP::NEQ || + op_type == Sass_OP::GT || + op_type == Sass_OP::GTE || + op_type == Sass_OP::LT || + op_type == Sass_OP::LTE) + { + LOCAL_FLAG(force, true); + lhs->is_expanded(false); + lhs->set_delayed(false); + lhs = lhs->perform(this); + rhs->is_expanded(false); + rhs->set_delayed(false); + rhs = rhs->perform(this); + } + else { + lhs = lhs->perform(this); + } + + // not a logical connective, so go ahead and eval the rhs + rhs = rhs->perform(this); + AST_Node_Obj lu = lhs; + AST_Node_Obj ru = rhs; + + Expression::Concrete_Type l_type; + Expression::Concrete_Type r_type; + + // Is one of the operands an interpolant? + String_Schema_Obj s1 = Cast(b->left()); + String_Schema_Obj s2 = Cast(b->right()); + Binary_Expression_Obj b1 = Cast(b->left()); + Binary_Expression_Obj b2 = Cast(b->right()); + + bool schema_op = false; + + bool force_delay = (s2 && s2->is_left_interpolant()) || + (s1 && s1->is_right_interpolant()) || + (b1 && b1->is_right_interpolant()) || + (b2 && b2->is_left_interpolant()); + + if ((s1 && s1->has_interpolants()) || (s2 && s2->has_interpolants()) || force_delay) + { + if (op_type == Sass_OP::DIV || op_type == Sass_OP::MUL || op_type == Sass_OP::MOD || op_type == Sass_OP::ADD || op_type == Sass_OP::SUB || + op_type == Sass_OP::EQ) { + // If possible upgrade LHS to a number (for number to string compare) + if (String_Constant_Ptr str = Cast(lhs)) { + std::string value(str->value()); + const char* start = value.c_str(); + if (Prelexer::sequence < Prelexer::dimension, Prelexer::end_of_file >(start) != 0) { + lhs = Parser::lexed_dimension(b->pstate(), str->value()); + } + } + // If possible upgrade RHS to a number (for string to number compare) + if (String_Constant_Ptr str = Cast(rhs)) { + std::string value(str->value()); + const char* start = value.c_str(); + if (Prelexer::sequence < Prelexer::dimension, Prelexer::number >(start) != 0) { + rhs = Parser::lexed_dimension(b->pstate(), str->value()); + } + } + } + + To_Value to_value(ctx); + Value_Obj v_l = Cast(lhs->perform(&to_value)); + Value_Obj v_r = Cast(rhs->perform(&to_value)); + + if (force_delay) { + std::string str(""); + str += v_l->to_string(ctx.c_options); + if (b->op().ws_before) str += " "; + str += b->separator(); + if (b->op().ws_after) str += " "; + str += v_r->to_string(ctx.c_options); + String_Constant_Ptr val = SASS_MEMORY_NEW(String_Constant, b->pstate(), str); + val->is_interpolant(b->left()->has_interpolant()); + return val; + } + } + + // see if it's a relational expression + try { + switch(op_type) { + case Sass_OP::EQ: return SASS_MEMORY_NEW(Boolean, b->pstate(), Operators::eq(lhs, rhs)); + case Sass_OP::NEQ: return SASS_MEMORY_NEW(Boolean, b->pstate(), Operators::neq(lhs, rhs)); + case Sass_OP::GT: return SASS_MEMORY_NEW(Boolean, b->pstate(), Operators::gt(lhs, rhs)); + case Sass_OP::GTE: return SASS_MEMORY_NEW(Boolean, b->pstate(), Operators::gte(lhs, rhs)); + case Sass_OP::LT: return SASS_MEMORY_NEW(Boolean, b->pstate(), Operators::lt(lhs, rhs)); + case Sass_OP::LTE: return SASS_MEMORY_NEW(Boolean, b->pstate(), Operators::lte(lhs, rhs)); + default: break; + } + } + catch (Exception::OperationError& err) + { + // throw Exception::Base(b->pstate(), err.what()); + traces.push_back(Backtrace(b->pstate())); + throw Exception::SassValueError(traces, b->pstate(), err); + } + + l_type = lhs->concrete_type(); + r_type = rhs->concrete_type(); + + // ToDo: throw error in op functions + // ToDo: then catch and re-throw them + Expression_Obj rv; + try { + ParserState pstate(b->pstate()); + if (l_type == Expression::NUMBER && r_type == Expression::NUMBER) { + Number_Ptr l_n = Cast(lhs); + Number_Ptr r_n = Cast(rhs); + l_n->reduce(); r_n->reduce(); + rv = Operators::op_numbers(op_type, *l_n, *r_n, ctx.c_options, pstate); + } + else if (l_type == Expression::NUMBER && r_type == Expression::COLOR) { + Number_Ptr l_n = Cast(lhs); + Color_Ptr r_c = Cast(rhs); + rv = Operators::op_number_color(op_type, *l_n, *r_c, ctx.c_options, pstate); + } + else if (l_type == Expression::COLOR && r_type == Expression::NUMBER) { + Color_Ptr l_c = Cast(lhs); + Number_Ptr r_n = Cast(rhs); + rv = Operators::op_color_number(op_type, *l_c, *r_n, ctx.c_options, pstate); + } + else if (l_type == Expression::COLOR && r_type == Expression::COLOR) { + Color_Ptr l_c = Cast(lhs); + Color_Ptr r_c = Cast(rhs); + rv = Operators::op_colors(op_type, *l_c, *r_c, ctx.c_options, pstate); + } + else { + To_Value to_value(ctx); + // this will leak if perform does not return a value! + Value_Obj v_l = Cast(lhs->perform(&to_value)); + Value_Obj v_r = Cast(rhs->perform(&to_value)); + bool interpolant = b->is_right_interpolant() || + b->is_left_interpolant() || + b->is_interpolant(); + if (op_type == Sass_OP::SUB) interpolant = false; + // if (op_type == Sass_OP::DIV) interpolant = true; + // check for type violations + if (l_type == Expression::MAP || l_type == Expression::FUNCTION_VAL) { + traces.push_back(Backtrace(v_l->pstate())); + throw Exception::InvalidValue(traces, *v_l); + } + if (r_type == Expression::MAP || l_type == Expression::FUNCTION_VAL) { + traces.push_back(Backtrace(v_r->pstate())); + throw Exception::InvalidValue(traces, *v_r); + } + Value_Ptr ex = Operators::op_strings(b->op(), *v_l, *v_r, ctx.c_options, pstate, !interpolant); // pass true to compress + if (String_Constant_Ptr str = Cast(ex)) + { + if (str->concrete_type() == Expression::STRING) + { + String_Constant_Ptr lstr = Cast(lhs); + String_Constant_Ptr rstr = Cast(rhs); + if (op_type != Sass_OP::SUB) { + if (String_Constant_Ptr org = lstr ? lstr : rstr) + { str->quote_mark(org->quote_mark()); } + } + } + } + ex->is_interpolant(b->is_interpolant()); + rv = ex; + } + } + catch (Exception::OperationError& err) + { + traces.push_back(Backtrace(b->pstate())); + // throw Exception::Base(b->pstate(), err.what()); + throw Exception::SassValueError(traces, b->pstate(), err); + } + + if (rv) { + if (schema_op) { + // XXX: this is never hit via spec tests + (*s2)[0] = rv; + rv = s2->perform(this); + } + } + + return rv.detach(); + + } + + Expression_Ptr Eval::operator()(Unary_Expression_Ptr u) + { + Expression_Obj operand = u->operand()->perform(this); + if (u->optype() == Unary_Expression::NOT) { + Boolean_Ptr result = SASS_MEMORY_NEW(Boolean, u->pstate(), (bool)*operand); + result->value(!result->value()); + return result; + } + else if (Number_Obj nr = Cast(operand)) { + // negate value for minus unary expression + if (u->optype() == Unary_Expression::MINUS) { + Number_Obj cpy = SASS_MEMORY_COPY(nr); + cpy->value( - cpy->value() ); // negate value + return cpy.detach(); // return the copy + } + else if (u->optype() == Unary_Expression::SLASH) { + std::string str = '/' + nr->to_string(ctx.c_options); + return SASS_MEMORY_NEW(String_Constant, u->pstate(), str); + } + // nothing for positive + return nr.detach(); + } + else { + // Special cases: +/- variables which evaluate to null ouput just +/-, + // but +/- null itself outputs the string + if (operand->concrete_type() == Expression::NULL_VAL && Cast(u->operand())) { + u->operand(SASS_MEMORY_NEW(String_Quoted, u->pstate(), "")); + } + // Never apply unary opertions on colors @see #2140 + else if (Color_Ptr color = Cast(operand)) { + // Use the color name if this was eval with one + if (color->disp().length() > 0) { + operand = SASS_MEMORY_NEW(String_Constant, operand->pstate(), color->disp()); + u->operand(operand); + } + } + else { + u->operand(operand); + } + + return SASS_MEMORY_NEW(String_Quoted, + u->pstate(), + u->inspect()); + } + // unreachable + return u; + } + + Expression_Ptr Eval::operator()(Function_Call_Ptr c) + { + if (traces.size() > Constants::MaxCallStack) { + // XXX: this is never hit via spec tests + std::ostringstream stm; + stm << "Stack depth exceeded max of " << Constants::MaxCallStack; + error(stm.str(), c->pstate(), traces); + } + std::string name(Util::normalize_underscores(c->name())); + std::string full_name(name + "[f]"); + // we make a clone here, need to implement that further + Arguments_Obj args = c->arguments(); + + Env* env = environment(); + if (!env->has(full_name) || (!c->via_call() && Prelexer::re_special_fun(name.c_str()))) { + if (!env->has("*[f]")) { + for (Argument_Obj arg : args->elements()) { + if (List_Obj ls = Cast(arg->value())) { + if (ls->size() == 0) error("() isn't a valid CSS value.", c->pstate(), traces); + } + } + args = Cast(args->perform(this)); + Function_Call_Obj lit = SASS_MEMORY_NEW(Function_Call, + c->pstate(), + c->name(), + args); + if (args->has_named_arguments()) { + error("Function " + c->name() + " doesn't support keyword arguments", c->pstate(), traces); + } + String_Quoted_Ptr str = SASS_MEMORY_NEW(String_Quoted, + c->pstate(), + lit->to_string(ctx.c_options)); + str->is_interpolant(c->is_interpolant()); + return str; + } else { + // call generic function + full_name = "*[f]"; + } + } + + // further delay for calls + if (full_name != "call[f]") { + args->set_delayed(false); // verified + } + if (full_name != "if[f]") { + args = Cast(args->perform(this)); + } + Definition_Ptr def = Cast((*env)[full_name]); + + if (c->func()) def = c->func()->definition(); + + if (def->is_overload_stub()) { + std::stringstream ss; + size_t L = args->length(); + // account for rest arguments + if (args->has_rest_argument() && args->length() > 0) { + // get the rest arguments list + List_Ptr rest = Cast(args->last()->value()); + // arguments before rest argument plus rest + if (rest) L += rest->length() - 1; + } + ss << full_name << L; + full_name = ss.str(); + std::string resolved_name(full_name); + if (!env->has(resolved_name)) error("overloaded function `" + std::string(c->name()) + "` given wrong number of arguments", c->pstate(), traces); + def = Cast((*env)[resolved_name]); + } + + Expression_Obj result = c; + Block_Obj body = def->block(); + Native_Function func = def->native_function(); + Sass_Function_Entry c_function = def->c_function(); + + if (c->is_css()) return result.detach(); + + Parameters_Obj params = def->parameters(); + Env fn_env(def->environment()); + exp.env_stack.push_back(&fn_env); + + if (func || body) { + bind(std::string("Function"), c->name(), params, args, &ctx, &fn_env, this); + std::string msg(", in function `" + c->name() + "`"); + traces.push_back(Backtrace(c->pstate(), msg)); + ctx.callee_stack.push_back({ + c->name().c_str(), + c->pstate().path, + c->pstate().line + 1, + c->pstate().column + 1, + SASS_CALLEE_FUNCTION, + { env } + }); + + // eval the body if user-defined or special, invoke underlying CPP function if native + if (body /* && !Prelexer::re_special_fun(name.c_str()) */) { + result = body->perform(this); + } + else if (func) { + result = func(fn_env, *env, ctx, def->signature(), c->pstate(), traces, exp.selector_stack); + } + if (!result) { + error(std::string("Function ") + c->name() + " finished without @return", c->pstate(), traces); + } + ctx.callee_stack.pop_back(); + traces.pop_back(); + } + + // else if it's a user-defined c function + // convert call into C-API compatible form + else if (c_function) { + Sass_Function_Fn c_func = sass_function_get_function(c_function); + if (full_name == "*[f]") { + String_Quoted_Obj str = SASS_MEMORY_NEW(String_Quoted, c->pstate(), c->name()); + Arguments_Obj new_args = SASS_MEMORY_NEW(Arguments, c->pstate()); + new_args->append(SASS_MEMORY_NEW(Argument, c->pstate(), str)); + new_args->concat(args); + args = new_args; + } + + // populates env with default values for params + std::string ff(c->name()); + bind(std::string("Function"), c->name(), params, args, &ctx, &fn_env, this); + std::string msg(", in function `" + c->name() + "`"); + traces.push_back(Backtrace(c->pstate(), msg)); + ctx.callee_stack.push_back({ + c->name().c_str(), + c->pstate().path, + c->pstate().line + 1, + c->pstate().column + 1, + SASS_CALLEE_C_FUNCTION, + { env } + }); + + To_C to_c; + union Sass_Value* c_args = sass_make_list(params->length(), SASS_COMMA, false); + for(size_t i = 0; i < params->length(); i++) { + Parameter_Obj param = params->at(i); + std::string key = param->name(); + AST_Node_Obj node = fn_env.get_local(key); + Expression_Obj arg = Cast(node); + sass_list_set_value(c_args, i, arg->perform(&to_c)); + } + union Sass_Value* c_val = c_func(c_args, c_function, ctx.c_compiler); + if (sass_value_get_tag(c_val) == SASS_ERROR) { + error("error in C function " + c->name() + ": " + sass_error_get_message(c_val), c->pstate(), traces); + } else if (sass_value_get_tag(c_val) == SASS_WARNING) { + error("warning in C function " + c->name() + ": " + sass_warning_get_message(c_val), c->pstate(), traces); + } + result = cval_to_astnode(c_val, traces, c->pstate()); + + ctx.callee_stack.pop_back(); + traces.pop_back(); + sass_delete_value(c_args); + if (c_val != c_args) + sass_delete_value(c_val); + } + + // link back to function definition + // only do this for custom functions + if (result->pstate().file == std::string::npos) + result->pstate(c->pstate()); + + result = result->perform(this); + result->is_interpolant(c->is_interpolant()); + exp.env_stack.pop_back(); + return result.detach(); + } + + Expression_Ptr Eval::operator()(Function_Call_Schema_Ptr s) + { + Expression_Ptr evaluated_name = s->name()->perform(this); + Expression_Ptr evaluated_args = s->arguments()->perform(this); + String_Schema_Obj ss = SASS_MEMORY_NEW(String_Schema, s->pstate(), 2); + ss->append(evaluated_name); + ss->append(evaluated_args); + return ss->perform(this); + } + + Expression_Ptr Eval::operator()(Variable_Ptr v) + { + Expression_Obj value = 0; + Env* env = environment(); + const std::string& name(v->name()); + EnvResult rv(env->find(name)); + if (rv.found) value = static_cast(rv.it->second.ptr()); + else error("Undefined variable: \"" + v->name() + "\".", v->pstate(), traces); + if (Argument_Ptr arg = Cast(value)) value = arg->value(); + if (Number_Ptr nr = Cast(value)) nr->zero(true); // force flag + value->is_interpolant(v->is_interpolant()); + if (force) value->is_expanded(false); + value->set_delayed(false); // verified + value = value->perform(this); + if(!force) rv.it->second = value; + return value.detach(); + } + + Expression_Ptr Eval::operator()(Color_Ptr c) + { + return c; + } + + Expression_Ptr Eval::operator()(Number_Ptr n) + { + return n; + } + + Expression_Ptr Eval::operator()(Boolean_Ptr b) + { + return b; + } + + void Eval::interpolation(Context& ctx, std::string& res, Expression_Obj ex, bool into_quotes, bool was_itpl) { + + bool needs_closing_brace = false; + + if (Arguments_Ptr args = Cast(ex)) { + List_Ptr ll = SASS_MEMORY_NEW(List, args->pstate(), 0, SASS_COMMA); + for(auto arg : args->elements()) { + ll->append(arg->value()); + } + ll->is_interpolant(args->is_interpolant()); + needs_closing_brace = true; + res += "("; + ex = ll; + } + if (Number_Ptr nr = Cast(ex)) { + Number reduced(nr); + reduced.reduce(); + if (!reduced.is_valid_css_unit()) { + traces.push_back(Backtrace(nr->pstate())); + throw Exception::InvalidValue(traces, *nr); + } + } + if (Argument_Ptr arg = Cast(ex)) { + ex = arg->value(); + } + if (String_Quoted_Ptr sq = Cast(ex)) { + if (was_itpl) { + bool was_interpolant = ex->is_interpolant(); + ex = SASS_MEMORY_NEW(String_Constant, sq->pstate(), sq->value()); + ex->is_interpolant(was_interpolant); + } + } + + if (Cast(ex)) { return; } + + // parent selector needs another go + if (Cast(ex)) { + // XXX: this is never hit via spec tests + ex = ex->perform(this); + } + + if (List_Ptr l = Cast(ex)) { + List_Obj ll = SASS_MEMORY_NEW(List, l->pstate(), 0, l->separator()); + // this fixes an issue with bourbon sample, not really sure why + // if (l->size() && Cast((*l)[0])) { res += ""; } + for(Expression_Obj item : *l) { + item->is_interpolant(l->is_interpolant()); + std::string rl(""); interpolation(ctx, rl, item, into_quotes, l->is_interpolant()); + bool is_null = Cast(item) != 0; // rl != "" + if (!is_null) ll->append(SASS_MEMORY_NEW(String_Quoted, item->pstate(), rl)); + } + // Check indicates that we probably should not get a list + // here. Normally single list items are already unwrapped. + if (l->size() > 1) { + // string_to_output would fail "#{'_\a' '_\a'}"; + std::string str(ll->to_string(ctx.c_options)); + str = read_hex_escapes(str); // read escapes + newline_to_space(str); // replace directly + res += str; // append to result string + } else { + res += (ll->to_string(ctx.c_options)); + } + ll->is_interpolant(l->is_interpolant()); + } + + // Value + // Function_Call + // Selector_List + // String_Quoted + // String_Constant + // Parent_Selector + // Binary_Expression + else { + // ex = ex->perform(this); + if (into_quotes && ex->is_interpolant()) { + res += evacuate_escapes(ex ? ex->to_string(ctx.c_options) : ""); + } else { + std::string str(ex ? ex->to_string(ctx.c_options) : ""); + if (into_quotes) str = read_hex_escapes(str); + res += str; // append to result string + } + } + + if (needs_closing_brace) res += ")"; + + } + + Expression_Ptr Eval::operator()(String_Schema_Ptr s) + { + size_t L = s->length(); + bool into_quotes = false; + if (L > 1) { + if (!Cast((*s)[0]) && !Cast((*s)[L - 1])) { + if (String_Constant_Ptr l = Cast((*s)[0])) { + if (String_Constant_Ptr r = Cast((*s)[L - 1])) { + if (r->value().size() > 0) { + if (l->value()[0] == '"' && r->value()[r->value().size() - 1] == '"') into_quotes = true; + if (l->value()[0] == '\'' && r->value()[r->value().size() - 1] == '\'') into_quotes = true; + } + } + } + } + } + bool was_quoted = false; + bool was_interpolant = false; + std::string res(""); + for (size_t i = 0; i < L; ++i) { + bool is_quoted = Cast((*s)[i]) != NULL; + if (was_quoted && !(*s)[i]->is_interpolant() && !was_interpolant) { res += " "; } + else if (i > 0 && is_quoted && !(*s)[i]->is_interpolant() && !was_interpolant) { res += " "; } + Expression_Obj ex = (*s)[i]->perform(this); + interpolation(ctx, res, ex, into_quotes, ex->is_interpolant()); + was_quoted = Cast((*s)[i]) != NULL; + was_interpolant = (*s)[i]->is_interpolant(); + + } + if (!s->is_interpolant()) { + if (s->length() > 1 && res == "") return SASS_MEMORY_NEW(Null, s->pstate()); + return SASS_MEMORY_NEW(String_Constant, s->pstate(), res, s->css()); + } + // string schema seems to have a special unquoting behavior (also handles "nested" quotes) + String_Quoted_Obj str = SASS_MEMORY_NEW(String_Quoted, s->pstate(), res, 0, false, false, false, s->css()); + // if (s->is_interpolant()) str->quote_mark(0); + // String_Constant_Ptr str = SASS_MEMORY_NEW(String_Constant, s->pstate(), res); + if (str->quote_mark()) str->quote_mark('*'); + else if (!is_in_comment) str->value(string_to_output(str->value())); + str->is_interpolant(s->is_interpolant()); + return str.detach(); + } + + + Expression_Ptr Eval::operator()(String_Constant_Ptr s) + { + return s; + } + + Expression_Ptr Eval::operator()(String_Quoted_Ptr s) + { + String_Quoted_Ptr str = SASS_MEMORY_NEW(String_Quoted, s->pstate(), ""); + str->value(s->value()); + str->quote_mark(s->quote_mark()); + str->is_interpolant(s->is_interpolant()); + return str; + } + + Expression_Ptr Eval::operator()(Supports_Operator_Ptr c) + { + Expression_Ptr left = c->left()->perform(this); + Expression_Ptr right = c->right()->perform(this); + Supports_Operator_Ptr cc = SASS_MEMORY_NEW(Supports_Operator, + c->pstate(), + Cast(left), + Cast(right), + c->operand()); + return cc; + } + + Expression_Ptr Eval::operator()(Supports_Negation_Ptr c) + { + Expression_Ptr condition = c->condition()->perform(this); + Supports_Negation_Ptr cc = SASS_MEMORY_NEW(Supports_Negation, + c->pstate(), + Cast(condition)); + return cc; + } + + Expression_Ptr Eval::operator()(Supports_Declaration_Ptr c) + { + Expression_Ptr feature = c->feature()->perform(this); + Expression_Ptr value = c->value()->perform(this); + Supports_Declaration_Ptr cc = SASS_MEMORY_NEW(Supports_Declaration, + c->pstate(), + feature, + value); + return cc; + } + + Expression_Ptr Eval::operator()(Supports_Interpolation_Ptr c) + { + Expression_Ptr value = c->value()->perform(this); + Supports_Interpolation_Ptr cc = SASS_MEMORY_NEW(Supports_Interpolation, + c->pstate(), + value); + return cc; + } + + Expression_Ptr Eval::operator()(At_Root_Query_Ptr e) + { + Expression_Obj feature = e->feature(); + feature = (feature ? feature->perform(this) : 0); + Expression_Obj value = e->value(); + value = (value ? value->perform(this) : 0); + Expression_Ptr ee = SASS_MEMORY_NEW(At_Root_Query, + e->pstate(), + Cast(feature), + value); + return ee; + } + + Media_Query_Ptr Eval::operator()(Media_Query_Ptr q) + { + String_Obj t = q->media_type(); + t = static_cast(t.isNull() ? 0 : t->perform(this)); + Media_Query_Obj qq = SASS_MEMORY_NEW(Media_Query, + q->pstate(), + t, + q->length(), + q->is_negated(), + q->is_restricted()); + for (size_t i = 0, L = q->length(); i < L; ++i) { + qq->append(static_cast((*q)[i]->perform(this))); + } + return qq.detach(); + } + + Expression_Ptr Eval::operator()(Media_Query_Expression_Ptr e) + { + Expression_Obj feature = e->feature(); + feature = (feature ? feature->perform(this) : 0); + if (feature && Cast(feature)) { + feature = SASS_MEMORY_NEW(String_Quoted, + feature->pstate(), + Cast(feature)->value()); + } + Expression_Obj value = e->value(); + value = (value ? value->perform(this) : 0); + if (value && Cast(value)) { + // XXX: this is never hit via spec tests + value = SASS_MEMORY_NEW(String_Quoted, + value->pstate(), + Cast(value)->value()); + } + return SASS_MEMORY_NEW(Media_Query_Expression, + e->pstate(), + feature, + value, + e->is_interpolated()); + } + + Expression_Ptr Eval::operator()(Null_Ptr n) + { + return n; + } + + Expression_Ptr Eval::operator()(Argument_Ptr a) + { + Expression_Obj val = a->value()->perform(this); + bool is_rest_argument = a->is_rest_argument(); + bool is_keyword_argument = a->is_keyword_argument(); + + if (a->is_rest_argument()) { + if (val->concrete_type() == Expression::MAP) { + is_rest_argument = false; + is_keyword_argument = true; + } + else if(val->concrete_type() != Expression::LIST) { + List_Obj wrapper = SASS_MEMORY_NEW(List, + val->pstate(), + 0, + SASS_COMMA, + true); + wrapper->append(val); + val = wrapper; + } + } + return SASS_MEMORY_NEW(Argument, + a->pstate(), + val, + a->name(), + is_rest_argument, + is_keyword_argument); + } + + Expression_Ptr Eval::operator()(Arguments_Ptr a) + { + Arguments_Obj aa = SASS_MEMORY_NEW(Arguments, a->pstate()); + if (a->length() == 0) return aa.detach(); + for (size_t i = 0, L = a->length(); i < L; ++i) { + Expression_Obj rv = (*a)[i]->perform(this); + Argument_Ptr arg = Cast(rv); + if (!(arg->is_rest_argument() || arg->is_keyword_argument())) { + aa->append(arg); + } + } + + if (a->has_rest_argument()) { + Expression_Obj rest = a->get_rest_argument()->perform(this); + Expression_Obj splat = Cast(rest)->value()->perform(this); + + Sass_Separator separator = SASS_COMMA; + List_Ptr ls = Cast(splat); + Map_Ptr ms = Cast(splat); + + List_Obj arglist = SASS_MEMORY_NEW(List, + splat->pstate(), + 0, + ls ? ls->separator() : separator, + true); + + if (ls && ls->is_arglist()) { + arglist->concat(ls); + } else if (ms) { + aa->append(SASS_MEMORY_NEW(Argument, splat->pstate(), ms, "", false, true)); + } else if (ls) { + arglist->concat(ls); + } else { + arglist->append(splat); + } + if (arglist->length()) { + aa->append(SASS_MEMORY_NEW(Argument, splat->pstate(), arglist, "", true)); + } + } + + if (a->has_keyword_argument()) { + Expression_Obj rv = a->get_keyword_argument()->perform(this); + Argument_Ptr rvarg = Cast(rv); + Expression_Obj kwarg = rvarg->value()->perform(this); + + aa->append(SASS_MEMORY_NEW(Argument, kwarg->pstate(), kwarg, "", false, true)); + } + return aa.detach(); + } + + Expression_Ptr Eval::operator()(Comment_Ptr c) + { + return 0; + } + + inline Expression_Ptr Eval::fallback_impl(AST_Node_Ptr n) + { + return static_cast(n); + } + + // All the binary helpers. + + Expression_Ptr cval_to_astnode(union Sass_Value* v, Backtraces traces, ParserState pstate) + { + using std::strlen; + using std::strcpy; + Expression_Ptr e = NULL; + switch (sass_value_get_tag(v)) { + case SASS_BOOLEAN: { + e = SASS_MEMORY_NEW(Boolean, pstate, !!sass_boolean_get_value(v)); + } break; + case SASS_NUMBER: { + e = SASS_MEMORY_NEW(Number, pstate, sass_number_get_value(v), sass_number_get_unit(v)); + } break; + case SASS_COLOR: { + e = SASS_MEMORY_NEW(Color, pstate, sass_color_get_r(v), sass_color_get_g(v), sass_color_get_b(v), sass_color_get_a(v)); + } break; + case SASS_STRING: { + if (sass_string_is_quoted(v)) + e = SASS_MEMORY_NEW(String_Quoted, pstate, sass_string_get_value(v)); + else { + e = SASS_MEMORY_NEW(String_Constant, pstate, sass_string_get_value(v)); + } + } break; + case SASS_LIST: { + List_Ptr l = SASS_MEMORY_NEW(List, pstate, sass_list_get_length(v), sass_list_get_separator(v)); + for (size_t i = 0, L = sass_list_get_length(v); i < L; ++i) { + l->append(cval_to_astnode(sass_list_get_value(v, i), traces, pstate)); + } + l->is_bracketed(sass_list_get_is_bracketed(v)); + e = l; + } break; + case SASS_MAP: { + Map_Ptr m = SASS_MEMORY_NEW(Map, pstate); + for (size_t i = 0, L = sass_map_get_length(v); i < L; ++i) { + *m << std::make_pair( + cval_to_astnode(sass_map_get_key(v, i), traces, pstate), + cval_to_astnode(sass_map_get_value(v, i), traces, pstate)); + } + e = m; + } break; + case SASS_NULL: { + e = SASS_MEMORY_NEW(Null, pstate); + } break; + case SASS_ERROR: { + error("Error in C function: " + std::string(sass_error_get_message(v)), pstate, traces); + } break; + case SASS_WARNING: { + error("Warning in C function: " + std::string(sass_warning_get_message(v)), pstate, traces); + } break; + default: break; + } + return e; + } + + Selector_List_Ptr Eval::operator()(Selector_List_Ptr s) + { + std::vector rv; + Selector_List_Obj sl = SASS_MEMORY_NEW(Selector_List, s->pstate()); + sl->is_optional(s->is_optional()); + sl->media_block(s->media_block()); + sl->is_optional(s->is_optional()); + for (size_t i = 0, iL = s->length(); i < iL; ++i) { + rv.push_back(operator()((*s)[i])); + } + + // we should actually permutate parent first + // but here we have permutated the selector first + size_t round = 0; + while (round != std::string::npos) { + bool abort = true; + for (size_t i = 0, iL = rv.size(); i < iL; ++i) { + if (rv[i]->length() > round) { + sl->append((*rv[i])[round]); + abort = false; + } + } + if (abort) { + round = std::string::npos; + } else { + ++ round; + } + + } + return sl.detach(); + } + + + Selector_List_Ptr Eval::operator()(Complex_Selector_Ptr s) + { + bool implicit_parent = !exp.old_at_root_without_rule; + if (is_in_selector_schema) exp.selector_stack.push_back(0); + Selector_List_Obj resolved = s->resolve_parent_refs(exp.selector_stack, traces, implicit_parent); + if (is_in_selector_schema) exp.selector_stack.pop_back(); + for (size_t i = 0; i < resolved->length(); i++) { + Complex_Selector_Ptr is = resolved->at(i)->first(); + while (is) { + if (is->head()) { + is->head(operator()(is->head())); + } + is = is->tail(); + } + } + return resolved.detach(); + } + + Compound_Selector_Ptr Eval::operator()(Compound_Selector_Ptr s) + { + for (size_t i = 0; i < s->length(); i++) { + Simple_Selector_Ptr ss = s->at(i); + // skip parents here (called via resolve_parent_refs) + if (ss == NULL || Cast(ss)) continue; + s->at(i) = Cast(ss->perform(this)); + } + return s; + } + + Selector_List_Ptr Eval::operator()(Selector_Schema_Ptr s) + { + LOCAL_FLAG(is_in_selector_schema, true); + // the parser will look for a brace to end the selector + ctx.c_options.in_selector = true; // do not compress colors + Expression_Obj sel = s->contents()->perform(this); + std::string result_str(sel->to_string(ctx.c_options)); + ctx.c_options.in_selector = false; // flag temporary only + result_str = unquote(Util::rtrim(result_str)); + char* temp_cstr = sass_copy_c_string(result_str.c_str()); + ctx.strings.push_back(temp_cstr); // attach to context + Parser p = Parser::from_c_str(temp_cstr, ctx, traces, s->pstate()); + p.last_media_block = s->media_block(); + // a selector schema may or may not connect to parent? + bool chroot = s->connect_parent() == false; + Selector_List_Obj sl = p.parse_selector_list(chroot); + auto vec_str_rend = ctx.strings.rend(); + auto vec_str_rbegin = ctx.strings.rbegin(); + // remove the first item searching from the back + // we cannot assume our item is still the last one + // order is not important, so we can optimize this + auto it = std::find(vec_str_rbegin, vec_str_rend, temp_cstr); + // undefined behavior if not found! + if (it != vec_str_rend) { + // overwrite with last item + *it = ctx.strings.back(); + // remove last one from vector + ctx.strings.pop_back(); + // free temporary copy + free(temp_cstr); + } + flag_is_in_selector_schema.reset(); + return operator()(sl); + } + + Expression_Ptr Eval::operator()(Parent_Selector_Ptr p) + { + if (Selector_List_Obj pr = selector()) { + exp.selector_stack.pop_back(); + Selector_List_Obj rv = operator()(pr); + exp.selector_stack.push_back(rv); + return rv.detach(); + } else { + return SASS_MEMORY_NEW(Null, p->pstate()); + } + } + + Simple_Selector_Ptr Eval::operator()(Simple_Selector_Ptr s) + { + return s; + } + + // hotfix to avoid invalid nested `:not` selectors + // probably the wrong place, but this should ultimately + // be fixed by implement superselector correctly for `:not` + // first use of "find" (ATM only implemented for selectors) + bool hasNotSelector(AST_Node_Obj obj) { + if (Wrapped_Selector_Ptr w = Cast(obj)) { + return w->name() == ":not"; + } + return false; + } + + Wrapped_Selector_Ptr Eval::operator()(Wrapped_Selector_Ptr s) + { + + if (s->name() == ":not") { + if (exp.selector_stack.back()) { + if (s->selector()->find(hasNotSelector)) { + s->selector()->clear(); + s->name(" "); + } else if (s->selector()->length() == 1) { + Complex_Selector_Ptr cs = s->selector()->at(0); + if (cs->tail()) { + s->selector()->clear(); + s->name(" "); + } + } else if (s->selector()->length() > 1) { + s->selector()->clear(); + s->name(" "); + } + } + } + return s; + }; + +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/eval.hpp b/mybulma/node_modules/node-sass/src/libsass/src/eval.hpp new file mode 100644 index 0000000..aeaada8 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/eval.hpp @@ -0,0 +1,103 @@ +#ifndef SASS_EVAL_H +#define SASS_EVAL_H + +#include "ast.hpp" +#include "context.hpp" +#include "listize.hpp" +#include "operation.hpp" +#include "environment.hpp" + +namespace Sass { + + class Expand; + class Context; + + class Eval : public Operation_CRTP { + + private: + Expression_Ptr fallback_impl(AST_Node_Ptr n); + + public: + Expand& exp; + Context& ctx; + Backtraces& traces; + Eval(Expand& exp); + ~Eval(); + + bool force; + bool is_in_comment; + bool is_in_selector_schema; + + Boolean_Obj bool_true; + Boolean_Obj bool_false; + + Env* environment(); + Selector_List_Obj selector(); + + // for evaluating function bodies + Expression_Ptr operator()(Block_Ptr); + Expression_Ptr operator()(Assignment_Ptr); + Expression_Ptr operator()(If_Ptr); + Expression_Ptr operator()(For_Ptr); + Expression_Ptr operator()(Each_Ptr); + Expression_Ptr operator()(While_Ptr); + Expression_Ptr operator()(Return_Ptr); + Expression_Ptr operator()(Warning_Ptr); + Expression_Ptr operator()(Error_Ptr); + Expression_Ptr operator()(Debug_Ptr); + + Expression_Ptr operator()(List_Ptr); + Expression_Ptr operator()(Map_Ptr); + Expression_Ptr operator()(Binary_Expression_Ptr); + Expression_Ptr operator()(Unary_Expression_Ptr); + Expression_Ptr operator()(Function_Call_Ptr); + Expression_Ptr operator()(Function_Call_Schema_Ptr); + Expression_Ptr operator()(Variable_Ptr); + Expression_Ptr operator()(Number_Ptr); + Expression_Ptr operator()(Color_Ptr); + Expression_Ptr operator()(Boolean_Ptr); + Expression_Ptr operator()(String_Schema_Ptr); + Expression_Ptr operator()(String_Quoted_Ptr); + Expression_Ptr operator()(String_Constant_Ptr); + // Expression_Ptr operator()(Selector_List_Ptr); + Media_Query_Ptr operator()(Media_Query_Ptr); + Expression_Ptr operator()(Media_Query_Expression_Ptr); + Expression_Ptr operator()(At_Root_Query_Ptr); + Expression_Ptr operator()(Supports_Operator_Ptr); + Expression_Ptr operator()(Supports_Negation_Ptr); + Expression_Ptr operator()(Supports_Declaration_Ptr); + Expression_Ptr operator()(Supports_Interpolation_Ptr); + Expression_Ptr operator()(Null_Ptr); + Expression_Ptr operator()(Argument_Ptr); + Expression_Ptr operator()(Arguments_Ptr); + Expression_Ptr operator()(Comment_Ptr); + + // these will return selectors + Selector_List_Ptr operator()(Selector_List_Ptr); + Selector_List_Ptr operator()(Complex_Selector_Ptr); + Compound_Selector_Ptr operator()(Compound_Selector_Ptr); + Simple_Selector_Ptr operator()(Simple_Selector_Ptr s); + Wrapped_Selector_Ptr operator()(Wrapped_Selector_Ptr s); + // they don't have any specific implementation (yet) + // Element_Selector_Ptr operator()(Element_Selector_Ptr s) { return s; }; + // Pseudo_Selector_Ptr operator()(Pseudo_Selector_Ptr s) { return s; }; + // Class_Selector_Ptr operator()(Class_Selector_Ptr s) { return s; }; + // Id_Selector_Ptr operator()(Id_Selector_Ptr s) { return s; }; + // Placeholder_Selector_Ptr operator()(Placeholder_Selector_Ptr s) { return s; }; + // actual evaluated selectors + Selector_List_Ptr operator()(Selector_Schema_Ptr); + Expression_Ptr operator()(Parent_Selector_Ptr); + + template + Expression_Ptr fallback(U x) { return fallback_impl(x); } + + private: + void interpolation(Context& ctx, std::string& res, Expression_Obj ex, bool into_quotes, bool was_itpl = false); + + }; + + Expression_Ptr cval_to_astnode(union Sass_Value* v, Backtraces traces, ParserState pstate = ParserState("[AST]")); + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/expand.cpp b/mybulma/node_modules/node-sass/src/libsass/src/expand.cpp new file mode 100644 index 0000000..d8dc03f --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/expand.cpp @@ -0,0 +1,817 @@ +#include "sass.hpp" +#include +#include + +#include "ast.hpp" +#include "expand.hpp" +#include "bind.hpp" +#include "eval.hpp" +#include "backtrace.hpp" +#include "context.hpp" +#include "parser.hpp" +#include "sass_functions.hpp" + +namespace Sass { + + // simple endless recursion protection + const size_t maxRecursion = 500; + + Expand::Expand(Context& ctx, Env* env, std::vector* stack) + : ctx(ctx), + traces(ctx.traces), + eval(Eval(*this)), + recursions(0), + in_keyframes(false), + at_root_without_rule(false), + old_at_root_without_rule(false), + env_stack(std::vector()), + block_stack(std::vector()), + call_stack(std::vector()), + selector_stack(std::vector()), + media_block_stack(std::vector()) + { + env_stack.push_back(0); + env_stack.push_back(env); + block_stack.push_back(0); + call_stack.push_back(0); + if (stack == NULL) { selector_stack.push_back(0); } + else { selector_stack.insert(selector_stack.end(), stack->begin(), stack->end()); } + media_block_stack.push_back(0); + } + + Env* Expand::environment() + { + if (env_stack.size() > 0) + return env_stack.back(); + return 0; + } + + Selector_List_Obj Expand::selector() + { + if (selector_stack.size() > 0) + return selector_stack.back(); + return 0; + } + + // blocks create new variable scopes + Block_Ptr Expand::operator()(Block_Ptr b) + { + // create new local environment + // set the current env as parent + Env env(environment()); + // copy the block object (add items later) + Block_Obj bb = SASS_MEMORY_NEW(Block, + b->pstate(), + b->length(), + b->is_root()); + // setup block and env stack + this->block_stack.push_back(bb); + this->env_stack.push_back(&env); + // operate on block + // this may throw up! + this->append_block(b); + // revert block and env stack + this->block_stack.pop_back(); + this->env_stack.pop_back(); + // return copy + return bb.detach(); + } + + Statement_Ptr Expand::operator()(Ruleset_Ptr r) + { + LOCAL_FLAG(old_at_root_without_rule, at_root_without_rule); + + if (in_keyframes) { + Block_Ptr bb = operator()(r->block()); + Keyframe_Rule_Obj k = SASS_MEMORY_NEW(Keyframe_Rule, r->pstate(), bb); + if (r->selector()) { + if (Selector_List_Ptr s = r->selector()) { + selector_stack.push_back(0); + k->name(s->eval(eval)); + selector_stack.pop_back(); + } + } + return k.detach(); + } + + // reset when leaving scope + LOCAL_FLAG(at_root_without_rule, false); + + // `&` is allowed in `@at-root`! + bool has_parent_selector = false; + for (size_t i = 0, L = selector_stack.size(); i < L && !has_parent_selector; i++) { + Selector_List_Obj ll = selector_stack.at(i); + has_parent_selector = ll != 0 && ll->length() > 0; + } + + Selector_List_Obj sel = r->selector(); + if (sel) sel = sel->eval(eval); + + // check for parent selectors in base level rules + if (r->is_root() || (block_stack.back() && block_stack.back()->is_root())) { + if (Selector_List_Ptr selector_list = Cast(r->selector())) { + for (Complex_Selector_Obj complex_selector : selector_list->elements()) { + Complex_Selector_Ptr tail = complex_selector; + while (tail) { + if (tail->head()) for (Simple_Selector_Obj header : tail->head()->elements()) { + Parent_Selector_Ptr ptr = Cast(header); + if (ptr == NULL || (!ptr->real() || has_parent_selector)) continue; + std::string sel_str(complex_selector->to_string(ctx.c_options)); + error("Base-level rules cannot contain the parent-selector-referencing character '&'.", header->pstate(), traces); + } + tail = tail->tail(); + } + } + } + } + else { + if (sel->length() == 0 || sel->has_parent_ref()) { + if (sel->has_real_parent_ref() && !has_parent_selector) { + error("Base-level rules cannot contain the parent-selector-referencing character '&'.", sel->pstate(), traces); + } + } + } + + // do not connect parent again + sel->remove_parent_selectors(); + selector_stack.push_back(sel); + Env env(environment()); + if (block_stack.back()->is_root()) { + env_stack.push_back(&env); + } + sel->set_media_block(media_block_stack.back()); + Block_Obj blk = 0; + if (r->block()) blk = operator()(r->block()); + Ruleset_Ptr rr = SASS_MEMORY_NEW(Ruleset, + r->pstate(), + sel, + blk); + selector_stack.pop_back(); + if (block_stack.back()->is_root()) { + env_stack.pop_back(); + } + + rr->is_root(r->is_root()); + rr->tabs(r->tabs()); + + return rr; + } + + Statement_Ptr Expand::operator()(Supports_Block_Ptr f) + { + Expression_Obj condition = f->condition()->perform(&eval); + Supports_Block_Obj ff = SASS_MEMORY_NEW(Supports_Block, + f->pstate(), + Cast(condition), + operator()(f->block())); + return ff.detach(); + } + + Statement_Ptr Expand::operator()(Media_Block_Ptr m) + { + Media_Block_Obj cpy = SASS_MEMORY_COPY(m); + // Media_Blocks are prone to have circular references + // Copy could leak memory if it does not get picked up + // Looks like we are able to reset block reference for copy + // Good as it will ensure a low memory overhead for this fix + // So this is a cheap solution with a minimal price + ctx.ast_gc.push_back(cpy); cpy->block(0); + Expression_Obj mq = eval(m->media_queries()); + std::string str_mq(mq->to_string(ctx.c_options)); + char* str = sass_copy_c_string(str_mq.c_str()); + ctx.strings.push_back(str); + Parser p(Parser::from_c_str(str, ctx, traces, mq->pstate())); + mq = p.parse_media_queries(); // re-assign now + cpy->media_queries(mq); + media_block_stack.push_back(cpy); + Block_Obj blk = operator()(m->block()); + Media_Block_Ptr mm = SASS_MEMORY_NEW(Media_Block, + m->pstate(), + mq, + blk); + media_block_stack.pop_back(); + mm->tabs(m->tabs()); + return mm; + } + + Statement_Ptr Expand::operator()(At_Root_Block_Ptr a) + { + Block_Obj ab = a->block(); + Expression_Obj ae = a->expression(); + + if (ae) ae = ae->perform(&eval); + else ae = SASS_MEMORY_NEW(At_Root_Query, a->pstate()); + + LOCAL_FLAG(at_root_without_rule, true); + LOCAL_FLAG(in_keyframes, false); + + ; + + Block_Obj bb = ab ? operator()(ab) : NULL; + At_Root_Block_Obj aa = SASS_MEMORY_NEW(At_Root_Block, + a->pstate(), + bb, + Cast(ae)); + return aa.detach(); + } + + Statement_Ptr Expand::operator()(Directive_Ptr a) + { + LOCAL_FLAG(in_keyframes, a->is_keyframes()); + Block_Ptr ab = a->block(); + Selector_List_Ptr as = a->selector(); + Expression_Ptr av = a->value(); + selector_stack.push_back(0); + if (av) av = av->perform(&eval); + if (as) as = eval(as); + selector_stack.pop_back(); + Block_Ptr bb = ab ? operator()(ab) : NULL; + Directive_Ptr aa = SASS_MEMORY_NEW(Directive, + a->pstate(), + a->keyword(), + as, + bb, + av); + return aa; + } + + Statement_Ptr Expand::operator()(Declaration_Ptr d) + { + Block_Obj ab = d->block(); + String_Obj old_p = d->property(); + Expression_Obj prop = old_p->perform(&eval); + String_Obj new_p = Cast(prop); + // we might get a color back + if (!new_p) { + std::string str(prop->to_string(ctx.c_options)); + new_p = SASS_MEMORY_NEW(String_Constant, old_p->pstate(), str); + } + Expression_Obj value = d->value(); + if (value) value = value->perform(&eval); + Block_Obj bb = ab ? operator()(ab) : NULL; + if (!bb) { + if (!value || (value->is_invisible() && !d->is_important())) return 0; + } + Declaration_Ptr decl = SASS_MEMORY_NEW(Declaration, + d->pstate(), + new_p, + value, + d->is_important(), + d->is_custom_property(), + bb); + decl->tabs(d->tabs()); + return decl; + } + + Statement_Ptr Expand::operator()(Assignment_Ptr a) + { + Env* env = environment(); + const std::string& var(a->variable()); + if (a->is_global()) { + if (a->is_default()) { + if (env->has_global(var)) { + Expression_Obj e = Cast(env->get_global(var)); + if (!e || e->concrete_type() == Expression::NULL_VAL) { + env->set_global(var, a->value()->perform(&eval)); + } + } + else { + env->set_global(var, a->value()->perform(&eval)); + } + } + else { + env->set_global(var, a->value()->perform(&eval)); + } + } + else if (a->is_default()) { + if (env->has_lexical(var)) { + auto cur = env; + while (cur && cur->is_lexical()) { + if (cur->has_local(var)) { + if (AST_Node_Obj node = cur->get_local(var)) { + Expression_Obj e = Cast(node); + if (!e || e->concrete_type() == Expression::NULL_VAL) { + cur->set_local(var, a->value()->perform(&eval)); + } + } + else { + throw std::runtime_error("Env not in sync"); + } + return 0; + } + cur = cur->parent(); + } + throw std::runtime_error("Env not in sync"); + } + else if (env->has_global(var)) { + if (AST_Node_Obj node = env->get_global(var)) { + Expression_Obj e = Cast(node); + if (!e || e->concrete_type() == Expression::NULL_VAL) { + env->set_global(var, a->value()->perform(&eval)); + } + } + } + else if (env->is_lexical()) { + env->set_local(var, a->value()->perform(&eval)); + } + else { + env->set_local(var, a->value()->perform(&eval)); + } + } + else { + env->set_lexical(var, a->value()->perform(&eval)); + } + return 0; + } + + Statement_Ptr Expand::operator()(Import_Ptr imp) + { + Import_Obj result = SASS_MEMORY_NEW(Import, imp->pstate()); + if (imp->import_queries() && imp->import_queries()->size()) { + Expression_Obj ex = imp->import_queries()->perform(&eval); + result->import_queries(Cast(ex)); + } + for ( size_t i = 0, S = imp->urls().size(); i < S; ++i) { + result->urls().push_back(imp->urls()[i]->perform(&eval)); + } + // all resources have been dropped for Input_Stubs + // for ( size_t i = 0, S = imp->incs().size(); i < S; ++i) {} + return result.detach(); + } + + Statement_Ptr Expand::operator()(Import_Stub_Ptr i) + { + traces.push_back(Backtrace(i->pstate())); + // get parent node from call stack + AST_Node_Obj parent = call_stack.back(); + if (Cast(parent) == NULL) { + error("Import directives may not be used within control directives or mixins.", i->pstate(), traces); + } + // we don't seem to need that actually afterall + Sass_Import_Entry import = sass_make_import( + i->imp_path().c_str(), + i->abs_path().c_str(), + 0, 0 + ); + ctx.import_stack.push_back(import); + + Block_Obj trace_block = SASS_MEMORY_NEW(Block, i->pstate()); + Trace_Obj trace = SASS_MEMORY_NEW(Trace, i->pstate(), i->imp_path(), trace_block, 'i'); + block_stack.back()->append(trace); + block_stack.push_back(trace_block); + + const std::string& abs_path(i->resource().abs_path); + append_block(ctx.sheets.at(abs_path).root); + sass_delete_import(ctx.import_stack.back()); + ctx.import_stack.pop_back(); + block_stack.pop_back(); + traces.pop_back(); + return 0; + } + + Statement_Ptr Expand::operator()(Warning_Ptr w) + { + // eval handles this too, because warnings may occur in functions + w->perform(&eval); + return 0; + } + + Statement_Ptr Expand::operator()(Error_Ptr e) + { + // eval handles this too, because errors may occur in functions + e->perform(&eval); + return 0; + } + + Statement_Ptr Expand::operator()(Debug_Ptr d) + { + // eval handles this too, because warnings may occur in functions + d->perform(&eval); + return 0; + } + + Statement_Ptr Expand::operator()(Comment_Ptr c) + { + if (ctx.output_style() == COMPRESSED) { + // comments should not be evaluated in compact + // https://github.com/sass/libsass/issues/2359 + if (!c->is_important()) return NULL; + } + eval.is_in_comment = true; + Comment_Ptr rv = SASS_MEMORY_NEW(Comment, c->pstate(), Cast(c->text()->perform(&eval)), c->is_important()); + eval.is_in_comment = false; + // TODO: eval the text, once we're parsing/storing it as a String_Schema + return rv; + } + + Statement_Ptr Expand::operator()(If_Ptr i) + { + Env env(environment(), true); + env_stack.push_back(&env); + call_stack.push_back(i); + Expression_Obj rv = i->predicate()->perform(&eval); + if (*rv) { + append_block(i->block()); + } + else { + Block_Ptr alt = i->alternative(); + if (alt) append_block(alt); + } + call_stack.pop_back(); + env_stack.pop_back(); + return 0; + } + + // For does not create a new env scope + // But iteration vars are reset afterwards + Statement_Ptr Expand::operator()(For_Ptr f) + { + std::string variable(f->variable()); + Expression_Obj low = f->lower_bound()->perform(&eval); + if (low->concrete_type() != Expression::NUMBER) { + traces.push_back(Backtrace(low->pstate())); + throw Exception::TypeMismatch(traces, *low, "integer"); + } + Expression_Obj high = f->upper_bound()->perform(&eval); + if (high->concrete_type() != Expression::NUMBER) { + traces.push_back(Backtrace(high->pstate())); + throw Exception::TypeMismatch(traces, *high, "integer"); + } + Number_Obj sass_start = Cast(low); + Number_Obj sass_end = Cast(high); + // check if units are valid for sequence + if (sass_start->unit() != sass_end->unit()) { + std::stringstream msg; msg << "Incompatible units: '" + << sass_start->unit() << "' and '" + << sass_end->unit() << "'."; + error(msg.str(), low->pstate(), traces); + } + double start = sass_start->value(); + double end = sass_end->value(); + // only create iterator once in this environment + Env env(environment(), true); + env_stack.push_back(&env); + call_stack.push_back(f); + Block_Ptr body = f->block(); + if (start < end) { + if (f->is_inclusive()) ++end; + for (double i = start; + i < end; + ++i) { + Number_Obj it = SASS_MEMORY_NEW(Number, low->pstate(), i, sass_end->unit()); + env.set_local(variable, it); + append_block(body); + } + } else { + if (f->is_inclusive()) --end; + for (double i = start; + i > end; + --i) { + Number_Obj it = SASS_MEMORY_NEW(Number, low->pstate(), i, sass_end->unit()); + env.set_local(variable, it); + append_block(body); + } + } + call_stack.pop_back(); + env_stack.pop_back(); + return 0; + } + + // Eval does not create a new env scope + // But iteration vars are reset afterwards + Statement_Ptr Expand::operator()(Each_Ptr e) + { + std::vector variables(e->variables()); + Expression_Obj expr = e->list()->perform(&eval); + List_Obj list = 0; + Map_Obj map; + if (expr->concrete_type() == Expression::MAP) { + map = Cast(expr); + } + else if (Selector_List_Ptr ls = Cast(expr)) { + Listize listize; + Expression_Obj rv = ls->perform(&listize); + list = Cast(rv); + } + else if (expr->concrete_type() != Expression::LIST) { + list = SASS_MEMORY_NEW(List, expr->pstate(), 1, SASS_COMMA); + list->append(expr); + } + else { + list = Cast(expr); + } + // remember variables and then reset them + Env env(environment(), true); + env_stack.push_back(&env); + call_stack.push_back(e); + Block_Ptr body = e->block(); + + if (map) { + for (auto key : map->keys()) { + Expression_Obj k = key->perform(&eval); + Expression_Obj v = map->at(key)->perform(&eval); + + if (variables.size() == 1) { + List_Obj variable = SASS_MEMORY_NEW(List, map->pstate(), 2, SASS_SPACE); + variable->append(k); + variable->append(v); + env.set_local(variables[0], variable); + } else { + env.set_local(variables[0], k); + env.set_local(variables[1], v); + } + append_block(body); + } + } + else { + // bool arglist = list->is_arglist(); + if (list->length() == 1 && Cast(list)) { + list = Cast(list); + } + for (size_t i = 0, L = list->length(); i < L; ++i) { + Expression_Obj item = list->at(i); + // unwrap value if the expression is an argument + if (Argument_Obj arg = Cast(item)) item = arg->value(); + // check if we got passed a list of args (investigate) + if (List_Obj scalars = Cast(item)) { + if (variables.size() == 1) { + List_Obj var = scalars; + // if (arglist) var = (*scalars)[0]; + env.set_local(variables[0], var); + } else { + for (size_t j = 0, K = variables.size(); j < K; ++j) { + Expression_Obj res = j >= scalars->length() + ? SASS_MEMORY_NEW(Null, expr->pstate()) + : (*scalars)[j]->perform(&eval); + env.set_local(variables[j], res); + } + } + } else { + if (variables.size() > 0) { + env.set_local(variables.at(0), item); + for (size_t j = 1, K = variables.size(); j < K; ++j) { + Expression_Obj res = SASS_MEMORY_NEW(Null, expr->pstate()); + env.set_local(variables[j], res); + } + } + } + append_block(body); + } + } + call_stack.pop_back(); + env_stack.pop_back(); + return 0; + } + + Statement_Ptr Expand::operator()(While_Ptr w) + { + Expression_Obj pred = w->predicate(); + Block_Ptr body = w->block(); + Env env(environment(), true); + env_stack.push_back(&env); + call_stack.push_back(w); + Expression_Obj cond = pred->perform(&eval); + while (!cond->is_false()) { + append_block(body); + cond = pred->perform(&eval); + } + call_stack.pop_back(); + env_stack.pop_back(); + return 0; + } + + Statement_Ptr Expand::operator()(Return_Ptr r) + { + error("@return may only be used within a function", r->pstate(), traces); + return 0; + } + + + void Expand::expand_selector_list(Selector_Obj s, Selector_List_Obj extender) { + + if (Selector_List_Obj sl = Cast(s)) { + for (Complex_Selector_Obj complex_selector : sl->elements()) { + Complex_Selector_Obj tail = complex_selector; + while (tail) { + if (tail->head()) for (Simple_Selector_Obj header : tail->head()->elements()) { + if (Cast(header) == NULL) continue; // skip all others + std::string sel_str(complex_selector->to_string(ctx.c_options)); + error("Can't extend " + sel_str + ": can't extend parent selectors", header->pstate(), traces); + } + tail = tail->tail(); + } + } + } + + + Selector_List_Obj contextualized = Cast(s->perform(&eval)); + if (contextualized == false) return; + for (auto complex_sel : contextualized->elements()) { + Complex_Selector_Obj c = complex_sel; + if (!c->head() || c->tail()) { + std::string sel_str(contextualized->to_string(ctx.c_options)); + error("Can't extend " + sel_str + ": can't extend nested selectors", c->pstate(), traces); + } + Compound_Selector_Obj target = c->head(); + if (contextualized->is_optional()) target->is_optional(true); + for (size_t i = 0, L = extender->length(); i < L; ++i) { + Complex_Selector_Obj sel = (*extender)[i]; + if (!(sel->head() && sel->head()->length() > 0 && + Cast((*sel->head())[0]))) + { + Compound_Selector_Obj hh = SASS_MEMORY_NEW(Compound_Selector, (*extender)[i]->pstate()); + hh->media_block((*extender)[i]->media_block()); + Complex_Selector_Obj ssel = SASS_MEMORY_NEW(Complex_Selector, (*extender)[i]->pstate()); + ssel->media_block((*extender)[i]->media_block()); + if (sel->has_line_feed()) ssel->has_line_feed(true); + Parent_Selector_Obj ps = SASS_MEMORY_NEW(Parent_Selector, (*extender)[i]->pstate()); + ps->media_block((*extender)[i]->media_block()); + hh->append(ps); + ssel->tail(sel); + ssel->head(hh); + sel = ssel; + } + // if (c->has_line_feed()) sel->has_line_feed(true); + ctx.subset_map.put(target, std::make_pair(sel, target)); + } + } + + } + + Statement* Expand::operator()(Extension_Ptr e) + { + if (Selector_List_Ptr extender = selector()) { + Selector_List_Ptr sl = e->selector(); + // abort on invalid selector + if (sl == NULL) return NULL; + if (Selector_Schema_Ptr schema = sl->schema()) { + if (schema->has_real_parent_ref()) { + // put root block on stack again (ignore parents) + // selector schema must not connect in eval! + block_stack.push_back(block_stack.at(1)); + sl = eval(sl->schema()); + block_stack.pop_back(); + } else { + selector_stack.push_back(0); + sl = eval(sl->schema()); + selector_stack.pop_back(); + } + } + for (Complex_Selector_Obj cs : sl->elements()) { + if (!cs.isNull() && !cs->head().isNull()) { + cs->head()->media_block(media_block_stack.back()); + } + } + selector_stack.push_back(0); + expand_selector_list(sl, extender); + selector_stack.pop_back(); + } + return 0; + } + + Statement_Ptr Expand::operator()(Definition_Ptr d) + { + Env* env = environment(); + Definition_Obj dd = SASS_MEMORY_COPY(d); + env->local_frame()[d->name() + + (d->type() == Definition::MIXIN ? "[m]" : "[f]")] = dd; + + if (d->type() == Definition::FUNCTION && ( + Prelexer::calc_fn_call(d->name().c_str()) || + d->name() == "element" || + d->name() == "expression" || + d->name() == "url" + )) { + deprecated( + "Naming a function \"" + d->name() + "\" is disallowed and will be an error in future versions of Sass.", + "This name conflicts with an existing CSS function with special parse rules.", + false, d->pstate() + ); + } + + // set the static link so we can have lexical scoping + dd->environment(env); + return 0; + } + + Statement_Ptr Expand::operator()(Mixin_Call_Ptr c) + { + if (recursions > maxRecursion) { + throw Exception::StackError(traces, *c); + } + + recursions ++; + + Env* env = environment(); + std::string full_name(c->name() + "[m]"); + if (!env->has(full_name)) { + error("no mixin named " + c->name(), c->pstate(), traces); + } + Definition_Obj def = Cast((*env)[full_name]); + Block_Obj body = def->block(); + Parameters_Obj params = def->parameters(); + + if (c->block() && c->name() != "@content" && !body->has_content()) { + error("Mixin \"" + c->name() + "\" does not accept a content block.", c->pstate(), traces); + } + Expression_Obj rv = c->arguments()->perform(&eval); + Arguments_Obj args = Cast(rv); + std::string msg(", in mixin `" + c->name() + "`"); + traces.push_back(Backtrace(c->pstate(), msg)); + ctx.callee_stack.push_back({ + c->name().c_str(), + c->pstate().path, + c->pstate().line + 1, + c->pstate().column + 1, + SASS_CALLEE_MIXIN, + { env } + }); + + Env new_env(def->environment()); + env_stack.push_back(&new_env); + if (c->block()) { + // represent mixin content blocks as thunks/closures + Definition_Obj thunk = SASS_MEMORY_NEW(Definition, + c->pstate(), + "@content", + SASS_MEMORY_NEW(Parameters, c->pstate()), + c->block(), + Definition::MIXIN); + thunk->environment(env); + new_env.local_frame()["@content[m]"] = thunk; + } + + bind(std::string("Mixin"), c->name(), params, args, &ctx, &new_env, &eval); + + Block_Obj trace_block = SASS_MEMORY_NEW(Block, c->pstate()); + Trace_Obj trace = SASS_MEMORY_NEW(Trace, c->pstate(), c->name(), trace_block); + + env->set_global("is_in_mixin", bool_true); + if (Block_Ptr pr = block_stack.back()) { + trace_block->is_root(pr->is_root()); + } + block_stack.push_back(trace_block); + for (auto bb : body->elements()) { + if (Ruleset_Ptr r = Cast(bb)) { + r->is_root(trace_block->is_root()); + } + Statement_Obj ith = bb->perform(this); + if (ith) trace->block()->append(ith); + } + block_stack.pop_back(); + env->del_global("is_in_mixin"); + + ctx.callee_stack.pop_back(); + env_stack.pop_back(); + traces.pop_back(); + + recursions --; + return trace.detach(); + } + + Statement_Ptr Expand::operator()(Content_Ptr c) + { + Env* env = environment(); + // convert @content directives into mixin calls to the underlying thunk + if (!env->has("@content[m]")) return 0; + + if (block_stack.back()->is_root()) { + selector_stack.push_back(0); + } + + Mixin_Call_Obj call = SASS_MEMORY_NEW(Mixin_Call, + c->pstate(), + "@content", + SASS_MEMORY_NEW(Arguments, c->pstate())); + + Trace_Obj trace = Cast(call->perform(this)); + + if (block_stack.back()->is_root()) { + selector_stack.pop_back(); + } + + return trace.detach(); + } + + // produce an error if something is not implemented + inline Statement_Ptr Expand::fallback_impl(AST_Node_Ptr n) + { + std::string err =std:: string("`Expand` doesn't handle ") + typeid(*n).name(); + String_Quoted_Obj msg = SASS_MEMORY_NEW(String_Quoted, ParserState("[WARN]"), err); + error("unknown internal error; please contact the LibSass maintainers", n->pstate(), traces); + return SASS_MEMORY_NEW(Warning, ParserState("[WARN]"), msg); + } + + // process and add to last block on stack + inline void Expand::append_block(Block_Ptr b) + { + if (b->is_root()) call_stack.push_back(b); + for (size_t i = 0, L = b->length(); i < L; ++i) { + Statement_Ptr stm = b->at(i); + Statement_Obj ith = stm->perform(this); + if (ith) block_stack.back()->append(ith); + } + if (b->is_root()) call_stack.pop_back(); + } + +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/expand.hpp b/mybulma/node_modules/node-sass/src/libsass/src/expand.hpp new file mode 100644 index 0000000..3464c98 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/expand.hpp @@ -0,0 +1,82 @@ +#ifndef SASS_EXPAND_H +#define SASS_EXPAND_H + +#include + +#include "ast.hpp" +#include "eval.hpp" +#include "operation.hpp" +#include "environment.hpp" + +namespace Sass { + + class Listize; + class Context; + class Eval; + struct Backtrace; + + class Expand : public Operation_CRTP { + public: + + Env* environment(); + Selector_List_Obj selector(); + + Context& ctx; + Backtraces& traces; + Eval eval; + size_t recursions; + bool in_keyframes; + bool at_root_without_rule; + bool old_at_root_without_rule; + + // it's easier to work with vectors + std::vector env_stack; + std::vector block_stack; + std::vector call_stack; + std::vector selector_stack; + std::vector media_block_stack; + + Boolean_Obj bool_true; + + Statement_Ptr fallback_impl(AST_Node_Ptr n); + + private: + void expand_selector_list(Selector_Obj, Selector_List_Obj extender); + + public: + Expand(Context&, Env*, std::vector* stack = NULL); + ~Expand() { } + + Block_Ptr operator()(Block_Ptr); + Statement_Ptr operator()(Ruleset_Ptr); + Statement_Ptr operator()(Media_Block_Ptr); + Statement_Ptr operator()(Supports_Block_Ptr); + Statement_Ptr operator()(At_Root_Block_Ptr); + Statement_Ptr operator()(Directive_Ptr); + Statement_Ptr operator()(Declaration_Ptr); + Statement_Ptr operator()(Assignment_Ptr); + Statement_Ptr operator()(Import_Ptr); + Statement_Ptr operator()(Import_Stub_Ptr); + Statement_Ptr operator()(Warning_Ptr); + Statement_Ptr operator()(Error_Ptr); + Statement_Ptr operator()(Debug_Ptr); + Statement_Ptr operator()(Comment_Ptr); + Statement_Ptr operator()(If_Ptr); + Statement_Ptr operator()(For_Ptr); + Statement_Ptr operator()(Each_Ptr); + Statement_Ptr operator()(While_Ptr); + Statement_Ptr operator()(Return_Ptr); + Statement_Ptr operator()(Extension_Ptr); + Statement_Ptr operator()(Definition_Ptr); + Statement_Ptr operator()(Mixin_Call_Ptr); + Statement_Ptr operator()(Content_Ptr); + + template + Statement_Ptr fallback(U x) { return fallback_impl(x); } + + void append_block(Block_Ptr); + }; + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/extend.cpp b/mybulma/node_modules/node-sass/src/libsass/src/extend.cpp new file mode 100644 index 0000000..6022698 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/extend.cpp @@ -0,0 +1,2130 @@ +#include "sass.hpp" +#include "extend.hpp" +#include "context.hpp" +#include "backtrace.hpp" +#include "paths.hpp" +#include "parser.hpp" +#include "expand.hpp" +#include "node.hpp" +#include "sass_util.hpp" +#include "remove_placeholders.hpp" +#include "debug.hpp" +#include +#include +#include + +/* + NOTES: + + - The print* functions print to cerr. This allows our testing frameworks (like sass-spec) to ignore the output, which + is very helpful when debugging. The format of the output is mainly to wrap things in square brackets to match what + ruby already outputs (to make comparisons easier). + + - For the direct porting effort, we're trying to port method-for-method until we get all the tests passing. + Where applicable, I've tried to include the ruby code above the function for reference until all our tests pass. + The ruby code isn't always directly portable, so I've tried to include any modified ruby code that was actually + used for the porting. + + - DO NOT try to optimize yet. We get a tremendous benefit out of comparing the output of each stage of the extend to the ruby + output at the same stage. This makes it much easier to determine where problems are. Try to keep as close to + the ruby code as you can until we have all the sass-spec tests passing. Then, we should optimize. However, if you see + something that could probably be optimized, let's not forget it. Add a // TODO: or // IMPROVEMENT: comment. + + - Coding conventions in this file (these may need to be changed before merging back into master) + - Very basic hungarian notation: + p prefix for pointers (pSelector) + no prefix for value types and references (selector) + - Use STL iterators where possible + - prefer verbose naming over terse naming + - use typedefs for STL container types for make maintenance easier + + - You may see a lot of comments that say "// TODO: is this the correct combinator?". See the comment referring to combinators + in extendCompoundSelector for a more extensive explanation of my confusion. I think our divergence in data model from ruby + sass causes this to be necessary. + + + GLOBAL TODOS: + + - wrap the contents of the print functions in DEBUG preprocesser conditionals so they will be optimized away in non-debug mode. + + - consider making the extend* functions member functions to avoid passing around ctx and subset_map map around. This has the + drawback that the implementation details of the operator are then exposed to the outside world, which is not ideal and + can cause additional compile time dependencies. + + - mark the helper methods in this file static to given them compilation unit linkage. + + - implement parent directive matching + + - fix compilation warnings for unused Extend members if we really don't need those references anymore. + */ + + +namespace Sass { + + + +#ifdef DEBUG + + // TODO: move the ast specific ostream operators into ast.hpp/ast.cpp + std::ostream& operator<<(std::ostream& os, const Complex_Selector::Combinator combinator) { + switch (combinator) { + case Complex_Selector::ANCESTOR_OF: os << "\" \""; break; + case Complex_Selector::PARENT_OF: os << "\">\""; break; + case Complex_Selector::PRECEDES: os << "\"~\""; break; + case Complex_Selector::ADJACENT_TO: os << "\"+\""; break; + case Complex_Selector::REFERENCE: os << "\"/\""; break; + } + + return os; + } + + + std::ostream& operator<<(std::ostream& os, Compound_Selector& compoundSelector) { + for (size_t i = 0, L = compoundSelector.length(); i < L; ++i) { + if (i > 0) os << ", "; + os << compoundSelector[i]->to_string(); + } + return os; + } + + std::ostream& operator<<(std::ostream& os, Simple_Selector& simpleSelector) { + os << simpleSelector.to_string(); + return os; + } + + // Print a string representation of a Compound_Selector + static void printSimpleSelector(Simple_Selector* pSimpleSelector, const char* message=NULL, bool newline=true) { + + if (message) { + std::cerr << message; + } + + if (pSimpleSelector) { + std::cerr << "[" << *pSimpleSelector << "]"; + } else { + std::cerr << "NULL"; + } + + if (newline) { + std::cerr << std::endl; + } + } + + // Print a string representation of a Compound_Selector + static void printCompoundSelector(Compound_Selector_Ptr pCompoundSelector, const char* message=NULL, bool newline=true) { + + if (message) { + std::cerr << message; + } + + if (pCompoundSelector) { + std::cerr << "[" << *pCompoundSelector << "]"; + } else { + std::cerr << "NULL"; + } + + if (newline) { + std::cerr << std::endl; + } + } + + + std::ostream& operator<<(std::ostream& os, Complex_Selector& complexSelector) { + + os << "["; + Complex_Selector_Ptr pIter = &complexSelector; + bool first = true; + while (pIter) { + if (pIter->combinator() != Complex_Selector::ANCESTOR_OF) { + if (!first) { + os << ", "; + } + first = false; + os << pIter->combinator(); + } + + if (!first) { + os << ", "; + } + first = false; + + if (pIter->head()) { + os << pIter->head()->to_string(); + } else { + os << "NULL_HEAD"; + } + + pIter = pIter->tail(); + } + os << "]"; + + return os; + } + + + // Print a string representation of a Complex_Selector + static void printComplexSelector(Complex_Selector_Ptr pComplexSelector, const char* message=NULL, bool newline=true) { + + if (message) { + std::cerr << message; + } + + if (pComplexSelector) { + std::cerr << *pComplexSelector; + } else { + std::cerr << "NULL"; + } + + if (newline) { + std::cerr << std::endl; + } + } + + static void printSelsNewSeqPairCollection(SubSetMapLookups& collection, const char* message=NULL, bool newline=true) { + + if (message) { + std::cerr << message; + } + bool first = true; + std::cerr << "["; + for(SubSetMapLookup& pair : collection) { + if (first) { + first = false; + } else { + std::cerr << ", "; + } + std::cerr << "["; + Compound_Selector_Ptr pSels = pair.first; + Complex_Selector_Ptr pNewSelector = pair.second; + std::cerr << "[" << *pSels << "], "; + printComplexSelector(pNewSelector, NULL, false); + } + std::cerr << "]"; + + if (newline) { + std::cerr << std::endl; + } + } + + // Print a string representation of a ComplexSelectorSet + static void printSourcesSet(ComplexSelectorSet& sources, const char* message=NULL, bool newline=true) { + + if (message) { + std::cerr << message; + } + + // Convert to a deque of strings so we can sort since order doesn't matter in a set. This should cut down on + // the differences we see when debug printing. + typedef std::deque SourceStrings; + SourceStrings sourceStrings; + for (ComplexSelectorSet::iterator iterator = sources.begin(), iteratorEnd = sources.end(); iterator != iteratorEnd; ++iterator) { + Complex_Selector_Ptr pSource = *iterator; + std::stringstream sstream; + sstream << complexSelectorToNode(pSource); + sourceStrings.push_back(sstream.str()); + } + + // Sort to get consistent output + std::sort(sourceStrings.begin(), sourceStrings.end()); + + std::cerr << "ComplexSelectorSet["; + for (SourceStrings::iterator iterator = sourceStrings.begin(), iteratorEnd = sourceStrings.end(); iterator != iteratorEnd; ++iterator) { + std::string source = *iterator; + if (iterator != sourceStrings.begin()) { + std::cerr << ", "; + } + std::cerr << source; + } + std::cerr << "]"; + + if (newline) { + std::cerr << std::endl; + } + } + + + std::ostream& operator<<(std::ostream& os, SubSetMapPairs& entries) { + os << "SUBSET_MAP_ENTRIES["; + + for (SubSetMapPairs::iterator iterator = entries.begin(), endIterator = entries.end(); iterator != endIterator; ++iterator) { + Complex_Selector_Obj pExtComplexSelector = iterator->first; // The selector up to where the @extend is (ie, the thing to merge) + Compound_Selector_Obj pExtCompoundSelector = iterator->second; // The stuff after the @extend + + if (iterator != entries.begin()) { + os << ", "; + } + + os << "("; + + if (pExtComplexSelector) { + std::cerr << *pExtComplexSelector; + } else { + std::cerr << "NULL"; + } + + os << " -> "; + + if (pExtCompoundSelector) { + std::cerr << *pExtCompoundSelector; + } else { + std::cerr << "NULL"; + } + + os << ")"; + + } + + os << "]"; + + return os; + } +#endif + + static bool parentSuperselector(Complex_Selector_Ptr pOne, Complex_Selector_Ptr pTwo) { + // TODO: figure out a better way to create a Complex_Selector from scratch + // TODO: There's got to be a better way. This got ugly quick... + Element_Selector_Obj fakeParent = SASS_MEMORY_NEW(Element_Selector, ParserState("[FAKE]"), "temp"); + Compound_Selector_Obj fakeHead = SASS_MEMORY_NEW(Compound_Selector, ParserState("[FAKE]"), 1 /*size*/); + fakeHead->elements().push_back(fakeParent); + Complex_Selector_Obj fakeParentContainer = SASS_MEMORY_NEW(Complex_Selector, ParserState("[FAKE]"), Complex_Selector::ANCESTOR_OF, fakeHead /*head*/, NULL /*tail*/); + + pOne->set_innermost(fakeParentContainer, Complex_Selector::ANCESTOR_OF); + pTwo->set_innermost(fakeParentContainer, Complex_Selector::ANCESTOR_OF); + + bool isSuperselector = pOne->is_superselector_of(pTwo); + + pOne->clear_innermost(); + pTwo->clear_innermost(); + + return isSuperselector; + } + + void nodeToComplexSelectorDeque(const Node& node, ComplexSelectorDeque& out) { + for (NodeDeque::iterator iter = node.collection()->begin(), iterEnd = node.collection()->end(); iter != iterEnd; iter++) { + Node& child = *iter; + out.push_back(nodeToComplexSelector(child)); + } + } + + Node complexSelectorDequeToNode(const ComplexSelectorDeque& deque) { + Node result = Node::createCollection(); + + for (ComplexSelectorDeque::const_iterator iter = deque.begin(), iterEnd = deque.end(); iter != iterEnd; iter++) { + Complex_Selector_Obj pChild = *iter; + result.collection()->push_back(complexSelectorToNode(pChild)); + } + + return result; + } + + class LcsCollectionComparator { + public: + LcsCollectionComparator() {} + + bool operator()(Complex_Selector_Obj pOne, Complex_Selector_Obj pTwo, Complex_Selector_Obj& pOut) const { + /* + This code is based on the following block from ruby sass' subweave + do |s1, s2| + next s1 if s1 == s2 + next unless s1.first.is_a?(SimpleSequence) && s2.first.is_a?(SimpleSequence) + next s2 if parent_superselector?(s1, s2) + next s1 if parent_superselector?(s2, s1) + end + */ + + if (*pOne == *pTwo) { + pOut = pOne; + return true; + } + + if (pOne->combinator() != Complex_Selector::ANCESTOR_OF || pTwo->combinator() != Complex_Selector::ANCESTOR_OF) { + return false; + } + + if (parentSuperselector(pOne, pTwo)) { + pOut = pTwo; + return true; + } + + if (parentSuperselector(pTwo, pOne)) { + pOut = pOne; + return true; + } + + return false; + } + }; + + + /* + This is the equivalent of ruby's Sass::Util.lcs_backtrace. + + # Computes a single longest common subsequence for arrays x and y. + # Algorithm from http://en.wikipedia.org/wiki/Longest_common_subsequence_problem#Reading_out_an_LCS + */ + void lcs_backtrace(const LCSTable& c, ComplexSelectorDeque& x, ComplexSelectorDeque& y, int i, int j, const LcsCollectionComparator& comparator, ComplexSelectorDeque& out) { + //DEBUG_PRINTLN(LCS, "LCSBACK: X=" << x << " Y=" << y << " I=" << i << " J=" << j) + // TODO: make printComplexSelectorDeque and use DEBUG_EXEC AND DEBUG_PRINTLN HERE to get equivalent output + + if (i == 0 || j == 0) { + DEBUG_PRINTLN(LCS, "RETURNING EMPTY") + return; + } + + + Complex_Selector_Obj pCompareOut; + if (comparator(x[i], y[j], pCompareOut)) { + DEBUG_PRINTLN(LCS, "RETURNING AFTER ELEM COMPARE") + lcs_backtrace(c, x, y, i - 1, j - 1, comparator, out); + out.push_back(pCompareOut); + return; + } + + if (c[i][j - 1] > c[i - 1][j]) { + DEBUG_PRINTLN(LCS, "RETURNING AFTER TABLE COMPARE") + lcs_backtrace(c, x, y, i, j - 1, comparator, out); + return; + } + + DEBUG_PRINTLN(LCS, "FINAL RETURN") + lcs_backtrace(c, x, y, i - 1, j, comparator, out); + return; + } + + /* + This is the equivalent of ruby's Sass::Util.lcs_table. + + # Calculates the memoization table for the Least Common Subsequence algorithm. + # Algorithm from http://en.wikipedia.org/wiki/Longest_common_subsequence_problem#Computing_the_length_of_the_LCS + */ + void lcs_table(const ComplexSelectorDeque& x, const ComplexSelectorDeque& y, const LcsCollectionComparator& comparator, LCSTable& out) { + //DEBUG_PRINTLN(LCS, "LCSTABLE: X=" << x << " Y=" << y) + // TODO: make printComplexSelectorDeque and use DEBUG_EXEC AND DEBUG_PRINTLN HERE to get equivalent output + + LCSTable c(x.size(), std::vector(y.size())); + + // These shouldn't be necessary since the vector will be initialized to 0 already. + // x.size.times {|i| c[i][0] = 0} + // y.size.times {|j| c[0][j] = 0} + + for (size_t i = 1; i < x.size(); i++) { + for (size_t j = 1; j < y.size(); j++) { + Complex_Selector_Obj pCompareOut; + + if (comparator(x[i], y[j], pCompareOut)) { + c[i][j] = c[i - 1][j - 1] + 1; + } else { + c[i][j] = std::max(c[i][j - 1], c[i - 1][j]); + } + } + } + + out = c; + } + + /* + This is the equivalent of ruby's Sass::Util.lcs. + + # Computes a single longest common subsequence for `x` and `y`. + # If there are more than one longest common subsequences, + # the one returned is that which starts first in `x`. + + # @param x [NodeCollection] + # @param y [NodeCollection] + # @comparator An equality check between elements of `x` and `y`. + # @return [NodeCollection] The LCS + + http://en.wikipedia.org/wiki/Longest_common_subsequence_problem + */ + void lcs(ComplexSelectorDeque& x, ComplexSelectorDeque& y, const LcsCollectionComparator& comparator, ComplexSelectorDeque& out) { + //DEBUG_PRINTLN(LCS, "LCS: X=" << x << " Y=" << y) + // TODO: make printComplexSelectorDeque and use DEBUG_EXEC AND DEBUG_PRINTLN HERE to get equivalent output + + x.push_front(NULL); + y.push_front(NULL); + + LCSTable table; + lcs_table(x, y, comparator, table); + + return lcs_backtrace(table, x, y, static_cast(x.size()) - 1, static_cast(y.size()) - 1, comparator, out); + } + + + /* + This is the equivalent of ruby's Sequence.trim. + + The following is the modified version of the ruby code that was more portable to C++. You + should be able to drop it into ruby 3.2.19 and get the same results from ruby sass. + + # Avoid truly horrific quadratic behavior. TODO: I think there + # may be a way to get perfect trimming without going quadratic. + return seqses if seqses.size > 100 + + # Keep the results in a separate array so we can be sure we aren't + # comparing against an already-trimmed selector. This ensures that two + # identical selectors don't mutually trim one another. + result = seqses.dup + + # This is n^2 on the sequences, but only comparing between + # separate sequences should limit the quadratic behavior. + seqses.each_with_index do |seqs1, i| + tempResult = [] + + for seq1 in seqs1 do + max_spec = 0 + for seq in _sources(seq1) do + max_spec = [max_spec, seq.specificity].max + end + + + isMoreSpecificOuter = false + for seqs2 in result do + if seqs1.equal?(seqs2) then + next + end + + # Second Law of Extend: the specificity of a generated selector + # should never be less than the specificity of the extending + # selector. + # + # See https://github.com/nex3/sass/issues/324. + isMoreSpecificInner = false + for seq2 in seqs2 do + isMoreSpecificInner = _specificity(seq2) >= max_spec && _superselector?(seq2, seq1) + if isMoreSpecificInner then + break + end + end + + if isMoreSpecificInner then + isMoreSpecificOuter = true + break + end + end + + if !isMoreSpecificOuter then + tempResult.push(seq1) + end + end + + result[i] = tempResult + + end + + result + */ + /* + - IMPROVEMENT: We could probably work directly in the output trimmed deque. + */ + Node Extend::trim(Node& seqses, bool isReplace) { + // See the comments in the above ruby code before embarking on understanding this function. + + // Avoid poor performance in extreme cases. + if (seqses.collection()->size() > 100) { + return seqses; + } + + + DEBUG_PRINTLN(TRIM, "TRIM: " << seqses) + + + Node result = Node::createCollection(); + result.plus(seqses); + + DEBUG_PRINTLN(TRIM, "RESULT INITIAL: " << result) + + // Normally we use the standard STL iterators, but in this case, we need to access the result collection by index since we're + // iterating the input collection, computing a value, and then setting the result in the output collection. We have to keep track + // of the index manually. + int toTrimIndex = 0; + + for (NodeDeque::iterator seqsesIter = seqses.collection()->begin(), seqsesIterEnd = seqses.collection()->end(); seqsesIter != seqsesIterEnd; ++seqsesIter) { + Node& seqs1 = *seqsesIter; + + DEBUG_PRINTLN(TRIM, "SEQS1: " << seqs1 << " " << toTrimIndex) + + Node tempResult = Node::createCollection(); + tempResult.got_line_feed = seqs1.got_line_feed; + + for (NodeDeque::iterator seqs1Iter = seqs1.collection()->begin(), seqs1EndIter = seqs1.collection()->end(); seqs1Iter != seqs1EndIter; ++seqs1Iter) { + Node& seq1 = *seqs1Iter; + + Complex_Selector_Obj pSeq1 = nodeToComplexSelector(seq1); + + // Compute the maximum specificity. This requires looking at the "sources" of the sequence. See SimpleSequence.sources in the ruby code + // for a good description of sources. + // + // TODO: I'm pretty sure there's a bug in the sources code. It was implemented for sass-spec's 182_test_nested_extend_loop test. + // While the test passes, I compared the state of each trim call to verify correctness. The last trim call had incorrect sources. We + // had an extra source that the ruby version did not have. Without a failing test case, this is going to be extra hard to find. My + // best guess at this point is that we're cloning an object somewhere and maintaining the sources when we shouldn't be. This is purely + // a guess though. + unsigned long maxSpecificity = isReplace ? pSeq1->specificity() : 0; + ComplexSelectorSet sources = pSeq1->sources(); + + DEBUG_PRINTLN(TRIM, "TRIM SEQ1: " << seq1) + DEBUG_EXEC(TRIM, printSourcesSet(sources, "TRIM SOURCES: ")) + + for (ComplexSelectorSet::iterator sourcesSetIterator = sources.begin(), sourcesSetIteratorEnd = sources.end(); sourcesSetIterator != sourcesSetIteratorEnd; ++sourcesSetIterator) { + const Complex_Selector_Obj& pCurrentSelector = *sourcesSetIterator; + maxSpecificity = std::max(maxSpecificity, pCurrentSelector->specificity()); + } + + DEBUG_PRINTLN(TRIM, "MAX SPECIFICITY: " << maxSpecificity) + + bool isMoreSpecificOuter = false; + + int resultIndex = 0; + + for (NodeDeque::iterator resultIter = result.collection()->begin(), resultIterEnd = result.collection()->end(); resultIter != resultIterEnd; ++resultIter) { + Node& seqs2 = *resultIter; + + DEBUG_PRINTLN(TRIM, "SEQS1: " << seqs1) + DEBUG_PRINTLN(TRIM, "SEQS2: " << seqs2) + + // Do not compare the same sequence to itself. The ruby call we're trying to + // emulate is: seqs1.equal?(seqs2). equal? is an object comparison, not an equivalency comparision. + // Since we have the same pointers in seqes and results, we can do a pointer comparision. seqs1 is + // derived from seqses and seqs2 is derived from result. + if (seqs1.collection() == seqs2.collection()) { + DEBUG_PRINTLN(TRIM, "CONTINUE") + continue; + } + + bool isMoreSpecificInner = false; + + for (NodeDeque::iterator seqs2Iter = seqs2.collection()->begin(), seqs2IterEnd = seqs2.collection()->end(); seqs2Iter != seqs2IterEnd; ++seqs2Iter) { + Node& seq2 = *seqs2Iter; + + Complex_Selector_Obj pSeq2 = nodeToComplexSelector(seq2); + + DEBUG_PRINTLN(TRIM, "SEQ2 SPEC: " << pSeq2->specificity()) + DEBUG_PRINTLN(TRIM, "IS SPEC: " << pSeq2->specificity() << " >= " << maxSpecificity << " " << (pSeq2->specificity() >= maxSpecificity ? "true" : "false")) + DEBUG_PRINTLN(TRIM, "IS SUPER: " << (pSeq2->is_superselector_of(pSeq1) ? "true" : "false")) + + isMoreSpecificInner = pSeq2->specificity() >= maxSpecificity && pSeq2->is_superselector_of(pSeq1); + + if (isMoreSpecificInner) { + DEBUG_PRINTLN(TRIM, "FOUND MORE SPECIFIC") + break; + } + } + + // If we found something more specific, we're done. Let the outer loop know and stop iterating. + if (isMoreSpecificInner) { + isMoreSpecificOuter = true; + break; + } + + resultIndex++; + } + + if (!isMoreSpecificOuter) { + DEBUG_PRINTLN(TRIM, "PUSHING: " << seq1) + tempResult.collection()->push_back(seq1); + } + + } + + DEBUG_PRINTLN(TRIM, "RESULT BEFORE ASSIGN: " << result) + DEBUG_PRINTLN(TRIM, "TEMP RESULT: " << toTrimIndex << " " << tempResult) + (*result.collection())[toTrimIndex] = tempResult; + + toTrimIndex++; + + DEBUG_PRINTLN(TRIM, "RESULT: " << result) + } + + return result; + } + + + + static bool parentSuperselector(const Node& one, const Node& two) { + // TODO: figure out a better way to create a Complex_Selector from scratch + // TODO: There's got to be a better way. This got ugly quick... + Element_Selector_Obj fakeParent = SASS_MEMORY_NEW(Element_Selector, ParserState("[FAKE]"), "temp"); + Compound_Selector_Obj fakeHead = SASS_MEMORY_NEW(Compound_Selector, ParserState("[FAKE]"), 1 /*size*/); + fakeHead->elements().push_back(fakeParent); + Complex_Selector_Obj fakeParentContainer = SASS_MEMORY_NEW(Complex_Selector, ParserState("[FAKE]"), Complex_Selector::ANCESTOR_OF, fakeHead /*head*/, NULL /*tail*/); + + Complex_Selector_Obj pOneWithFakeParent = nodeToComplexSelector(one); + pOneWithFakeParent->set_innermost(fakeParentContainer, Complex_Selector::ANCESTOR_OF); + Complex_Selector_Obj pTwoWithFakeParent = nodeToComplexSelector(two); + pTwoWithFakeParent->set_innermost(fakeParentContainer, Complex_Selector::ANCESTOR_OF); + + return pOneWithFakeParent->is_superselector_of(pTwoWithFakeParent); + } + + + class ParentSuperselectorChunker { + public: + ParentSuperselectorChunker(Node& lcs) : mLcs(lcs) {} + Node& mLcs; + + bool operator()(const Node& seq) const { + // {|s| parent_superselector?(s.first, lcs.first)} + if (seq.collection()->size() == 0) return false; + return parentSuperselector(seq.collection()->front(), mLcs.collection()->front()); + } + }; + + class SubweaveEmptyChunker { + public: + bool operator()(const Node& seq) const { + // {|s| s.empty?} + + return seq.collection()->empty(); + } + }; + + /* + # Takes initial subsequences of `seq1` and `seq2` and returns all + # orderings of those subsequences. The initial subsequences are determined + # by a block. + # + # Destructively removes the initial subsequences of `seq1` and `seq2`. + # + # For example, given `(A B C | D E)` and `(1 2 | 3 4 5)` (with `|` + # denoting the boundary of the initial subsequence), this would return + # `[(A B C 1 2), (1 2 A B C)]`. The sequences would then be `(D E)` and + # `(3 4 5)`. + # + # @param seq1 [Array] + # @param seq2 [Array] + # @yield [a] Used to determine when to cut off the initial subsequences. + # Called repeatedly for each sequence until it returns true. + # @yieldparam a [Array] A final subsequence of one input sequence after + # cutting off some initial subsequence. + # @yieldreturn [Boolean] Whether or not to cut off the initial subsequence + # here. + # @return [Array] All possible orderings of the initial subsequences. + def chunks(seq1, seq2) + chunk1 = [] + chunk1 << seq1.shift until yield seq1 + chunk2 = [] + chunk2 << seq2.shift until yield seq2 + return [] if chunk1.empty? && chunk2.empty? + return [chunk2] if chunk1.empty? + return [chunk1] if chunk2.empty? + [chunk1 + chunk2, chunk2 + chunk1] + end + */ + template + static Node chunks(Node& seq1, Node& seq2, const ChunkerType& chunker) { + Node chunk1 = Node::createCollection(); + while (seq1.collection()->size() && !chunker(seq1)) { + chunk1.collection()->push_back(seq1.collection()->front()); + seq1.collection()->pop_front(); + } + + Node chunk2 = Node::createCollection(); + while (!seq2.collection()->empty() && !chunker(seq2)) { + chunk2.collection()->push_back(seq2.collection()->front()); + seq2.collection()->pop_front(); + } + + if (chunk1.collection()->empty() && chunk2.collection()->empty()) { + DEBUG_PRINTLN(CHUNKS, "RETURNING BOTH EMPTY") + return Node::createCollection(); + } + + if (chunk1.collection()->empty()) { + Node chunk2Wrapper = Node::createCollection(); + chunk2Wrapper.collection()->push_back(chunk2); + DEBUG_PRINTLN(CHUNKS, "RETURNING ONE EMPTY") + return chunk2Wrapper; + } + + if (chunk2.collection()->empty()) { + Node chunk1Wrapper = Node::createCollection(); + chunk1Wrapper.collection()->push_back(chunk1); + DEBUG_PRINTLN(CHUNKS, "RETURNING TWO EMPTY") + return chunk1Wrapper; + } + + Node perms = Node::createCollection(); + + Node firstPermutation = Node::createCollection(); + firstPermutation.collection()->insert(firstPermutation.collection()->end(), chunk1.collection()->begin(), chunk1.collection()->end()); + firstPermutation.collection()->insert(firstPermutation.collection()->end(), chunk2.collection()->begin(), chunk2.collection()->end()); + perms.collection()->push_back(firstPermutation); + + Node secondPermutation = Node::createCollection(); + secondPermutation.collection()->insert(secondPermutation.collection()->end(), chunk2.collection()->begin(), chunk2.collection()->end()); + secondPermutation.collection()->insert(secondPermutation.collection()->end(), chunk1.collection()->begin(), chunk1.collection()->end()); + perms.collection()->push_back(secondPermutation); + + DEBUG_PRINTLN(CHUNKS, "RETURNING PERM") + + return perms; + } + + + static Node groupSelectors(Node& seq) { + Node newSeq = Node::createCollection(); + + Node tail = Node::createCollection(); + tail.plus(seq); + + while (!tail.collection()->empty()) { + Node head = Node::createCollection(); + + do { + head.collection()->push_back(tail.collection()->front()); + tail.collection()->pop_front(); + } while (!tail.collection()->empty() && (head.collection()->back().isCombinator() || tail.collection()->front().isCombinator())); + + newSeq.collection()->push_back(head); + } + + return newSeq; + } + + + static void getAndRemoveInitialOps(Node& seq, Node& ops) { + NodeDeque& seqCollection = *(seq.collection()); + NodeDeque& opsCollection = *(ops.collection()); + + while (seqCollection.size() > 0 && seqCollection.front().isCombinator()) { + opsCollection.push_back(seqCollection.front()); + seqCollection.pop_front(); + } + } + + + static void getAndRemoveFinalOps(Node& seq, Node& ops) { + NodeDeque& seqCollection = *(seq.collection()); + NodeDeque& opsCollection = *(ops.collection()); + + while (seqCollection.size() > 0 && seqCollection.back().isCombinator()) { + opsCollection.push_back(seqCollection.back()); // Purposefully reversed to match ruby code + seqCollection.pop_back(); + } + } + + + /* + def merge_initial_ops(seq1, seq2) + ops1, ops2 = [], [] + ops1 << seq1.shift while seq1.first.is_a?(String) + ops2 << seq2.shift while seq2.first.is_a?(String) + + newline = false + newline ||= !!ops1.shift if ops1.first == "\n" + newline ||= !!ops2.shift if ops2.first == "\n" + + # If neither sequence is a subsequence of the other, they cannot be + # merged successfully + lcs = Sass::Util.lcs(ops1, ops2) + return unless lcs == ops1 || lcs == ops2 + return (newline ? ["\n"] : []) + (ops1.size > ops2.size ? ops1 : ops2) + end + */ + static Node mergeInitialOps(Node& seq1, Node& seq2) { + Node ops1 = Node::createCollection(); + Node ops2 = Node::createCollection(); + + getAndRemoveInitialOps(seq1, ops1); + getAndRemoveInitialOps(seq2, ops2); + + // TODO: Do we have this information available to us? + // newline = false + // newline ||= !!ops1.shift if ops1.first == "\n" + // newline ||= !!ops2.shift if ops2.first == "\n" + + // If neither sequence is a subsequence of the other, they cannot be merged successfully + DefaultLcsComparator lcsDefaultComparator; + Node opsLcs = lcs(ops1, ops2, lcsDefaultComparator); + + if (!(opsLcs == ops1 || opsLcs == ops2)) { + return Node::createNil(); + } + + // TODO: more newline logic + // return (newline ? ["\n"] : []) + (ops1.size > ops2.size ? ops1 : ops2) + + return (ops1.collection()->size() > ops2.collection()->size() ? ops1 : ops2); + } + + + /* + def merge_final_ops(seq1, seq2, res = []) + + + # This code looks complicated, but it's actually just a bunch of special + # cases for interactions between different combinators. + op1, op2 = ops1.first, ops2.first + if op1 && op2 + sel1 = seq1.pop + sel2 = seq2.pop + if op1 == '~' && op2 == '~' + if sel1.superselector?(sel2) + res.unshift sel2, '~' + elsif sel2.superselector?(sel1) + res.unshift sel1, '~' + else + merged = sel1.unify(sel2.members, sel2.subject?) + res.unshift [ + [sel1, '~', sel2, '~'], + [sel2, '~', sel1, '~'], + ([merged, '~'] if merged) + ].compact + end + elsif (op1 == '~' && op2 == '+') || (op1 == '+' && op2 == '~') + if op1 == '~' + tilde_sel, plus_sel = sel1, sel2 + else + tilde_sel, plus_sel = sel2, sel1 + end + + if tilde_sel.superselector?(plus_sel) + res.unshift plus_sel, '+' + else + merged = plus_sel.unify(tilde_sel.members, tilde_sel.subject?) + res.unshift [ + [tilde_sel, '~', plus_sel, '+'], + ([merged, '+'] if merged) + ].compact + end + elsif op1 == '>' && %w[~ +].include?(op2) + res.unshift sel2, op2 + seq1.push sel1, op1 + elsif op2 == '>' && %w[~ +].include?(op1) + res.unshift sel1, op1 + seq2.push sel2, op2 + elsif op1 == op2 + return unless merged = sel1.unify(sel2.members, sel2.subject?) + res.unshift merged, op1 + else + # Unknown selector combinators can't be unified + return + end + return merge_final_ops(seq1, seq2, res) + elsif op1 + seq2.pop if op1 == '>' && seq2.last && seq2.last.superselector?(seq1.last) + res.unshift seq1.pop, op1 + return merge_final_ops(seq1, seq2, res) + else # op2 + seq1.pop if op2 == '>' && seq1.last && seq1.last.superselector?(seq2.last) + res.unshift seq2.pop, op2 + return merge_final_ops(seq1, seq2, res) + end + end + */ + static Node mergeFinalOps(Node& seq1, Node& seq2, Node& res) { + + Node ops1 = Node::createCollection(); + Node ops2 = Node::createCollection(); + + getAndRemoveFinalOps(seq1, ops1); + getAndRemoveFinalOps(seq2, ops2); + + // TODO: do we have newlines to remove? + // ops1.reject! {|o| o == "\n"} + // ops2.reject! {|o| o == "\n"} + + if (ops1.collection()->empty() && ops2.collection()->empty()) { + return res; + } + + if (ops1.collection()->size() > 1 || ops2.collection()->size() > 1) { + DefaultLcsComparator lcsDefaultComparator; + Node opsLcs = lcs(ops1, ops2, lcsDefaultComparator); + + // If there are multiple operators, something hacky's going on. If one is a supersequence of the other, use that, otherwise give up. + + if (!(opsLcs == ops1 || opsLcs == ops2)) { + return Node::createNil(); + } + + if (ops1.collection()->size() > ops2.collection()->size()) { + res.collection()->insert(res.collection()->begin(), ops1.collection()->rbegin(), ops1.collection()->rend()); + } else { + res.collection()->insert(res.collection()->begin(), ops2.collection()->rbegin(), ops2.collection()->rend()); + } + + return res; + } + + if (!ops1.collection()->empty() && !ops2.collection()->empty()) { + + Node op1 = ops1.collection()->front(); + Node op2 = ops2.collection()->front(); + + Node sel1 = seq1.collection()->back(); + seq1.collection()->pop_back(); + + Node sel2 = seq2.collection()->back(); + seq2.collection()->pop_back(); + + if (op1.combinator() == Complex_Selector::PRECEDES && op2.combinator() == Complex_Selector::PRECEDES) { + + if (sel1.selector()->is_superselector_of(sel2.selector())) { + + res.collection()->push_front(op1 /*PRECEDES - could have been op2 as well*/); + res.collection()->push_front(sel2); + + } else if (sel2.selector()->is_superselector_of(sel1.selector())) { + + res.collection()->push_front(op1 /*PRECEDES - could have been op2 as well*/); + res.collection()->push_front(sel1); + + } else { + + DEBUG_PRINTLN(ALL, "sel1: " << sel1) + DEBUG_PRINTLN(ALL, "sel2: " << sel2) + + Complex_Selector_Obj pMergedWrapper = SASS_MEMORY_CLONE(sel1.selector()); // Clone the Complex_Selector to get back to something we can transform to a node once we replace the head with the unification result + // TODO: does subject matter? Ruby: return unless merged = sel1.unify(sel2.members, sel2.subject?) + Compound_Selector_Ptr pMerged = sel1.selector()->head()->unify_with(sel2.selector()->head()); + pMergedWrapper->head(pMerged); + + DEBUG_EXEC(ALL, printCompoundSelector(pMerged, "MERGED: ")) + + Node newRes = Node::createCollection(); + + Node firstPerm = Node::createCollection(); + firstPerm.collection()->push_back(sel1); + firstPerm.collection()->push_back(Node::createCombinator(Complex_Selector::PRECEDES)); + firstPerm.collection()->push_back(sel2); + firstPerm.collection()->push_back(Node::createCombinator(Complex_Selector::PRECEDES)); + newRes.collection()->push_back(firstPerm); + + Node secondPerm = Node::createCollection(); + secondPerm.collection()->push_back(sel2); + secondPerm.collection()->push_back(Node::createCombinator(Complex_Selector::PRECEDES)); + secondPerm.collection()->push_back(sel1); + secondPerm.collection()->push_back(Node::createCombinator(Complex_Selector::PRECEDES)); + newRes.collection()->push_back(secondPerm); + + if (pMerged) { + Node mergedPerm = Node::createCollection(); + mergedPerm.collection()->push_back(Node::createSelector(pMergedWrapper)); + mergedPerm.collection()->push_back(Node::createCombinator(Complex_Selector::PRECEDES)); + newRes.collection()->push_back(mergedPerm); + } + + res.collection()->push_front(newRes); + + DEBUG_PRINTLN(ALL, "RESULT: " << res) + + } + + } else if (((op1.combinator() == Complex_Selector::PRECEDES && op2.combinator() == Complex_Selector::ADJACENT_TO)) || ((op1.combinator() == Complex_Selector::ADJACENT_TO && op2.combinator() == Complex_Selector::PRECEDES))) { + + Node tildeSel = sel1; + Node plusSel = sel2; + Node plusOp = op2; + if (op1.combinator() != Complex_Selector::PRECEDES) { + tildeSel = sel2; + plusSel = sel1; + plusOp = op1; + } + + if (tildeSel.selector()->is_superselector_of(plusSel.selector())) { + + res.collection()->push_front(plusOp); + res.collection()->push_front(plusSel); + + } else { + + DEBUG_PRINTLN(ALL, "PLUS SEL: " << plusSel) + DEBUG_PRINTLN(ALL, "TILDE SEL: " << tildeSel) + + Complex_Selector_Obj pMergedWrapper = SASS_MEMORY_CLONE(plusSel.selector()); // Clone the Complex_Selector to get back to something we can transform to a node once we replace the head with the unification result + // TODO: does subject matter? Ruby: merged = plus_sel.unify(tilde_sel.members, tilde_sel.subject?) + Compound_Selector_Ptr pMerged = plusSel.selector()->head()->unify_with(tildeSel.selector()->head()); + pMergedWrapper->head(pMerged); + + DEBUG_EXEC(ALL, printCompoundSelector(pMerged, "MERGED: ")) + + Node newRes = Node::createCollection(); + + Node firstPerm = Node::createCollection(); + firstPerm.collection()->push_back(tildeSel); + firstPerm.collection()->push_back(Node::createCombinator(Complex_Selector::PRECEDES)); + firstPerm.collection()->push_back(plusSel); + firstPerm.collection()->push_back(Node::createCombinator(Complex_Selector::ADJACENT_TO)); + newRes.collection()->push_back(firstPerm); + + if (pMerged) { + Node mergedPerm = Node::createCollection(); + mergedPerm.collection()->push_back(Node::createSelector(pMergedWrapper)); + mergedPerm.collection()->push_back(Node::createCombinator(Complex_Selector::ADJACENT_TO)); + newRes.collection()->push_back(mergedPerm); + } + + res.collection()->push_front(newRes); + + DEBUG_PRINTLN(ALL, "RESULT: " << res) + + } + } else if (op1.combinator() == Complex_Selector::PARENT_OF && (op2.combinator() == Complex_Selector::PRECEDES || op2.combinator() == Complex_Selector::ADJACENT_TO)) { + + res.collection()->push_front(op2); + res.collection()->push_front(sel2); + + seq1.collection()->push_back(sel1); + seq1.collection()->push_back(op1); + + } else if (op2.combinator() == Complex_Selector::PARENT_OF && (op1.combinator() == Complex_Selector::PRECEDES || op1.combinator() == Complex_Selector::ADJACENT_TO)) { + + res.collection()->push_front(op1); + res.collection()->push_front(sel1); + + seq2.collection()->push_back(sel2); + seq2.collection()->push_back(op2); + + } else if (op1.combinator() == op2.combinator()) { + + DEBUG_PRINTLN(ALL, "sel1: " << sel1) + DEBUG_PRINTLN(ALL, "sel2: " << sel2) + + Complex_Selector_Obj pMergedWrapper = SASS_MEMORY_CLONE(sel1.selector()); // Clone the Complex_Selector to get back to something we can transform to a node once we replace the head with the unification result + // TODO: does subject matter? Ruby: return unless merged = sel1.unify(sel2.members, sel2.subject?) + Compound_Selector_Ptr pMerged = sel1.selector()->head()->unify_with(sel2.selector()->head()); + pMergedWrapper->head(pMerged); + + DEBUG_EXEC(ALL, printCompoundSelector(pMerged, "MERGED: ")) + + if (!pMerged) { + return Node::createNil(); + } + + res.collection()->push_front(op1); + res.collection()->push_front(Node::createSelector(pMergedWrapper)); + + DEBUG_PRINTLN(ALL, "RESULT: " << res) + + } else { + return Node::createNil(); + } + + return mergeFinalOps(seq1, seq2, res); + + } else if (!ops1.collection()->empty()) { + + Node op1 = ops1.collection()->front(); + + if (op1.combinator() == Complex_Selector::PARENT_OF && !seq2.collection()->empty() && seq2.collection()->back().selector()->is_superselector_of(seq1.collection()->back().selector())) { + seq2.collection()->pop_back(); + } + + // TODO: consider unshift(NodeCollection, Node) + res.collection()->push_front(op1); + res.collection()->push_front(seq1.collection()->back()); + seq1.collection()->pop_back(); + + return mergeFinalOps(seq1, seq2, res); + + } else { // !ops2.collection()->empty() + + Node op2 = ops2.collection()->front(); + + if (op2.combinator() == Complex_Selector::PARENT_OF && !seq1.collection()->empty() && seq1.collection()->back().selector()->is_superselector_of(seq2.collection()->back().selector())) { + seq1.collection()->pop_back(); + } + + res.collection()->push_front(op2); + res.collection()->push_front(seq2.collection()->back()); + seq2.collection()->pop_back(); + + return mergeFinalOps(seq1, seq2, res); + + } + + } + + + /* + This is the equivalent of ruby's Sequence.subweave. + + Here is the original subweave code for reference during porting. + + def subweave(seq1, seq2) + return [seq2] if seq1.empty? + return [seq1] if seq2.empty? + + seq1, seq2 = seq1.dup, seq2.dup + return unless init = merge_initial_ops(seq1, seq2) + return unless fin = merge_final_ops(seq1, seq2) + seq1 = group_selectors(seq1) + seq2 = group_selectors(seq2) + lcs = Sass::Util.lcs(seq2, seq1) do |s1, s2| + next s1 if s1 == s2 + next unless s1.first.is_a?(SimpleSequence) && s2.first.is_a?(SimpleSequence) + next s2 if parent_superselector?(s1, s2) + next s1 if parent_superselector?(s2, s1) + end + + diff = [[init]] + until lcs.empty? + diff << chunks(seq1, seq2) {|s| parent_superselector?(s.first, lcs.first)} << [lcs.shift] + seq1.shift + seq2.shift + end + diff << chunks(seq1, seq2) {|s| s.empty?} + diff += fin.map {|sel| sel.is_a?(Array) ? sel : [sel]} + diff.reject! {|c| c.empty?} + + result = Sass::Util.paths(diff).map {|p| p.flatten}.reject {|p| path_has_two_subjects?(p)} + + result + end + */ + Node subweave(Node& one, Node& two) { + // Check for the simple cases + if (one.collection()->size() == 0) { + Node out = Node::createCollection(); + out.collection()->push_back(two); + return out; + } + if (two.collection()->size() == 0) { + Node out = Node::createCollection(); + out.collection()->push_back(one); + return out; + } + + Node seq1 = Node::createCollection(); + seq1.plus(one); + Node seq2 = Node::createCollection(); + seq2.plus(two); + + DEBUG_PRINTLN(SUBWEAVE, "SUBWEAVE ONE: " << seq1) + DEBUG_PRINTLN(SUBWEAVE, "SUBWEAVE TWO: " << seq2) + + Node init = mergeInitialOps(seq1, seq2); + if (init.isNil()) { + return Node::createNil(); + } + + DEBUG_PRINTLN(SUBWEAVE, "INIT: " << init) + + Node res = Node::createCollection(); + Node fin = mergeFinalOps(seq1, seq2, res); + if (fin.isNil()) { + return Node::createNil(); + } + + DEBUG_PRINTLN(SUBWEAVE, "FIN: " << fin) + + + // Moving this line up since fin isn't modified between now and when it happened before + // fin.map {|sel| sel.is_a?(Array) ? sel : [sel]} + + for (NodeDeque::iterator finIter = fin.collection()->begin(), finEndIter = fin.collection()->end(); + finIter != finEndIter; ++finIter) { + + Node& childNode = *finIter; + + if (!childNode.isCollection()) { + Node wrapper = Node::createCollection(); + wrapper.collection()->push_back(childNode); + childNode = wrapper; + } + + } + + DEBUG_PRINTLN(SUBWEAVE, "FIN MAPPED: " << fin) + + + + Node groupSeq1 = groupSelectors(seq1); + DEBUG_PRINTLN(SUBWEAVE, "SEQ1: " << groupSeq1) + + Node groupSeq2 = groupSelectors(seq2); + DEBUG_PRINTLN(SUBWEAVE, "SEQ2: " << groupSeq2) + + + ComplexSelectorDeque groupSeq1Converted; + nodeToComplexSelectorDeque(groupSeq1, groupSeq1Converted); + + ComplexSelectorDeque groupSeq2Converted; + nodeToComplexSelectorDeque(groupSeq2, groupSeq2Converted); + + ComplexSelectorDeque out; + LcsCollectionComparator collectionComparator; + lcs(groupSeq2Converted, groupSeq1Converted, collectionComparator, out); + Node seqLcs = complexSelectorDequeToNode(out); + + DEBUG_PRINTLN(SUBWEAVE, "SEQLCS: " << seqLcs) + + + Node initWrapper = Node::createCollection(); + initWrapper.collection()->push_back(init); + Node diff = Node::createCollection(); + diff.collection()->push_back(initWrapper); + + DEBUG_PRINTLN(SUBWEAVE, "DIFF INIT: " << diff) + + + while (!seqLcs.collection()->empty()) { + ParentSuperselectorChunker superselectorChunker(seqLcs); + Node chunksResult = chunks(groupSeq1, groupSeq2, superselectorChunker); + diff.collection()->push_back(chunksResult); + + Node lcsWrapper = Node::createCollection(); + lcsWrapper.collection()->push_back(seqLcs.collection()->front()); + seqLcs.collection()->pop_front(); + diff.collection()->push_back(lcsWrapper); + + if (groupSeq1.collection()->size()) groupSeq1.collection()->pop_front(); + if (groupSeq2.collection()->size()) groupSeq2.collection()->pop_front(); + } + + DEBUG_PRINTLN(SUBWEAVE, "DIFF POST LCS: " << diff) + + + DEBUG_PRINTLN(SUBWEAVE, "CHUNKS: ONE=" << groupSeq1 << " TWO=" << groupSeq2) + + + SubweaveEmptyChunker emptyChunker; + Node chunksResult = chunks(groupSeq1, groupSeq2, emptyChunker); + diff.collection()->push_back(chunksResult); + + + DEBUG_PRINTLN(SUBWEAVE, "DIFF POST CHUNKS: " << diff) + + + diff.collection()->insert(diff.collection()->end(), fin.collection()->begin(), fin.collection()->end()); + + DEBUG_PRINTLN(SUBWEAVE, "DIFF POST FIN MAPPED: " << diff) + + // JMA - filter out the empty nodes (use a new collection, since iterator erase() invalidates the old collection) + Node diffFiltered = Node::createCollection(); + for (NodeDeque::iterator diffIter = diff.collection()->begin(), diffEndIter = diff.collection()->end(); + diffIter != diffEndIter; ++diffIter) { + Node& node = *diffIter; + if (node.collection() && !node.collection()->empty()) { + diffFiltered.collection()->push_back(node); + } + } + diff = diffFiltered; + + DEBUG_PRINTLN(SUBWEAVE, "DIFF POST REJECT: " << diff) + + + Node pathsResult = paths(diff); + + DEBUG_PRINTLN(SUBWEAVE, "PATHS: " << pathsResult) + + + // We're flattening in place + for (NodeDeque::iterator pathsIter = pathsResult.collection()->begin(), pathsEndIter = pathsResult.collection()->end(); + pathsIter != pathsEndIter; ++pathsIter) { + + Node& child = *pathsIter; + child = flatten(child); + } + + DEBUG_PRINTLN(SUBWEAVE, "FLATTENED: " << pathsResult) + + + /* + TODO: implement + rejected = mapped.reject {|p| path_has_two_subjects?(p)} + $stderr.puts "REJECTED: #{rejected}" + */ + + + return pathsResult; + + } + /* + // disabled to avoid clang warning [-Wunused-function] + static Node subweaveNaive(const Node& one, const Node& two) { + Node out = Node::createCollection(); + + // Check for the simple cases + if (one.isNil()) { + out.collection()->push_back(two.klone()); + } else if (two.isNil()) { + out.collection()->push_back(one.klone()); + } else { + // Do the naive implementation. pOne = A B and pTwo = C D ...yields... A B C D and C D A B + // See https://gist.github.com/nex3/7609394 for details. + + Node firstPerm = one.klone(); + Node twoCloned = two.klone(); + firstPerm.plus(twoCloned); + out.collection()->push_back(firstPerm); + + Node secondPerm = two.klone(); + Node oneCloned = one.klone(); + secondPerm.plus(oneCloned ); + out.collection()->push_back(secondPerm); + } + + return out; + } + */ + + + /* + This is the equivalent of ruby's Sequence.weave. + + The following is the modified version of the ruby code that was more portable to C++. You + should be able to drop it into ruby 3.2.19 and get the same results from ruby sass. + + def weave(path) + # This function works by moving through the selector path left-to-right, + # building all possible prefixes simultaneously. These prefixes are + # `befores`, while the remaining parenthesized suffixes is `afters`. + befores = [[]] + afters = path.dup + + until afters.empty? + current = afters.shift.dup + last_current = [current.pop] + + tempResult = [] + + for before in befores do + sub = subweave(before, current) + if sub.nil? + next + end + + for seqs in sub do + tempResult.push(seqs + last_current) + end + end + + befores = tempResult + + end + + return befores + end + */ + /* + def weave(path) + befores = [[]] + afters = path.dup + + until afters.empty? + current = afters.shift.dup + + last_current = [current.pop] + + + tempResult = [] + + for before in befores do + sub = subweave(before, current) + + if sub.nil? + next [] + end + + + for seqs in sub do + toPush = seqs + last_current + + tempResult.push(seqs + last_current) + end + + end + + befores = tempResult + + end + + return befores + end + */ + Node Extend::weave(Node& path) { + + DEBUG_PRINTLN(WEAVE, "WEAVE: " << path) + + Node befores = Node::createCollection(); + befores.collection()->push_back(Node::createCollection()); + + Node afters = Node::createCollection(); + afters.plus(path); + + while (!afters.collection()->empty()) { + Node current = afters.collection()->front().klone(); + afters.collection()->pop_front(); + DEBUG_PRINTLN(WEAVE, "CURRENT: " << current) + if (current.collection()->size() == 0) continue; + + Node last_current = Node::createCollection(); + last_current.collection()->push_back(current.collection()->back()); + current.collection()->pop_back(); + DEBUG_PRINTLN(WEAVE, "CURRENT POST POP: " << current) + DEBUG_PRINTLN(WEAVE, "LAST CURRENT: " << last_current) + + Node tempResult = Node::createCollection(); + + for (NodeDeque::iterator beforesIter = befores.collection()->begin(), beforesEndIter = befores.collection()->end(); beforesIter != beforesEndIter; beforesIter++) { + Node& before = *beforesIter; + + Node sub = subweave(before, current); + + DEBUG_PRINTLN(WEAVE, "SUB: " << sub) + + if (sub.isNil()) { + return Node::createCollection(); + } + + for (NodeDeque::iterator subIter = sub.collection()->begin(), subEndIter = sub.collection()->end(); subIter != subEndIter; subIter++) { + Node& seqs = *subIter; + + Node toPush = Node::createCollection(); + toPush.plus(seqs); + toPush.plus(last_current); + + // move line feed from inner to outer selector (very hacky indeed) + if (last_current.collection() && last_current.collection()->front().selector()) { + toPush.got_line_feed = last_current.collection()->front().got_line_feed; + last_current.collection()->front().selector()->has_line_feed(false); + last_current.collection()->front().got_line_feed = false; + } + + tempResult.collection()->push_back(toPush); + + } + } + + befores = tempResult; + + } + + return befores; + } + + + + /* + This is the equivalent of ruby's SimpleSequence.do_extend. + + // TODO: I think I have some modified ruby code to put here. Check. + */ + /* + ISSUES: + - Previous TODO: Do we need to group the results by extender? + - What does subject do in?: next unless unified = seq.members.last.unify(self_without_sel, subject?) + - IMPROVEMENT: The search for uniqueness at the end is not ideal since it's has to loop over everything... + - IMPROVEMENT: Check if the final search for uniqueness is doing anything that extendComplexSelector isn't already doing... + */ + template + class GroupByToAFunctor { + public: + KeyType operator()(SubSetMapPair& extPair) const { + Complex_Selector_Obj pSelector = extPair.first; + return pSelector; + } + }; + Node Extend::extendCompoundSelector(Compound_Selector_Ptr pSelector, CompoundSelectorSet& seen, bool isReplace) { + + /* this turned out to be too much overhead + probably due to holding a "Node" object + // check if we already extended this selector + // we can do this since subset_map is "static" + auto memoized = memoizeCompound.find(pSelector); + if (memoized != memoizeCompound.end()) { + return memoized->second.klone(); + } + */ + + DEBUG_EXEC(EXTEND_COMPOUND, printCompoundSelector(pSelector, "EXTEND COMPOUND: ")) + // TODO: Ruby has another loop here to skip certain members? + + // let RESULTS be an empty list of complex selectors + Node results = Node::createCollection(); + // extendedSelectors.got_line_feed = true; + + SubSetMapPairs entries = subset_map.get_v(pSelector); + + GroupByToAFunctor extPairKeyFunctor; + SubSetMapResults arr; + group_by_to_a(entries, extPairKeyFunctor, arr); + + SubSetMapLookups holder; + + // for each (EXTENDER, TARGET) in MAP.get(COMPOUND): + for (SubSetMapResult& groupedPair : arr) { + + Complex_Selector_Obj seq = groupedPair.first; + SubSetMapPairs& group = groupedPair.second; + + DEBUG_EXEC(EXTEND_COMPOUND, printComplexSelector(seq, "SEQ: ")) + + Compound_Selector_Obj pSels = SASS_MEMORY_NEW(Compound_Selector, pSelector->pstate()); + for (SubSetMapPair& pair : group) { + pair.second->extended(true); + pSels->concat(pair.second); + } + + DEBUG_EXEC(EXTEND_COMPOUND, printCompoundSelector(pSels, "SELS: ")) + + // The selector up to where the @extend is (ie, the thing to merge) + Complex_Selector_Ptr pExtComplexSelector = seq; + + // TODO: This can return a Compound_Selector with no elements. Should that just be returning NULL? + // RUBY: self_without_sel = Sass::Util.array_minus(members, sels) + Compound_Selector_Obj pSelectorWithoutExtendSelectors = pSelector->minus(pSels); + + DEBUG_EXEC(EXTEND_COMPOUND, printCompoundSelector(pSelector, "MEMBERS: ")) + DEBUG_EXEC(EXTEND_COMPOUND, printCompoundSelector(pSelectorWithoutExtendSelectors, "SELF_WO_SEL: ")) + + Compound_Selector_Obj pInnermostCompoundSelector = pExtComplexSelector->last()->head(); + + if (!pInnermostCompoundSelector) { + pInnermostCompoundSelector = SASS_MEMORY_NEW(Compound_Selector, pSelector->pstate()); + } + Compound_Selector_Obj pUnifiedSelector = pInnermostCompoundSelector->unify_with(pSelectorWithoutExtendSelectors); + + DEBUG_EXEC(EXTEND_COMPOUND, printCompoundSelector(pInnermostCompoundSelector, "LHS: ")) + DEBUG_EXEC(EXTEND_COMPOUND, printCompoundSelector(pSelectorWithoutExtendSelectors, "RHS: ")) + DEBUG_EXEC(EXTEND_COMPOUND, printCompoundSelector(pUnifiedSelector, "UNIFIED: ")) + + // RUBY: next unless unified + if (!pUnifiedSelector || pUnifiedSelector->length() == 0) { + continue; + } + + // TODO: implement the parent directive match (if necessary based on test failures) + // next if group.map {|e, _| check_directives_match!(e, parent_directives)}.none? + + // TODO: This seems a little fishy to me. See if it causes any problems. From the ruby, we should be able to just + // get rid of the last Compound_Selector and replace it with this one. I think the reason this code is more + // complex is that Complex_Selector contains a combinator, but in ruby combinators have already been filtered + // out and aren't operated on. + Complex_Selector_Obj pNewSelector = SASS_MEMORY_CLONE(pExtComplexSelector); // ->first(); + + Complex_Selector_Obj pNewInnerMost = SASS_MEMORY_NEW(Complex_Selector, pSelector->pstate(), Complex_Selector::ANCESTOR_OF, pUnifiedSelector, NULL); + + Complex_Selector::Combinator combinator = pNewSelector->clear_innermost(); + pNewSelector->set_innermost(pNewInnerMost, combinator); + +#ifdef DEBUG + ComplexSelectorSet debugSet; + debugSet = pNewSelector->sources(); + if (debugSet.size() > 0) { + throw std::runtime_error("The new selector should start with no sources. Something needs to be cloned to fix this."); + } + debugSet = pExtComplexSelector->sources(); + if (debugSet.size() > 0) { + throw std::runtime_error("The extension selector from our subset map should not have sources. These will bleed to the new selector. Something needs to be cloned to fix this."); + } +#endif + + + // if (pSelector && pSelector->has_line_feed()) pNewInnerMost->has_line_feed(true); + // Set the sources on our new Complex_Selector to the sources of this simple sequence plus the thing we're extending. + DEBUG_PRINTLN(EXTEND_COMPOUND, "SOURCES SETTING ON NEW SEQ: " << complexSelectorToNode(pNewSelector)) + + DEBUG_EXEC(EXTEND_COMPOUND, ComplexSelectorSet oldSet = pNewSelector->sources(); printSourcesSet(oldSet, "SOURCES NEW SEQ BEGIN: ")) + + // I actually want to create a copy here (performance!) + ComplexSelectorSet newSourcesSet = pSelector->sources(); // XXX + DEBUG_EXEC(EXTEND_COMPOUND, printSourcesSet(newSourcesSet, "SOURCES THIS EXTEND: ")) + + newSourcesSet.insert(pExtComplexSelector); + DEBUG_EXEC(EXTEND_COMPOUND, printSourcesSet(newSourcesSet, "SOURCES WITH NEW SOURCE: ")) + + // RUBY: new_seq.add_sources!(sources + [seq]) + pNewSelector->addSources(newSourcesSet); + + DEBUG_EXEC(EXTEND_COMPOUND, ComplexSelectorSet newSet = pNewSelector->sources(); printSourcesSet(newSet, "SOURCES ON NEW SELECTOR AFTER ADD: ")) + DEBUG_EXEC(EXTEND_COMPOUND, printSourcesSet(pSelector->sources(), "SOURCES THIS EXTEND WHICH SHOULD BE SAME STILL: ")) + + + if (pSels->has_line_feed()) pNewSelector->has_line_feed(true); + + holder.push_back(std::make_pair(pSels, pNewSelector)); + } + + + for (SubSetMapLookup& pair : holder) { + + Compound_Selector_Obj pSels = pair.first; + Complex_Selector_Obj pNewSelector = pair.second; + + + // RUBY??: next [] if seen.include?(sels) + if (seen.find(pSels) != seen.end()) { + continue; + } + + + CompoundSelectorSet recurseSeen(seen); + recurseSeen.insert(pSels); + + + DEBUG_PRINTLN(EXTEND_COMPOUND, "RECURSING DO EXTEND: " << complexSelectorToNode(pNewSelector)) + Node recurseExtendedSelectors = extendComplexSelector(pNewSelector, recurseSeen, isReplace, false); // !:isOriginal + + DEBUG_PRINTLN(EXTEND_COMPOUND, "RECURSING DO EXTEND RETURN: " << recurseExtendedSelectors) + + for (NodeDeque::iterator iterator = recurseExtendedSelectors.collection()->begin(), endIterator = recurseExtendedSelectors.collection()->end(); + iterator != endIterator; ++iterator) { + Node newSelector = *iterator; + +// DEBUG_PRINTLN(EXTEND_COMPOUND, "EXTENDED AT THIS POINT: " << results) +// DEBUG_PRINTLN(EXTEND_COMPOUND, "SELECTOR EXISTS ALREADY: " << newSelector << " " << results.contains(newSelector, false /*simpleSelectorOrderDependent*/)); + + if (!results.contains(newSelector)) { +// DEBUG_PRINTLN(EXTEND_COMPOUND, "ADDING NEW SELECTOR") + results.collection()->push_back(newSelector); + } + } + } + + DEBUG_EXEC(EXTEND_COMPOUND, printCompoundSelector(pSelector, "EXTEND COMPOUND END: ")) + + // this turned out to be too much overhead + // memory results in a map table - since extending is very expensive + // memoizeCompound.insert(std::pair(pSelector, results)); + + return results; + } + + + // check if selector has something to be extended by subset_map + bool Extend::complexSelectorHasExtension(Complex_Selector_Ptr selector, CompoundSelectorSet& seen) { + + bool hasExtension = false; + + Complex_Selector_Obj pIter = selector; + + while (!hasExtension && pIter) { + Compound_Selector_Obj pHead = pIter->head(); + + if (pHead) { + SubSetMapPairs entries = subset_map.get_v(pHead); + for (SubSetMapPair ext : entries) { + // check if both selectors have the same media block parent + // if (ext.first->media_block() == pComplexSelector->media_block()) continue; + if (ext.second->media_block() == 0) continue; + if (pHead->media_block() && + ext.second->media_block()->media_queries() && + pHead->media_block()->media_queries() + ) { + std::string query_left(ext.second->media_block()->media_queries()->to_string()); + std::string query_right(pHead->media_block()->media_queries()->to_string()); + if (query_left == query_right) continue; + } + + // fail if one goes across media block boundaries + std::stringstream err; + std::string cwd(Sass::File::get_cwd()); + ParserState pstate(ext.second->pstate()); + std::string rel_path(Sass::File::abs2rel(pstate.path, cwd, cwd)); + err << "You may not @extend an outer selector from within @media.\n"; + err << "You may only @extend selectors within the same directive.\n"; + err << "From \"@extend " << ext.second->to_string() << "\""; + err << " on line " << pstate.line+1 << " of " << rel_path << "\n"; + error(err.str(), selector->pstate(), eval->exp.traces); + } + if (entries.size() > 0) hasExtension = true; + } + + pIter = pIter->tail(); + } + + return hasExtension; + } + + + /* + This is the equivalent of ruby's Sequence.do_extend. + + // TODO: I think I have some modified ruby code to put here. Check. + */ + /* + ISSUES: + - check to automatically include combinators doesn't transfer over to libsass' data model where + the combinator and compound selector are one unit + next [[sseq_or_op]] unless sseq_or_op.is_a?(SimpleSequence) + */ + Node Extend::extendComplexSelector(Complex_Selector_Ptr selector, CompoundSelectorSet& seen, bool isReplace, bool isOriginal) { + + // check if we already extended this selector + // we can do this since subset_map is "static" + auto memoized = memoizeComplex.find(selector); + if (memoized != memoizeComplex.end()) { + return memoized->second; + } + + // convert the input selector to extend node format + Node complexSelector = complexSelectorToNode(selector); + DEBUG_PRINTLN(EXTEND_COMPLEX, "EXTEND COMPLEX: " << complexSelector) + + // let CHOICES be an empty list of selector-lists + // create new collection to hold the results + Node choices = Node::createCollection(); + + // for each compound selector COMPOUND in COMPLEX: + for (Node& sseqOrOp : *complexSelector.collection()) { + + DEBUG_PRINTLN(EXTEND_COMPLEX, "LOOP: " << sseqOrOp) + + // If it's not a selector (meaning it's a combinator), just include it automatically + // RUBY: next [[sseq_or_op]] unless sseq_or_op.is_a?(SimpleSequence) + if (!sseqOrOp.isSelector()) { + // Wrap our Combinator in two collections to match ruby. This is essentially making a collection Node + // with one collection child. The collection child represents a Complex_Selector that is only a combinator. + Node outer = Node::createCollection(); + Node inner = Node::createCollection(); + outer.collection()->push_back(inner); + inner.collection()->push_back(sseqOrOp); + choices.collection()->push_back(outer); + continue; + } + + // verified now that node is a valid selector + Complex_Selector_Obj sseqSel = sseqOrOp.selector(); + Compound_Selector_Obj sseqHead = sseqSel->head(); + + // let EXTENDED be extend_compound(COMPOUND, SEEN) + // extend the compound selector against the given subset_map + // RUBY: extended = sseq_or_op.do_extend(extends, parent_directives, replace, seen) + Node extended = extendCompoundSelector(sseqHead, seen, isReplace); // slow(17%)! + if (sseqOrOp.got_line_feed) extended.got_line_feed = true; + DEBUG_PRINTLN(EXTEND_COMPLEX, "EXTENDED: " << extended) + + // Prepend the Compound_Selector based on the choices logic; choices seems to be extend but with a ruby + // Array instead of a Sequence due to the member mapping: choices = extended.map {|seq| seq.members} + // RUBY: extended.first.add_sources!([self]) if original && !has_placeholder? + if (isOriginal && !selector->has_placeholder()) { + ComplexSelectorSet srcset; + srcset.insert(selector); + sseqSel->addSources(srcset); + // DEBUG_PRINTLN(EXTEND_COMPLEX, "ADD SOURCES: " << *pComplexSelector) + } + + bool isSuperselector = false; + // if no complex selector in EXTENDED is a superselector of COMPOUND: + for (Node& childNode : *extended.collection()) { + Complex_Selector_Obj pExtensionSelector = nodeToComplexSelector(childNode); + if (pExtensionSelector->is_superselector_of(sseqSel)) { + isSuperselector = true; + break; + } + } + + if (!isSuperselector) { + // add a complex selector composed only of COMPOUND to EXTENDED + if (sseqOrOp.got_line_feed) sseqSel->has_line_feed(sseqOrOp.got_line_feed); + extended.collection()->push_front(complexSelectorToNode(sseqSel)); + } + + DEBUG_PRINTLN(EXTEND_COMPLEX, "CHOICES UNSHIFTED: " << extended) + + // add EXTENDED to CHOICES + // Aggregate our current extensions + choices.collection()->push_back(extended); + } + + + DEBUG_PRINTLN(EXTEND_COMPLEX, "EXTENDED NOT EXPANDED: " << choices) + + + + // Ruby Equivalent: paths + Node paths = Sass::paths(choices); + + DEBUG_PRINTLN(EXTEND_COMPLEX, "PATHS: " << paths) + + // let WEAVES be an empty list of selector lists + Node weaves = Node::createCollection(); + + // for each list of complex selectors PATH in paths(CHOICES): + for (Node& path : *paths.collection()) { + // add weave(PATH) to WEAVES + Node weaved = weave(path); // slow(12%)! + weaved.got_line_feed = path.got_line_feed; + weaves.collection()->push_back(weaved); + } + + DEBUG_PRINTLN(EXTEND_COMPLEX, "WEAVES: " << weaves) + + // Ruby Equivalent: trim + Node trimmed(trim(weaves, isReplace)); // slow(19%)! + + DEBUG_PRINTLN(EXTEND_COMPLEX, "TRIMMED: " << trimmed) + + // Ruby Equivalent: flatten + Node flattened(flatten(trimmed, 1)); + + DEBUG_PRINTLN(EXTEND_COMPLEX, ">>>>> EXTENDED: " << extendedSelectors) + DEBUG_PRINTLN(EXTEND_COMPLEX, "EXTEND COMPLEX END: " << complexSelector) + + // memory results in a map table - since extending is very expensive + memoizeComplex.insert(std::pair(selector, flattened)); + + // return trim(WEAVES) + return flattened; + } + + + + /* + This is the equivalent of ruby's CommaSequence.do_extend. + */ + // We get a selector list with has something to extend and a subset_map with + // all extenders. Pick the ones that match our selectors in the list. + Selector_List_Ptr Extend::extendSelectorList(Selector_List_Obj pSelectorList, bool isReplace, bool& extendedSomething, CompoundSelectorSet& seen) { + + Selector_List_Obj pNewSelectors = SASS_MEMORY_NEW(Selector_List, pSelectorList->pstate(), pSelectorList->length()); + + // check if we already extended this selector + // we can do this since subset_map is "static" + auto memoized = memoizeList.find(pSelectorList); + if (memoized != memoizeList.end()) { + extendedSomething = true; + return memoized->second; + } + + extendedSomething = false; + // process each comlplex selector in the selector list. + // Find the ones that can be extended by given subset_map. + for (size_t index = 0, length = pSelectorList->length(); index < length; index++) { + Complex_Selector_Obj pSelector = (*pSelectorList)[index]; + + // ruby sass seems to keep a list of things that have extensions and then only extend those. We don't currently do that. + // Since it's not that expensive to check if an extension exists in the subset map and since it can be relatively expensive to + // run through the extend code (which does a data model transformation), check if there is anything to extend before doing + // the extend. We might be able to optimize extendComplexSelector, but this approach keeps us closer to ruby sass (which helps + // when debugging). + if (!complexSelectorHasExtension(pSelector, seen)) { + pNewSelectors->append(pSelector); + continue; + } + + // complexSelectorHasExtension was true! + extendedSomething = true; + + // now do the actual extension of the complex selector + Node extendedSelectors = extendComplexSelector(pSelector, seen, isReplace, true); + + if (!pSelector->has_placeholder()) { + Node nSelector(complexSelectorToNode(pSelector)); + if (!extendedSelectors.contains(nSelector)) { + pNewSelectors->append(pSelector); + continue; + } + } + + bool doReplace = isReplace; + for (Node& childNode : *extendedSelectors.collection()) { + // When it is a replace, skip the first one, unless there is only one + if(doReplace && extendedSelectors.collection()->size() > 1 ) { + doReplace = false; + continue; + } + pNewSelectors->append(nodeToComplexSelector(childNode)); + } + } + + Remove_Placeholders remove_placeholders; + // it seems that we have to remove the place holders early here + // normally we do this as the very last step (compare to ruby sass) + pNewSelectors = remove_placeholders.remove_placeholders(pNewSelectors); + + // unwrap all wrapped selectors with inner lists + for (Complex_Selector_Obj cur : pNewSelectors->elements()) { + // process tails + while (cur) { + // process header + if (cur->head() && seen.find(cur->head()) == seen.end()) { + CompoundSelectorSet recseen(seen); + recseen.insert(cur->head()); + // create a copy since we add multiple items if stuff get unwrapped + Compound_Selector_Obj cpy_head = SASS_MEMORY_NEW(Compound_Selector, cur->pstate()); + for (Simple_Selector_Obj hs : *cur->head()) { + if (Wrapped_Selector_Obj ws = Cast(hs)) { + ws->selector(SASS_MEMORY_CLONE(ws->selector())); + if (Selector_List_Obj sl = Cast(ws->selector())) { + // special case for ruby ass + if (sl->empty()) { + // this seems inconsistent but it is how ruby sass seems to remove parentheses + cpy_head->append(SASS_MEMORY_NEW(Element_Selector, hs->pstate(), ws->name())); + } + // has wrapped not selectors + else if (ws->name() == ":not") { + // extend the inner list of wrapped selector + bool extended = false; + Selector_List_Obj ext_sl = extendSelectorList(sl, false, extended, recseen); + for (size_t i = 0; i < ext_sl->length(); i += 1) { + if (Complex_Selector_Obj ext_cs = ext_sl->at(i)) { + // create clones for wrapped selector and the inner list + Wrapped_Selector_Obj cpy_ws = SASS_MEMORY_COPY(ws); + Selector_List_Obj cpy_ws_sl = SASS_MEMORY_NEW(Selector_List, sl->pstate()); + // remove parent selectors from inner selector + Compound_Selector_Obj ext_head = NULL; + if (ext_cs->first()) ext_head = ext_cs->first()->head(); + if (ext_head && ext_head && ext_head->length() > 0) { + cpy_ws_sl->append(ext_cs->first()); + } + // assign list to clone + cpy_ws->selector(cpy_ws_sl); + // append the clone + cpy_head->append(cpy_ws); + } + } + if (eval && extended) { + eval->exp.selector_stack.push_back(pNewSelectors); + cpy_head->perform(eval); + eval->exp.selector_stack.pop_back(); + } + } + // has wrapped selectors + else { + Wrapped_Selector_Obj cpy_ws = SASS_MEMORY_COPY(ws); + Selector_List_Obj ext_sl = extendSelectorList(sl, recseen); + cpy_ws->selector(ext_sl); + cpy_head->append(cpy_ws); + } + } else { + cpy_head->append(hs); + } + } else { + cpy_head->append(hs); + } + } + // replace header + cur->head(cpy_head); + } + // process tail + cur = cur->tail(); + } + } + + // memory results in a map table - since extending is very expensive + memoizeList.insert(std::pair(pSelectorList, pNewSelectors)); + + return pNewSelectors.detach(); + + } + + + bool shouldExtendBlock(Block_Obj b) { + + // If a block is empty, there's no reason to extend it since any rules placed on this block + // won't have any output. The main benefit of this is for structures like: + // + // .a { + // .b { + // x: y; + // } + // } + // + // We end up visiting two rulesets (one with the selector .a and the other with the selector .a .b). + // In this case, we don't want to try to pull rules onto .a since they won't get output anyway since + // there are no child statements. However .a .b should have extensions applied. + + for (size_t i = 0, L = b->length(); i < L; ++i) { + Statement_Obj stm = b->at(i); + + if (Cast(stm)) { + // Do nothing. This doesn't count as a statement that causes extension since we'll + // iterate over this rule set in a future visit and try to extend it. + } + else { + return true; + } + } + + return false; + + } + + + // Extend a ruleset by extending the selectors and updating them on the ruleset. The block's rules don't need to change. + // Every Ruleset in the whole tree is calling this function. We decide if there + // was is @extend that matches our selector. If we find one, we will go further + // and call the extend magic for our selector. The subset_map contains all blocks + // where @extend was found. Pick the ones that match our selector! + void Extend::extendObjectWithSelectorAndBlock(Ruleset_Ptr pObject) { + + DEBUG_PRINTLN(EXTEND_OBJECT, "FOUND SELECTOR: " << Cast(pObject->selector())->to_string()) + + // Ruby sass seems to filter nodes that don't have any content well before we get here. + // I'm not sure the repercussions of doing so, so for now, let's just not extend things + // that won't be output later. Profiling shows this may us 0.2% or so. + if (!shouldExtendBlock(pObject->block())) { + DEBUG_PRINTLN(EXTEND_OBJECT, "RETURNING WITHOUT EXTEND ATTEMPT") + return; + } + + bool extendedSomething = false; + + CompoundSelectorSet seen; + Selector_List_Obj pNewSelectorList = extendSelectorList(pObject->selector(), false, extendedSomething, seen); + + if (extendedSomething && pNewSelectorList) { + DEBUG_PRINTLN(EXTEND_OBJECT, "EXTEND ORIGINAL SELECTORS: " << pObject->selector()->to_string()) + DEBUG_PRINTLN(EXTEND_OBJECT, "EXTEND SETTING NEW SELECTORS: " << pNewSelectorList->to_string()) + pNewSelectorList->remove_parent_selectors(); + pObject->selector(pNewSelectorList); + } else { + DEBUG_PRINTLN(EXTEND_OBJECT, "EXTEND DID NOT TRY TO EXTEND ANYTHING") + } + } + + Extend::Extend(Subset_Map& ssm) + : subset_map(ssm), eval(NULL) + { } + + void Extend::setEval(Eval& e) { + eval = &e; + } + + void Extend::operator()(Block_Ptr b) + { + for (size_t i = 0, L = b->length(); i < L; ++i) { + Statement_Obj stm = b->at(i); + stm->perform(this); + } + // do final check if everything was extended + // we set `extended` flag on extended selectors + if (b->is_root()) { + // debug_subset_map(subset_map); + for(auto const &it : subset_map.values()) { + Complex_Selector_Ptr sel = NULL; + Compound_Selector_Ptr ext = NULL; + if (it.first) sel = it.first->first(); + if (it.second) ext = it.second; + if (ext && (ext->extended() || ext->is_optional())) continue; + std::string str_sel(sel ? sel->to_string({ NESTED, 5 }) : "NULL"); + std::string str_ext(ext ? ext->to_string({ NESTED, 5 }) : "NULL"); + // debug_ast(sel, "sel: "); + // debug_ast(ext, "ext: "); + error("\"" + str_sel + "\" failed to @extend \"" + str_ext + "\".\n" + "The selector \"" + str_ext + "\" was not found.\n" + "Use \"@extend " + str_ext + " !optional\" if the" + " extend should be able to fail.", (ext ? ext->pstate() : NULL), eval->exp.traces); + } + } + + } + + void Extend::operator()(Ruleset_Ptr pRuleset) + { + extendObjectWithSelectorAndBlock( pRuleset ); + pRuleset->block()->perform(this); + } + + void Extend::operator()(Supports_Block_Ptr pFeatureBlock) + { + pFeatureBlock->block()->perform(this); + } + + void Extend::operator()(Media_Block_Ptr pMediaBlock) + { + pMediaBlock->block()->perform(this); + } + + void Extend::operator()(Directive_Ptr a) + { + // Selector_List_Ptr ls = Cast(a->selector()); + // selector_stack.push_back(ls); + if (a->block()) a->block()->perform(this); + // exp.selector_stack.pop_back(); + } +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/extend.hpp b/mybulma/node_modules/node-sass/src/libsass/src/extend.hpp new file mode 100644 index 0000000..03042f3 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/extend.hpp @@ -0,0 +1,86 @@ +#ifndef SASS_EXTEND_H +#define SASS_EXTEND_H + +#include +#include + +#include "ast.hpp" +#include "node.hpp" +#include "eval.hpp" +#include "operation.hpp" +#include "subset_map.hpp" +#include "ast_fwd_decl.hpp" + +namespace Sass { + + Node subweave(Node& one, Node& two); + + class Extend : public Operation_CRTP { + + Subset_Map& subset_map; + Eval* eval; + + void fallback_impl(AST_Node_Ptr n) { } + + private: + + std::unordered_map< + Selector_List_Obj, // key + Selector_List_Obj, // value + HashNodes, // hasher + CompareNodes // compare + > memoizeList; + + std::unordered_map< + Complex_Selector_Obj, // key + Node, // value + HashNodes, // hasher + CompareNodes // compare + > memoizeComplex; + + /* this turned out to be too much overhead + re-evaluate once we store an ast selector + std::unordered_map< + Compound_Selector_Obj, // key + Node, // value + HashNodes, // hasher + CompareNodes // compare + > memoizeCompound; + */ + + void extendObjectWithSelectorAndBlock(Ruleset_Ptr pObject); + Node extendComplexSelector(Complex_Selector_Ptr sel, CompoundSelectorSet& seen, bool isReplace, bool isOriginal); + Node extendCompoundSelector(Compound_Selector_Ptr sel, CompoundSelectorSet& seen, bool isReplace); + bool complexSelectorHasExtension(Complex_Selector_Ptr selector, CompoundSelectorSet& seen); + Node trim(Node& seqses, bool isReplace); + Node weave(Node& path); + + public: + void setEval(Eval& eval); + Selector_List_Ptr extendSelectorList(Selector_List_Obj pSelectorList, bool isReplace, bool& extendedSomething, CompoundSelectorSet& seen); + Selector_List_Ptr extendSelectorList(Selector_List_Obj pSelectorList, bool isReplace = false) { + bool extendedSomething = false; + CompoundSelectorSet seen; + return extendSelectorList(pSelectorList, isReplace, extendedSomething, seen); + } + Selector_List_Ptr extendSelectorList(Selector_List_Obj pSelectorList, CompoundSelectorSet& seen) { + bool isReplace = false; + bool extendedSomething = false; + return extendSelectorList(pSelectorList, isReplace, extendedSomething, seen); + } + Extend(Subset_Map&); + ~Extend() { } + + void operator()(Block_Ptr); + void operator()(Ruleset_Ptr); + void operator()(Supports_Block_Ptr); + void operator()(Media_Block_Ptr); + void operator()(Directive_Ptr); + + template + void fallback(U x) { return fallback_impl(x); } + }; + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/file.cpp b/mybulma/node_modules/node-sass/src/libsass/src/file.cpp new file mode 100644 index 0000000..32d4a7c --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/file.cpp @@ -0,0 +1,485 @@ +#include "sass.hpp" +#ifdef _WIN32 +# ifdef __MINGW32__ +# ifndef off64_t +# define off64_t _off64_t /* Workaround for http://sourceforge.net/p/mingw/bugs/2024/ */ +# endif +# endif +# include +# define S_ISDIR(mode) (((mode) & S_IFMT) == S_IFDIR) +#else +# include +#endif +#include +#include +#include +#include +#include +#include +#include "file.hpp" +#include "context.hpp" +#include "prelexer.hpp" +#include "utf8_string.hpp" +#include "sass_functions.hpp" +#include "sass2scss.h" + +#ifdef _WIN32 +# include + +# ifdef _MSC_VER +# include +inline static std::string wstring_to_string(const std::wstring& wstr) +{ + std::wstring_convert, wchar_t> wchar_converter; + return wchar_converter.to_bytes(wstr); +} +# else // mingw(/gcc) does not support C++11's codecvt yet. +inline static std::string wstring_to_string(const std::wstring &wstr) +{ + int size_needed = WideCharToMultiByte(CP_UTF8, 0, &wstr[0], (int)wstr.size(), NULL, 0, NULL, NULL); + std::string strTo(size_needed, 0); + WideCharToMultiByte(CP_UTF8, 0, &wstr[0], (int)wstr.size(), &strTo[0], size_needed, NULL, NULL); + return strTo; +} +# endif +#endif + +namespace Sass { + namespace File { + + // return the current directory + // always with forward slashes + // always with trailing slash + std::string get_cwd() + { + const size_t wd_len = 4096; + #ifndef _WIN32 + char wd[wd_len]; + char* pwd = getcwd(wd, wd_len); + // we should check error for more detailed info (e.g. ENOENT) + // http://man7.org/linux/man-pages/man2/getcwd.2.html#ERRORS + if (pwd == NULL) throw Exception::OperationError("cwd gone missing"); + std::string cwd = pwd; + #else + wchar_t wd[wd_len]; + wchar_t* pwd = _wgetcwd(wd, wd_len); + if (pwd == NULL) throw Exception::OperationError("cwd gone missing"); + std::string cwd = wstring_to_string(pwd); + //convert backslashes to forward slashes + replace(cwd.begin(), cwd.end(), '\\', '/'); + #endif + if (cwd[cwd.length() - 1] != '/') cwd += '/'; + return cwd; + } + + // test if path exists and is a file + bool file_exists(const std::string& path) + { + #ifdef _WIN32 + wchar_t resolved[32768]; + // windows unicode filepaths are encoded in utf16 + std::string abspath(join_paths(get_cwd(), path)); + std::wstring wpath(UTF_8::convert_to_utf16("\\\\?\\" + abspath)); + std::replace(wpath.begin(), wpath.end(), '/', '\\'); + DWORD rv = GetFullPathNameW(wpath.c_str(), 32767, resolved, NULL); + if (rv > 32767) throw Exception::OperationError("Path is too long"); + if (rv == 0) throw Exception::OperationError("Path could not be resolved"); + DWORD dwAttrib = GetFileAttributesW(resolved); + return (dwAttrib != INVALID_FILE_ATTRIBUTES && + (!(dwAttrib & FILE_ATTRIBUTE_DIRECTORY))); + #else + struct stat st_buf; + return (stat (path.c_str(), &st_buf) == 0) && + (!S_ISDIR (st_buf.st_mode)); + #endif + } + + // return if given path is absolute + // works with *nix and windows paths + bool is_absolute_path(const std::string& path) + { + #ifdef _WIN32 + if (path.length() >= 2 && isalpha(path[0]) && path[1] == ':') return true; + #endif + size_t i = 0; + // check if we have a protocol + if (path[i] && Prelexer::is_alpha(path[i])) { + // skip over all alphanumeric characters + while (path[i] && Prelexer::is_alnum(path[i])) ++i; + i = i && path[i] == ':' ? i + 1 : 0; + } + return path[i] == '/'; + } + + // helper function to find the last directory seperator + inline size_t find_last_folder_separator(const std::string& path, size_t limit = std::string::npos) + { + size_t pos; + size_t pos_p = path.find_last_of('/', limit); + #ifdef _WIN32 + size_t pos_w = path.find_last_of('\\', limit); + #else + size_t pos_w = std::string::npos; + #endif + if (pos_p != std::string::npos && pos_w != std::string::npos) { + pos = std::max(pos_p, pos_w); + } + else if (pos_p != std::string::npos) { + pos = pos_p; + } + else { + pos = pos_w; + } + return pos; + } + + // return only the directory part of path + std::string dir_name(const std::string& path) + { + size_t pos = find_last_folder_separator(path); + if (pos == std::string::npos) return ""; + else return path.substr(0, pos+1); + } + + // return only the filename part of path + std::string base_name(const std::string& path) + { + size_t pos = find_last_folder_separator(path); + if (pos == std::string::npos) return path; + else return path.substr(pos+1); + } + + // do a logical clean up of the path + // no physical check on the filesystem + std::string make_canonical_path (std::string path) + { + + // declarations + size_t pos; + + #ifdef _WIN32 + //convert backslashes to forward slashes + replace(path.begin(), path.end(), '\\', '/'); + #endif + + pos = 0; // remove all self references inside the path string + while((pos = path.find("/./", pos)) != std::string::npos) path.erase(pos, 2); + + // remove all leading and trailing self references + while(path.length() > 1 && path.substr(0, 2) == "./") path.erase(0, 2); + while((pos = path.length()) > 1 && path.substr(pos - 2) == "/.") path.erase(pos - 2); + + + size_t proto = 0; + // check if we have a protocol + if (path[proto] && Prelexer::is_alpha(path[proto])) { + // skip over all alphanumeric characters + while (path[proto] && Prelexer::is_alnum(path[proto++])) {} + // then skip over the mandatory colon + if (proto && path[proto] == ':') ++ proto; + } + + // then skip over start slashes + while (path[proto++] == '/') {} + + pos = proto; // collapse multiple delimiters into a single one + while((pos = path.find("//", pos)) != std::string::npos) path.erase(pos, 1); + + return path; + + } + + // join two path segments cleanly together + // but only if right side is not absolute yet + std::string join_paths(std::string l, std::string r) + { + + #ifdef _WIN32 + // convert Windows backslashes to URL forward slashes + replace(l.begin(), l.end(), '\\', '/'); + replace(r.begin(), r.end(), '\\', '/'); + #endif + + if (l.empty()) return r; + if (r.empty()) return l; + + if (is_absolute_path(r)) return r; + if (l[l.length()-1] != '/') l += '/'; + + // this does a logical cleanup of the right hand path + // Note that this does collapse x/../y sections into y. + // This is by design. If /foo on your system is a symlink + // to /bar/baz, then /foo/../cd is actually /bar/cd, + // not /cd as a naive ../ removal would give you. + // will only work on leading double dot dirs on rhs + // therefore it is safe if lhs is already resolved cwd + while ((r.length() > 3) && ((r.substr(0, 3) == "../") || (r.substr(0, 3)) == "..\\")) { + size_t L = l.length(), pos = find_last_folder_separator(l, L - 2); + bool is_slash = pos + 2 == L && (l[pos+1] == '/' || l[pos+1] == '\\'); + bool is_self = pos + 3 == L && (l[pos+1] == '.'); + if (!is_self && !is_slash) r = r.substr(3); + else if (pos == std::string::npos) break; + l = l.substr(0, pos == std::string::npos ? pos : pos + 1); + } + + return l + r; + } + + std::string path_for_console(const std::string& rel_path, const std::string& abs_path, const std::string& orig_path) + { + // magic algorith goes here!! + + // if the file is outside this directory show the absolute path + if (rel_path.substr(0, 3) == "../") { + return orig_path; + } + // this seems to work most of the time + return abs_path == orig_path ? abs_path : rel_path; + } + + // create an absolute path by resolving relative paths with cwd + std::string rel2abs(const std::string& path, const std::string& base, const std::string& cwd) + { + return make_canonical_path(join_paths(join_paths(cwd + "/", base + "/"), path)); + } + + // create a path that is relative to the given base directory + // path and base will first be resolved against cwd to make them absolute + std::string abs2rel(const std::string& path, const std::string& base, const std::string& cwd) + { + + std::string abs_path = rel2abs(path, cwd); + std::string abs_base = rel2abs(base, cwd); + + size_t proto = 0; + // check if we have a protocol + if (path[proto] && Prelexer::is_alpha(path[proto])) { + // skip over all alphanumeric characters + while (path[proto] && Prelexer::is_alnum(path[proto++])) {} + // then skip over the mandatory colon + if (proto && path[proto] == ':') ++ proto; + } + + // distinguish between windows absolute paths and valid protocols + // we assume that protocols must at least have two chars to be valid + if (proto && path[proto++] == '/' && proto > 3) return path; + + #ifdef _WIN32 + // absolute link must have a drive letter, and we know that we + // can only create relative links if both are on the same drive + if (abs_base[0] != abs_path[0]) return abs_path; + #endif + + std::string stripped_uri = ""; + std::string stripped_base = ""; + + size_t index = 0; + size_t minSize = std::min(abs_path.size(), abs_base.size()); + for (size_t i = 0; i < minSize; ++i) { + #ifdef FS_CASE_SENSITIVE + if (abs_path[i] != abs_base[i]) break; + #else + // compare the charactes in a case insensitive manner + // windows fs is only case insensitive in ascii ranges + if (tolower(abs_path[i]) != tolower(abs_base[i])) break; + #endif + if (abs_path[i] == '/') index = i + 1; + } + for (size_t i = index; i < abs_path.size(); ++i) { + stripped_uri += abs_path[i]; + } + for (size_t i = index; i < abs_base.size(); ++i) { + stripped_base += abs_base[i]; + } + + size_t left = 0; + size_t directories = 0; + for (size_t right = 0; right < stripped_base.size(); ++right) { + if (stripped_base[right] == '/') { + if (stripped_base.substr(left, 2) != "..") { + ++directories; + } + else if (directories > 1) { + --directories; + } + else { + directories = 0; + } + left = right + 1; + } + } + + std::string result = ""; + for (size_t i = 0; i < directories; ++i) { + result += "../"; + } + result += stripped_uri; + + return result; + } + + // Resolution order for ambiguous imports: + // (1) filename as given + // (2) underscore + given + // (3) underscore + given + extension + // (4) given + extension + std::vector resolve_includes(const std::string& root, const std::string& file, const std::vector& exts) + { + std::string filename = join_paths(root, file); + // split the filename + std::string base(dir_name(file)); + std::string name(base_name(file)); + std::vector includes; + // create full path (maybe relative) + std::string rel_path(join_paths(base, name)); + std::string abs_path(join_paths(root, rel_path)); + if (file_exists(abs_path)) includes.push_back({{ rel_path, root }, abs_path }); + // next test variation with underscore + rel_path = join_paths(base, "_" + name); + abs_path = join_paths(root, rel_path); + if (file_exists(abs_path)) includes.push_back({{ rel_path, root }, abs_path }); + // next test exts plus underscore + for(auto ext : exts) { + rel_path = join_paths(base, "_" + name + ext); + abs_path = join_paths(root, rel_path); + if (file_exists(abs_path)) includes.push_back({{ rel_path, root }, abs_path }); + } + // next test plain name with exts + for(auto ext : exts) { + rel_path = join_paths(base, name + ext); + abs_path = join_paths(root, rel_path); + if (file_exists(abs_path)) includes.push_back({{ rel_path, root }, abs_path }); + } + // nothing found + return includes; + } + + std::vector find_files(const std::string& file, const std::vector paths) + { + std::vector includes; + for (std::string path : paths) { + std::string abs_path(join_paths(path, file)); + if (file_exists(abs_path)) includes.push_back(abs_path); + } + return includes; + } + + std::vector find_files(const std::string& file, struct Sass_Compiler* compiler) + { + // get the last import entry to get current base directory + // struct Sass_Options* options = sass_compiler_get_options(compiler); + Sass_Import_Entry import = sass_compiler_get_last_import(compiler); + const std::vector& incs = compiler->cpp_ctx->include_paths; + // create the vector with paths to lookup + std::vector paths(1 + incs.size()); + paths.push_back(dir_name(import->abs_path)); + paths.insert(paths.end(), incs.begin(), incs.end()); + // dispatch to find files in paths + return find_files(file, paths); + } + + // helper function to search one file in all include paths + // this is normally not used internally by libsass (C-API sugar) + std::string find_file(const std::string& file, const std::vector paths) + { + if (file.empty()) return file; + auto res = find_files(file, paths); + return res.empty() ? "" : res.front(); + } + + // helper function to resolve a filename + std::string find_include(const std::string& file, const std::vector paths) + { + // search in every include path for a match + for (size_t i = 0, S = paths.size(); i < S; ++i) + { + std::vector resolved(resolve_includes(paths[i], file)); + if (resolved.size()) return resolved[0].abs_path; + } + // nothing found + return std::string(""); + } + + // try to load the given filename + // returned memory must be freed + // will auto convert .sass files + char* read_file(const std::string& path) + { + #ifdef _WIN32 + BYTE* pBuffer; + DWORD dwBytes; + wchar_t resolved[32768]; + // windows unicode filepaths are encoded in utf16 + std::string abspath(join_paths(get_cwd(), path)); + std::wstring wpath(UTF_8::convert_to_utf16("\\\\?\\" + abspath)); + std::replace(wpath.begin(), wpath.end(), '/', '\\'); + DWORD rv = GetFullPathNameW(wpath.c_str(), 32767, resolved, NULL); + if (rv > 32767) throw Exception::OperationError("Path is too long"); + if (rv == 0) throw Exception::OperationError("Path could not be resolved"); + HANDLE hFile = CreateFileW(resolved, GENERIC_READ, FILE_SHARE_READ, NULL, OPEN_EXISTING, 0, NULL); + if (hFile == INVALID_HANDLE_VALUE) return 0; + DWORD dwFileLength = GetFileSize(hFile, NULL); + if (dwFileLength == INVALID_FILE_SIZE) return 0; + // allocate an extra byte for the null char + // and another one for edge-cases in lexer + pBuffer = (BYTE*)malloc((dwFileLength+2)*sizeof(BYTE)); + ReadFile(hFile, pBuffer, dwFileLength, &dwBytes, NULL); + pBuffer[dwFileLength+0] = '\0'; + pBuffer[dwFileLength+1] = '\0'; + CloseHandle(hFile); + // just convert from unsigned char* + char* contents = (char*) pBuffer; + #else + struct stat st; + if (stat(path.c_str(), &st) == -1 || S_ISDIR(st.st_mode)) return 0; + std::ifstream file(path.c_str(), std::ios::in | std::ios::binary | std::ios::ate); + char* contents = 0; + if (file.is_open()) { + size_t size = file.tellg(); + // allocate an extra byte for the null char + // and another one for edge-cases in lexer + contents = (char*) malloc((size+2)*sizeof(char)); + file.seekg(0, std::ios::beg); + file.read(contents, size); + contents[size+0] = '\0'; + contents[size+1] = '\0'; + file.close(); + } + #endif + std::string extension; + if (path.length() > 5) { + extension = path.substr(path.length() - 5, 5); + } + for(size_t i=0; i split_path_list(const char* str) + { + std::vector paths; + if (str == NULL) return paths; + // find delimiter via prelexer (return zero at end) + const char* end = Prelexer::find_first(str); + // search until null delimiter + while (end) { + // add path from current position to delimiter + paths.push_back(std::string(str, end - str)); + str = end + 1; // skip delimiter + end = Prelexer::find_first(str); + } + // add path from current position to end + paths.push_back(std::string(str)); + // return back + return paths; + } + + } +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/file.hpp b/mybulma/node_modules/node-sass/src/libsass/src/file.hpp new file mode 100644 index 0000000..279b9e9 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/file.hpp @@ -0,0 +1,133 @@ +#ifndef SASS_FILE_H +#define SASS_FILE_H + +#include +#include + +#include "sass/context.h" +#include "ast_fwd_decl.hpp" + +namespace Sass { + + namespace File { + + // return the current directory + // always with forward slashes + std::string get_cwd(); + + // test if path exists and is a file + bool file_exists(const std::string& file); + + // return if given path is absolute + // works with *nix and windows paths + bool is_absolute_path(const std::string& path); + + // return only the directory part of path + std::string dir_name(const std::string& path); + + // return only the filename part of path + std::string base_name(const std::string&); + + // do a locigal clean up of the path + // no physical check on the filesystem + std::string make_canonical_path (std::string path); + + // join two path segments cleanly together + // but only if right side is not absolute yet + std::string join_paths(std::string root, std::string name); + + // if the relative path is outside of the cwd we want want to + // show the absolute path in console messages + std::string path_for_console(const std::string& rel_path, const std::string& abs_path, const std::string& orig_path); + + // create an absolute path by resolving relative paths with cwd + std::string rel2abs(const std::string& path, const std::string& base = ".", const std::string& cwd = get_cwd()); + + // create a path that is relative to the given base directory + // path and base will first be resolved against cwd to make them absolute + std::string abs2rel(const std::string& path, const std::string& base = ".", const std::string& cwd = get_cwd()); + + // helper function to resolve a filename + // searching without variations in all paths + std::string find_file(const std::string& file, struct Sass_Compiler* options); + std::string find_file(const std::string& file, const std::vector paths); + + // helper function to resolve a include filename + // this has the original resolve logic for sass include + std::string find_include(const std::string& file, const std::vector paths); + + // split a path string delimited by semicolons or colons (OS dependent) + std::vector split_path_list(const char* paths); + + // try to load the given filename + // returned memory must be freed + // will auto convert .sass files + char* read_file(const std::string& file); + + } + + // requested import + class Importer { + public: + // requested import path + std::string imp_path; + // parent context path + std::string ctx_path; + // base derived from context path + // this really just acts as a cache + std::string base_path; + public: + Importer(std::string imp_path, std::string ctx_path) + : imp_path(File::make_canonical_path(imp_path)), + ctx_path(File::make_canonical_path(ctx_path)), + base_path(File::dir_name(ctx_path)) + { } + }; + + // a resolved include (final import) + class Include : public Importer { + public: + // resolved absolute path + std::string abs_path; + public: + Include(const Importer& imp, std::string abs_path) + : Importer(imp), abs_path(abs_path) + { } + }; + + // a loaded resource + class Resource { + public: + // the file contents + char* contents; + // conected sourcemap + char* srcmap; + public: + Resource(char* contents, char* srcmap) + : contents(contents), srcmap(srcmap) + { } + }; + + // parsed stylesheet from loaded resource + class StyleSheet : public Resource { + public: + // parsed root block + Block_Obj root; + public: + StyleSheet(const Resource& res, Block_Obj root) + : Resource(res), root(root) + { } + }; + + namespace File { + + static std::vector defaultExtensions = { ".scss", ".sass", ".css" }; + + std::vector resolve_includes(const std::string& root, const std::string& file, + const std::vector& exts = defaultExtensions); + + } + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/functions.cpp b/mybulma/node_modules/node-sass/src/libsass/src/functions.cpp new file mode 100644 index 0000000..c9999fc --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/functions.cpp @@ -0,0 +1,2234 @@ +#include "sass.hpp" +#include "functions.hpp" +#include "ast.hpp" +#include "context.hpp" +#include "backtrace.hpp" +#include "parser.hpp" +#include "constants.hpp" +#include "inspect.hpp" +#include "extend.hpp" +#include "eval.hpp" +#include "util.hpp" +#include "expand.hpp" +#include "operators.hpp" +#include "utf8_string.hpp" +#include "sass/base.h" +#include "utf8.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#ifdef __MINGW32__ +#include "windows.h" +#include "wincrypt.h" +#endif + +#define ARG(argname, argtype) get_arg(argname, env, sig, pstate, traces) +#define ARGM(argname, argtype, ctx) get_arg_m(argname, env, sig, pstate, traces, ctx) + +// return a number object (copied since we want to have reduced units) +#define ARGN(argname) get_arg_n(argname, env, sig, pstate, traces) // Number copy + +// special function for weird hsla percent (10px == 10% == 10 != 0.1) +#define ARGVAL(argname) get_arg_val(argname, env, sig, pstate, traces) // double + +// macros for common ranges (u mean unsigned or upper, r for full range) +#define DARG_U_FACT(argname) get_arg_r(argname, env, sig, pstate, traces, - 0.0, 1.0) // double +#define DARG_R_FACT(argname) get_arg_r(argname, env, sig, pstate, traces, - 1.0, 1.0) // double +#define DARG_U_BYTE(argname) get_arg_r(argname, env, sig, pstate, traces, - 0.0, 255.0) // double +#define DARG_R_BYTE(argname) get_arg_r(argname, env, sig, pstate, traces, - 255.0, 255.0) // double +#define DARG_U_PRCT(argname) get_arg_r(argname, env, sig, pstate, traces, - 0.0, 100.0) // double +#define DARG_R_PRCT(argname) get_arg_r(argname, env, sig, pstate, traces, - 100.0, 100.0) // double + +// macros for color related inputs (rbg and alpha/opacity values) +#define COLOR_NUM(argname) color_num(argname, env, sig, pstate, traces) // double +#define ALPHA_NUM(argname) alpha_num(argname, env, sig, pstate, traces) // double + +namespace Sass { + using std::stringstream; + using std::endl; + + Definition_Ptr make_native_function(Signature sig, Native_Function func, Context& ctx) + { + Parser sig_parser = Parser::from_c_str(sig, ctx, ctx.traces, ParserState("[built-in function]")); + sig_parser.lex(); + std::string name(Util::normalize_underscores(sig_parser.lexed)); + Parameters_Obj params = sig_parser.parse_parameters(); + return SASS_MEMORY_NEW(Definition, + ParserState("[built-in function]"), + sig, + name, + params, + func, + false); + } + + Definition_Ptr make_c_function(Sass_Function_Entry c_func, Context& ctx) + { + using namespace Prelexer; + + const char* sig = sass_function_get_signature(c_func); + Parser sig_parser = Parser::from_c_str(sig, ctx, ctx.traces, ParserState("[c function]")); + // allow to overload generic callback plus @warn, @error and @debug with custom functions + sig_parser.lex < alternatives < identifier, exactly <'*'>, + exactly < Constants::warn_kwd >, + exactly < Constants::error_kwd >, + exactly < Constants::debug_kwd > + > >(); + std::string name(Util::normalize_underscores(sig_parser.lexed)); + Parameters_Obj params = sig_parser.parse_parameters(); + return SASS_MEMORY_NEW(Definition, + ParserState("[c function]"), + sig, + name, + params, + c_func, + false, true); + } + + std::string function_name(Signature sig) + { + std::string str(sig); + return str.substr(0, str.find('(')); + } + + namespace Functions { + + inline void handle_utf8_error (const ParserState& pstate, Backtraces traces) + { + try { + throw; + } + catch (utf8::invalid_code_point) { + std::string msg("utf8::invalid_code_point"); + error(msg, pstate, traces); + } + catch (utf8::not_enough_room) { + std::string msg("utf8::not_enough_room"); + error(msg, pstate, traces); + } + catch (utf8::invalid_utf8) { + std::string msg("utf8::invalid_utf8"); + error(msg, pstate, traces); + } + catch (...) { throw; } + } + + template + T* get_arg(const std::string& argname, Env& env, Signature sig, ParserState pstate, Backtraces traces) + { + // Minimal error handling -- the expectation is that built-ins will be written correctly! + T* val = Cast(env[argname]); + if (!val) { + std::string msg("argument `"); + msg += argname; + msg += "` of `"; + msg += sig; + msg += "` must be a "; + msg += T::type_name(); + error(msg, pstate, traces); + } + return val; + } + + Map_Ptr get_arg_m(const std::string& argname, Env& env, Signature sig, ParserState pstate, Backtraces traces, Context& ctx) + { + // Minimal error handling -- the expectation is that built-ins will be written correctly! + Map_Ptr val = Cast(env[argname]); + if (val) return val; + + List_Ptr lval = Cast(env[argname]); + if (lval && lval->length() == 0) return SASS_MEMORY_NEW(Map, pstate, 0); + + // fallback on get_arg for error handling + val = get_arg(argname, env, sig, pstate, traces); + return val; + } + + double get_arg_r(const std::string& argname, Env& env, Signature sig, ParserState pstate, Backtraces traces, double lo, double hi) + { + // Minimal error handling -- the expectation is that built-ins will be written correctly! + Number_Ptr val = get_arg(argname, env, sig, pstate, traces); + Number tmpnr(val); + tmpnr.reduce(); + double v = tmpnr.value(); + if (!(lo <= v && v <= hi)) { + std::stringstream msg; + msg << "argument `" << argname << "` of `" << sig << "` must be between "; + msg << lo << " and " << hi; + error(msg.str(), pstate, traces); + } + return v; + } + + Number_Ptr get_arg_n(const std::string& argname, Env& env, Signature sig, ParserState pstate, Backtraces traces) + { + // Minimal error handling -- the expectation is that built-ins will be written correctly! + Number_Ptr val = get_arg(argname, env, sig, pstate, traces); + val = SASS_MEMORY_COPY(val); + val->reduce(); + return val; + } + + double get_arg_v(const std::string& argname, Env& env, Signature sig, ParserState pstate, Backtraces traces) + { + // Minimal error handling -- the expectation is that built-ins will be written correctly! + Number_Ptr val = get_arg(argname, env, sig, pstate, traces); + Number tmpnr(val); + tmpnr.reduce(); + /* + if (tmpnr.unit() == "%") { + tmpnr.value(tmpnr.value() / 100); + tmpnr.numerators.clear(); + } else { + if (!tmpnr.is_unitless()) error("argument " + argname + " of `" + std::string(sig) + "` must be unitless", pstate); + } + */ + return tmpnr.value(); + } + + double get_arg_val(const std::string& argname, Env& env, Signature sig, ParserState pstate, Backtraces traces) + { + // Minimal error handling -- the expectation is that built-ins will be written correctly! + Number_Ptr val = get_arg(argname, env, sig, pstate, traces); + Number tmpnr(val); + tmpnr.reduce(); + return tmpnr.value(); + } + + double color_num(const std::string& argname, Env& env, Signature sig, ParserState pstate, Backtraces traces) + { + Number_Ptr val = get_arg(argname, env, sig, pstate, traces); + Number tmpnr(val); + tmpnr.reduce(); + if (tmpnr.unit() == "%") { + return std::min(std::max(tmpnr.value() * 255 / 100.0, 0.0), 255.0); + } else { + return std::min(std::max(tmpnr.value(), 0.0), 255.0); + } + } + + + inline double alpha_num(const std::string& argname, Env& env, Signature sig, ParserState pstate, Backtraces traces) { + Number_Ptr val = get_arg(argname, env, sig, pstate, traces); + Number tmpnr(val); + tmpnr.reduce(); + if (tmpnr.unit() == "%") { + return std::min(std::max(tmpnr.value(), 0.0), 100.0); + } else { + return std::min(std::max(tmpnr.value(), 0.0), 1.0); + } + } + + #define ARGSEL(argname, seltype, contextualize) get_arg_sel(argname, env, sig, pstate, traces, ctx) + + template + T get_arg_sel(const std::string& argname, Env& env, Signature sig, ParserState pstate, Backtraces traces, Context& ctx); + + template <> + Selector_List_Obj get_arg_sel(const std::string& argname, Env& env, Signature sig, ParserState pstate, Backtraces traces, Context& ctx) { + Expression_Obj exp = ARG(argname, Expression); + if (exp->concrete_type() == Expression::NULL_VAL) { + std::stringstream msg; + msg << argname << ": null is not a valid selector: it must be a string,\n"; + msg << "a list of strings, or a list of lists of strings for `" << function_name(sig) << "'"; + error(msg.str(), pstate, traces); + } + if (String_Constant_Ptr str = Cast(exp)) { + str->quote_mark(0); + } + std::string exp_src = exp->to_string(ctx.c_options); + return Parser::parse_selector(exp_src.c_str(), ctx, traces); + } + + template <> + Compound_Selector_Obj get_arg_sel(const std::string& argname, Env& env, Signature sig, ParserState pstate, Backtraces traces, Context& ctx) { + Expression_Obj exp = ARG(argname, Expression); + if (exp->concrete_type() == Expression::NULL_VAL) { + std::stringstream msg; + msg << argname << ": null is not a string for `" << function_name(sig) << "'"; + error(msg.str(), pstate, traces); + } + if (String_Constant_Ptr str = Cast(exp)) { + str->quote_mark(0); + } + std::string exp_src = exp->to_string(ctx.c_options); + Selector_List_Obj sel_list = Parser::parse_selector(exp_src.c_str(), ctx, traces); + if (sel_list->length() == 0) return NULL; + Complex_Selector_Obj first = sel_list->first(); + if (!first->tail()) return first->head(); + return first->tail()->head(); + } + + #ifdef __MINGW32__ + uint64_t GetSeed() + { + HCRYPTPROV hp = 0; + BYTE rb[8]; + CryptAcquireContext(&hp, 0, 0, PROV_RSA_FULL, CRYPT_VERIFYCONTEXT); + CryptGenRandom(hp, sizeof(rb), rb); + CryptReleaseContext(hp, 0); + + uint64_t seed; + memcpy(&seed, &rb[0], sizeof(seed)); + + return seed; + } + #else + uint64_t GetSeed() + { + std::random_device rd; + return rd(); + } + #endif + + // note: the performance of many implementations of + // random_device degrades sharply once the entropy pool + // is exhausted. For practical use, random_device is + // generally only used to seed a PRNG such as mt19937. + static std::mt19937 rand(static_cast(GetSeed())); + + // features + static std::set features { + "global-variable-shadowing", + "extend-selector-pseudoclass", + "at-error", + "units-level-3", + "custom-property" + }; + + //////////////// + // RGB FUNCTIONS + //////////////// + + inline bool special_number(String_Constant_Ptr s) { + if (s) { + std::string calc("calc("); + std::string var("var("); + std::string ss(s->value()); + return std::equal(calc.begin(), calc.end(), ss.begin()) || + std::equal(var.begin(), var.end(), ss.begin()); + } + return false; + } + + Signature rgb_sig = "rgb($red, $green, $blue)"; + BUILT_IN(rgb) + { + if ( + special_number(Cast(env["$red"])) || + special_number(Cast(env["$green"])) || + special_number(Cast(env["$blue"])) + ) { + return SASS_MEMORY_NEW(String_Constant, pstate, "rgb(" + + env["$red"]->to_string() + + ", " + + env["$green"]->to_string() + + ", " + + env["$blue"]->to_string() + + ")" + ); + } + + return SASS_MEMORY_NEW(Color, + pstate, + COLOR_NUM("$red"), + COLOR_NUM("$green"), + COLOR_NUM("$blue")); + } + + Signature rgba_4_sig = "rgba($red, $green, $blue, $alpha)"; + BUILT_IN(rgba_4) + { + if ( + special_number(Cast(env["$red"])) || + special_number(Cast(env["$green"])) || + special_number(Cast(env["$blue"])) || + special_number(Cast(env["$alpha"])) + ) { + return SASS_MEMORY_NEW(String_Constant, pstate, "rgba(" + + env["$red"]->to_string() + + ", " + + env["$green"]->to_string() + + ", " + + env["$blue"]->to_string() + + ", " + + env["$alpha"]->to_string() + + ")" + ); + } + + return SASS_MEMORY_NEW(Color, + pstate, + COLOR_NUM("$red"), + COLOR_NUM("$green"), + COLOR_NUM("$blue"), + ALPHA_NUM("$alpha")); + } + + Signature rgba_2_sig = "rgba($color, $alpha)"; + BUILT_IN(rgba_2) + { + if ( + special_number(Cast(env["$color"])) + ) { + return SASS_MEMORY_NEW(String_Constant, pstate, "rgba(" + + env["$color"]->to_string() + + ", " + + env["$alpha"]->to_string() + + ")" + ); + } + + Color_Ptr c_arg = ARG("$color", Color); + + if ( + special_number(Cast(env["$alpha"])) + ) { + std::stringstream strm; + strm << "rgba(" + << (int)c_arg->r() << ", " + << (int)c_arg->g() << ", " + << (int)c_arg->b() << ", " + << env["$alpha"]->to_string() + << ")"; + return SASS_MEMORY_NEW(String_Constant, pstate, strm.str()); + } + + Color_Ptr new_c = SASS_MEMORY_COPY(c_arg); + new_c->a(ALPHA_NUM("$alpha")); + new_c->disp(""); + return new_c; + } + + Signature red_sig = "red($color)"; + BUILT_IN(red) + { return SASS_MEMORY_NEW(Number, pstate, ARG("$color", Color)->r()); } + + Signature green_sig = "green($color)"; + BUILT_IN(green) + { return SASS_MEMORY_NEW(Number, pstate, ARG("$color", Color)->g()); } + + Signature blue_sig = "blue($color)"; + BUILT_IN(blue) + { return SASS_MEMORY_NEW(Number, pstate, ARG("$color", Color)->b()); } + + Color* colormix(Context& ctx, ParserState& pstate, Color* color1, Color* color2, double weight) { + double p = weight/100; + double w = 2*p - 1; + double a = color1->a() - color2->a(); + + double w1 = (((w * a == -1) ? w : (w + a)/(1 + w*a)) + 1)/2.0; + double w2 = 1 - w1; + + return SASS_MEMORY_NEW(Color, + pstate, + Sass::round(w1*color1->r() + w2*color2->r(), ctx.c_options.precision), + Sass::round(w1*color1->g() + w2*color2->g(), ctx.c_options.precision), + Sass::round(w1*color1->b() + w2*color2->b(), ctx.c_options.precision), + color1->a()*p + color2->a()*(1-p)); + } + + Signature mix_sig = "mix($color-1, $color-2, $weight: 50%)"; + BUILT_IN(mix) + { + Color_Obj color1 = ARG("$color-1", Color); + Color_Obj color2 = ARG("$color-2", Color); + double weight = DARG_U_PRCT("$weight"); + return colormix(ctx, pstate, color1, color2, weight); + + } + + //////////////// + // HSL FUNCTIONS + //////////////// + + // RGB to HSL helper function + struct HSL { double h; double s; double l; }; + HSL rgb_to_hsl(double r, double g, double b) + { + + // Algorithm from http://en.wikipedia.org/wiki/wHSL_and_HSV#Conversion_from_RGB_to_HSL_or_HSV + r /= 255.0; g /= 255.0; b /= 255.0; + + double max = std::max(r, std::max(g, b)); + double min = std::min(r, std::min(g, b)); + double delta = max - min; + + double h = 0; + double s; + double l = (max + min) / 2.0; + + if (NEAR_EQUAL(max, min)) { + h = s = 0; // achromatic + } + else { + if (l < 0.5) s = delta / (max + min); + else s = delta / (2.0 - max - min); + + if (r == max) h = (g - b) / delta + (g < b ? 6 : 0); + else if (g == max) h = (b - r) / delta + 2; + else if (b == max) h = (r - g) / delta + 4; + } + + HSL hsl_struct; + hsl_struct.h = h / 6 * 360; + hsl_struct.s = s * 100; + hsl_struct.l = l * 100; + + return hsl_struct; + } + + // hue to RGB helper function + double h_to_rgb(double m1, double m2, double h) { + while (h < 0) h += 1; + while (h > 1) h -= 1; + if (h*6.0 < 1) return m1 + (m2 - m1)*h*6; + if (h*2.0 < 1) return m2; + if (h*3.0 < 2) return m1 + (m2 - m1) * (2.0/3.0 - h)*6; + return m1; + } + + Color_Ptr hsla_impl(double h, double s, double l, double a, Context& ctx, ParserState pstate) + { + h /= 360.0; + s /= 100.0; + l /= 100.0; + + if (l < 0) l = 0; + if (s < 0) s = 0; + if (l > 1) l = 1; + if (s > 1) s = 1; + while (h < 0) h += 1; + while (h > 1) h -= 1; + + // if saturation is exacly zero, we loose + // information for hue, since it will evaluate + // to zero if converted back from rgb. Setting + // saturation to a very tiny number solves this. + if (s == 0) s = 1e-10; + + // Algorithm from the CSS3 spec: http://www.w3.org/TR/css3-color/#hsl-color. + double m2; + if (l <= 0.5) m2 = l*(s+1.0); + else m2 = (l+s)-(l*s); + double m1 = (l*2.0)-m2; + // round the results -- consider moving this into the Color constructor + double r = (h_to_rgb(m1, m2, h + 1.0/3.0) * 255.0); + double g = (h_to_rgb(m1, m2, h) * 255.0); + double b = (h_to_rgb(m1, m2, h - 1.0/3.0) * 255.0); + + return SASS_MEMORY_NEW(Color, pstate, r, g, b, a); + } + + Signature hsl_sig = "hsl($hue, $saturation, $lightness)"; + BUILT_IN(hsl) + { + if ( + special_number(Cast(env["$hue"])) || + special_number(Cast(env["$saturation"])) || + special_number(Cast(env["$lightness"])) + ) { + return SASS_MEMORY_NEW(String_Constant, pstate, "hsl(" + + env["$hue"]->to_string() + + ", " + + env["$saturation"]->to_string() + + ", " + + env["$lightness"]->to_string() + + ")" + ); + } + + return hsla_impl(ARGVAL("$hue"), + ARGVAL("$saturation"), + ARGVAL("$lightness"), + 1.0, + ctx, + pstate); + } + + Signature hsla_sig = "hsla($hue, $saturation, $lightness, $alpha)"; + BUILT_IN(hsla) + { + if ( + special_number(Cast(env["$hue"])) || + special_number(Cast(env["$saturation"])) || + special_number(Cast(env["$lightness"])) || + special_number(Cast(env["$alpha"])) + ) { + return SASS_MEMORY_NEW(String_Constant, pstate, "hsla(" + + env["$hue"]->to_string() + + ", " + + env["$saturation"]->to_string() + + ", " + + env["$lightness"]->to_string() + + ", " + + env["$alpha"]->to_string() + + ")" + ); + } + + return hsla_impl(ARGVAL("$hue"), + ARGVAL("$saturation"), + ARGVAL("$lightness"), + ARGVAL("$alpha"), + ctx, + pstate); + } + + Signature hue_sig = "hue($color)"; + BUILT_IN(hue) + { + Color_Ptr rgb_color = ARG("$color", Color); + HSL hsl_color = rgb_to_hsl(rgb_color->r(), + rgb_color->g(), + rgb_color->b()); + return SASS_MEMORY_NEW(Number, pstate, hsl_color.h, "deg"); + } + + Signature saturation_sig = "saturation($color)"; + BUILT_IN(saturation) + { + Color_Ptr rgb_color = ARG("$color", Color); + HSL hsl_color = rgb_to_hsl(rgb_color->r(), + rgb_color->g(), + rgb_color->b()); + return SASS_MEMORY_NEW(Number, pstate, hsl_color.s, "%"); + } + + Signature lightness_sig = "lightness($color)"; + BUILT_IN(lightness) + { + Color_Ptr rgb_color = ARG("$color", Color); + HSL hsl_color = rgb_to_hsl(rgb_color->r(), + rgb_color->g(), + rgb_color->b()); + return SASS_MEMORY_NEW(Number, pstate, hsl_color.l, "%"); + } + + Signature adjust_hue_sig = "adjust-hue($color, $degrees)"; + BUILT_IN(adjust_hue) + { + Color_Ptr rgb_color = ARG("$color", Color); + double degrees = ARGVAL("$degrees"); + HSL hsl_color = rgb_to_hsl(rgb_color->r(), + rgb_color->g(), + rgb_color->b()); + return hsla_impl(hsl_color.h + degrees, + hsl_color.s, + hsl_color.l, + rgb_color->a(), + ctx, + pstate); + } + + Signature lighten_sig = "lighten($color, $amount)"; + BUILT_IN(lighten) + { + Color_Ptr rgb_color = ARG("$color", Color); + double amount = DARG_U_PRCT("$amount"); + HSL hsl_color = rgb_to_hsl(rgb_color->r(), + rgb_color->g(), + rgb_color->b()); + //Check lightness is not negative before lighten it + double hslcolorL = hsl_color.l; + if (hslcolorL < 0) { + hslcolorL = 0; + } + + return hsla_impl(hsl_color.h, + hsl_color.s, + hslcolorL + amount, + rgb_color->a(), + ctx, + pstate); + } + + Signature darken_sig = "darken($color, $amount)"; + BUILT_IN(darken) + { + Color_Ptr rgb_color = ARG("$color", Color); + double amount = DARG_U_PRCT("$amount"); + HSL hsl_color = rgb_to_hsl(rgb_color->r(), + rgb_color->g(), + rgb_color->b()); + + //Check lightness if not over 100, before darken it + double hslcolorL = hsl_color.l; + if (hslcolorL > 100) { + hslcolorL = 100; + } + + return hsla_impl(hsl_color.h, + hsl_color.s, + hslcolorL - amount, + rgb_color->a(), + ctx, + pstate); + } + + Signature saturate_sig = "saturate($color, $amount: false)"; + BUILT_IN(saturate) + { + // CSS3 filter function overload: pass literal through directly + if (!Cast(env["$amount"])) { + return SASS_MEMORY_NEW(String_Quoted, pstate, "saturate(" + env["$color"]->to_string(ctx.c_options) + ")"); + } + + double amount = DARG_U_PRCT("$amount"); + Color_Ptr rgb_color = ARG("$color", Color); + HSL hsl_color = rgb_to_hsl(rgb_color->r(), + rgb_color->g(), + rgb_color->b()); + + double hslcolorS = hsl_color.s + amount; + + // Saturation cannot be below 0 or above 100 + if (hslcolorS < 0) { + hslcolorS = 0; + } + if (hslcolorS > 100) { + hslcolorS = 100; + } + + return hsla_impl(hsl_color.h, + hslcolorS, + hsl_color.l, + rgb_color->a(), + ctx, + pstate); + } + + Signature desaturate_sig = "desaturate($color, $amount)"; + BUILT_IN(desaturate) + { + Color_Ptr rgb_color = ARG("$color", Color); + double amount = DARG_U_PRCT("$amount"); + HSL hsl_color = rgb_to_hsl(rgb_color->r(), + rgb_color->g(), + rgb_color->b()); + + double hslcolorS = hsl_color.s - amount; + + // Saturation cannot be below 0 or above 100 + if (hslcolorS <= 0) { + hslcolorS = 0; + } + if (hslcolorS > 100) { + hslcolorS = 100; + } + + return hsla_impl(hsl_color.h, + hslcolorS, + hsl_color.l, + rgb_color->a(), + ctx, + pstate); + } + + Signature grayscale_sig = "grayscale($color)"; + BUILT_IN(grayscale) + { + // CSS3 filter function overload: pass literal through directly + Number_Ptr amount = Cast(env["$color"]); + if (amount) { + return SASS_MEMORY_NEW(String_Quoted, pstate, "grayscale(" + amount->to_string(ctx.c_options) + ")"); + } + + Color_Ptr rgb_color = ARG("$color", Color); + HSL hsl_color = rgb_to_hsl(rgb_color->r(), + rgb_color->g(), + rgb_color->b()); + return hsla_impl(hsl_color.h, + 0.0, + hsl_color.l, + rgb_color->a(), + ctx, + pstate); + } + + Signature complement_sig = "complement($color)"; + BUILT_IN(complement) + { + Color_Ptr rgb_color = ARG("$color", Color); + HSL hsl_color = rgb_to_hsl(rgb_color->r(), + rgb_color->g(), + rgb_color->b()); + return hsla_impl(hsl_color.h - 180.0, + hsl_color.s, + hsl_color.l, + rgb_color->a(), + ctx, + pstate); + } + + Signature invert_sig = "invert($color, $weight: 100%)"; + BUILT_IN(invert) + { + // CSS3 filter function overload: pass literal through directly + Number_Ptr amount = Cast(env["$color"]); + if (amount) { + return SASS_MEMORY_NEW(String_Quoted, pstate, "invert(" + amount->to_string(ctx.c_options) + ")"); + } + + double weight = DARG_U_PRCT("$weight"); + Color_Ptr rgb_color = ARG("$color", Color); + Color_Obj inv = SASS_MEMORY_NEW(Color, + pstate, + 255 - rgb_color->r(), + 255 - rgb_color->g(), + 255 - rgb_color->b(), + rgb_color->a()); + return colormix(ctx, pstate, inv, rgb_color, weight); + } + + //////////////////// + // OPACITY FUNCTIONS + //////////////////// + Signature alpha_sig = "alpha($color)"; + Signature opacity_sig = "opacity($color)"; + BUILT_IN(alpha) + { + String_Constant_Ptr ie_kwd = Cast(env["$color"]); + if (ie_kwd) { + return SASS_MEMORY_NEW(String_Quoted, pstate, "alpha(" + ie_kwd->value() + ")"); + } + + // CSS3 filter function overload: pass literal through directly + Number_Ptr amount = Cast(env["$color"]); + if (amount) { + return SASS_MEMORY_NEW(String_Quoted, pstate, "opacity(" + amount->to_string(ctx.c_options) + ")"); + } + + return SASS_MEMORY_NEW(Number, pstate, ARG("$color", Color)->a()); + } + + Signature opacify_sig = "opacify($color, $amount)"; + Signature fade_in_sig = "fade-in($color, $amount)"; + BUILT_IN(opacify) + { + Color_Ptr color = ARG("$color", Color); + double amount = DARG_U_FACT("$amount"); + double alpha = std::min(color->a() + amount, 1.0); + return SASS_MEMORY_NEW(Color, + pstate, + color->r(), + color->g(), + color->b(), + alpha); + } + + Signature transparentize_sig = "transparentize($color, $amount)"; + Signature fade_out_sig = "fade-out($color, $amount)"; + BUILT_IN(transparentize) + { + Color_Ptr color = ARG("$color", Color); + double amount = DARG_U_FACT("$amount"); + double alpha = std::max(color->a() - amount, 0.0); + return SASS_MEMORY_NEW(Color, + pstate, + color->r(), + color->g(), + color->b(), + alpha); + } + + //////////////////////// + // OTHER COLOR FUNCTIONS + //////////////////////// + + Signature adjust_color_sig = "adjust-color($color, $red: false, $green: false, $blue: false, $hue: false, $saturation: false, $lightness: false, $alpha: false)"; + BUILT_IN(adjust_color) + { + Color_Ptr color = ARG("$color", Color); + Number_Ptr r = Cast(env["$red"]); + Number_Ptr g = Cast(env["$green"]); + Number_Ptr b = Cast(env["$blue"]); + Number_Ptr h = Cast(env["$hue"]); + Number_Ptr s = Cast(env["$saturation"]); + Number_Ptr l = Cast(env["$lightness"]); + Number_Ptr a = Cast(env["$alpha"]); + + bool rgb = r || g || b; + bool hsl = h || s || l; + + if (rgb && hsl) { + error("Cannot specify HSL and RGB values for a color at the same time for `adjust-color'", pstate, traces); + } + if (rgb) { + double rr = r ? DARG_R_BYTE("$red") : 0; + double gg = g ? DARG_R_BYTE("$green") : 0; + double bb = b ? DARG_R_BYTE("$blue") : 0; + double aa = a ? DARG_R_FACT("$alpha") : 0; + return SASS_MEMORY_NEW(Color, + pstate, + color->r() + rr, + color->g() + gg, + color->b() + bb, + color->a() + aa); + } + if (hsl) { + HSL hsl_struct = rgb_to_hsl(color->r(), color->g(), color->b()); + double ss = s ? DARG_R_PRCT("$saturation") : 0; + double ll = l ? DARG_R_PRCT("$lightness") : 0; + double aa = a ? DARG_R_FACT("$alpha") : 0; + return hsla_impl(hsl_struct.h + (h ? h->value() : 0), + hsl_struct.s + ss, + hsl_struct.l + ll, + color->a() + aa, + ctx, + pstate); + } + if (a) { + return SASS_MEMORY_NEW(Color, + pstate, + color->r(), + color->g(), + color->b(), + color->a() + (a ? a->value() : 0)); + } + error("not enough arguments for `adjust-color'", pstate, traces); + // unreachable + return color; + } + + Signature scale_color_sig = "scale-color($color, $red: false, $green: false, $blue: false, $hue: false, $saturation: false, $lightness: false, $alpha: false)"; + BUILT_IN(scale_color) + { + Color_Ptr color = ARG("$color", Color); + Number_Ptr r = Cast(env["$red"]); + Number_Ptr g = Cast(env["$green"]); + Number_Ptr b = Cast(env["$blue"]); + Number_Ptr h = Cast(env["$hue"]); + Number_Ptr s = Cast(env["$saturation"]); + Number_Ptr l = Cast(env["$lightness"]); + Number_Ptr a = Cast(env["$alpha"]); + + bool rgb = r || g || b; + bool hsl = h || s || l; + + if (rgb && hsl) { + error("Cannot specify HSL and RGB values for a color at the same time for `scale-color'", pstate, traces); + } + if (rgb) { + double rscale = (r ? DARG_R_PRCT("$red") : 0.0) / 100.0; + double gscale = (g ? DARG_R_PRCT("$green") : 0.0) / 100.0; + double bscale = (b ? DARG_R_PRCT("$blue") : 0.0) / 100.0; + double ascale = (a ? DARG_R_PRCT("$alpha") : 0.0) / 100.0; + return SASS_MEMORY_NEW(Color, + pstate, + color->r() + rscale * (rscale > 0.0 ? 255 - color->r() : color->r()), + color->g() + gscale * (gscale > 0.0 ? 255 - color->g() : color->g()), + color->b() + bscale * (bscale > 0.0 ? 255 - color->b() : color->b()), + color->a() + ascale * (ascale > 0.0 ? 1.0 - color->a() : color->a())); + } + if (hsl) { + double hscale = (h ? DARG_R_PRCT("$hue") : 0.0) / 100.0; + double sscale = (s ? DARG_R_PRCT("$saturation") : 0.0) / 100.0; + double lscale = (l ? DARG_R_PRCT("$lightness") : 0.0) / 100.0; + double ascale = (a ? DARG_R_PRCT("$alpha") : 0.0) / 100.0; + HSL hsl_struct = rgb_to_hsl(color->r(), color->g(), color->b()); + hsl_struct.h += hscale * (hscale > 0.0 ? 360.0 - hsl_struct.h : hsl_struct.h); + hsl_struct.s += sscale * (sscale > 0.0 ? 100.0 - hsl_struct.s : hsl_struct.s); + hsl_struct.l += lscale * (lscale > 0.0 ? 100.0 - hsl_struct.l : hsl_struct.l); + double alpha = color->a() + ascale * (ascale > 0.0 ? 1.0 - color->a() : color->a()); + return hsla_impl(hsl_struct.h, hsl_struct.s, hsl_struct.l, alpha, ctx, pstate); + } + if (a) { + double ascale = (DARG_R_PRCT("$alpha")) / 100.0; + return SASS_MEMORY_NEW(Color, + pstate, + color->r(), + color->g(), + color->b(), + color->a() + ascale * (ascale > 0.0 ? 1.0 - color->a() : color->a())); + } + error("not enough arguments for `scale-color'", pstate, traces); + // unreachable + return color; + } + + Signature change_color_sig = "change-color($color, $red: false, $green: false, $blue: false, $hue: false, $saturation: false, $lightness: false, $alpha: false)"; + BUILT_IN(change_color) + { + Color_Ptr color = ARG("$color", Color); + Number_Ptr r = Cast(env["$red"]); + Number_Ptr g = Cast(env["$green"]); + Number_Ptr b = Cast(env["$blue"]); + Number_Ptr h = Cast(env["$hue"]); + Number_Ptr s = Cast(env["$saturation"]); + Number_Ptr l = Cast(env["$lightness"]); + Number_Ptr a = Cast(env["$alpha"]); + + bool rgb = r || g || b; + bool hsl = h || s || l; + + if (rgb && hsl) { + error("Cannot specify HSL and RGB values for a color at the same time for `change-color'", pstate, traces); + } + if (rgb) { + return SASS_MEMORY_NEW(Color, + pstate, + r ? DARG_U_BYTE("$red") : color->r(), + g ? DARG_U_BYTE("$green") : color->g(), + b ? DARG_U_BYTE("$blue") : color->b(), + a ? DARG_U_BYTE("$alpha") : color->a()); + } + if (hsl) { + HSL hsl_struct = rgb_to_hsl(color->r(), color->g(), color->b()); + if (h) hsl_struct.h = std::fmod(h->value(), 360.0); + if (s) hsl_struct.s = DARG_U_PRCT("$saturation"); + if (l) hsl_struct.l = DARG_U_PRCT("$lightness"); + double alpha = a ? DARG_U_FACT("$alpha") : color->a(); + return hsla_impl(hsl_struct.h, hsl_struct.s, hsl_struct.l, alpha, ctx, pstate); + } + if (a) { + double alpha = DARG_U_FACT("$alpha"); + return SASS_MEMORY_NEW(Color, + pstate, + color->r(), + color->g(), + color->b(), + alpha); + } + error("not enough arguments for `change-color'", pstate, traces); + // unreachable + return color; + } + + template + static double cap_channel(double c) { + if (c > range) return range; + else if (c < 0) return 0; + else return c; + } + + Signature ie_hex_str_sig = "ie-hex-str($color)"; + BUILT_IN(ie_hex_str) + { + Color_Ptr c = ARG("$color", Color); + double r = cap_channel<0xff>(c->r()); + double g = cap_channel<0xff>(c->g()); + double b = cap_channel<0xff>(c->b()); + double a = cap_channel<1> (c->a()) * 255; + + std::stringstream ss; + ss << '#' << std::setw(2) << std::setfill('0'); + ss << std::hex << std::setw(2) << static_cast(Sass::round(a, ctx.c_options.precision)); + ss << std::hex << std::setw(2) << static_cast(Sass::round(r, ctx.c_options.precision)); + ss << std::hex << std::setw(2) << static_cast(Sass::round(g, ctx.c_options.precision)); + ss << std::hex << std::setw(2) << static_cast(Sass::round(b, ctx.c_options.precision)); + + std::string result(ss.str()); + for (size_t i = 0, L = result.length(); i < L; ++i) { + result[i] = std::toupper(result[i]); + } + return SASS_MEMORY_NEW(String_Quoted, pstate, result); + } + + /////////////////// + // STRING FUNCTIONS + /////////////////// + + Signature unquote_sig = "unquote($string)"; + BUILT_IN(sass_unquote) + { + AST_Node_Obj arg = env["$string"]; + if (String_Quoted_Ptr string_quoted = Cast(arg)) { + String_Constant_Ptr result = SASS_MEMORY_NEW(String_Constant, pstate, string_quoted->value()); + // remember if the string was quoted (color tokens) + result->is_delayed(true); // delay colors + return result; + } + else if (String_Constant_Ptr str = Cast(arg)) { + return str; + } + else if (Expression_Ptr ex = Cast(arg)) { + Sass_Output_Style oldstyle = ctx.c_options.output_style; + ctx.c_options.output_style = SASS_STYLE_NESTED; + std::string val(arg->to_string(ctx.c_options)); + val = Cast(arg) ? "null" : val; + ctx.c_options.output_style = oldstyle; + + deprecated_function("Passing " + val + ", a non-string value, to unquote()", pstate); + return ex; + } + throw std::runtime_error("Invalid Data Type for unquote"); + } + + Signature quote_sig = "quote($string)"; + BUILT_IN(sass_quote) + { + AST_Node_Obj arg = env["$string"]; + // only set quote mark to true if already a string + if (String_Quoted_Ptr qstr = Cast(arg)) { + qstr->quote_mark('*'); + return qstr; + } + // all other nodes must be converted to a string node + std::string str(quote(arg->to_string(ctx.c_options), String_Constant::double_quote())); + String_Quoted_Ptr result = SASS_MEMORY_NEW(String_Quoted, pstate, str); + result->quote_mark('*'); + return result; + } + + + Signature str_length_sig = "str-length($string)"; + BUILT_IN(str_length) + { + size_t len = std::string::npos; + try { + String_Constant_Ptr s = ARG("$string", String_Constant); + len = UTF_8::code_point_count(s->value(), 0, s->value().size()); + + } + // handle any invalid utf8 errors + // other errors will be re-thrown + catch (...) { handle_utf8_error(pstate, traces); } + // return something even if we had an error (-1) + return SASS_MEMORY_NEW(Number, pstate, (double)len); + } + + Signature str_insert_sig = "str-insert($string, $insert, $index)"; + BUILT_IN(str_insert) + { + std::string str; + try { + String_Constant_Ptr s = ARG("$string", String_Constant); + str = s->value(); + str = unquote(str); + String_Constant_Ptr i = ARG("$insert", String_Constant); + std::string ins = i->value(); + ins = unquote(ins); + double index = ARGVAL("$index"); + size_t len = UTF_8::code_point_count(str, 0, str.size()); + + if (index > 0 && index <= len) { + // positive and within string length + str.insert(UTF_8::offset_at_position(str, static_cast(index) - 1), ins); + } + else if (index > len) { + // positive and past string length + str += ins; + } + else if (index == 0) { + str = ins + str; + } + else if (std::abs(index) <= len) { + // negative and within string length + index += len + 1; + str.insert(UTF_8::offset_at_position(str, static_cast(index)), ins); + } + else { + // negative and past string length + str = ins + str; + } + + if (String_Quoted_Ptr ss = Cast(s)) { + if (ss->quote_mark()) str = quote(str); + } + } + // handle any invalid utf8 errors + // other errors will be re-thrown + catch (...) { handle_utf8_error(pstate, traces); } + return SASS_MEMORY_NEW(String_Quoted, pstate, str); + } + + Signature str_index_sig = "str-index($string, $substring)"; + BUILT_IN(str_index) + { + size_t index = std::string::npos; + try { + String_Constant_Ptr s = ARG("$string", String_Constant); + String_Constant_Ptr t = ARG("$substring", String_Constant); + std::string str = s->value(); + str = unquote(str); + std::string substr = t->value(); + substr = unquote(substr); + + size_t c_index = str.find(substr); + if(c_index == std::string::npos) { + return SASS_MEMORY_NEW(Null, pstate); + } + index = UTF_8::code_point_count(str, 0, c_index) + 1; + } + // handle any invalid utf8 errors + // other errors will be re-thrown + catch (...) { handle_utf8_error(pstate, traces); } + // return something even if we had an error (-1) + return SASS_MEMORY_NEW(Number, pstate, (double)index); + } + + Signature str_slice_sig = "str-slice($string, $start-at, $end-at:-1)"; + BUILT_IN(str_slice) + { + std::string newstr; + try { + String_Constant_Ptr s = ARG("$string", String_Constant); + double start_at = ARGVAL("$start-at"); + double end_at = ARGVAL("$end-at"); + String_Quoted_Ptr ss = Cast(s); + + std::string str = unquote(s->value()); + + size_t size = utf8::distance(str.begin(), str.end()); + + if (!Cast(env["$end-at"])) { + end_at = -1; + } + + if (end_at == 0 || (end_at + size) < 0) { + if (ss && ss->quote_mark()) newstr = quote(""); + return SASS_MEMORY_NEW(String_Quoted, pstate, newstr); + } + + if (end_at < 0) { + end_at += size + 1; + if (end_at == 0) end_at = 1; + } + if (end_at > size) { end_at = (double)size; } + if (start_at < 0) { + start_at += size + 1; + if (start_at < 0) start_at = 0; + } + else if (start_at == 0) { ++ start_at; } + + if (start_at <= end_at) + { + std::string::iterator start = str.begin(); + utf8::advance(start, start_at - 1, str.end()); + std::string::iterator end = start; + utf8::advance(end, end_at - start_at + 1, str.end()); + newstr = std::string(start, end); + } + if (ss) { + if(ss->quote_mark()) newstr = quote(newstr); + } + } + // handle any invalid utf8 errors + // other errors will be re-thrown + catch (...) { handle_utf8_error(pstate, traces); } + return SASS_MEMORY_NEW(String_Quoted, pstate, newstr); + } + + Signature to_upper_case_sig = "to-upper-case($string)"; + BUILT_IN(to_upper_case) + { + String_Constant_Ptr s = ARG("$string", String_Constant); + std::string str = s->value(); + + for (size_t i = 0, L = str.length(); i < L; ++i) { + if (Sass::Util::isAscii(str[i])) { + str[i] = std::toupper(str[i]); + } + } + + if (String_Quoted_Ptr ss = Cast(s)) { + String_Quoted_Ptr cpy = SASS_MEMORY_COPY(ss); + cpy->value(str); + return cpy; + } else { + return SASS_MEMORY_NEW(String_Quoted, pstate, str); + } + } + + Signature to_lower_case_sig = "to-lower-case($string)"; + BUILT_IN(to_lower_case) + { + String_Constant_Ptr s = ARG("$string", String_Constant); + std::string str = s->value(); + + for (size_t i = 0, L = str.length(); i < L; ++i) { + if (Sass::Util::isAscii(str[i])) { + str[i] = std::tolower(str[i]); + } + } + + if (String_Quoted_Ptr ss = Cast(s)) { + String_Quoted_Ptr cpy = SASS_MEMORY_COPY(ss); + cpy->value(str); + return cpy; + } else { + return SASS_MEMORY_NEW(String_Quoted, pstate, str); + } + } + + /////////////////// + // NUMBER FUNCTIONS + /////////////////// + + Signature percentage_sig = "percentage($number)"; + BUILT_IN(percentage) + { + Number_Obj n = ARGN("$number"); + if (!n->is_unitless()) error("argument $number of `" + std::string(sig) + "` must be unitless", pstate, traces); + return SASS_MEMORY_NEW(Number, pstate, n->value() * 100, "%"); + } + + Signature round_sig = "round($number)"; + BUILT_IN(round) + { + Number_Obj r = ARGN("$number"); + r->value(Sass::round(r->value(), ctx.c_options.precision)); + r->pstate(pstate); + return r.detach(); + } + + Signature ceil_sig = "ceil($number)"; + BUILT_IN(ceil) + { + Number_Obj r = ARGN("$number"); + r->value(std::ceil(r->value())); + r->pstate(pstate); + return r.detach(); + } + + Signature floor_sig = "floor($number)"; + BUILT_IN(floor) + { + Number_Obj r = ARGN("$number"); + r->value(std::floor(r->value())); + r->pstate(pstate); + return r.detach(); + } + + Signature abs_sig = "abs($number)"; + BUILT_IN(abs) + { + Number_Obj r = ARGN("$number"); + r->value(std::abs(r->value())); + r->pstate(pstate); + return r.detach(); + } + + Signature min_sig = "min($numbers...)"; + BUILT_IN(min) + { + List_Ptr arglist = ARG("$numbers", List); + Number_Obj least = NULL; + for (size_t i = 0, L = arglist->length(); i < L; ++i) { + Expression_Obj val = arglist->value_at_index(i); + Number_Obj xi = Cast(val); + if (!xi) { + error("\"" + val->to_string(ctx.c_options) + "\" is not a number for `min'", pstate, traces); + } + if (least) { + if (*xi < *least) least = xi; + } else least = xi; + } + return least.detach(); + } + + Signature max_sig = "max($numbers...)"; + BUILT_IN(max) + { + List_Ptr arglist = ARG("$numbers", List); + Number_Obj greatest = NULL; + for (size_t i = 0, L = arglist->length(); i < L; ++i) { + Expression_Obj val = arglist->value_at_index(i); + Number_Obj xi = Cast(val); + if (!xi) { + error("\"" + val->to_string(ctx.c_options) + "\" is not a number for `max'", pstate, traces); + } + if (greatest) { + if (*greatest < *xi) greatest = xi; + } else greatest = xi; + } + return greatest.detach(); + } + + Signature random_sig = "random($limit:false)"; + BUILT_IN(random) + { + AST_Node_Obj arg = env["$limit"]; + Value_Ptr v = Cast(arg); + Number_Ptr l = Cast(arg); + Boolean_Ptr b = Cast(arg); + if (l) { + double lv = l->value(); + if (lv < 1) { + stringstream err; + err << "$limit " << lv << " must be greater than or equal to 1 for `random'"; + error(err.str(), pstate, traces); + } + bool eq_int = std::fabs(trunc(lv) - lv) < NUMBER_EPSILON; + if (!eq_int) { + stringstream err; + err << "Expected $limit to be an integer but got " << lv << " for `random'"; + error(err.str(), pstate, traces); + } + std::uniform_real_distribution<> distributor(1, lv + 1); + uint_fast32_t distributed = static_cast(distributor(rand)); + return SASS_MEMORY_NEW(Number, pstate, (double)distributed); + } + else if (b) { + std::uniform_real_distribution<> distributor(0, 1); + double distributed = static_cast(distributor(rand)); + return SASS_MEMORY_NEW(Number, pstate, distributed); + } else if (v) { + traces.push_back(Backtrace(pstate)); + throw Exception::InvalidArgumentType(pstate, traces, "random", "$limit", "number", v); + } else { + traces.push_back(Backtrace(pstate)); + throw Exception::InvalidArgumentType(pstate, traces, "random", "$limit", "number"); + } + } + + ///////////////// + // LIST FUNCTIONS + ///////////////// + + Signature length_sig = "length($list)"; + BUILT_IN(length) + { + if (Selector_List_Ptr sl = Cast(env["$list"])) { + return SASS_MEMORY_NEW(Number, pstate, (double)sl->length()); + } + Expression_Ptr v = ARG("$list", Expression); + if (v->concrete_type() == Expression::MAP) { + Map_Ptr map = Cast(env["$list"]); + return SASS_MEMORY_NEW(Number, pstate, (double)(map ? map->length() : 1)); + } + if (v->concrete_type() == Expression::SELECTOR) { + if (Compound_Selector_Ptr h = Cast(v)) { + return SASS_MEMORY_NEW(Number, pstate, (double)h->length()); + } else if (Selector_List_Ptr ls = Cast(v)) { + return SASS_MEMORY_NEW(Number, pstate, (double)ls->length()); + } else { + return SASS_MEMORY_NEW(Number, pstate, 1); + } + } + + List_Ptr list = Cast(env["$list"]); + return SASS_MEMORY_NEW(Number, + pstate, + (double)(list ? list->size() : 1)); + } + + Signature nth_sig = "nth($list, $n)"; + BUILT_IN(nth) + { + double nr = ARGVAL("$n"); + Map_Ptr m = Cast(env["$list"]); + if (Selector_List_Ptr sl = Cast(env["$list"])) { + size_t len = m ? m->length() : sl->length(); + bool empty = m ? m->empty() : sl->empty(); + if (empty) error("argument `$list` of `" + std::string(sig) + "` must not be empty", pstate, traces); + double index = std::floor(nr < 0 ? len + nr : nr - 1); + if (index < 0 || index > len - 1) error("index out of bounds for `" + std::string(sig) + "`", pstate, traces); + // return (*sl)[static_cast(index)]; + Listize listize; + return (*sl)[static_cast(index)]->perform(&listize); + } + List_Obj l = Cast(env["$list"]); + if (nr == 0) error("argument `$n` of `" + std::string(sig) + "` must be non-zero", pstate, traces); + // if the argument isn't a list, then wrap it in a singleton list + if (!m && !l) { + l = SASS_MEMORY_NEW(List, pstate, 1); + l->append(ARG("$list", Expression)); + } + size_t len = m ? m->length() : l->length(); + bool empty = m ? m->empty() : l->empty(); + if (empty) error("argument `$list` of `" + std::string(sig) + "` must not be empty", pstate, traces); + double index = std::floor(nr < 0 ? len + nr : nr - 1); + if (index < 0 || index > len - 1) error("index out of bounds for `" + std::string(sig) + "`", pstate, traces); + + if (m) { + l = SASS_MEMORY_NEW(List, pstate, 1); + l->append(m->keys()[static_cast(index)]); + l->append(m->at(m->keys()[static_cast(index)])); + return l.detach(); + } + else { + Expression_Obj rv = l->value_at_index(static_cast(index)); + rv->set_delayed(false); + return rv.detach(); + } + } + + Signature set_nth_sig = "set-nth($list, $n, $value)"; + BUILT_IN(set_nth) + { + Map_Obj m = Cast(env["$list"]); + List_Obj l = Cast(env["$list"]); + Number_Obj n = ARG("$n", Number); + Expression_Obj v = ARG("$value", Expression); + if (!l) { + l = SASS_MEMORY_NEW(List, pstate, 1); + l->append(ARG("$list", Expression)); + } + if (m) { + l = m->to_list(pstate); + } + if (l->empty()) error("argument `$list` of `" + std::string(sig) + "` must not be empty", pstate, traces); + double index = std::floor(n->value() < 0 ? l->length() + n->value() : n->value() - 1); + if (index < 0 || index > l->length() - 1) error("index out of bounds for `" + std::string(sig) + "`", pstate, traces); + List_Ptr result = SASS_MEMORY_NEW(List, pstate, l->length(), l->separator(), false, l->is_bracketed()); + for (size_t i = 0, L = l->length(); i < L; ++i) { + result->append(((i == index) ? v : (*l)[i])); + } + return result; + } + + Signature index_sig = "index($list, $value)"; + BUILT_IN(index) + { + Map_Obj m = Cast(env["$list"]); + List_Obj l = Cast(env["$list"]); + Expression_Obj v = ARG("$value", Expression); + if (!l) { + l = SASS_MEMORY_NEW(List, pstate, 1); + l->append(ARG("$list", Expression)); + } + if (m) { + l = m->to_list(pstate); + } + for (size_t i = 0, L = l->length(); i < L; ++i) { + if (Operators::eq(l->value_at_index(i), v)) return SASS_MEMORY_NEW(Number, pstate, (double)(i+1)); + } + return SASS_MEMORY_NEW(Null, pstate); + } + + Signature join_sig = "join($list1, $list2, $separator: auto, $bracketed: auto)"; + BUILT_IN(join) + { + Map_Obj m1 = Cast(env["$list1"]); + Map_Obj m2 = Cast(env["$list2"]); + List_Obj l1 = Cast(env["$list1"]); + List_Obj l2 = Cast(env["$list2"]); + String_Constant_Obj sep = ARG("$separator", String_Constant); + enum Sass_Separator sep_val = (l1 ? l1->separator() : SASS_SPACE); + Value* bracketed = ARG("$bracketed", Value); + bool is_bracketed = (l1 ? l1->is_bracketed() : false); + if (!l1) { + l1 = SASS_MEMORY_NEW(List, pstate, 1); + l1->append(ARG("$list1", Expression)); + sep_val = (l2 ? l2->separator() : SASS_SPACE); + is_bracketed = (l2 ? l2->is_bracketed() : false); + } + if (!l2) { + l2 = SASS_MEMORY_NEW(List, pstate, 1); + l2->append(ARG("$list2", Expression)); + } + if (m1) { + l1 = m1->to_list(pstate); + sep_val = SASS_COMMA; + } + if (m2) { + l2 = m2->to_list(pstate); + } + size_t len = l1->length() + l2->length(); + std::string sep_str = unquote(sep->value()); + if (sep_str == "space") sep_val = SASS_SPACE; + else if (sep_str == "comma") sep_val = SASS_COMMA; + else if (sep_str != "auto") error("argument `$separator` of `" + std::string(sig) + "` must be `space`, `comma`, or `auto`", pstate, traces); + String_Constant_Obj bracketed_as_str = Cast(bracketed); + bool bracketed_is_auto = bracketed_as_str && unquote(bracketed_as_str->value()) == "auto"; + if (!bracketed_is_auto) { + is_bracketed = !bracketed->is_false(); + } + List_Obj result = SASS_MEMORY_NEW(List, pstate, len, sep_val, false, is_bracketed); + result->concat(l1); + result->concat(l2); + return result.detach(); + } + + Signature append_sig = "append($list, $val, $separator: auto)"; + BUILT_IN(append) + { + Map_Obj m = Cast(env["$list"]); + List_Obj l = Cast(env["$list"]); + Expression_Obj v = ARG("$val", Expression); + if (Selector_List_Ptr sl = Cast(env["$list"])) { + Listize listize; + l = Cast(sl->perform(&listize)); + } + String_Constant_Obj sep = ARG("$separator", String_Constant); + if (!l) { + l = SASS_MEMORY_NEW(List, pstate, 1); + l->append(ARG("$list", Expression)); + } + if (m) { + l = m->to_list(pstate); + } + List_Ptr result = SASS_MEMORY_COPY(l); + std::string sep_str(unquote(sep->value())); + if (sep_str != "auto") { // check default first + if (sep_str == "space") result->separator(SASS_SPACE); + else if (sep_str == "comma") result->separator(SASS_COMMA); + else error("argument `$separator` of `" + std::string(sig) + "` must be `space`, `comma`, or `auto`", pstate, traces); + } + if (l->is_arglist()) { + result->append(SASS_MEMORY_NEW(Argument, + v->pstate(), + v, + "", + false, + false)); + + } else { + result->append(v); + } + return result; + } + + Signature zip_sig = "zip($lists...)"; + BUILT_IN(zip) + { + List_Obj arglist = SASS_MEMORY_COPY(ARG("$lists", List)); + size_t shortest = 0; + for (size_t i = 0, L = arglist->length(); i < L; ++i) { + List_Obj ith = Cast(arglist->value_at_index(i)); + Map_Obj mith = Cast(arglist->value_at_index(i)); + if (!ith) { + if (mith) { + ith = mith->to_list(pstate); + } else { + ith = SASS_MEMORY_NEW(List, pstate, 1); + ith->append(arglist->value_at_index(i)); + } + if (arglist->is_arglist()) { + Argument_Obj arg = (Argument_Ptr)(arglist->at(i).ptr()); // XXX + arg->value(ith); + } else { + (*arglist)[i] = ith; + } + } + shortest = (i ? std::min(shortest, ith->length()) : ith->length()); + } + List_Ptr zippers = SASS_MEMORY_NEW(List, pstate, shortest, SASS_COMMA); + size_t L = arglist->length(); + for (size_t i = 0; i < shortest; ++i) { + List_Ptr zipper = SASS_MEMORY_NEW(List, pstate, L); + for (size_t j = 0; j < L; ++j) { + zipper->append(Cast(arglist->value_at_index(j))->at(i)); + } + zippers->append(zipper); + } + return zippers; + } + + Signature list_separator_sig = "list_separator($list)"; + BUILT_IN(list_separator) + { + List_Obj l = Cast(env["$list"]); + if (!l) { + l = SASS_MEMORY_NEW(List, pstate, 1); + l->append(ARG("$list", Expression)); + } + return SASS_MEMORY_NEW(String_Quoted, + pstate, + l->separator() == SASS_COMMA ? "comma" : "space"); + } + + ///////////////// + // MAP FUNCTIONS + ///////////////// + + Signature map_get_sig = "map-get($map, $key)"; + BUILT_IN(map_get) + { + // leaks for "map-get((), foo)" if not Obj + // investigate why this is (unexpected) + Map_Obj m = ARGM("$map", Map, ctx); + Expression_Obj v = ARG("$key", Expression); + try { + Expression_Obj val = m->at(v); + if (!val) return SASS_MEMORY_NEW(Null, pstate); + val->set_delayed(false); + return val.detach(); + } catch (const std::out_of_range&) { + return SASS_MEMORY_NEW(Null, pstate); + } + catch (...) { throw; } + } + + Signature map_has_key_sig = "map-has-key($map, $key)"; + BUILT_IN(map_has_key) + { + Map_Obj m = ARGM("$map", Map, ctx); + Expression_Obj v = ARG("$key", Expression); + return SASS_MEMORY_NEW(Boolean, pstate, m->has(v)); + } + + Signature map_keys_sig = "map-keys($map)"; + BUILT_IN(map_keys) + { + Map_Obj m = ARGM("$map", Map, ctx); + List_Ptr result = SASS_MEMORY_NEW(List, pstate, m->length(), SASS_COMMA); + for ( auto key : m->keys()) { + result->append(key); + } + return result; + } + + Signature map_values_sig = "map-values($map)"; + BUILT_IN(map_values) + { + Map_Obj m = ARGM("$map", Map, ctx); + List_Ptr result = SASS_MEMORY_NEW(List, pstate, m->length(), SASS_COMMA); + for ( auto key : m->keys()) { + result->append(m->at(key)); + } + return result; + } + + Signature map_merge_sig = "map-merge($map1, $map2)"; + BUILT_IN(map_merge) + { + Map_Obj m1 = ARGM("$map1", Map, ctx); + Map_Obj m2 = ARGM("$map2", Map, ctx); + + size_t len = m1->length() + m2->length(); + Map_Ptr result = SASS_MEMORY_NEW(Map, pstate, len); + // concat not implemented for maps + *result += m1; + *result += m2; + return result; + } + + Signature map_remove_sig = "map-remove($map, $keys...)"; + BUILT_IN(map_remove) + { + bool remove; + Map_Obj m = ARGM("$map", Map, ctx); + List_Obj arglist = ARG("$keys", List); + Map_Ptr result = SASS_MEMORY_NEW(Map, pstate, 1); + for (auto key : m->keys()) { + remove = false; + for (size_t j = 0, K = arglist->length(); j < K && !remove; ++j) { + remove = Operators::eq(key, arglist->value_at_index(j)); + } + if (!remove) *result << std::make_pair(key, m->at(key)); + } + return result; + } + + Signature keywords_sig = "keywords($args)"; + BUILT_IN(keywords) + { + List_Obj arglist = SASS_MEMORY_COPY(ARG("$args", List)); // copy + Map_Obj result = SASS_MEMORY_NEW(Map, pstate, 1); + for (size_t i = arglist->size(), L = arglist->length(); i < L; ++i) { + Expression_Obj obj = arglist->at(i); + Argument_Obj arg = (Argument_Ptr) obj.ptr(); // XXX + std::string name = std::string(arg->name()); + name = name.erase(0, 1); // sanitize name (remove dollar sign) + *result << std::make_pair(SASS_MEMORY_NEW(String_Quoted, + pstate, name), + arg->value()); + } + return result.detach(); + } + + ////////////////////////// + // INTROSPECTION FUNCTIONS + ////////////////////////// + + Signature type_of_sig = "type-of($value)"; + BUILT_IN(type_of) + { + Expression_Ptr v = ARG("$value", Expression); + return SASS_MEMORY_NEW(String_Quoted, pstate, v->type()); + } + + Signature unit_sig = "unit($number)"; + BUILT_IN(unit) + { + Number_Obj arg = ARGN("$number"); + std::string str(quote(arg->unit(), '"')); + return SASS_MEMORY_NEW(String_Quoted, pstate, str); + } + + Signature unitless_sig = "unitless($number)"; + BUILT_IN(unitless) + { + Number_Obj arg = ARGN("$number"); + bool unitless = arg->is_unitless(); + return SASS_MEMORY_NEW(Boolean, pstate, unitless); + } + + Signature comparable_sig = "comparable($number-1, $number-2)"; + BUILT_IN(comparable) + { + Number_Obj n1 = ARGN("$number-1"); + Number_Obj n2 = ARGN("$number-2"); + if (n1->is_unitless() || n2->is_unitless()) { + return SASS_MEMORY_NEW(Boolean, pstate, true); + } + // normalize into main units + n1->normalize(); n2->normalize(); + Units &lhs_unit = *n1, &rhs_unit = *n2; + bool is_comparable = (lhs_unit == rhs_unit); + return SASS_MEMORY_NEW(Boolean, pstate, is_comparable); + } + + Signature variable_exists_sig = "variable-exists($name)"; + BUILT_IN(variable_exists) + { + std::string s = Util::normalize_underscores(unquote(ARG("$name", String_Constant)->value())); + + if(d_env.has("$"+s)) { + return SASS_MEMORY_NEW(Boolean, pstate, true); + } + else { + return SASS_MEMORY_NEW(Boolean, pstate, false); + } + } + + Signature global_variable_exists_sig = "global-variable-exists($name)"; + BUILT_IN(global_variable_exists) + { + std::string s = Util::normalize_underscores(unquote(ARG("$name", String_Constant)->value())); + + if(d_env.has_global("$"+s)) { + return SASS_MEMORY_NEW(Boolean, pstate, true); + } + else { + return SASS_MEMORY_NEW(Boolean, pstate, false); + } + } + + Signature function_exists_sig = "function-exists($name)"; + BUILT_IN(function_exists) + { + String_Constant_Ptr ss = Cast(env["$name"]); + if (!ss) { + error("$name: " + (env["$name"]->to_string()) + " is not a string for `function-exists'", pstate, traces); + } + + std::string name = Util::normalize_underscores(unquote(ss->value())); + + if(d_env.has_global(name+"[f]")) { + return SASS_MEMORY_NEW(Boolean, pstate, true); + } + else { + return SASS_MEMORY_NEW(Boolean, pstate, false); + } + } + + Signature mixin_exists_sig = "mixin-exists($name)"; + BUILT_IN(mixin_exists) + { + std::string s = Util::normalize_underscores(unquote(ARG("$name", String_Constant)->value())); + + if(d_env.has_global(s+"[m]")) { + return SASS_MEMORY_NEW(Boolean, pstate, true); + } + else { + return SASS_MEMORY_NEW(Boolean, pstate, false); + } + } + + Signature feature_exists_sig = "feature-exists($name)"; + BUILT_IN(feature_exists) + { + std::string s = unquote(ARG("$name", String_Constant)->value()); + + if(features.find(s) == features.end()) { + return SASS_MEMORY_NEW(Boolean, pstate, false); + } + else { + return SASS_MEMORY_NEW(Boolean, pstate, true); + } + } + + Signature call_sig = "call($name, $args...)"; + BUILT_IN(call) + { + std::string name; + Function_Ptr ff = Cast(env["$name"]); + String_Constant_Ptr ss = Cast(env["$name"]); + + if (ss) { + name = Util::normalize_underscores(unquote(ss->value())); + std::cerr << "DEPRECATION WARNING: "; + std::cerr << "Passing a string to call() is deprecated and will be illegal" << std::endl; + std::cerr << "in Sass 4.0. Use call(get-function(" + quote(name) + ")) instead." << std::endl; + std::cerr << std::endl; + } else if (ff) { + name = ff->name(); + } + + List_Obj arglist = SASS_MEMORY_COPY(ARG("$args", List)); + + Arguments_Obj args = SASS_MEMORY_NEW(Arguments, pstate); + // std::string full_name(name + "[f]"); + // Definition_Ptr def = d_env.has(full_name) ? Cast((d_env)[full_name]) : 0; + // Parameters_Ptr params = def ? def->parameters() : 0; + // size_t param_size = params ? params->length() : 0; + for (size_t i = 0, L = arglist->length(); i < L; ++i) { + Expression_Obj expr = arglist->value_at_index(i); + // if (params && params->has_rest_parameter()) { + // Parameter_Obj p = param_size > i ? (*params)[i] : 0; + // List_Ptr list = Cast(expr); + // if (list && p && !p->is_rest_parameter()) expr = (*list)[0]; + // } + if (arglist->is_arglist()) { + Expression_Obj obj = arglist->at(i); + Argument_Obj arg = (Argument_Ptr) obj.ptr(); // XXX + args->append(SASS_MEMORY_NEW(Argument, + pstate, + expr, + arg ? arg->name() : "", + arg ? arg->is_rest_argument() : false, + arg ? arg->is_keyword_argument() : false)); + } else { + args->append(SASS_MEMORY_NEW(Argument, pstate, expr)); + } + } + Function_Call_Obj func = SASS_MEMORY_NEW(Function_Call, pstate, name, args); + Expand expand(ctx, &d_env, &selector_stack); + func->via_call(true); // calc invoke is allowed + if (ff) func->func(ff); + return func->perform(&expand.eval); + } + + //////////////////// + // BOOLEAN FUNCTIONS + //////////////////// + + Signature not_sig = "not($value)"; + BUILT_IN(sass_not) + { + return SASS_MEMORY_NEW(Boolean, pstate, ARG("$value", Expression)->is_false()); + } + + Signature if_sig = "if($condition, $if-true, $if-false)"; + // BUILT_IN(sass_if) + // { return ARG("$condition", Expression)->is_false() ? ARG("$if-false", Expression) : ARG("$if-true", Expression); } + BUILT_IN(sass_if) + { + Expand expand(ctx, &d_env, &selector_stack); + Expression_Obj cond = ARG("$condition", Expression)->perform(&expand.eval); + bool is_true = !cond->is_false(); + Expression_Obj res = ARG(is_true ? "$if-true" : "$if-false", Expression); + res = res->perform(&expand.eval); + res->set_delayed(false); // clone? + return res.detach(); + } + + ////////////////////////// + // MISCELLANEOUS FUNCTIONS + ////////////////////////// + + // value.check_deprecated_interp if value.is_a?(Sass::Script::Value::String) + // unquoted_string(value.to_sass) + + Signature inspect_sig = "inspect($value)"; + BUILT_IN(inspect) + { + Expression_Ptr v = ARG("$value", Expression); + if (v->concrete_type() == Expression::NULL_VAL) { + return SASS_MEMORY_NEW(String_Quoted, pstate, "null"); + } else if (v->concrete_type() == Expression::BOOLEAN && v->is_false()) { + return SASS_MEMORY_NEW(String_Quoted, pstate, "false"); + } else if (v->concrete_type() == Expression::STRING) { + return v; + } else { + // ToDo: fix to_sass for nested parentheses + Sass_Output_Style old_style; + old_style = ctx.c_options.output_style; + ctx.c_options.output_style = TO_SASS; + Emitter emitter(ctx.c_options); + Inspect i(emitter); + i.in_declaration = false; + v->perform(&i); + ctx.c_options.output_style = old_style; + return SASS_MEMORY_NEW(String_Quoted, pstate, i.get_buffer()); + } + // return v; + } + Signature selector_nest_sig = "selector-nest($selectors...)"; + BUILT_IN(selector_nest) + { + List_Ptr arglist = ARG("$selectors", List); + + // Not enough parameters + if( arglist->length() == 0 ) + error("$selectors: At least one selector must be passed for `selector-nest'", pstate, traces); + + // Parse args into vector of selectors + std::vector parsedSelectors; + for (size_t i = 0, L = arglist->length(); i < L; ++i) { + Expression_Obj exp = Cast(arglist->value_at_index(i)); + if (exp->concrete_type() == Expression::NULL_VAL) { + std::stringstream msg; + msg << "$selectors: null is not a valid selector: it must be a string,\n"; + msg << "a list of strings, or a list of lists of strings for 'selector-nest'"; + error(msg.str(), pstate, traces); + } + if (String_Constant_Obj str = Cast(exp)) { + str->quote_mark(0); + } + std::string exp_src = exp->to_string(ctx.c_options); + Selector_List_Obj sel = Parser::parse_selector(exp_src.c_str(), ctx, traces); + parsedSelectors.push_back(sel); + } + + // Nothing to do + if( parsedSelectors.empty() ) { + return SASS_MEMORY_NEW(Null, pstate); + } + + // Set the first element as the `result`, keep appending to as we go down the parsedSelector vector. + std::vector::iterator itr = parsedSelectors.begin(); + Selector_List_Obj result = *itr; + ++itr; + + for(;itr != parsedSelectors.end(); ++itr) { + Selector_List_Obj child = *itr; + std::vector exploded; + selector_stack.push_back(result); + Selector_List_Obj rv = child->resolve_parent_refs(selector_stack, traces); + selector_stack.pop_back(); + for (size_t m = 0, mLen = rv->length(); m < mLen; ++m) { + exploded.push_back((*rv)[m]); + } + result->elements(exploded); + } + + Listize listize; + return result->perform(&listize); + } + + Signature selector_append_sig = "selector-append($selectors...)"; + BUILT_IN(selector_append) + { + List_Ptr arglist = ARG("$selectors", List); + + // Not enough parameters + if( arglist->length() == 0 ) + error("$selectors: At least one selector must be passed for `selector-append'", pstate, traces); + + // Parse args into vector of selectors + std::vector parsedSelectors; + for (size_t i = 0, L = arglist->length(); i < L; ++i) { + Expression_Obj exp = Cast(arglist->value_at_index(i)); + if (exp->concrete_type() == Expression::NULL_VAL) { + std::stringstream msg; + msg << "$selectors: null is not a valid selector: it must be a string,\n"; + msg << "a list of strings, or a list of lists of strings for 'selector-append'"; + error(msg.str(), pstate, traces); + } + if (String_Constant_Ptr str = Cast(exp)) { + str->quote_mark(0); + } + std::string exp_src = exp->to_string(); + Selector_List_Obj sel = Parser::parse_selector(exp_src.c_str(), ctx, traces); + parsedSelectors.push_back(sel); + } + + // Nothing to do + if( parsedSelectors.empty() ) { + return SASS_MEMORY_NEW(Null, pstate); + } + + // Set the first element as the `result`, keep appending to as we go down the parsedSelector vector. + std::vector::iterator itr = parsedSelectors.begin(); + Selector_List_Obj result = *itr; + ++itr; + + for(;itr != parsedSelectors.end(); ++itr) { + Selector_List_Obj child = *itr; + std::vector newElements; + + // For every COMPLEX_SELECTOR in `result` + // For every COMPLEX_SELECTOR in `child` + // let parentSeqClone equal a copy of result->elements[i] + // let childSeq equal child->elements[j] + // Append all of childSeq head elements into parentSeqClone + // Set the innermost tail of parentSeqClone, to childSeq's tail + // Replace result->elements with newElements + for (size_t i = 0, resultLen = result->length(); i < resultLen; ++i) { + for (size_t j = 0, childLen = child->length(); j < childLen; ++j) { + Complex_Selector_Obj parentSeqClone = SASS_MEMORY_CLONE((*result)[i]); + Complex_Selector_Obj childSeq = (*child)[j]; + Complex_Selector_Obj base = childSeq->tail(); + + // Must be a simple sequence + if( childSeq->combinator() != Complex_Selector::Combinator::ANCESTOR_OF ) { + std::string msg("Can't append \""); + msg += childSeq->to_string(); + msg += "\" to \""; + msg += parentSeqClone->to_string(); + msg += "\" for `selector-append'"; + error(msg, pstate, traces); + } + + // Cannot be a Universal selector + Element_Selector_Obj pType = Cast(childSeq->head()->first()); + if(pType && pType->name() == "*") { + std::string msg("Can't append \""); + msg += childSeq->to_string(); + msg += "\" to \""; + msg += parentSeqClone->to_string(); + msg += "\" for `selector-append'"; + error(msg, pstate, traces); + } + + // TODO: Add check for namespace stuff + + // append any selectors in childSeq's head + parentSeqClone->innermost()->head()->concat(base->head()); + + // Set parentSeqClone new tail + parentSeqClone->innermost()->tail( base->tail() ); + + newElements.push_back(parentSeqClone); + } + } + + result->elements(newElements); + } + + Listize listize; + return result->perform(&listize); + } + + Signature selector_unify_sig = "selector-unify($selector1, $selector2)"; + BUILT_IN(selector_unify) + { + Selector_List_Obj selector1 = ARGSEL("$selector1", Selector_List_Obj, p_contextualize); + Selector_List_Obj selector2 = ARGSEL("$selector2", Selector_List_Obj, p_contextualize); + + Selector_List_Obj result = selector1->unify_with(selector2); + Listize listize; + return result->perform(&listize); + } + + Signature simple_selectors_sig = "simple-selectors($selector)"; + BUILT_IN(simple_selectors) + { + Compound_Selector_Obj sel = ARGSEL("$selector", Compound_Selector_Obj, p_contextualize); + + List_Ptr l = SASS_MEMORY_NEW(List, sel->pstate(), sel->length(), SASS_COMMA); + + for (size_t i = 0, L = sel->length(); i < L; ++i) { + Simple_Selector_Obj ss = (*sel)[i]; + std::string ss_string = ss->to_string() ; + + l->append(SASS_MEMORY_NEW(String_Quoted, ss->pstate(), ss_string)); + } + + return l; + } + + Signature selector_extend_sig = "selector-extend($selector, $extendee, $extender)"; + BUILT_IN(selector_extend) + { + Selector_List_Obj selector = ARGSEL("$selector", Selector_List_Obj, p_contextualize); + Selector_List_Obj extendee = ARGSEL("$extendee", Selector_List_Obj, p_contextualize); + Selector_List_Obj extender = ARGSEL("$extender", Selector_List_Obj, p_contextualize); + + Subset_Map subset_map; + extender->populate_extends(extendee, subset_map); + Extend extend(subset_map); + + Selector_List_Obj result = extend.extendSelectorList(selector, false); + + Listize listize; + return result->perform(&listize); + } + + Signature selector_replace_sig = "selector-replace($selector, $original, $replacement)"; + BUILT_IN(selector_replace) + { + Selector_List_Obj selector = ARGSEL("$selector", Selector_List_Obj, p_contextualize); + Selector_List_Obj original = ARGSEL("$original", Selector_List_Obj, p_contextualize); + Selector_List_Obj replacement = ARGSEL("$replacement", Selector_List_Obj, p_contextualize); + Subset_Map subset_map; + replacement->populate_extends(original, subset_map); + Extend extend(subset_map); + + Selector_List_Obj result = extend.extendSelectorList(selector, true); + + Listize listize; + return result->perform(&listize); + } + + Signature selector_parse_sig = "selector-parse($selector)"; + BUILT_IN(selector_parse) + { + Selector_List_Obj sel = ARGSEL("$selector", Selector_List_Obj, p_contextualize); + + Listize listize; + return sel->perform(&listize); + } + + Signature is_superselector_sig = "is-superselector($super, $sub)"; + BUILT_IN(is_superselector) + { + Selector_List_Obj sel_sup = ARGSEL("$super", Selector_List_Obj, p_contextualize); + Selector_List_Obj sel_sub = ARGSEL("$sub", Selector_List_Obj, p_contextualize); + bool result = sel_sup->is_superselector_of(sel_sub); + return SASS_MEMORY_NEW(Boolean, pstate, result); + } + + Signature unique_id_sig = "unique-id()"; + BUILT_IN(unique_id) + { + std::stringstream ss; + std::uniform_real_distribution<> distributor(0, 4294967296); // 16^8 + uint_fast32_t distributed = static_cast(distributor(rand)); + ss << "u" << std::setfill('0') << std::setw(8) << std::hex << distributed; + return SASS_MEMORY_NEW(String_Quoted, pstate, ss.str()); + } + + Signature is_bracketed_sig = "is-bracketed($list)"; + BUILT_IN(is_bracketed) + { + Value_Obj value = ARG("$list", Value); + List_Obj list = Cast(value); + return SASS_MEMORY_NEW(Boolean, pstate, list && list->is_bracketed()); + } + + Signature content_exists_sig = "content-exists()"; + BUILT_IN(content_exists) + { + if (!d_env.has_global("is_in_mixin")) { + error("Cannot call content-exists() except within a mixin.", pstate, traces); + } + return SASS_MEMORY_NEW(Boolean, pstate, d_env.has_lexical("@content[m]")); + } + + Signature get_function_sig = "get-function($name, $css: false)"; + BUILT_IN(get_function) + { + String_Constant_Ptr ss = Cast(env["$name"]); + if (!ss) { + error("$name: " + (env["$name"]->to_string()) + " is not a string for `get-function'", pstate, traces); + } + + std::string name = Util::normalize_underscores(unquote(ss->value())); + std::string full_name = name + "[f]"; + + Boolean_Obj css = ARG("$css", Boolean); + if (!css->is_false()) { + Definition_Ptr def = SASS_MEMORY_NEW(Definition, + pstate, + name, + SASS_MEMORY_NEW(Parameters, pstate), + SASS_MEMORY_NEW(Block, pstate, 0, false), + Definition::FUNCTION); + return SASS_MEMORY_NEW(Function, pstate, def, true); + } + + + if (!d_env.has_global(full_name)) { + error("Function not found: " + name, pstate, traces); + } + + Definition_Ptr def = Cast(d_env[full_name]); + return SASS_MEMORY_NEW(Function, pstate, def, false); + } + } +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/functions.hpp b/mybulma/node_modules/node-sass/src/libsass/src/functions.hpp new file mode 100644 index 0000000..7019be9 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/functions.hpp @@ -0,0 +1,198 @@ +#ifndef SASS_FUNCTIONS_H +#define SASS_FUNCTIONS_H + +#include "listize.hpp" +#include "position.hpp" +#include "environment.hpp" +#include "ast_fwd_decl.hpp" +#include "sass/functions.h" + +#define BUILT_IN(name) Expression_Ptr \ +name(Env& env, Env& d_env, Context& ctx, Signature sig, ParserState pstate, Backtraces traces, std::vector selector_stack) + +namespace Sass { + struct Backtrace; + typedef const char* Signature; + typedef Expression_Ptr (*Native_Function)(Env&, Env&, Context&, Signature, ParserState, Backtraces, std::vector); + + Definition_Ptr make_native_function(Signature, Native_Function, Context& ctx); + Definition_Ptr make_c_function(Sass_Function_Entry c_func, Context& ctx); + + std::string function_name(Signature); + + namespace Functions { + + extern Signature rgb_sig; + extern Signature rgba_4_sig; + extern Signature rgba_2_sig; + extern Signature red_sig; + extern Signature green_sig; + extern Signature blue_sig; + extern Signature mix_sig; + extern Signature hsl_sig; + extern Signature hsla_sig; + extern Signature hue_sig; + extern Signature saturation_sig; + extern Signature lightness_sig; + extern Signature adjust_hue_sig; + extern Signature lighten_sig; + extern Signature darken_sig; + extern Signature saturate_sig; + extern Signature desaturate_sig; + extern Signature grayscale_sig; + extern Signature complement_sig; + extern Signature invert_sig; + extern Signature alpha_sig; + extern Signature opacity_sig; + extern Signature opacify_sig; + extern Signature fade_in_sig; + extern Signature transparentize_sig; + extern Signature fade_out_sig; + extern Signature adjust_color_sig; + extern Signature scale_color_sig; + extern Signature change_color_sig; + extern Signature ie_hex_str_sig; + extern Signature unquote_sig; + extern Signature quote_sig; + extern Signature str_length_sig; + extern Signature str_insert_sig; + extern Signature str_index_sig; + extern Signature str_slice_sig; + extern Signature to_upper_case_sig; + extern Signature to_lower_case_sig; + extern Signature percentage_sig; + extern Signature round_sig; + extern Signature ceil_sig; + extern Signature floor_sig; + extern Signature abs_sig; + extern Signature min_sig; + extern Signature max_sig; + extern Signature inspect_sig; + extern Signature random_sig; + extern Signature length_sig; + extern Signature nth_sig; + extern Signature index_sig; + extern Signature join_sig; + extern Signature append_sig; + extern Signature zip_sig; + extern Signature list_separator_sig; + extern Signature type_of_sig; + extern Signature unit_sig; + extern Signature unitless_sig; + extern Signature comparable_sig; + extern Signature variable_exists_sig; + extern Signature global_variable_exists_sig; + extern Signature function_exists_sig; + extern Signature mixin_exists_sig; + extern Signature feature_exists_sig; + extern Signature call_sig; + extern Signature not_sig; + extern Signature if_sig; + extern Signature map_get_sig; + extern Signature map_merge_sig; + extern Signature map_remove_sig; + extern Signature map_keys_sig; + extern Signature map_values_sig; + extern Signature map_has_key_sig; + extern Signature keywords_sig; + extern Signature set_nth_sig; + extern Signature unique_id_sig; + extern Signature selector_nest_sig; + extern Signature selector_append_sig; + extern Signature selector_extend_sig; + extern Signature selector_replace_sig; + extern Signature selector_unify_sig; + extern Signature is_superselector_sig; + extern Signature simple_selectors_sig; + extern Signature selector_parse_sig; + extern Signature is_bracketed_sig; + extern Signature content_exists_sig; + extern Signature get_function_sig; + + BUILT_IN(rgb); + BUILT_IN(rgba_4); + BUILT_IN(rgba_2); + BUILT_IN(red); + BUILT_IN(green); + BUILT_IN(blue); + BUILT_IN(mix); + BUILT_IN(hsl); + BUILT_IN(hsla); + BUILT_IN(hue); + BUILT_IN(saturation); + BUILT_IN(lightness); + BUILT_IN(adjust_hue); + BUILT_IN(lighten); + BUILT_IN(darken); + BUILT_IN(saturate); + BUILT_IN(desaturate); + BUILT_IN(grayscale); + BUILT_IN(complement); + BUILT_IN(invert); + BUILT_IN(alpha); + BUILT_IN(opacify); + BUILT_IN(transparentize); + BUILT_IN(adjust_color); + BUILT_IN(scale_color); + BUILT_IN(change_color); + BUILT_IN(ie_hex_str); + BUILT_IN(sass_unquote); + BUILT_IN(sass_quote); + BUILT_IN(str_length); + BUILT_IN(str_insert); + BUILT_IN(str_index); + BUILT_IN(str_slice); + BUILT_IN(to_upper_case); + BUILT_IN(to_lower_case); + BUILT_IN(percentage); + BUILT_IN(round); + BUILT_IN(ceil); + BUILT_IN(floor); + BUILT_IN(abs); + BUILT_IN(min); + BUILT_IN(max); + BUILT_IN(inspect); + BUILT_IN(random); + BUILT_IN(length); + BUILT_IN(nth); + BUILT_IN(index); + BUILT_IN(join); + BUILT_IN(append); + BUILT_IN(zip); + BUILT_IN(list_separator); + BUILT_IN(type_of); + BUILT_IN(unit); + BUILT_IN(unitless); + BUILT_IN(comparable); + BUILT_IN(variable_exists); + BUILT_IN(global_variable_exists); + BUILT_IN(function_exists); + BUILT_IN(mixin_exists); + BUILT_IN(feature_exists); + BUILT_IN(call); + BUILT_IN(sass_not); + BUILT_IN(sass_if); + BUILT_IN(map_get); + BUILT_IN(map_merge); + BUILT_IN(map_remove); + BUILT_IN(map_keys); + BUILT_IN(map_values); + BUILT_IN(map_has_key); + BUILT_IN(keywords); + BUILT_IN(set_nth); + BUILT_IN(unique_id); + BUILT_IN(selector_nest); + BUILT_IN(selector_append); + BUILT_IN(selector_extend); + BUILT_IN(selector_replace); + BUILT_IN(selector_unify); + BUILT_IN(is_superselector); + BUILT_IN(simple_selectors); + BUILT_IN(selector_parse); + BUILT_IN(is_bracketed); + BUILT_IN(content_exists); + BUILT_IN(get_function); + } +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/inspect.cpp b/mybulma/node_modules/node-sass/src/libsass/src/inspect.cpp new file mode 100644 index 0000000..5cd8cc0 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/inspect.cpp @@ -0,0 +1,1138 @@ +#include "sass.hpp" +#include +#include +#include +#include +#include +#include + +#include "ast.hpp" +#include "inspect.hpp" +#include "context.hpp" +#include "listize.hpp" +#include "color_maps.hpp" +#include "utf8/checked.h" + +namespace Sass { + + Inspect::Inspect(const Emitter& emi) + : Emitter(emi) + { } + Inspect::~Inspect() { } + + // statements + void Inspect::operator()(Block_Ptr block) + { + if (!block->is_root()) { + add_open_mapping(block); + append_scope_opener(); + } + if (output_style() == NESTED) indentation += block->tabs(); + for (size_t i = 0, L = block->length(); i < L; ++i) { + (*block)[i]->perform(this); + } + if (output_style() == NESTED) indentation -= block->tabs(); + if (!block->is_root()) { + append_scope_closer(); + add_close_mapping(block); + } + + } + + void Inspect::operator()(Ruleset_Ptr ruleset) + { + if (ruleset->selector()) { + opt.in_selector = true; + ruleset->selector()->perform(this); + opt.in_selector = false; + } + if (ruleset->block()) { + ruleset->block()->perform(this); + } + } + + void Inspect::operator()(Keyframe_Rule_Ptr rule) + { + if (rule->name()) rule->name()->perform(this); + if (rule->block()) rule->block()->perform(this); + } + + void Inspect::operator()(Bubble_Ptr bubble) + { + append_indentation(); + append_token("::BUBBLE", bubble); + append_scope_opener(); + bubble->node()->perform(this); + append_scope_closer(); + } + + void Inspect::operator()(Media_Block_Ptr media_block) + { + append_indentation(); + append_token("@media", media_block); + append_mandatory_space(); + in_media_block = true; + media_block->media_queries()->perform(this); + in_media_block = false; + media_block->block()->perform(this); + } + + void Inspect::operator()(Supports_Block_Ptr feature_block) + { + append_indentation(); + append_token("@supports", feature_block); + append_mandatory_space(); + feature_block->condition()->perform(this); + feature_block->block()->perform(this); + } + + void Inspect::operator()(At_Root_Block_Ptr at_root_block) + { + append_indentation(); + append_token("@at-root ", at_root_block); + append_mandatory_space(); + if(at_root_block->expression()) at_root_block->expression()->perform(this); + if(at_root_block->block()) at_root_block->block()->perform(this); + } + + void Inspect::operator()(Directive_Ptr at_rule) + { + append_indentation(); + append_token(at_rule->keyword(), at_rule); + if (at_rule->selector()) { + append_mandatory_space(); + bool was_wrapped = in_wrapped; + in_wrapped = true; + at_rule->selector()->perform(this); + in_wrapped = was_wrapped; + } + if (at_rule->value()) { + append_mandatory_space(); + at_rule->value()->perform(this); + } + if (at_rule->block()) { + at_rule->block()->perform(this); + } + else { + append_delimiter(); + } + } + + void Inspect::operator()(Declaration_Ptr dec) + { + if (dec->value()->concrete_type() == Expression::NULL_VAL) return; + bool was_decl = in_declaration; + in_declaration = true; + LOCAL_FLAG(in_custom_property, dec->is_custom_property()); + + if (output_style() == NESTED) + indentation += dec->tabs(); + append_indentation(); + if (dec->property()) + dec->property()->perform(this); + append_colon_separator(); + + if (dec->value()->concrete_type() == Expression::SELECTOR) { + Listize listize; + Expression_Obj ls = dec->value()->perform(&listize); + ls->perform(this); + } else { + dec->value()->perform(this); + } + + if (dec->is_important()) { + append_optional_space(); + append_string("!important"); + } + append_delimiter(); + if (output_style() == NESTED) + indentation -= dec->tabs(); + in_declaration = was_decl; + } + + void Inspect::operator()(Assignment_Ptr assn) + { + append_token(assn->variable(), assn); + append_colon_separator(); + assn->value()->perform(this); + if (assn->is_default()) { + append_optional_space(); + append_string("!default"); + } + append_delimiter(); + } + + void Inspect::operator()(Import_Ptr import) + { + if (!import->urls().empty()) { + append_token("@import", import); + append_mandatory_space(); + + import->urls().front()->perform(this); + if (import->urls().size() == 1) { + if (import->import_queries()) { + append_mandatory_space(); + import->import_queries()->perform(this); + } + } + append_delimiter(); + for (size_t i = 1, S = import->urls().size(); i < S; ++i) { + append_mandatory_linefeed(); + append_token("@import", import); + append_mandatory_space(); + + import->urls()[i]->perform(this); + if (import->urls().size() - 1 == i) { + if (import->import_queries()) { + append_mandatory_space(); + import->import_queries()->perform(this); + } + } + append_delimiter(); + } + } + } + + void Inspect::operator()(Import_Stub_Ptr import) + { + append_indentation(); + append_token("@import", import); + append_mandatory_space(); + append_string(import->imp_path()); + append_delimiter(); + } + + void Inspect::operator()(Warning_Ptr warning) + { + append_indentation(); + append_token("@warn", warning); + append_mandatory_space(); + warning->message()->perform(this); + append_delimiter(); + } + + void Inspect::operator()(Error_Ptr error) + { + append_indentation(); + append_token("@error", error); + append_mandatory_space(); + error->message()->perform(this); + append_delimiter(); + } + + void Inspect::operator()(Debug_Ptr debug) + { + append_indentation(); + append_token("@debug", debug); + append_mandatory_space(); + debug->value()->perform(this); + append_delimiter(); + } + + void Inspect::operator()(Comment_Ptr comment) + { + in_comment = true; + comment->text()->perform(this); + in_comment = false; + } + + void Inspect::operator()(If_Ptr cond) + { + append_indentation(); + append_token("@if", cond); + append_mandatory_space(); + cond->predicate()->perform(this); + cond->block()->perform(this); + if (cond->alternative()) { + append_optional_linefeed(); + append_indentation(); + append_string("else"); + cond->alternative()->perform(this); + } + } + + void Inspect::operator()(For_Ptr loop) + { + append_indentation(); + append_token("@for", loop); + append_mandatory_space(); + append_string(loop->variable()); + append_string(" from "); + loop->lower_bound()->perform(this); + append_string(loop->is_inclusive() ? " through " : " to "); + loop->upper_bound()->perform(this); + loop->block()->perform(this); + } + + void Inspect::operator()(Each_Ptr loop) + { + append_indentation(); + append_token("@each", loop); + append_mandatory_space(); + append_string(loop->variables()[0]); + for (size_t i = 1, L = loop->variables().size(); i < L; ++i) { + append_comma_separator(); + append_string(loop->variables()[i]); + } + append_string(" in "); + loop->list()->perform(this); + loop->block()->perform(this); + } + + void Inspect::operator()(While_Ptr loop) + { + append_indentation(); + append_token("@while", loop); + append_mandatory_space(); + loop->predicate()->perform(this); + loop->block()->perform(this); + } + + void Inspect::operator()(Return_Ptr ret) + { + append_indentation(); + append_token("@return", ret); + append_mandatory_space(); + ret->value()->perform(this); + append_delimiter(); + } + + void Inspect::operator()(Extension_Ptr extend) + { + append_indentation(); + append_token("@extend", extend); + append_mandatory_space(); + extend->selector()->perform(this); + append_delimiter(); + } + + void Inspect::operator()(Definition_Ptr def) + { + append_indentation(); + if (def->type() == Definition::MIXIN) { + append_token("@mixin", def); + append_mandatory_space(); + } else { + append_token("@function", def); + append_mandatory_space(); + } + append_string(def->name()); + def->parameters()->perform(this); + def->block()->perform(this); + } + + void Inspect::operator()(Mixin_Call_Ptr call) + { + append_indentation(); + append_token("@include", call); + append_mandatory_space(); + append_string(call->name()); + if (call->arguments()) { + call->arguments()->perform(this); + } + if (call->block()) { + append_optional_space(); + call->block()->perform(this); + } + if (!call->block()) append_delimiter(); + } + + void Inspect::operator()(Content_Ptr content) + { + append_indentation(); + append_token("@content", content); + append_delimiter(); + } + + void Inspect::operator()(Map_Ptr map) + { + if (output_style() == TO_SASS && map->empty()) { + append_string("()"); + return; + } + if (map->empty()) return; + if (map->is_invisible()) return; + bool items_output = false; + append_string("("); + for (auto key : map->keys()) { + if (items_output) append_comma_separator(); + key->perform(this); + append_colon_separator(); + LOCAL_FLAG(in_space_array, true); + LOCAL_FLAG(in_comma_array, true); + map->at(key)->perform(this); + items_output = true; + } + append_string(")"); + } + + std::string Inspect::lbracket(List_Ptr list) { + return list->is_bracketed() ? "[" : "("; + } + + std::string Inspect::rbracket(List_Ptr list) { + return list->is_bracketed() ? "]" : ")"; + } + + void Inspect::operator()(List_Ptr list) + { + if (list->empty() && (output_style() == TO_SASS || list->is_bracketed())) { + append_string(lbracket(list)); + append_string(rbracket(list)); + return; + } + std::string sep(list->separator() == SASS_SPACE ? " " : ","); + if ((output_style() != COMPRESSED) && sep == ",") sep += " "; + else if (in_media_block && sep != " ") sep += " "; // verified + if (list->empty()) return; + bool items_output = false; + + bool was_space_array = in_space_array; + bool was_comma_array = in_comma_array; + // if the list is bracketed, always include the left bracket + if (list->is_bracketed()) { + append_string(lbracket(list)); + } + // probably ruby sass eqivalent of element_needs_parens + else if (output_style() == TO_SASS && + list->length() == 1 && + !list->from_selector() && + !Cast(list->at(0)) && + !Cast(list->at(0)) + ) { + append_string(lbracket(list)); + } + else if (!in_declaration && (list->separator() == SASS_HASH || + (list->separator() == SASS_SPACE && in_space_array) || + (list->separator() == SASS_COMMA && in_comma_array) + )) { + append_string(lbracket(list)); + } + + if (list->separator() == SASS_SPACE) in_space_array = true; + else if (list->separator() == SASS_COMMA) in_comma_array = true; + + for (size_t i = 0, L = list->size(); i < L; ++i) { + if (list->separator() == SASS_HASH) + { sep[0] = i % 2 ? ':' : ','; } + Expression_Obj list_item = list->at(i); + if (output_style() != TO_SASS) { + if (list_item->is_invisible()) { + // this fixes an issue with "" in a list + if (!Cast(list_item)) { + continue; + } + } + } + if (items_output) { + append_string(sep); + } + if (items_output && sep != " ") + append_optional_space(); + list_item->perform(this); + items_output = true; + } + + in_comma_array = was_comma_array; + in_space_array = was_space_array; + + // if the list is bracketed, always include the right bracket + if (list->is_bracketed()) { + if (list->separator() == SASS_COMMA && list->size() == 1) { + append_string(","); + } + append_string(rbracket(list)); + } + // probably ruby sass eqivalent of element_needs_parens + else if (output_style() == TO_SASS && + list->length() == 1 && + !list->from_selector() && + !Cast(list->at(0)) && + !Cast(list->at(0)) + ) { + append_string(","); + append_string(rbracket(list)); + } + else if (!in_declaration && (list->separator() == SASS_HASH || + (list->separator() == SASS_SPACE && in_space_array) || + (list->separator() == SASS_COMMA && in_comma_array) + )) { + append_string(rbracket(list)); + } + + } + + void Inspect::operator()(Binary_Expression_Ptr expr) + { + expr->left()->perform(this); + if ( in_media_block || + (output_style() == INSPECT) || ( + expr->op().ws_before + && (!expr->is_interpolant()) + && (expr->is_left_interpolant() || + expr->is_right_interpolant()) + + )) append_string(" "); + switch (expr->optype()) { + case Sass_OP::AND: append_string("&&"); break; + case Sass_OP::OR: append_string("||"); break; + case Sass_OP::EQ: append_string("=="); break; + case Sass_OP::NEQ: append_string("!="); break; + case Sass_OP::GT: append_string(">"); break; + case Sass_OP::GTE: append_string(">="); break; + case Sass_OP::LT: append_string("<"); break; + case Sass_OP::LTE: append_string("<="); break; + case Sass_OP::ADD: append_string("+"); break; + case Sass_OP::SUB: append_string("-"); break; + case Sass_OP::MUL: append_string("*"); break; + case Sass_OP::DIV: append_string("/"); break; + case Sass_OP::MOD: append_string("%"); break; + default: break; // shouldn't get here + } + if ( in_media_block || + (output_style() == INSPECT) || ( + expr->op().ws_after + && (!expr->is_interpolant()) + && (expr->is_left_interpolant() || + expr->is_right_interpolant()) + )) append_string(" "); + expr->right()->perform(this); + } + + void Inspect::operator()(Unary_Expression_Ptr expr) + { + if (expr->optype() == Unary_Expression::PLUS) append_string("+"); + else if (expr->optype() == Unary_Expression::SLASH) append_string("/"); + else append_string("-"); + expr->operand()->perform(this); + } + + void Inspect::operator()(Function_Call_Ptr call) + { + append_token(call->name(), call); + call->arguments()->perform(this); + } + + void Inspect::operator()(Function_Call_Schema_Ptr call) + { + call->name()->perform(this); + call->arguments()->perform(this); + } + + void Inspect::operator()(Variable_Ptr var) + { + append_token(var->name(), var); + } + + void Inspect::operator()(Number_Ptr n) + { + + std::string res; + + // reduce units + n->reduce(); + + // check if the fractional part of the value equals to zero + // neat trick from http://stackoverflow.com/a/1521682/1550314 + // double int_part; bool is_int = modf(value, &int_part) == 0.0; + + // this all cannot be done with one run only, since fixed + // output differs from normal output and regular output + // can contain scientific notation which we do not want! + + // first sample + std::stringstream ss; + ss.precision(12); + ss << n->value(); + + // check if we got scientific notation in result + if (ss.str().find_first_of("e") != std::string::npos) { + ss.clear(); ss.str(std::string()); + ss.precision(std::max(12, opt.precision)); + ss << std::fixed << n->value(); + } + + std::string tmp = ss.str(); + size_t pos_point = tmp.find_first_of(".,"); + size_t pos_fract = tmp.find_last_not_of("0"); + bool is_int = pos_point == pos_fract || + pos_point == std::string::npos; + + // reset stream for another run + ss.clear(); ss.str(std::string()); + + // take a shortcut for integers + if (is_int) + { + ss.precision(0); + ss << std::fixed << n->value(); + res = std::string(ss.str()); + } + // process floats + else + { + // do we have have too much precision? + if (pos_fract < opt.precision + pos_point) + { ss.precision((int)(pos_fract - pos_point)); } + else { ss.precision(opt.precision); } + // round value again + ss << std::fixed << n->value(); + res = std::string(ss.str()); + // maybe we truncated up to decimal point + size_t pos = res.find_last_not_of("0"); + // handle case where we have a "0" + if (pos == std::string::npos) { + res = "0.0"; + } else { + bool at_dec_point = res[pos] == '.' || + res[pos] == ','; + // don't leave a blank point + if (at_dec_point) ++ pos; + res.resize (pos + 1); + } + } + + // some final cosmetics + if (res == "0.0") res = "0"; + else if (res == "") res = "0"; + else if (res == "-0") res = "0"; + else if (res == "-0.0") res = "0"; + else if (opt.output_style == COMPRESSED) + { + // check if handling negative nr + size_t off = res[0] == '-' ? 1 : 0; + // remove leading zero from floating point in compressed mode + if (n->zero() && res[off] == '0' && res[off+1] == '.') res.erase(off, 1); + } + + // add unit now + res += n->unit(); + + // output the final token + append_token(res, n); + } + + // helper function for serializing colors + template + static double cap_channel(double c) { + if (c > range) return range; + else if (c < 0) return 0; + else return c; + } + + void Inspect::operator()(Color_Ptr c) + { + // output the final token + std::stringstream ss; + + // original color name + // maybe an unknown token + std::string name = c->disp(); + + if (opt.in_selector && name != "") { + append_token(name, c); + return; + } + + // resolved color + std::string res_name = name; + + double r = Sass::round(cap_channel<0xff>(c->r()), opt.precision); + double g = Sass::round(cap_channel<0xff>(c->g()), opt.precision); + double b = Sass::round(cap_channel<0xff>(c->b()), opt.precision); + double a = cap_channel<1> (c->a()); + + // get color from given name (if one was given at all) + if (name != "" && name_to_color(name)) { + Color_Ptr_Const n = name_to_color(name); + r = Sass::round(cap_channel<0xff>(n->r()), opt.precision); + g = Sass::round(cap_channel<0xff>(n->g()), opt.precision); + b = Sass::round(cap_channel<0xff>(n->b()), opt.precision); + a = cap_channel<1> (n->a()); + } + // otherwise get the possible resolved color name + else { + double numval = r * 0x10000 + g * 0x100 + b; + if (color_to_name(numval)) + res_name = color_to_name(numval); + } + + std::stringstream hexlet; + // dart sass compressed all colors in regular css always + // ruby sass and libsass does it only when not delayed + // since color math is going to be removed, this can go too + bool compressed = opt.output_style == COMPRESSED; + hexlet << '#' << std::setw(1) << std::setfill('0'); + // create a short color hexlet if there is any need for it + if (compressed && is_color_doublet(r, g, b) && a == 1) { + hexlet << std::hex << std::setw(1) << (static_cast(r) >> 4); + hexlet << std::hex << std::setw(1) << (static_cast(g) >> 4); + hexlet << std::hex << std::setw(1) << (static_cast(b) >> 4); + } else { + hexlet << std::hex << std::setw(2) << static_cast(r); + hexlet << std::hex << std::setw(2) << static_cast(g); + hexlet << std::hex << std::setw(2) << static_cast(b); + } + + if (compressed && !c->is_delayed()) name = ""; + if (opt.output_style == INSPECT && a >= 1) { + append_token(hexlet.str(), c); + return; + } + + // retain the originally specified color definition if unchanged + if (name != "") { + ss << name; + } + else if (a >= 1) { + if (res_name != "") { + if (compressed && hexlet.str().size() < res_name.size()) { + ss << hexlet.str(); + } else { + ss << res_name; + } + } + else { + ss << hexlet.str(); + } + } + else { + ss << "rgba("; + ss << static_cast(r) << ","; + if (!compressed) ss << " "; + ss << static_cast(g) << ","; + if (!compressed) ss << " "; + ss << static_cast(b) << ","; + if (!compressed) ss << " "; + ss << a << ')'; + } + + append_token(ss.str(), c); + + } + + void Inspect::operator()(Boolean_Ptr b) + { + // output the final token + append_token(b->value() ? "true" : "false", b); + } + + void Inspect::operator()(String_Schema_Ptr ss) + { + // Evaluation should turn these into String_Constants, + // so this method is only for inspection purposes. + for (size_t i = 0, L = ss->length(); i < L; ++i) { + if ((*ss)[i]->is_interpolant()) append_string("#{"); + (*ss)[i]->perform(this); + if ((*ss)[i]->is_interpolant()) append_string("}"); + } + } + + void Inspect::operator()(String_Constant_Ptr s) + { + append_token(s->value(), s); + } + + void Inspect::operator()(String_Quoted_Ptr s) + { + if (const char q = s->quote_mark()) { + append_token(quote(s->value(), q), s); + } else { + append_token(s->value(), s); + } + } + + void Inspect::operator()(Custom_Error_Ptr e) + { + append_token(e->message(), e); + } + + void Inspect::operator()(Custom_Warning_Ptr w) + { + append_token(w->message(), w); + } + + void Inspect::operator()(Supports_Operator_Ptr so) + { + + if (so->needs_parens(so->left())) append_string("("); + so->left()->perform(this); + if (so->needs_parens(so->left())) append_string(")"); + + if (so->operand() == Supports_Operator::AND) { + append_mandatory_space(); + append_token("and", so); + append_mandatory_space(); + } else if (so->operand() == Supports_Operator::OR) { + append_mandatory_space(); + append_token("or", so); + append_mandatory_space(); + } + + if (so->needs_parens(so->right())) append_string("("); + so->right()->perform(this); + if (so->needs_parens(so->right())) append_string(")"); + } + + void Inspect::operator()(Supports_Negation_Ptr sn) + { + append_token("not", sn); + append_mandatory_space(); + if (sn->needs_parens(sn->condition())) append_string("("); + sn->condition()->perform(this); + if (sn->needs_parens(sn->condition())) append_string(")"); + } + + void Inspect::operator()(Supports_Declaration_Ptr sd) + { + append_string("("); + sd->feature()->perform(this); + append_string(": "); + sd->value()->perform(this); + append_string(")"); + } + + void Inspect::operator()(Supports_Interpolation_Ptr sd) + { + sd->value()->perform(this); + } + + void Inspect::operator()(Media_Query_Ptr mq) + { + size_t i = 0; + if (mq->media_type()) { + if (mq->is_negated()) append_string("not "); + else if (mq->is_restricted()) append_string("only "); + mq->media_type()->perform(this); + } + else { + (*mq)[i++]->perform(this); + } + for (size_t L = mq->length(); i < L; ++i) { + append_string(" and "); + (*mq)[i]->perform(this); + } + } + + void Inspect::operator()(Media_Query_Expression_Ptr mqe) + { + if (mqe->is_interpolated()) { + mqe->feature()->perform(this); + } + else { + append_string("("); + mqe->feature()->perform(this); + if (mqe->value()) { + append_string(": "); // verified + mqe->value()->perform(this); + } + append_string(")"); + } + } + + void Inspect::operator()(At_Root_Query_Ptr ae) + { + if (ae->feature()) { + append_string("("); + ae->feature()->perform(this); + if (ae->value()) { + append_colon_separator(); + ae->value()->perform(this); + } + append_string(")"); + } + } + + void Inspect::operator()(Function_Ptr f) + { + append_token("get-function", f); + append_string("("); + append_string(quote(f->name())); + append_string(")"); + } + + void Inspect::operator()(Null_Ptr n) + { + // output the final token + append_token("null", n); + } + + // parameters and arguments + void Inspect::operator()(Parameter_Ptr p) + { + append_token(p->name(), p); + if (p->default_value()) { + append_colon_separator(); + p->default_value()->perform(this); + } + else if (p->is_rest_parameter()) { + append_string("..."); + } + } + + void Inspect::operator()(Parameters_Ptr p) + { + append_string("("); + if (!p->empty()) { + (*p)[0]->perform(this); + for (size_t i = 1, L = p->length(); i < L; ++i) { + append_comma_separator(); + (*p)[i]->perform(this); + } + } + append_string(")"); + } + + void Inspect::operator()(Argument_Ptr a) + { + if (!a->name().empty()) { + append_token(a->name(), a); + append_colon_separator(); + } + if (!a->value()) return; + // Special case: argument nulls can be ignored + if (a->value()->concrete_type() == Expression::NULL_VAL) { + return; + } + if (a->value()->concrete_type() == Expression::STRING) { + String_Constant_Ptr s = Cast(a->value()); + if (s) s->perform(this); + } else { + a->value()->perform(this); + } + if (a->is_rest_argument()) { + append_string("..."); + } + } + + void Inspect::operator()(Arguments_Ptr a) + { + append_string("("); + if (!a->empty()) { + (*a)[0]->perform(this); + for (size_t i = 1, L = a->length(); i < L; ++i) { + append_string(", "); // verified + // Sass Bug? append_comma_separator(); + (*a)[i]->perform(this); + } + } + append_string(")"); + } + + void Inspect::operator()(Selector_Schema_Ptr s) + { + opt.in_selector = true; + s->contents()->perform(this); + opt.in_selector = false; + } + + void Inspect::operator()(Parent_Selector_Ptr p) + { + if (p->is_real_parent_ref()) append_string("&"); + } + + void Inspect::operator()(Placeholder_Selector_Ptr s) + { + append_token(s->name(), s); + if (s->has_line_break()) append_optional_linefeed(); + if (s->has_line_break()) append_indentation(); + + } + + void Inspect::operator()(Element_Selector_Ptr s) + { + append_token(s->ns_name(), s); + } + + void Inspect::operator()(Class_Selector_Ptr s) + { + append_token(s->ns_name(), s); + if (s->has_line_break()) append_optional_linefeed(); + if (s->has_line_break()) append_indentation(); + } + + void Inspect::operator()(Id_Selector_Ptr s) + { + append_token(s->ns_name(), s); + if (s->has_line_break()) append_optional_linefeed(); + if (s->has_line_break()) append_indentation(); + } + + void Inspect::operator()(Attribute_Selector_Ptr s) + { + append_string("["); + add_open_mapping(s); + append_token(s->ns_name(), s); + if (!s->matcher().empty()) { + append_string(s->matcher()); + if (s->value() && *s->value()) { + s->value()->perform(this); + } + } + add_close_mapping(s); + if (s->modifier() != 0) { + append_mandatory_space(); + append_char(s->modifier()); + } + append_string("]"); + } + + void Inspect::operator()(Pseudo_Selector_Ptr s) + { + append_token(s->ns_name(), s); + if (s->expression()) { + append_string("("); + s->expression()->perform(this); + append_string(")"); + } + } + + void Inspect::operator()(Wrapped_Selector_Ptr s) + { + if (s->name() == " ") { + append_string(""); + } else { + bool was = in_wrapped; + in_wrapped = true; + append_token(s->name(), s); + append_string("("); + bool was_comma_array = in_comma_array; + in_comma_array = false; + s->selector()->perform(this); + in_comma_array = was_comma_array; + append_string(")"); + in_wrapped = was; + } + } + + void Inspect::operator()(Compound_Selector_Ptr s) + { + for (size_t i = 0, L = s->length(); i < L; ++i) { + (*s)[i]->perform(this); + } + if (s->has_line_break()) { + if (output_style() != COMPACT) { + append_optional_linefeed(); + } + } + } + + void Inspect::operator()(Complex_Selector_Ptr c) + { + Compound_Selector_Obj head = c->head(); + Complex_Selector_Obj tail = c->tail(); + Complex_Selector::Combinator comb = c->combinator(); + + if (comb == Complex_Selector::ANCESTOR_OF && (!head || head->empty())) { + if (tail) tail->perform(this); + return; + } + + if (c->has_line_feed()) { + if (!(c->has_parent_ref())) { + append_optional_linefeed(); + append_indentation(); + } + } + + if (head && head->length() != 0) head->perform(this); + bool is_empty = !head || head->length() == 0 || head->is_empty_reference(); + bool is_tail = head && !head->is_empty_reference() && tail; + if (output_style() == COMPRESSED && comb != Complex_Selector::ANCESTOR_OF) scheduled_space = 0; + + switch (comb) { + case Complex_Selector::ANCESTOR_OF: + if (is_tail) append_mandatory_space(); + break; + case Complex_Selector::PARENT_OF: + append_optional_space(); + append_string(">"); + append_optional_space(); + break; + case Complex_Selector::ADJACENT_TO: + append_optional_space(); + append_string("+"); + append_optional_space(); + break; + case Complex_Selector::REFERENCE: + append_mandatory_space(); + append_string("/"); + if (c->reference()) c->reference()->perform(this); + append_string("/"); + append_mandatory_space(); + break; + case Complex_Selector::PRECEDES: + if (is_empty) append_optional_space(); + else append_mandatory_space(); + append_string("~"); + if (tail) append_mandatory_space(); + else append_optional_space(); + break; + default: break; + } + if (tail && comb != Complex_Selector::ANCESTOR_OF) { + if (c->has_line_break()) append_optional_linefeed(); + } + if (tail) tail->perform(this); + if (!tail && c->has_line_break()) { + if (output_style() == COMPACT) { + append_mandatory_space(); + } + } + } + + void Inspect::operator()(Selector_List_Ptr g) + { + + if (g->empty()) { + if (output_style() == TO_SASS) { + append_token("()", g); + } + return; + } + + + bool was_comma_array = in_comma_array; + // probably ruby sass eqivalent of element_needs_parens + if (output_style() == TO_SASS && g->length() == 1 && + (!Cast((*g)[0]) && + !Cast((*g)[0]))) { + append_string("("); + } + else if (!in_declaration && in_comma_array) { + append_string("("); + } + + if (in_declaration) in_comma_array = true; + + for (size_t i = 0, L = g->length(); i < L; ++i) { + if (!in_wrapped && i == 0) append_indentation(); + if ((*g)[i] == 0) continue; + schedule_mapping(g->at(i)->last()); + // add_open_mapping((*g)[i]->last()); + (*g)[i]->perform(this); + // add_close_mapping((*g)[i]->last()); + if (i < L - 1) { + scheduled_space = 0; + append_comma_separator(); + } + } + + in_comma_array = was_comma_array; + // probably ruby sass eqivalent of element_needs_parens + if (output_style() == TO_SASS && g->length() == 1 && + (!Cast((*g)[0]) && + !Cast((*g)[0]))) { + append_string(",)"); + } + else if (!in_declaration && in_comma_array) { + append_string(")"); + } + + } + + void Inspect::fallback_impl(AST_Node_Ptr n) + { + } + +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/inspect.hpp b/mybulma/node_modules/node-sass/src/libsass/src/inspect.hpp new file mode 100644 index 0000000..c36790b --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/inspect.hpp @@ -0,0 +1,103 @@ +#ifndef SASS_INSPECT_H +#define SASS_INSPECT_H + +#include "position.hpp" +#include "operation.hpp" +#include "emitter.hpp" + +namespace Sass { + class Context; + + class Inspect : public Operation_CRTP, public Emitter { + protected: + // import all the class-specific methods and override as desired + using Operation_CRTP::operator(); + + void fallback_impl(AST_Node_Ptr n); + + public: + + Inspect(const Emitter& emi); + virtual ~Inspect(); + + // statements + virtual void operator()(Block_Ptr); + virtual void operator()(Ruleset_Ptr); + virtual void operator()(Bubble_Ptr); + virtual void operator()(Supports_Block_Ptr); + virtual void operator()(Media_Block_Ptr); + virtual void operator()(At_Root_Block_Ptr); + virtual void operator()(Directive_Ptr); + virtual void operator()(Keyframe_Rule_Ptr); + virtual void operator()(Declaration_Ptr); + virtual void operator()(Assignment_Ptr); + virtual void operator()(Import_Ptr); + virtual void operator()(Import_Stub_Ptr); + virtual void operator()(Warning_Ptr); + virtual void operator()(Error_Ptr); + virtual void operator()(Debug_Ptr); + virtual void operator()(Comment_Ptr); + virtual void operator()(If_Ptr); + virtual void operator()(For_Ptr); + virtual void operator()(Each_Ptr); + virtual void operator()(While_Ptr); + virtual void operator()(Return_Ptr); + virtual void operator()(Extension_Ptr); + virtual void operator()(Definition_Ptr); + virtual void operator()(Mixin_Call_Ptr); + virtual void operator()(Content_Ptr); + // expressions + virtual void operator()(Map_Ptr); + virtual void operator()(Function_Ptr); + virtual void operator()(List_Ptr); + virtual void operator()(Binary_Expression_Ptr); + virtual void operator()(Unary_Expression_Ptr); + virtual void operator()(Function_Call_Ptr); + virtual void operator()(Function_Call_Schema_Ptr); + // virtual void operator()(Custom_Warning_Ptr); + // virtual void operator()(Custom_Error_Ptr); + virtual void operator()(Variable_Ptr); + virtual void operator()(Number_Ptr); + virtual void operator()(Color_Ptr); + virtual void operator()(Boolean_Ptr); + virtual void operator()(String_Schema_Ptr); + virtual void operator()(String_Constant_Ptr); + virtual void operator()(String_Quoted_Ptr); + virtual void operator()(Custom_Error_Ptr); + virtual void operator()(Custom_Warning_Ptr); + virtual void operator()(Supports_Operator_Ptr); + virtual void operator()(Supports_Negation_Ptr); + virtual void operator()(Supports_Declaration_Ptr); + virtual void operator()(Supports_Interpolation_Ptr); + virtual void operator()(Media_Query_Ptr); + virtual void operator()(Media_Query_Expression_Ptr); + virtual void operator()(At_Root_Query_Ptr); + virtual void operator()(Null_Ptr); + virtual void operator()(Parent_Selector_Ptr p); + // parameters and arguments + virtual void operator()(Parameter_Ptr); + virtual void operator()(Parameters_Ptr); + virtual void operator()(Argument_Ptr); + virtual void operator()(Arguments_Ptr); + // selectors + virtual void operator()(Selector_Schema_Ptr); + virtual void operator()(Placeholder_Selector_Ptr); + virtual void operator()(Element_Selector_Ptr); + virtual void operator()(Class_Selector_Ptr); + virtual void operator()(Id_Selector_Ptr); + virtual void operator()(Attribute_Selector_Ptr); + virtual void operator()(Pseudo_Selector_Ptr); + virtual void operator()(Wrapped_Selector_Ptr); + virtual void operator()(Compound_Selector_Ptr); + virtual void operator()(Complex_Selector_Ptr); + virtual void operator()(Selector_List_Ptr); + + virtual std::string lbracket(List_Ptr); + virtual std::string rbracket(List_Ptr); + + // template + // void fallback(U x) { fallback_impl(reinterpret_cast(x)); } + }; + +} +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/json.cpp b/mybulma/node_modules/node-sass/src/libsass/src/json.cpp new file mode 100644 index 0000000..8f433f5 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/json.cpp @@ -0,0 +1,1436 @@ +/* + Copyright (C) 2011 Joseph A. Adams (joeyadams3.14159@gmail.com) + All rights reserved. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. +*/ + +#ifdef _MSC_VER +#define _CRT_SECURE_NO_WARNINGS +#define _CRT_NONSTDC_NO_DEPRECATE +#endif + +#include "json.hpp" + +// include utf8 library used by libsass +// ToDo: replace internal json utf8 code +#include "utf8.h" + +#include +#include +#include +#include +#include + +#if defined(_MSC_VER) && _MSC_VER < 1900 +#include +#ifdef snprintf +#undef snprintf +#endif +extern "C" int snprintf(char *, size_t, const char *, ...); +#endif + +#define out_of_memory() do { \ + fprintf(stderr, "Out of memory.\n"); \ + exit(EXIT_FAILURE); \ + } while (0) + +/* Sadly, strdup is not portable. */ +static char *json_strdup(const char *str) +{ + char *ret = (char*) malloc(strlen(str) + 1); + if (ret == NULL) + out_of_memory(); + strcpy(ret, str); + return ret; +} + +/* String buffer */ + +typedef struct +{ + char *cur; + char *end; + char *start; +} SB; + +static void sb_init(SB *sb) +{ + sb->start = (char*) malloc(17); + if (sb->start == NULL) + out_of_memory(); + sb->cur = sb->start; + sb->end = sb->start + 16; +} + +/* sb and need may be evaluated multiple times. */ +#define sb_need(sb, need) do { \ + if ((sb)->end - (sb)->cur < (need)) \ + sb_grow(sb, need); \ + } while (0) + +static void sb_grow(SB *sb, int need) +{ + size_t length = sb->cur - sb->start; + size_t alloc = sb->end - sb->start; + + do { + alloc *= 2; + } while (alloc < length + need); + + sb->start = (char*) realloc(sb->start, alloc + 1); + if (sb->start == NULL) + out_of_memory(); + sb->cur = sb->start + length; + sb->end = sb->start + alloc; +} + +static void sb_put(SB *sb, const char *bytes, int count) +{ + sb_need(sb, count); + memcpy(sb->cur, bytes, count); + sb->cur += count; +} + +#define sb_putc(sb, c) do { \ + if ((sb)->cur >= (sb)->end) \ + sb_grow(sb, 1); \ + *(sb)->cur++ = (c); \ + } while (0) + +static void sb_puts(SB *sb, const char *str) +{ + sb_put(sb, str, (int)strlen(str)); +} + +static char *sb_finish(SB *sb) +{ + *sb->cur = 0; + assert(sb->start <= sb->cur && strlen(sb->start) == (size_t)(sb->cur - sb->start)); + return sb->start; +} + +static void sb_free(SB *sb) +{ + free(sb->start); +} + +/* + * Unicode helper functions + * + * These are taken from the ccan/charset module and customized a bit. + * Putting them here means the compiler can (choose to) inline them, + * and it keeps ccan/json from having a dependency. + * + * We use uint32_t Type for Unicode codepoints. + * We need our own because wchar_t might be 16 bits. + */ + +/* + * Validate a single UTF-8 character starting at @s. + * The string must be null-terminated. + * + * If it's valid, return its length (1 thru 4). + * If it's invalid or clipped, return 0. + * + * This function implements the syntax given in RFC3629, which is + * the same as that given in The Unicode Standard, Version 6.0. + * + * It has the following properties: + * + * * All codepoints U+0000..U+10FFFF may be encoded, + * except for U+D800..U+DFFF, which are reserved + * for UTF-16 surrogate pair encoding. + * * UTF-8 byte sequences longer than 4 bytes are not permitted, + * as they exceed the range of Unicode. + * * The sixty-six Unicode "non-characters" are permitted + * (namely, U+FDD0..U+FDEF, U+xxFFFE, and U+xxFFFF). + */ +static int utf8_validate_cz(const char *s) +{ + unsigned char c = *s++; + + if (c <= 0x7F) { /* 00..7F */ + return 1; + } else if (c <= 0xC1) { /* 80..C1 */ + /* Disallow overlong 2-byte sequence. */ + return 0; + } else if (c <= 0xDF) { /* C2..DF */ + /* Make sure subsequent byte is in the range 0x80..0xBF. */ + if (((unsigned char)*s++ & 0xC0) != 0x80) + return 0; + + return 2; + } else if (c <= 0xEF) { /* E0..EF */ + /* Disallow overlong 3-byte sequence. */ + if (c == 0xE0 && (unsigned char)*s < 0xA0) + return 0; + + /* Disallow U+D800..U+DFFF. */ + if (c == 0xED && (unsigned char)*s > 0x9F) + return 0; + + /* Make sure subsequent bytes are in the range 0x80..0xBF. */ + if (((unsigned char)*s++ & 0xC0) != 0x80) + return 0; + if (((unsigned char)*s++ & 0xC0) != 0x80) + return 0; + + return 3; + } else if (c <= 0xF4) { /* F0..F4 */ + /* Disallow overlong 4-byte sequence. */ + if (c == 0xF0 && (unsigned char)*s < 0x90) + return 0; + + /* Disallow codepoints beyond U+10FFFF. */ + if (c == 0xF4 && (unsigned char)*s > 0x8F) + return 0; + + /* Make sure subsequent bytes are in the range 0x80..0xBF. */ + if (((unsigned char)*s++ & 0xC0) != 0x80) + return 0; + if (((unsigned char)*s++ & 0xC0) != 0x80) + return 0; + if (((unsigned char)*s++ & 0xC0) != 0x80) + return 0; + + return 4; + } else { /* F5..FF */ + return 0; + } +} + +/* Validate a null-terminated UTF-8 string. */ +static bool utf8_validate(const char *s) +{ + int len; + + for (; *s != 0; s += len) { + len = utf8_validate_cz(s); + if (len == 0) + return false; + } + + return true; +} + +/* + * Read a single UTF-8 character starting at @s, + * returning the length, in bytes, of the character read. + * + * This function assumes input is valid UTF-8, + * and that there are enough characters in front of @s. + */ +static int utf8_read_char(const char *s, uint32_t *out) +{ + const unsigned char *c = (const unsigned char*) s; + + assert(utf8_validate_cz(s)); + + if (c[0] <= 0x7F) { + /* 00..7F */ + *out = c[0]; + return 1; + } else if (c[0] <= 0xDF) { + /* C2..DF (unless input is invalid) */ + *out = ((uint32_t)c[0] & 0x1F) << 6 | + ((uint32_t)c[1] & 0x3F); + return 2; + } else if (c[0] <= 0xEF) { + /* E0..EF */ + *out = ((uint32_t)c[0] & 0xF) << 12 | + ((uint32_t)c[1] & 0x3F) << 6 | + ((uint32_t)c[2] & 0x3F); + return 3; + } else { + /* F0..F4 (unless input is invalid) */ + *out = ((uint32_t)c[0] & 0x7) << 18 | + ((uint32_t)c[1] & 0x3F) << 12 | + ((uint32_t)c[2] & 0x3F) << 6 | + ((uint32_t)c[3] & 0x3F); + return 4; + } +} + +/* + * Write a single UTF-8 character to @s, + * returning the length, in bytes, of the character written. + * + * @unicode must be U+0000..U+10FFFF, but not U+D800..U+DFFF. + * + * This function will write up to 4 bytes to @out. + */ +static int utf8_write_char(uint32_t unicode, char *out) +{ + unsigned char *o = (unsigned char*) out; + + assert(unicode <= 0x10FFFF && !(unicode >= 0xD800 && unicode <= 0xDFFF)); + + if (unicode <= 0x7F) { + /* U+0000..U+007F */ + *o++ = unicode; + return 1; + } else if (unicode <= 0x7FF) { + /* U+0080..U+07FF */ + *o++ = 0xC0 | unicode >> 6; + *o++ = 0x80 | (unicode & 0x3F); + return 2; + } else if (unicode <= 0xFFFF) { + /* U+0800..U+FFFF */ + *o++ = 0xE0 | unicode >> 12; + *o++ = 0x80 | (unicode >> 6 & 0x3F); + *o++ = 0x80 | (unicode & 0x3F); + return 3; + } else { + /* U+10000..U+10FFFF */ + *o++ = 0xF0 | unicode >> 18; + *o++ = 0x80 | (unicode >> 12 & 0x3F); + *o++ = 0x80 | (unicode >> 6 & 0x3F); + *o++ = 0x80 | (unicode & 0x3F); + return 4; + } +} + +/* + * Compute the Unicode codepoint of a UTF-16 surrogate pair. + * + * @uc should be 0xD800..0xDBFF, and @lc should be 0xDC00..0xDFFF. + * If they aren't, this function returns false. + */ +static bool from_surrogate_pair(uint16_t uc, uint16_t lc, uint32_t *unicode) +{ + if (uc >= 0xD800 && uc <= 0xDBFF && lc >= 0xDC00 && lc <= 0xDFFF) { + *unicode = 0x10000 + ((((uint32_t)uc & 0x3FF) << 10) | (lc & 0x3FF)); + return true; + } else { + return false; + } +} + +/* + * Construct a UTF-16 surrogate pair given a Unicode codepoint. + * + * @unicode must be U+10000..U+10FFFF. + */ +static void to_surrogate_pair(uint32_t unicode, uint16_t *uc, uint16_t *lc) +{ + uint32_t n; + + assert(unicode >= 0x10000 && unicode <= 0x10FFFF); + + n = unicode - 0x10000; + *uc = ((n >> 10) & 0x3FF) | 0xD800; + *lc = (n & 0x3FF) | 0xDC00; +} + +static bool is_space (const char *c); +static bool is_digit (const char *c); +static bool parse_value (const char **sp, JsonNode **out); +static bool parse_string (const char **sp, char **out); +static bool parse_number (const char **sp, double *out); +static bool parse_array (const char **sp, JsonNode **out); +static bool parse_object (const char **sp, JsonNode **out); +static bool parse_hex16 (const char **sp, uint16_t *out); + +static bool expect_literal (const char **sp, const char *str); +static void skip_space (const char **sp); + +static void emit_value (SB *out, const JsonNode *node); +static void emit_value_indented (SB *out, const JsonNode *node, const char *space, int indent_level); +static void emit_string (SB *out, const char *str); +static void emit_number (SB *out, double num); +static void emit_array (SB *out, const JsonNode *array); +static void emit_array_indented (SB *out, const JsonNode *array, const char *space, int indent_level); +static void emit_object (SB *out, const JsonNode *object); +static void emit_object_indented (SB *out, const JsonNode *object, const char *space, int indent_level); + +static int write_hex16(char *out, uint16_t val); + +static JsonNode *mknode(JsonTag tag); +static void append_node(JsonNode *parent, JsonNode *child); +static void prepend_node(JsonNode *parent, JsonNode *child); +static void append_member(JsonNode *object, char *key, JsonNode *value); + +/* Assertion-friendly validity checks */ +static bool tag_is_valid(unsigned int tag); +static bool number_is_valid(const char *num); + +JsonNode *json_decode(const char *json) +{ + const char *s = json; + JsonNode *ret; + + skip_space(&s); + if (!parse_value(&s, &ret)) + return NULL; + + skip_space(&s); + if (*s != 0) { + json_delete(ret); + return NULL; + } + + return ret; +} + +char *json_encode(const JsonNode *node) +{ + return json_stringify(node, NULL); +} + +char *json_encode_string(const char *str) +{ + SB sb; + sb_init(&sb); + + try { + emit_string(&sb, str); + } + catch (std::exception) { + sb_free(&sb); + throw; + } + + return sb_finish(&sb); +} + +char *json_stringify(const JsonNode *node, const char *space) +{ + SB sb; + sb_init(&sb); + + try { + if (space != NULL) + emit_value_indented(&sb, node, space, 0); + else + emit_value(&sb, node); + } + catch (std::exception) { + sb_free(&sb); + throw; + } + + return sb_finish(&sb); +} + +void json_delete(JsonNode *node) +{ + if (node != NULL) { + json_remove_from_parent(node); + + switch (node->tag) { + case JSON_STRING: + free(node->string_); + break; + case JSON_ARRAY: + case JSON_OBJECT: + { + JsonNode *child, *next; + for (child = node->children.head; child != NULL; child = next) { + next = child->next; + json_delete(child); + } + break; + } + default:; + } + + free(node); + } +} + +bool json_validate(const char *json) +{ + const char *s = json; + + skip_space(&s); + if (!parse_value(&s, NULL)) + return false; + + skip_space(&s); + if (*s != 0) + return false; + + return true; +} + +JsonNode *json_find_element(JsonNode *array, int index) +{ + JsonNode *element; + int i = 0; + + if (array == NULL || array->tag != JSON_ARRAY) + return NULL; + + json_foreach(element, array) { + if (i == index) + return element; + i++; + } + + return NULL; +} + +JsonNode *json_find_member(JsonNode *object, const char *name) +{ + JsonNode *member; + + if (object == NULL || object->tag != JSON_OBJECT) + return NULL; + + json_foreach(member, object) + if (strcmp(member->key, name) == 0) + return member; + + return NULL; +} + +JsonNode *json_first_child(const JsonNode *node) +{ + if (node != NULL && (node->tag == JSON_ARRAY || node->tag == JSON_OBJECT)) + return node->children.head; + return NULL; +} + +static JsonNode *mknode(JsonTag tag) +{ + JsonNode *ret = (JsonNode*) calloc(1, sizeof(JsonNode)); + if (ret == NULL) + out_of_memory(); + ret->tag = tag; + return ret; +} + +JsonNode *json_mknull(void) +{ + return mknode(JSON_NULL); +} + +JsonNode *json_mkbool(bool b) +{ + JsonNode *ret = mknode(JSON_BOOL); + ret->bool_ = b; + return ret; +} + +static JsonNode *mkstring(char *s) +{ + JsonNode *ret = mknode(JSON_STRING); + ret->string_ = s; + return ret; +} + +JsonNode *json_mkstring(const char *s) +{ + return mkstring(json_strdup(s)); +} + +JsonNode *json_mknumber(double n) +{ + JsonNode *node = mknode(JSON_NUMBER); + node->number_ = n; + return node; +} + +JsonNode *json_mkarray(void) +{ + return mknode(JSON_ARRAY); +} + +JsonNode *json_mkobject(void) +{ + return mknode(JSON_OBJECT); +} + +static void append_node(JsonNode *parent, JsonNode *child) +{ + if (child != NULL && parent != NULL) { + child->parent = parent; + child->prev = parent->children.tail; + child->next = NULL; + + if (parent->children.tail != NULL) + parent->children.tail->next = child; + else + parent->children.head = child; + parent->children.tail = child; + } +} + +static void prepend_node(JsonNode *parent, JsonNode *child) +{ + if (child != NULL && parent != NULL) { + child->parent = parent; + child->prev = NULL; + child->next = parent->children.head; + + if (parent->children.head != NULL) + parent->children.head->prev = child; + else + parent->children.tail = child; + parent->children.head = child; + } +} + +static void append_member(JsonNode *object, char *key, JsonNode *value) +{ + if (value != NULL && object != NULL) { + value->key = key; + append_node(object, value); + } +} + +void json_append_element(JsonNode *array, JsonNode *element) +{ + if (array != NULL && element !=NULL) { + assert(array->tag == JSON_ARRAY); + assert(element->parent == NULL); + + append_node(array, element); + } +} + +void json_prepend_element(JsonNode *array, JsonNode *element) +{ + assert(array->tag == JSON_ARRAY); + assert(element->parent == NULL); + + prepend_node(array, element); +} + +void json_append_member(JsonNode *object, const char *key, JsonNode *value) +{ + if (object != NULL && key != NULL && value != NULL) { + assert(object->tag == JSON_OBJECT); + assert(value->parent == NULL); + + append_member(object, json_strdup(key), value); + } +} + +void json_prepend_member(JsonNode *object, const char *key, JsonNode *value) +{ + if (object != NULL && key != NULL && value != NULL) { + assert(object->tag == JSON_OBJECT); + assert(value->parent == NULL); + + value->key = json_strdup(key); + prepend_node(object, value); + } +} + +void json_remove_from_parent(JsonNode *node) +{ + if (node != NULL) { + JsonNode *parent = node->parent; + + if (parent != NULL) { + if (node->prev != NULL) + node->prev->next = node->next; + else + parent->children.head = node->next; + + if (node->next != NULL) + node->next->prev = node->prev; + else + parent->children.tail = node->prev; + + free(node->key); + + node->parent = NULL; + node->prev = node->next = NULL; + node->key = NULL; + } + } +} + +static bool parse_value(const char **sp, JsonNode **out) +{ + const char *s = *sp; + + switch (*s) { + case 'n': + if (expect_literal(&s, "null")) { + if (out) + *out = json_mknull(); + *sp = s; + return true; + } + return false; + + case 'f': + if (expect_literal(&s, "false")) { + if (out) + *out = json_mkbool(false); + *sp = s; + return true; + } + return false; + + case 't': + if (expect_literal(&s, "true")) { + if (out) + *out = json_mkbool(true); + *sp = s; + return true; + } + return false; + + case '"': { + char *str = NULL; + if (parse_string(&s, out ? &str : NULL)) { + if (out) + *out = mkstring(str); + *sp = s; + return true; + } + return false; + } + + case '[': + if (parse_array(&s, out)) { + *sp = s; + return true; + } + return false; + + case '{': + if (parse_object(&s, out)) { + *sp = s; + return true; + } + return false; + + default: { + double num; + if (parse_number(&s, out ? &num : NULL)) { + if (out) + *out = json_mknumber(num); + *sp = s; + return true; + } + return false; + } + } +} + +static bool parse_array(const char **sp, JsonNode **out) +{ + const char *s = *sp; + JsonNode *ret = out ? json_mkarray() : NULL; + JsonNode *element = NULL; + + if (*s++ != '[') + goto failure; + skip_space(&s); + + if (*s == ']') { + s++; + goto success; + } + + for (;;) { + if (!parse_value(&s, out ? &element : NULL)) + goto failure; + skip_space(&s); + + if (out) + json_append_element(ret, element); + + if (*s == ']') { + s++; + goto success; + } + + if (*s++ != ',') + goto failure; + skip_space(&s); + } + +success: + *sp = s; + if (out) + *out = ret; + return true; + +failure: + json_delete(ret); + return false; +} + +static bool parse_object(const char **sp, JsonNode **out) +{ + const char *s = *sp; + JsonNode *ret = out ? json_mkobject() : NULL; + char *key = NULL; + JsonNode *value = NULL; + + if (*s++ != '{') + goto failure; + skip_space(&s); + + if (*s == '}') { + s++; + goto success; + } + + for (;;) { + if (!parse_string(&s, out ? &key : NULL)) + goto failure; + skip_space(&s); + + if (*s++ != ':') + goto failure_free_key; + skip_space(&s); + + if (!parse_value(&s, out ? &value : NULL)) + goto failure_free_key; + skip_space(&s); + + if (out) + append_member(ret, key, value); + + if (*s == '}') { + s++; + goto success; + } + + if (*s++ != ',') + goto failure; + skip_space(&s); + } + +success: + *sp = s; + if (out) + *out = ret; + return true; + +failure_free_key: + if (out) + free(key); +failure: + json_delete(ret); + return false; +} + +bool parse_string(const char **sp, char **out) +{ + const char *s = *sp; + SB sb = { 0, 0, 0 }; + char throwaway_buffer[4]; + /* enough space for a UTF-8 character */ + char *b; + + if (*s++ != '"') + return false; + + if (out) { + sb_init(&sb); + sb_need(&sb, 4); + b = sb.cur; + } else { + b = throwaway_buffer; + } + + while (*s != '"') { + unsigned char c = *s++; + + /* Parse next character, and write it to b. */ + if (c == '\\') { + c = *s++; + switch (c) { + case '"': + case '\\': + case '/': + *b++ = c; + break; + case 'b': + *b++ = '\b'; + break; + case 'f': + *b++ = '\f'; + break; + case 'n': + *b++ = '\n'; + break; + case 'r': + *b++ = '\r'; + break; + case 't': + *b++ = '\t'; + break; + case 'u': + { + uint16_t uc, lc; + uint32_t unicode; + + if (!parse_hex16(&s, &uc)) + goto failed; + + if (uc >= 0xD800 && uc <= 0xDFFF) { + /* Handle UTF-16 surrogate pair. */ + if (*s++ != '\\' || *s++ != 'u' || !parse_hex16(&s, &lc)) + goto failed; /* Incomplete surrogate pair. */ + if (!from_surrogate_pair(uc, lc, &unicode)) + goto failed; /* Invalid surrogate pair. */ + } else if (uc == 0) { + /* Disallow "\u0000". */ + goto failed; + } else { + unicode = uc; + } + + b += utf8_write_char(unicode, b); + break; + } + default: + /* Invalid escape */ + goto failed; + } + } else if (c <= 0x1F) { + /* Control characters are not allowed in string literals. */ + goto failed; + } else { + /* Validate and echo a UTF-8 character. */ + int len; + + s--; + len = utf8_validate_cz(s); + if (len == 0) + goto failed; /* Invalid UTF-8 character. */ + + while (len--) + *b++ = *s++; + } + + /* + * Update sb to know about the new bytes, + * and set up b to write another character. + */ + if (out) { + sb.cur = b; + sb_need(&sb, 4); + b = sb.cur; + } else { + b = throwaway_buffer; + } + } + s++; + + if (out) + *out = sb_finish(&sb); + *sp = s; + return true; + +failed: + if (out) + sb_free(&sb); + return false; +} + +bool is_space(const char *c) { + return ((*c) == '\t' || (*c) == '\n' || (*c) == '\r' || (*c) == ' '); +} + +bool is_digit(const char *c){ + return ((*c) >= '0' && (*c) <= '9'); +} + +/* + * The JSON spec says that a number shall follow this precise pattern + * (spaces and quotes added for readability): + * '-'? (0 | [1-9][0-9]*) ('.' [0-9]+)? ([Ee] [+-]? [0-9]+)? + * + * However, some JSON parsers are more liberal. For instance, PHP accepts + * '.5' and '1.'. JSON.parse accepts '+3'. + * + * This function takes the strict approach. + */ +bool parse_number(const char **sp, double *out) +{ + const char *s = *sp; + + /* '-'? */ + if (*s == '-') + s++; + + /* (0 | [1-9][0-9]*) */ + if (*s == '0') { + s++; + } else { + if (!is_digit(s)) + return false; + do { + s++; + } while (is_digit(s)); + } + + /* ('.' [0-9]+)? */ + if (*s == '.') { + s++; + if (!is_digit(s)) + return false; + do { + s++; + } while (is_digit(s)); + } + + /* ([Ee] [+-]? [0-9]+)? */ + if (*s == 'E' || *s == 'e') { + s++; + if (*s == '+' || *s == '-') + s++; + if (!is_digit(s)) + return false; + do { + s++; + } while (is_digit(s)); + } + + if (out) + *out = strtod(*sp, NULL); + + *sp = s; + return true; +} + +static void skip_space(const char **sp) +{ + const char *s = *sp; + while (is_space(s)) + s++; + *sp = s; +} + +static void emit_value(SB *out, const JsonNode *node) +{ + assert(tag_is_valid(node->tag)); + switch (node->tag) { + case JSON_NULL: + sb_puts(out, "null"); + break; + case JSON_BOOL: + sb_puts(out, node->bool_ ? "true" : "false"); + break; + case JSON_STRING: + emit_string(out, node->string_); + break; + case JSON_NUMBER: + emit_number(out, node->number_); + break; + case JSON_ARRAY: + emit_array(out, node); + break; + case JSON_OBJECT: + emit_object(out, node); + break; + default: + assert(false); + } +} + +void emit_value_indented(SB *out, const JsonNode *node, const char *space, int indent_level) +{ + assert(tag_is_valid(node->tag)); + switch (node->tag) { + case JSON_NULL: + sb_puts(out, "null"); + break; + case JSON_BOOL: + sb_puts(out, node->bool_ ? "true" : "false"); + break; + case JSON_STRING: + emit_string(out, node->string_); + break; + case JSON_NUMBER: + emit_number(out, node->number_); + break; + case JSON_ARRAY: + emit_array_indented(out, node, space, indent_level); + break; + case JSON_OBJECT: + emit_object_indented(out, node, space, indent_level); + break; + default: + assert(false); + } +} + +static void emit_array(SB *out, const JsonNode *array) +{ + const JsonNode *element; + + sb_putc(out, '['); + json_foreach(element, array) { + emit_value(out, element); + if (element->next != NULL) + sb_putc(out, ','); + } + sb_putc(out, ']'); +} + +static void emit_array_indented(SB *out, const JsonNode *array, const char *space, int indent_level) +{ + const JsonNode *element = array->children.head; + int i; + + if (element == NULL) { + sb_puts(out, "[]"); + return; + } + + sb_puts(out, "[\n"); + while (element != NULL) { + for (i = 0; i < indent_level + 1; i++) + sb_puts(out, space); + emit_value_indented(out, element, space, indent_level + 1); + + element = element->next; + sb_puts(out, element != NULL ? ",\n" : "\n"); + } + for (i = 0; i < indent_level; i++) + sb_puts(out, space); + sb_putc(out, ']'); +} + +static void emit_object(SB *out, const JsonNode *object) +{ + const JsonNode *member; + + sb_putc(out, '{'); + json_foreach(member, object) { + emit_string(out, member->key); + sb_putc(out, ':'); + emit_value(out, member); + if (member->next != NULL) + sb_putc(out, ','); + } + sb_putc(out, '}'); +} + +static void emit_object_indented(SB *out, const JsonNode *object, const char *space, int indent_level) +{ + const JsonNode *member = object->children.head; + int i; + + if (member == NULL) { + sb_puts(out, "{}"); + return; + } + + sb_puts(out, "{\n"); + while (member != NULL) { + for (i = 0; i < indent_level + 1; i++) + sb_puts(out, space); + emit_string(out, member->key); + sb_puts(out, ": "); + emit_value_indented(out, member, space, indent_level + 1); + + member = member->next; + sb_puts(out, member != NULL ? ",\n" : "\n"); + } + for (i = 0; i < indent_level; i++) + sb_puts(out, space); + sb_putc(out, '}'); +} + +void emit_string(SB *out, const char *str) +{ + bool escape_unicode = false; + const char *s = str; + char *b; + +// make assertion catchable +#ifndef NDEBUG + if (!utf8_validate(str)) { + throw utf8::invalid_utf8(0); + } +#endif + + assert(utf8_validate(str)); + + /* + * 14 bytes is enough space to write up to two + * \uXXXX escapes and two quotation marks. + */ + sb_need(out, 14); + b = out->cur; + + *b++ = '"'; + while (*s != 0) { + unsigned char c = *s++; + + /* Encode the next character, and write it to b. */ + switch (c) { + case '"': + *b++ = '\\'; + *b++ = '"'; + break; + case '\\': + *b++ = '\\'; + *b++ = '\\'; + break; + case '\b': + *b++ = '\\'; + *b++ = 'b'; + break; + case '\f': + *b++ = '\\'; + *b++ = 'f'; + break; + case '\n': + *b++ = '\\'; + *b++ = 'n'; + break; + case '\r': + *b++ = '\\'; + *b++ = 'r'; + break; + case '\t': + *b++ = '\\'; + *b++ = 't'; + break; + default: { + int len; + + s--; + len = utf8_validate_cz(s); + + if (len == 0) { + /* + * Handle invalid UTF-8 character gracefully in production + * by writing a replacement character (U+FFFD) + * and skipping a single byte. + * + * This should never happen when assertions are enabled + * due to the assertion at the beginning of this function. + */ + assert(false); + if (escape_unicode) { + strcpy(b, "\\uFFFD"); + b += 6; + } else { + *b++ = 0xEFu; + *b++ = 0xBFu; + *b++ = 0xBDu; + } + s++; + } else if (c < 0x1F || (c >= 0x80 && escape_unicode)) { + /* Encode using \u.... */ + uint32_t unicode; + + s += utf8_read_char(s, &unicode); + + if (unicode <= 0xFFFF) { + *b++ = '\\'; + *b++ = 'u'; + b += write_hex16(b, unicode); + } else { + /* Produce a surrogate pair. */ + uint16_t uc, lc; + assert(unicode <= 0x10FFFF); + to_surrogate_pair(unicode, &uc, &lc); + *b++ = '\\'; + *b++ = 'u'; + b += write_hex16(b, uc); + *b++ = '\\'; + *b++ = 'u'; + b += write_hex16(b, lc); + } + } else { + /* Write the character directly. */ + while (len--) + *b++ = *s++; + } + + break; + } + } + + /* + * Update *out to know about the new bytes, + * and set up b to write another encoded character. + */ + out->cur = b; + sb_need(out, 14); + b = out->cur; + } + *b++ = '"'; + + out->cur = b; +} + +static void emit_number(SB *out, double num) +{ + /* + * This isn't exactly how JavaScript renders numbers, + * but it should produce valid JSON for reasonable numbers + * preserve precision well enough, and avoid some oddities + * like 0.3 -> 0.299999999999999988898 . + */ + char buf[64]; + sprintf(buf, "%.16g", num); + + if (number_is_valid(buf)) + sb_puts(out, buf); + else + sb_puts(out, "null"); +} + +static bool tag_is_valid(unsigned int tag) +{ + return (/* tag >= JSON_NULL && */ tag <= JSON_OBJECT); +} + +static bool number_is_valid(const char *num) +{ + return (parse_number(&num, NULL) && *num == '\0'); +} + +static bool expect_literal(const char **sp, const char *str) +{ + const char *s = *sp; + + while (*str != '\0') + if (*s++ != *str++) + return false; + + *sp = s; + return true; +} + +/* + * Parses exactly 4 hex characters (capital or lowercase). + * Fails if any input chars are not [0-9A-Fa-f]. + */ +static bool parse_hex16(const char **sp, uint16_t *out) +{ + const char *s = *sp; + uint16_t ret = 0; + uint16_t i; + uint16_t tmp; + char c; + + for (i = 0; i < 4; i++) { + c = *s++; + if (c >= '0' && c <= '9') + tmp = c - '0'; + else if (c >= 'A' && c <= 'F') + tmp = c - 'A' + 10; + else if (c >= 'a' && c <= 'f') + tmp = c - 'a' + 10; + else + return false; + + ret <<= 4; + ret += tmp; + } + + if (out) + *out = ret; + *sp = s; + return true; +} + +/* + * Encodes a 16-bit number into hexadecimal, + * writing exactly 4 hex chars. + */ +static int write_hex16(char *out, uint16_t val) +{ + const char *hex = "0123456789ABCDEF"; + + *out++ = hex[(val >> 12) & 0xF]; + *out++ = hex[(val >> 8) & 0xF]; + *out++ = hex[(val >> 4) & 0xF]; + *out++ = hex[ val & 0xF]; + + return 4; +} + +bool json_check(const JsonNode *node, char errmsg[256]) +{ + #define problem(...) do { \ + if (errmsg != NULL) \ + snprintf(errmsg, 256, __VA_ARGS__); \ + return false; \ + } while (0) + + if (node->key != NULL && !utf8_validate(node->key)) + problem("key contains invalid UTF-8"); + + if (!tag_is_valid(node->tag)) + problem("tag is invalid (%u)", node->tag); + + if (node->tag == JSON_BOOL) { + if (node->bool_ != false && node->bool_ != true) + problem("bool_ is neither false (%d) nor true (%d)", (int)false, (int)true); + } else if (node->tag == JSON_STRING) { + if (node->string_ == NULL) + problem("string_ is NULL"); + if (!utf8_validate(node->string_)) + problem("string_ contains invalid UTF-8"); + } else if (node->tag == JSON_ARRAY || node->tag == JSON_OBJECT) { + JsonNode *head = node->children.head; + JsonNode *tail = node->children.tail; + + if (head == NULL || tail == NULL) { + if (head != NULL) + problem("tail is NULL, but head is not"); + if (tail != NULL) + problem("head is NULL, but tail is not"); + } else { + JsonNode *child; + JsonNode *last = NULL; + + if (head->prev != NULL) + problem("First child's prev pointer is not NULL"); + + for (child = head; child != NULL; last = child, child = child->next) { + if (child == node) + problem("node is its own child"); + if (child->next == child) + problem("child->next == child (cycle)"); + if (child->next == head) + problem("child->next == head (cycle)"); + + if (child->parent != node) + problem("child does not point back to parent"); + if (child->next != NULL && child->next->prev != child) + problem("child->next does not point back to child"); + + if (node->tag == JSON_ARRAY && child->key != NULL) + problem("Array element's key is not NULL"); + if (node->tag == JSON_OBJECT && child->key == NULL) + problem("Object member's key is NULL"); + + if (!json_check(child, errmsg)) + return false; + } + + if (last != tail) + problem("tail does not match pointer found by starting at head and following next links"); + } + } + + return true; + + #undef problem +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/json.hpp b/mybulma/node_modules/node-sass/src/libsass/src/json.hpp new file mode 100644 index 0000000..05b35cd --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/json.hpp @@ -0,0 +1,117 @@ +/* + Copyright (C) 2011 Joseph A. Adams (joeyadams3.14159@gmail.com) + All rights reserved. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. +*/ + +#ifndef CCAN_JSON_H +#define CCAN_JSON_H + +#include +#include + +typedef enum { + JSON_NULL, + JSON_BOOL, + JSON_STRING, + JSON_NUMBER, + JSON_ARRAY, + JSON_OBJECT, +} JsonTag; + +typedef struct JsonNode JsonNode; + +struct JsonNode +{ + /* only if parent is an object or array (NULL otherwise) */ + JsonNode *parent; + JsonNode *prev, *next; + + /* only if parent is an object (NULL otherwise) */ + char *key; /* Must be valid UTF-8. */ + + JsonTag tag; + union { + /* JSON_BOOL */ + bool bool_; + + /* JSON_STRING */ + char *string_; /* Must be valid UTF-8. */ + + /* JSON_NUMBER */ + double number_; + + /* JSON_ARRAY */ + /* JSON_OBJECT */ + struct { + JsonNode *head, *tail; + } children; + }; +}; + +/*** Encoding, decoding, and validation ***/ + +JsonNode *json_decode (const char *json); +char *json_encode (const JsonNode *node); +char *json_encode_string (const char *str); +char *json_stringify (const JsonNode *node, const char *space); +void json_delete (JsonNode *node); + +bool json_validate (const char *json); + +/*** Lookup and traversal ***/ + +JsonNode *json_find_element (JsonNode *array, int index); +JsonNode *json_find_member (JsonNode *object, const char *key); + +JsonNode *json_first_child (const JsonNode *node); + +#define json_foreach(i, object_or_array) \ + for ((i) = json_first_child(object_or_array); \ + (i) != NULL; \ + (i) = (i)->next) + +/*** Construction and manipulation ***/ + +JsonNode *json_mknull(void); +JsonNode *json_mkbool(bool b); +JsonNode *json_mkstring(const char *s); +JsonNode *json_mknumber(double n); +JsonNode *json_mkarray(void); +JsonNode *json_mkobject(void); + +void json_append_element(JsonNode *array, JsonNode *element); +void json_prepend_element(JsonNode *array, JsonNode *element); +void json_append_member(JsonNode *object, const char *key, JsonNode *value); +void json_prepend_member(JsonNode *object, const char *key, JsonNode *value); + +void json_remove_from_parent(JsonNode *node); + +/*** Debugging ***/ + +/* + * Look for structure and encoding problems in a JsonNode or its descendents. + * + * If a problem is detected, return false, writing a description of the problem + * to errmsg (unless errmsg is NULL). + */ +bool json_check(const JsonNode *node, char errmsg[256]); + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/kwd_arg_macros.hpp b/mybulma/node_modules/node-sass/src/libsass/src/kwd_arg_macros.hpp new file mode 100644 index 0000000..e135da7 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/kwd_arg_macros.hpp @@ -0,0 +1,28 @@ +#ifndef SASS_KWD_ARG_MACROS_H +#define SASS_KWD_ARG_MACROS_H + +// Example usage: +// KWD_ARG_SET(Args) { +// KWD_ARG(Args, string, foo); +// KWD_ARG(Args, int, bar); +// ... +// }; +// +// ... and later ... +// +// something(Args().foo("hey").bar(3)); + +#define KWD_ARG_SET(set_name) class set_name + +#define KWD_ARG(set_name, type, name) \ +private: \ + type name##_; \ +public: \ + set_name& name(type name##__) { \ + name##_ = name##__; \ + return *this; \ + } \ + type name() { return name##_; } \ +private: + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/lexer.cpp b/mybulma/node_modules/node-sass/src/libsass/src/lexer.cpp new file mode 100644 index 0000000..be7f677 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/lexer.cpp @@ -0,0 +1,181 @@ +#include "sass.hpp" +#include +#include +#include +#include "lexer.hpp" +#include "constants.hpp" + + +namespace Sass { + using namespace Constants; + + namespace Prelexer { + + //#################################### + // BASIC CHARACTER MATCHERS + //#################################### + + // Match standard control chars + const char* kwd_at(const char* src) { return exactly<'@'>(src); } + const char* kwd_dot(const char* src) { return exactly<'.'>(src); } + const char* kwd_comma(const char* src) { return exactly<','>(src); }; + const char* kwd_colon(const char* src) { return exactly<':'>(src); }; + const char* kwd_star(const char* src) { return exactly<'*'>(src); }; + const char* kwd_plus(const char* src) { return exactly<'+'>(src); }; + const char* kwd_minus(const char* src) { return exactly<'-'>(src); }; + const char* kwd_slash(const char* src) { return exactly<'/'>(src); }; + + //#################################### + // implement some function that do exist in the standard + // but those are locale aware which brought some trouble + // this even seems to improve performance by quite a bit + //#################################### + + bool is_alpha(const char& chr) + { + return unsigned(chr - 'A') <= 'Z' - 'A' || + unsigned(chr - 'a') <= 'z' - 'a'; + } + + bool is_space(const char& chr) + { + // adapted the technique from is_alpha + return chr == ' ' || unsigned(chr - '\t') <= '\r' - '\t'; + } + + bool is_digit(const char& chr) + { + // adapted the technique from is_alpha + return unsigned(chr - '0') <= '9' - '0'; + } + + bool is_number(const char& chr) + { + // adapted the technique from is_alpha + return is_digit(chr) || chr == '-' || chr == '+'; + } + + bool is_xdigit(const char& chr) + { + // adapted the technique from is_alpha + return unsigned(chr - '0') <= '9' - '0' || + unsigned(chr - 'a') <= 'f' - 'a' || + unsigned(chr - 'A') <= 'F' - 'A'; + } + + bool is_punct(const char& chr) + { + // locale independent + return chr == '.'; + } + + bool is_alnum(const char& chr) + { + return is_alpha(chr) || is_digit(chr); + } + + // check if char is outside ascii range + bool is_unicode(const char& chr) + { + // check for unicode range + return unsigned(chr) > 127; + } + + // check if char is outside ascii range + // but with specific ranges (copied from Ruby Sass) + bool is_nonascii(const char& chr) + { + unsigned int cmp = unsigned(chr); + return ( + (cmp >= 128 && cmp <= 15572911) || + (cmp >= 15630464 && cmp <= 15712189) || + (cmp >= 4036001920) + ); + } + + // check if char is within a reduced ascii range + // valid in a uri (copied from Ruby Sass) + bool is_uri_character(const char& chr) + { + unsigned int cmp = unsigned(chr); + return (cmp > 41 && cmp < 127) || + cmp == ':' || cmp == '/'; + } + + // check if char is within a reduced ascii range + // valid for escaping (copied from Ruby Sass) + bool is_escapable_character(const char& chr) + { + unsigned int cmp = unsigned(chr); + return cmp > 31 && cmp < 127; + } + + // Match word character (look ahead) + bool is_character(const char& chr) + { + // valid alpha, numeric or unicode char (plus hyphen) + return is_alnum(chr) || is_unicode(chr) || chr == '-'; + } + + //#################################### + // BASIC CLASS MATCHERS + //#################################### + + // create matchers that advance the position + const char* space(const char* src) { return is_space(*src) ? src + 1 : 0; } + const char* alpha(const char* src) { return is_alpha(*src) ? src + 1 : 0; } + const char* unicode(const char* src) { return is_unicode(*src) ? src + 1 : 0; } + const char* nonascii(const char* src) { return is_nonascii(*src) ? src + 1 : 0; } + const char* digit(const char* src) { return is_digit(*src) ? src + 1 : 0; } + const char* xdigit(const char* src) { return is_xdigit(*src) ? src + 1 : 0; } + const char* alnum(const char* src) { return is_alnum(*src) ? src + 1 : 0; } + const char* punct(const char* src) { return is_punct(*src) ? src + 1 : 0; } + const char* hyphen(const char* src) { return *src && *src == '-' ? src + 1 : 0; } + const char* character(const char* src) { return is_character(*src) ? src + 1 : 0; } + const char* uri_character(const char* src) { return is_uri_character(*src) ? src + 1 : 0; } + const char* escapable_character(const char* src) { return is_escapable_character(*src) ? src + 1 : 0; } + + // Match multiple ctype characters. + const char* spaces(const char* src) { return one_plus(src); } + const char* digits(const char* src) { return one_plus(src); } + const char* hyphens(const char* src) { return one_plus(src); } + + // Whitespace handling. + const char* no_spaces(const char* src) { return negate< space >(src); } + const char* optional_spaces(const char* src) { return zero_plus< space >(src); } + + // Match any single character. + const char* any_char(const char* src) { return *src ? src + 1 : src; } + + // Match word boundary (zero-width lookahead). + const char* word_boundary(const char* src) { return is_character(*src) || *src == '#' ? 0 : src; } + + // Match linefeed /(?:\n|\r\n?)/ + const char* re_linebreak(const char* src) + { + // end of file or unix linefeed return here + if (*src == 0 || *src == '\n') return src + 1; + // a carriage return may optionally be followed by a linefeed + if (*src == '\r') return *(src + 1) == '\n' ? src + 2 : src + 1; + // no linefeed + return 0; + } + + // Assert string boundaries (/\Z|\z|\A/) + // This is a zero-width positive lookahead + const char* end_of_line(const char* src) + { + // end of file or unix linefeed return here + return *src == 0 || *src == '\n' || *src == '\r' ? src : 0; + } + + // Assert end_of_file boundary (/\z/) + // This is a zero-width positive lookahead + const char* end_of_file(const char* src) + { + // end of file or unix linefeed return here + return *src == 0 ? src : 0; + } + + } +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/lexer.hpp b/mybulma/node_modules/node-sass/src/libsass/src/lexer.hpp new file mode 100644 index 0000000..5838c29 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/lexer.hpp @@ -0,0 +1,315 @@ +#ifndef SASS_LEXER_H +#define SASS_LEXER_H + +#include + +namespace Sass { + namespace Prelexer { + + //#################################### + // BASIC CHARACTER MATCHERS + //#################################### + + // Match standard control chars + const char* kwd_at(const char* src); + const char* kwd_dot(const char* src); + const char* kwd_comma(const char* src); + const char* kwd_colon(const char* src); + const char* kwd_star(const char* src); + const char* kwd_plus(const char* src); + const char* kwd_minus(const char* src); + const char* kwd_slash(const char* src); + + //#################################### + // BASIC CLASS MATCHERS + //#################################### + + // These are locale independant + bool is_space(const char& src); + bool is_alpha(const char& src); + bool is_punct(const char& src); + bool is_digit(const char& src); + bool is_number(const char& src); + bool is_alnum(const char& src); + bool is_xdigit(const char& src); + bool is_unicode(const char& src); + bool is_nonascii(const char& src); + bool is_character(const char& src); + bool is_uri_character(const char& src); + bool escapable_character(const char& src); + + // Match a single ctype predicate. + const char* space(const char* src); + const char* alpha(const char* src); + const char* digit(const char* src); + const char* xdigit(const char* src); + const char* alnum(const char* src); + const char* punct(const char* src); + const char* hyphen(const char* src); + const char* unicode(const char* src); + const char* nonascii(const char* src); + const char* character(const char* src); + const char* uri_character(const char* src); + const char* escapable_character(const char* src); + + // Match multiple ctype characters. + const char* spaces(const char* src); + const char* digits(const char* src); + const char* hyphens(const char* src); + + // Whitespace handling. + const char* no_spaces(const char* src); + const char* optional_spaces(const char* src); + + // Match any single character (/./). + const char* any_char(const char* src); + + // Assert word boundary (/\b/) + // Is a zero-width positive lookaheads + const char* word_boundary(const char* src); + + // Match a single linebreak (/(?:\n|\r\n?)/). + const char* re_linebreak(const char* src); + + // Assert string boundaries (/\Z|\z|\A/) + // There are zero-width positive lookaheads + const char* end_of_line(const char* src); + + // Assert end_of_file boundary (/\z/) + const char* end_of_file(const char* src); + // const char* start_of_string(const char* src); + + // Type definition for prelexer functions + typedef const char* (*prelexer)(const char*); + + //#################################### + // BASIC "REGEX" CONSTRUCTORS + //#################################### + + // Match a single character literal. + // Regex equivalent: /(?:x)/ + template + const char* exactly(const char* src) { + return *src == chr ? src + 1 : 0; + } + + // Match the full string literal. + // Regex equivalent: /(?:literal)/ + template + const char* exactly(const char* src) { + if (str == NULL) return 0; + const char* pre = str; + if (src == NULL) return 0; + // there is a small chance that the search string + // is longer than the rest of the string to look at + while (*pre && *src == *pre) { + ++src, ++pre; + } + // did the matcher finish? + return *pre == 0 ? src : 0; + } + + + // Match a single character literal. + // Regex equivalent: /(?:x)/i + // only define lower case alpha chars + template + const char* insensitive(const char* src) { + return *src == chr || *src+32 == chr ? src + 1 : 0; + } + + // Match the full string literal. + // Regex equivalent: /(?:literal)/i + // only define lower case alpha chars + template + const char* insensitive(const char* src) { + if (str == NULL) return 0; + const char* pre = str; + if (src == NULL) return 0; + // there is a small chance that the search string + // is longer than the rest of the string to look at + while (*pre && (*src == *pre || *src+32 == *pre)) { + ++src, ++pre; + } + // did the matcher finish? + return *pre == 0 ? src : 0; + } + + // Match for members of char class. + // Regex equivalent: /[axy]/ + template + const char* class_char(const char* src) { + const char* cc = char_class; + while (*cc && *src != *cc) ++cc; + return *cc ? src + 1 : 0; + } + + // Match for members of char class. + // Regex equivalent: /[axy]+/ + template + const char* class_chars(const char* src) { + const char* p = src; + while (class_char(p)) ++p; + return p == src ? 0 : p; + } + + // Match for members of char class. + // Regex equivalent: /[^axy]/ + template + const char* neg_class_char(const char* src) { + if (*src == 0) return 0; + const char* cc = neg_char_class; + while (*cc && *src != *cc) ++cc; + return *cc ? 0 : src + 1; + } + + // Match for members of char class. + // Regex equivalent: /[^axy]+/ + template + const char* neg_class_chars(const char* src) { + const char* p = src; + while (neg_class_char(p)) ++p; + return p == src ? 0 : p; + } + + // Match all except the supplied one. + // Regex equivalent: /[^x]/ + template + const char* any_char_but(const char* src) { + return (*src && *src != chr) ? src + 1 : 0; + } + + // Succeeds if the matcher fails. + // Aka. zero-width negative lookahead. + // Regex equivalent: /(?!literal)/ + template + const char* negate(const char* src) { + return mx(src) ? 0 : src; + } + + // Succeeds if the matcher succeeds. + // Aka. zero-width positive lookahead. + // Regex equivalent: /(?=literal)/ + // just hangs around until we need it + template + const char* lookahead(const char* src) { + return mx(src) ? src : 0; + } + + // Tries supplied matchers in order. + // Succeeds if one of them succeeds. + // Regex equivalent: /(?:FOO|BAR)/ + template + const char* alternatives(const char* src) { + const char* rslt; + if ((rslt = mx(src))) return rslt; + return 0; + } + template + const char* alternatives(const char* src) { + const char* rslt; + if ((rslt = mx1(src))) return rslt; + return alternatives(src); + } + + // Tries supplied matchers in order. + // Succeeds if all of them succeeds. + // Regex equivalent: /(?:FOO)(?:BAR)/ + template + const char* sequence(const char* src) { + const char* rslt = src; + if (!(rslt = mx1(rslt))) return 0; + return rslt; + } + template + const char* sequence(const char* src) { + const char* rslt = src; + if (!(rslt = mx1(rslt))) return 0; + return sequence(rslt); + } + + + // Match a pattern or not. Always succeeds. + // Regex equivalent: /(?:literal)?/ + template + const char* optional(const char* src) { + const char* p = mx(src); + return p ? p : src; + } + + // Match zero or more of the patterns. + // Regex equivalent: /(?:literal)*/ + template + const char* zero_plus(const char* src) { + const char* p = mx(src); + while (p) src = p, p = mx(src); + return src; + } + + // Match one or more of the patterns. + // Regex equivalent: /(?:literal)+/ + template + const char* one_plus(const char* src) { + const char* p = mx(src); + if (!p) return 0; + while (p) src = p, p = mx(src); + return src; + } + + // Match mx non-greedy until delimiter. + // Other prelexers are greedy by default. + // Regex equivalent: /(?:$mx)*?(?=$delim)\b/ + template + const char* non_greedy(const char* src) { + while (!delim(src)) { + const char* p = mx(src); + if (p == src) return 0; + if (p == 0) return 0; + src = p; + } + return src; + } + + //#################################### + // ADVANCED "REGEX" CONSTRUCTORS + //#################################### + + // Match with word boundary rule. + // Regex equivalent: /(?:$mx)\b/i + template + const char* keyword(const char* src) { + return sequence < + insensitive < str >, + word_boundary + >(src); + } + + // Match with word boundary rule. + // Regex equivalent: /(?:$mx)\b/ + template + const char* word(const char* src) { + return sequence < + exactly < str >, + word_boundary + >(src); + } + + template + const char* loosely(const char* src) { + return sequence < + optional_spaces, + exactly < chr > + >(src); + } + template + const char* loosely(const char* src) { + return sequence < + optional_spaces, + exactly < str > + >(src); + } + + } +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/listize.cpp b/mybulma/node_modules/node-sass/src/libsass/src/listize.cpp new file mode 100644 index 0000000..cb921ae --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/listize.cpp @@ -0,0 +1,86 @@ +#include "sass.hpp" +#include +#include +#include + +#include "listize.hpp" +#include "context.hpp" +#include "backtrace.hpp" +#include "error_handling.hpp" + +namespace Sass { + + Listize::Listize() + { } + + Expression_Ptr Listize::operator()(Selector_List_Ptr sel) + { + List_Obj l = SASS_MEMORY_NEW(List, sel->pstate(), sel->length(), SASS_COMMA); + l->from_selector(true); + for (size_t i = 0, L = sel->length(); i < L; ++i) { + if (!sel->at(i)) continue; + l->append(sel->at(i)->perform(this)); + } + if (l->length()) return l.detach(); + return SASS_MEMORY_NEW(Null, l->pstate()); + } + + Expression_Ptr Listize::operator()(Compound_Selector_Ptr sel) + { + std::string str; + for (size_t i = 0, L = sel->length(); i < L; ++i) { + Expression_Ptr e = (*sel)[i]->perform(this); + if (e) str += e->to_string(); + } + return SASS_MEMORY_NEW(String_Quoted, sel->pstate(), str); + } + + Expression_Ptr Listize::operator()(Complex_Selector_Ptr sel) + { + List_Obj l = SASS_MEMORY_NEW(List, sel->pstate(), 2); + l->from_selector(true); + Compound_Selector_Obj head = sel->head(); + if (head && !head->is_empty_reference()) + { + Expression_Ptr hh = head->perform(this); + if (hh) l->append(hh); + } + + std::string reference = ! sel->reference() ? "" + : sel->reference()->to_string(); + switch(sel->combinator()) + { + case Complex_Selector::PARENT_OF: + l->append(SASS_MEMORY_NEW(String_Quoted, sel->pstate(), ">")); + break; + case Complex_Selector::ADJACENT_TO: + l->append(SASS_MEMORY_NEW(String_Quoted, sel->pstate(), "+")); + break; + case Complex_Selector::REFERENCE: + l->append(SASS_MEMORY_NEW(String_Quoted, sel->pstate(), "/" + reference + "/")); + break; + case Complex_Selector::PRECEDES: + l->append(SASS_MEMORY_NEW(String_Quoted, sel->pstate(), "~")); + break; + case Complex_Selector::ANCESTOR_OF: + break; + default: break; + } + + Complex_Selector_Obj tail = sel->tail(); + if (tail) + { + Expression_Obj tt = tail->perform(this); + if (List_Ptr ls = Cast(tt)) + { l->concat(ls); } + } + if (l->length() == 0) return 0; + return l.detach(); + } + + Expression_Ptr Listize::fallback_impl(AST_Node_Ptr n) + { + return Cast(n); + } + +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/listize.hpp b/mybulma/node_modules/node-sass/src/libsass/src/listize.hpp new file mode 100644 index 0000000..9716ebe --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/listize.hpp @@ -0,0 +1,34 @@ +#ifndef SASS_LISTIZE_H +#define SASS_LISTIZE_H + +#include +#include + +#include "ast.hpp" +#include "context.hpp" +#include "operation.hpp" +#include "environment.hpp" + +namespace Sass { + + struct Backtrace; + + class Listize : public Operation_CRTP { + + Expression_Ptr fallback_impl(AST_Node_Ptr n); + + public: + Listize(); + ~Listize() { } + + Expression_Ptr operator()(Selector_List_Ptr); + Expression_Ptr operator()(Complex_Selector_Ptr); + Expression_Ptr operator()(Compound_Selector_Ptr); + + template + Expression_Ptr fallback(U x) { return fallback_impl(x); } + }; + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/mapping.hpp b/mybulma/node_modules/node-sass/src/libsass/src/mapping.hpp new file mode 100644 index 0000000..54fb4a0 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/mapping.hpp @@ -0,0 +1,18 @@ +#ifndef SASS_MAPPING_H +#define SASS_MAPPING_H + +#include "position.hpp" + +namespace Sass { + + struct Mapping { + Position original_position; + Position generated_position; + + Mapping(const Position& original_position, const Position& generated_position) + : original_position(original_position), generated_position(generated_position) { } + }; + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/memory/SharedPtr.cpp b/mybulma/node_modules/node-sass/src/libsass/src/memory/SharedPtr.cpp new file mode 100644 index 0000000..2530360 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/memory/SharedPtr.cpp @@ -0,0 +1,114 @@ +#include "../sass.hpp" +#include +#include + +#include "SharedPtr.hpp" +#include "../ast_fwd_decl.hpp" + +#ifdef DEBUG_SHARED_PTR +#include "../debugger.hpp" +#endif + +namespace Sass { + + #ifdef DEBUG_SHARED_PTR + void SharedObj::dumpMemLeaks() { + if (!all.empty()) { + std::cerr << "###################################\n"; + std::cerr << "# REPORTING MISSING DEALLOCATIONS #\n"; + std::cerr << "###################################\n"; + for (SharedObj* var : all) { + if (AST_Node_Ptr ast = dynamic_cast(var)) { + debug_ast(ast); + } else { + std::cerr << "LEAKED " << var << "\n"; + } + } + } + } + std::vector SharedObj::all; + #endif + + bool SharedObj::taint = false; + + SharedObj::SharedObj() + : detached(false) + #ifdef DEBUG_SHARED_PTR + , dbg(false) + #endif + { + refcounter = 0; + #ifdef DEBUG_SHARED_PTR + if (taint) all.push_back(this); + #endif + }; + + SharedObj::~SharedObj() { + #ifdef DEBUG_SHARED_PTR + if (dbg) std::cerr << "Destruct " << this << "\n"; + if(!all.empty()) { // check needed for MSVC (no clue why?) + all.erase(std::remove(all.begin(), all.end(), this), all.end()); + } + #endif + }; + + void SharedPtr::decRefCount() { + if (node) { + -- node->refcounter; + #ifdef DEBUG_SHARED_PTR + if (node->dbg) std::cerr << "- " << node << " X " << node->refcounter << " (" << this << ") " << "\n"; + #endif + if (node->refcounter == 0) { + #ifdef DEBUG_SHARED_PTR + // AST_Node_Ptr ast = dynamic_cast(node); + if (node->dbg) std::cerr << "DELETE NODE " << node << "\n"; + #endif + if (!node->detached) { + delete(node); + } + } + } + } + + void SharedPtr::incRefCount() { + if (node) { + ++ node->refcounter; + node->detached = false; + #ifdef DEBUG_SHARED_PTR + if (node->dbg) { + std::cerr << "+ " << node << " X " << node->refcounter << " (" << this << ") " << "\n"; + } + #endif + } + } + + SharedPtr::~SharedPtr() { + decRefCount(); + } + + + // the create constructor + SharedPtr::SharedPtr(SharedObj* ptr) + : node(ptr) { + incRefCount(); + } + // copy assignment operator + SharedPtr& SharedPtr::operator=(const SharedPtr& rhs) { + void* cur_ptr = (void*) node; + void* rhs_ptr = (void*) rhs.node; + if (cur_ptr == rhs_ptr) { + return *this; + } + decRefCount(); + node = rhs.node; + incRefCount(); + return *this; + } + + // the copy constructor + SharedPtr::SharedPtr(const SharedPtr& obj) + : node(obj.node) { + incRefCount(); + } + +} \ No newline at end of file diff --git a/mybulma/node_modules/node-sass/src/libsass/src/memory/SharedPtr.hpp b/mybulma/node_modules/node-sass/src/libsass/src/memory/SharedPtr.hpp new file mode 100644 index 0000000..f20dfa3 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/memory/SharedPtr.hpp @@ -0,0 +1,206 @@ +#ifndef SASS_MEMORY_SHARED_PTR_H +#define SASS_MEMORY_SHARED_PTR_H + +#include "sass/base.h" + +#include + +namespace Sass { + + class SharedPtr; + + /////////////////////////////////////////////////////////////////////////////// + // Use macros for the allocation task, since overloading operator `new` + // has been proven to be flaky under certain compilers (see comment below). + /////////////////////////////////////////////////////////////////////////////// + + #ifdef DEBUG_SHARED_PTR + + #define SASS_MEMORY_NEW(Class, ...) \ + ((Class*)(new Class(__VA_ARGS__))->trace(__FILE__, __LINE__)) \ + + #define SASS_MEMORY_COPY(obj) \ + ((obj)->copy(__FILE__, __LINE__)) \ + + #define SASS_MEMORY_CLONE(obj) \ + ((obj)->clone(__FILE__, __LINE__)) \ + + #else + + #define SASS_MEMORY_NEW(Class, ...) \ + new Class(__VA_ARGS__) \ + + #define SASS_MEMORY_COPY(obj) \ + ((obj)->copy()) \ + + #define SASS_MEMORY_CLONE(obj) \ + ((obj)->clone()) \ + + #endif + + class SharedObj { + protected: + friend class SharedPtr; + friend class Memory_Manager; + #ifdef DEBUG_SHARED_PTR + static std::vector all; + std::string file; + size_t line; + #endif + static bool taint; + long refcounter; + // long refcount; + bool detached; + #ifdef DEBUG_SHARED_PTR + bool dbg; + #endif + public: + #ifdef DEBUG_SHARED_PTR + static void dumpMemLeaks(); + SharedObj* trace(std::string file, size_t line) { + this->file = file; + this->line = line; + return this; + } + #endif + SharedObj(); + #ifdef DEBUG_SHARED_PTR + std::string getDbgFile() { + return file; + } + size_t getDbgLine() { + return line; + } + void setDbg(bool dbg) { + this->dbg = dbg; + } + #endif + static void setTaint(bool val) { + taint = val; + } + virtual ~SharedObj(); + long getRefCount() { + return refcounter; + } + }; + + + class SharedPtr { + protected: + SharedObj* node; + protected: + void decRefCount(); + void incRefCount(); + public: + // the empty constructor + SharedPtr() + : node(NULL) {}; + // the create constructor + SharedPtr(SharedObj* ptr); + // the copy constructor + SharedPtr(const SharedPtr& obj); + // the move constructor + SharedPtr(SharedPtr&& obj); + // copy assignment operator + SharedPtr& operator=(const SharedPtr& obj); + // move assignment operator + SharedPtr& operator=(SharedPtr&& obj); + // pure virtual destructor + virtual ~SharedPtr() = 0; + public: + SharedObj* obj () const { + return node; + }; + SharedObj* operator-> () const { + return node; + }; + bool isNull () { + return node == NULL; + }; + bool isNull () const { + return node == NULL; + }; + SharedObj* detach() const { + if (node) { + node->detached = true; + } + return node; + }; + operator bool() const { + return node != NULL; + }; + + }; + + template < class T > + class SharedImpl : private SharedPtr { + public: + SharedImpl() + : SharedPtr(NULL) {}; + SharedImpl(T* node) + : SharedPtr(node) {}; + template < class U > + SharedImpl(SharedImpl obj) + : SharedPtr(static_cast(obj.ptr())) {} + SharedImpl(T&& node) + : SharedPtr(node) {}; + SharedImpl(const T& node) + : SharedPtr(node) {}; + // the copy constructor + SharedImpl(const SharedImpl& impl) + : SharedPtr(impl.node) {}; + // the move constructor + SharedImpl(SharedImpl&& impl) + : SharedPtr(impl.node) {}; + // copy assignment operator + SharedImpl& operator=(const SharedImpl& rhs) { + if (node) decRefCount(); + node = rhs.node; + incRefCount(); + return *this; + } + // move assignment operator + SharedImpl& operator=(SharedImpl&& rhs) { + // don't move our self + if (this != &rhs) { + if (node) decRefCount(); + node = std::move(rhs.node); + rhs.node = NULL; + } + return *this; + } + ~SharedImpl() {}; + public: + operator T*() const { + return static_cast(this->obj()); + } + operator T&() const { + return *static_cast(this->obj()); + } + T& operator* () const { + return *static_cast(this->obj()); + }; + T* operator-> () const { + return static_cast(this->obj()); + }; + T* ptr () const { + return static_cast(this->obj()); + }; + T* detach() const { + if (this->obj() == NULL) return NULL; + return static_cast(SharedPtr::detach()); + } + bool isNull() const { + return this->obj() == NULL; + } + bool operator<(const T& rhs) const { + return *this->ptr() < rhs; + }; + operator bool() const { + return this->obj() != NULL; + }; + }; + +} + +#endif \ No newline at end of file diff --git a/mybulma/node_modules/node-sass/src/libsass/src/node.cpp b/mybulma/node_modules/node-sass/src/libsass/src/node.cpp new file mode 100644 index 0000000..08eada7 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/node.cpp @@ -0,0 +1,319 @@ +#include "sass.hpp" +#include + +#include "node.hpp" +#include "context.hpp" +#include "parser.hpp" + +namespace Sass { + + + Node Node::createCombinator(const Complex_Selector::Combinator& combinator) { + NodeDequePtr null; + return Node(COMBINATOR, combinator, NULL /*pSelector*/, null /*pCollection*/); + } + + + Node Node::createSelector(const Complex_Selector& pSelector) { + NodeDequePtr null; + + Complex_Selector_Ptr pStripped = SASS_MEMORY_COPY(&pSelector); + pStripped->tail(NULL); + pStripped->combinator(Complex_Selector::ANCESTOR_OF); + + Node n(SELECTOR, Complex_Selector::ANCESTOR_OF, pStripped, null /*pCollection*/); + n.got_line_feed = pSelector.has_line_feed(); + return n; + } + + + Node Node::createCollection() { + NodeDequePtr pEmptyCollection = std::make_shared(); + return Node(COLLECTION, Complex_Selector::ANCESTOR_OF, NULL /*pSelector*/, pEmptyCollection); + } + + + Node Node::createCollection(const NodeDeque& values) { + NodeDequePtr pShallowCopiedCollection = std::make_shared(values); + return Node(COLLECTION, Complex_Selector::ANCESTOR_OF, NULL /*pSelector*/, pShallowCopiedCollection); + } + + + Node Node::createNil() { + NodeDequePtr null; + return Node(NIL, Complex_Selector::ANCESTOR_OF, NULL /*pSelector*/, null /*pCollection*/); + } + + + Node::Node(const TYPE& type, Complex_Selector::Combinator combinator, Complex_Selector_Ptr pSelector, NodeDequePtr& pCollection) + : got_line_feed(false), mType(type), mCombinator(combinator), mpSelector(pSelector), mpCollection(pCollection) + { if (pSelector) got_line_feed = pSelector->has_line_feed(); } + + + Node Node::klone() const { + NodeDequePtr pNewCollection = std::make_shared(); + if (mpCollection) { + for (NodeDeque::iterator iter = mpCollection->begin(), iterEnd = mpCollection->end(); iter != iterEnd; iter++) { + Node& toClone = *iter; + pNewCollection->push_back(toClone.klone()); + } + } + + Node n(mType, mCombinator, mpSelector ? SASS_MEMORY_COPY(mpSelector) : NULL, pNewCollection); + n.got_line_feed = got_line_feed; + return n; + } + + + bool Node::contains(const Node& potentialChild) const { + bool found = false; + + for (NodeDeque::iterator iter = mpCollection->begin(), iterEnd = mpCollection->end(); iter != iterEnd; iter++) { + Node& toTest = *iter; + + if (toTest == potentialChild) { + found = true; + break; + } + } + + return found; + } + + + bool Node::operator==(const Node& rhs) const { + if (this->type() != rhs.type()) { + return false; + } + + if (this->isCombinator()) { + + return this->combinator() == rhs.combinator(); + + } else if (this->isNil()) { + + return true; // no state to check + + } else if (this->isSelector()){ + + return *this->selector() == *rhs.selector(); + + } else if (this->isCollection()) { + + if (this->collection()->size() != rhs.collection()->size()) { + return false; + } + + for (NodeDeque::iterator lhsIter = this->collection()->begin(), lhsIterEnd = this->collection()->end(), + rhsIter = rhs.collection()->begin(); lhsIter != lhsIterEnd; lhsIter++, rhsIter++) { + + if (*lhsIter != *rhsIter) { + return false; + } + + } + + return true; + + } + + // We shouldn't get here. + throw "Comparing unknown node types. A new type was probably added and this method wasn't implemented for it."; + } + + + void Node::plus(Node& rhs) { + if (!this->isCollection() || !rhs.isCollection()) { + throw "Both the current node and rhs must be collections."; + } + this->collection()->insert(this->collection()->end(), rhs.collection()->begin(), rhs.collection()->end()); + } + +#ifdef DEBUG + std::ostream& operator<<(std::ostream& os, const Node& node) { + + if (node.isCombinator()) { + + switch (node.combinator()) { + case Complex_Selector::ANCESTOR_OF: os << "\" \""; break; + case Complex_Selector::PARENT_OF: os << "\">\""; break; + case Complex_Selector::PRECEDES: os << "\"~\""; break; + case Complex_Selector::ADJACENT_TO: os << "\"+\""; break; + case Complex_Selector::REFERENCE: os << "\"/\""; break; + } + + } else if (node.isNil()) { + + os << "nil"; + + } else if (node.isSelector()){ + + os << node.selector()->head()->to_string(); + + } else if (node.isCollection()) { + + os << "["; + + for (NodeDeque::iterator iter = node.collection()->begin(), iterBegin = node.collection()->begin(), iterEnd = node.collection()->end(); iter != iterEnd; iter++) { + if (iter != iterBegin) { + os << ", "; + } + + os << (*iter); + } + + os << "]"; + + } + + return os; + + } +#endif + + + Node complexSelectorToNode(Complex_Selector_Ptr pToConvert) { + if (pToConvert == NULL) { + return Node::createNil(); + } + Node node = Node::createCollection(); + node.got_line_feed = pToConvert->has_line_feed(); + bool has_lf = pToConvert->has_line_feed(); + + // unwrap the selector from parent ref + if (pToConvert->head() && pToConvert->head()->has_parent_ref()) { + Complex_Selector_Obj tail = pToConvert->tail(); + if (tail) tail->has_line_feed(pToConvert->has_line_feed()); + pToConvert = tail; + } + + while (pToConvert) { + + bool empty_parent_ref = pToConvert->head() && pToConvert->head()->is_empty_reference(); + + // the first Complex_Selector may contain a dummy head pointer, skip it. + if (pToConvert->head() && !empty_parent_ref) { + node.collection()->push_back(Node::createSelector(*pToConvert)); + if (has_lf) node.collection()->back().got_line_feed = has_lf; + if (pToConvert->head() || empty_parent_ref) { + if (pToConvert->tail()) { + pToConvert->tail()->has_line_feed(pToConvert->has_line_feed()); + } + } + has_lf = false; + } + + if (pToConvert->combinator() != Complex_Selector::ANCESTOR_OF) { + node.collection()->push_back(Node::createCombinator(pToConvert->combinator())); + if (has_lf) node.collection()->back().got_line_feed = has_lf; + has_lf = false; + } + + if (pToConvert && empty_parent_ref && pToConvert->tail()) { + // pToConvert->tail()->has_line_feed(pToConvert->has_line_feed()); + } + + pToConvert = pToConvert->tail(); + } + + return node; + } + + + Complex_Selector_Ptr nodeToComplexSelector(const Node& toConvert) { + if (toConvert.isNil()) { + return NULL; + } + + + if (!toConvert.isCollection()) { + throw "The node to convert to a Complex_Selector_Ptr must be a collection type or nil."; + } + + + NodeDeque& childNodes = *toConvert.collection(); + + std::string noPath(""); + Complex_Selector_Obj pFirst = SASS_MEMORY_NEW(Complex_Selector, ParserState("[NODE]"), Complex_Selector::ANCESTOR_OF, NULL, NULL); + + Complex_Selector_Obj pCurrent = pFirst; + + if (toConvert.isSelector()) pFirst->has_line_feed(toConvert.got_line_feed); + if (toConvert.isCombinator()) pFirst->has_line_feed(toConvert.got_line_feed); + + for (NodeDeque::iterator childIter = childNodes.begin(), childIterEnd = childNodes.end(); childIter != childIterEnd; childIter++) { + + Node& child = *childIter; + + if (child.isSelector()) { + // JMA - need to clone the selector, because they can end up getting shared across Node + // collections, and can result in an infinite loop during the call to parentSuperselector() + pCurrent->tail(SASS_MEMORY_COPY(child.selector())); + // if (child.got_line_feed) pCurrent->has_line_feed(child.got_line_feed); + pCurrent = pCurrent->tail(); + } else if (child.isCombinator()) { + pCurrent->combinator(child.combinator()); + if (child.got_line_feed) pCurrent->has_line_feed(child.got_line_feed); + + // if the next node is also a combinator, create another Complex_Selector to hold it so it doesn't replace the current combinator + if (childIter+1 != childIterEnd) { + Node& nextNode = *(childIter+1); + if (nextNode.isCombinator()) { + pCurrent->tail(SASS_MEMORY_NEW(Complex_Selector, ParserState("[NODE]"), Complex_Selector::ANCESTOR_OF, NULL, NULL)); + if (nextNode.got_line_feed) pCurrent->tail()->has_line_feed(nextNode.got_line_feed); + pCurrent = pCurrent->tail(); + } + } + } else { + throw "The node to convert's children must be only combinators or selectors."; + } + } + + // Put the dummy Compound_Selector in the first position, for consistency with the rest of libsass + Compound_Selector_Ptr fakeHead = SASS_MEMORY_NEW(Compound_Selector, ParserState("[NODE]"), 1); + Parent_Selector_Ptr selectorRef = SASS_MEMORY_NEW(Parent_Selector, ParserState("[NODE]")); + fakeHead->elements().push_back(selectorRef); + if (toConvert.got_line_feed) pFirst->has_line_feed(toConvert.got_line_feed); + // pFirst->has_line_feed(pFirst->has_line_feed() || pFirst->tail()->has_line_feed() || toConvert.got_line_feed); + pFirst->head(fakeHead); + return SASS_MEMORY_COPY(pFirst); + } + + // A very naive trim function, which removes duplicates in a node + // This is only used in Complex_Selector::unify_with for now, may need modifications to fit other needs + Node Node::naiveTrim(Node& seqses) { + + std::vector res; + std::vector known; + + NodeDeque::reverse_iterator seqsesIter = seqses.collection()->rbegin(), + seqsesIterEnd = seqses.collection()->rend(); + + for (; seqsesIter != seqsesIterEnd; ++seqsesIter) + { + Node& seqs1 = *seqsesIter; + if( seqs1.isSelector() ) { + Complex_Selector_Obj sel = seqs1.selector(); + std::vector::iterator it; + bool found = false; + for (it = known.begin(); it != known.end(); ++it) { + if (**it == *sel) { found = true; break; } + } + if( !found ) { + known.push_back(seqs1.selector()); + res.push_back(&seqs1); + } + } else { + res.push_back(&seqs1); + } + } + + Node result = Node::createCollection(); + + for (size_t i = res.size() - 1; i != std::string::npos; --i) { + result.collection()->push_back(*res[i]); + } + + return result; + } +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/node.hpp b/mybulma/node_modules/node-sass/src/libsass/src/node.hpp new file mode 100644 index 0000000..23ba360 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/node.hpp @@ -0,0 +1,118 @@ +#ifndef SASS_NODE_H +#define SASS_NODE_H + +#include +#include + +#include "ast.hpp" + + +namespace Sass { + + + + + class Context; + + /* + There are a lot of stumbling blocks when trying to port the ruby extend code to C++. The biggest is the choice of + data type. The ruby code will pretty seamlessly switch types between an Array (libsass' + equivalent is the Complex_Selector) to a Sequence, which contains more metadata about the sequence than just the + selector info. They also have the ability to have arbitrary nestings of arrays like [1, [2]], which is hard to + implement using Array equivalents in C++ (like the deque or vector). They also have the ability to include nil + in the arrays, like [1, nil, 3], which has potential semantic differences than an empty array [1, [], 3]. To be + able to represent all of these as unique cases, we need to create a tree of variant objects. The tree nature allows + the inconsistent nesting levels. The variant nature (while making some of the C++ code uglier) allows the code to + more closely match the ruby code, which is a huge benefit when attempting to implement an complex algorithm like + the Extend operator. + + Note that the current libsass data model also pairs the combinator with the Complex_Selector that follows it, but + ruby sass has no such restriction, so we attempt to create a data structure that can handle them split apart. + */ + + class Node; + typedef std::deque NodeDeque; + typedef std::shared_ptr NodeDequePtr; + + class Node { + public: + enum TYPE { + SELECTOR, + COMBINATOR, + COLLECTION, + NIL + }; + + TYPE type() const { return mType; } + bool isCombinator() const { return mType == COMBINATOR; } + bool isSelector() const { return mType == SELECTOR; } + bool isCollection() const { return mType == COLLECTION; } + bool isNil() const { return mType == NIL; } + bool got_line_feed; + + Complex_Selector::Combinator combinator() const { return mCombinator; } + + Complex_Selector_Obj selector() { return mpSelector; } + Complex_Selector_Obj selector() const { return mpSelector; } + + NodeDequePtr collection() { return mpCollection; } + const NodeDequePtr collection() const { return mpCollection; } + + static Node createCombinator(const Complex_Selector::Combinator& combinator); + + // This method will klone the selector, stripping off the tail and combinator + static Node createSelector(const Complex_Selector& pSelector); + + static Node createCollection(); + static Node createCollection(const NodeDeque& values); + + static Node createNil(); + static Node naiveTrim(Node& seqses); + + Node klone() const; + + bool operator==(const Node& rhs) const; + inline bool operator!=(const Node& rhs) const { return !(*this == rhs); } + + + /* + COLLECTION FUNCTIONS + + Most types don't need any helper methods (nil and combinator due to their simplicity and + selector due to the fact that we leverage the non-node selector code on the Complex_Selector + whereever possible). The following methods are intended to be called on Node objects whose + type is COLLECTION only. + */ + + // rhs and this must be node collections. Shallow copy the nodes from rhs to the end of this. + // This function DOES NOT remove the nodes from rhs. + void plus(Node& rhs); + + // potentialChild must be a node collection of selectors/combinators. this must be a collection + // of collections of nodes/combinators. This method checks if potentialChild is a child of this + // Node. + bool contains(const Node& potentialChild) const; + + private: + // Private constructor; Use the static methods (like createCombinator and createSelector) + // to instantiate this object. This is more expressive, and it allows us to break apart each + // case into separate functions. + Node(const TYPE& type, Complex_Selector::Combinator combinator, Complex_Selector_Ptr pSelector, NodeDequePtr& pCollection); + + TYPE mType; + + // TODO: can we union these to save on memory? + Complex_Selector::Combinator mCombinator; + Complex_Selector_Obj mpSelector; + NodeDequePtr mpCollection; + }; + +#ifdef DEBUG + std::ostream& operator<<(std::ostream& os, const Node& node); +#endif + Node complexSelectorToNode(Complex_Selector_Ptr pToConvert); + Complex_Selector_Ptr nodeToComplexSelector(const Node& toConvert); + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/operation.hpp b/mybulma/node_modules/node-sass/src/libsass/src/operation.hpp new file mode 100644 index 0000000..2d4fbec --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/operation.hpp @@ -0,0 +1,173 @@ +#ifndef SASS_OPERATION_H +#define SASS_OPERATION_H + +#include "ast_fwd_decl.hpp" + +namespace Sass { + + template + class Operation { + public: + virtual T operator()(AST_Node_Ptr x) = 0; + virtual ~Operation() { } + // statements + virtual T operator()(Block_Ptr x) = 0; + virtual T operator()(Ruleset_Ptr x) = 0; + virtual T operator()(Bubble_Ptr x) = 0; + virtual T operator()(Trace_Ptr x) = 0; + virtual T operator()(Supports_Block_Ptr x) = 0; + virtual T operator()(Media_Block_Ptr x) = 0; + virtual T operator()(At_Root_Block_Ptr x) = 0; + virtual T operator()(Directive_Ptr x) = 0; + virtual T operator()(Keyframe_Rule_Ptr x) = 0; + virtual T operator()(Declaration_Ptr x) = 0; + virtual T operator()(Assignment_Ptr x) = 0; + virtual T operator()(Import_Ptr x) = 0; + virtual T operator()(Import_Stub_Ptr x) = 0; + virtual T operator()(Warning_Ptr x) = 0; + virtual T operator()(Error_Ptr x) = 0; + virtual T operator()(Debug_Ptr x) = 0; + virtual T operator()(Comment_Ptr x) = 0; + virtual T operator()(If_Ptr x) = 0; + virtual T operator()(For_Ptr x) = 0; + virtual T operator()(Each_Ptr x) = 0; + virtual T operator()(While_Ptr x) = 0; + virtual T operator()(Return_Ptr x) = 0; + virtual T operator()(Content_Ptr x) = 0; + virtual T operator()(Extension_Ptr x) = 0; + virtual T operator()(Definition_Ptr x) = 0; + virtual T operator()(Mixin_Call_Ptr x) = 0; + // expressions + virtual T operator()(List_Ptr x) = 0; + virtual T operator()(Map_Ptr x) = 0; + virtual T operator()(Function_Ptr x) = 0; + virtual T operator()(Binary_Expression_Ptr x) = 0; + virtual T operator()(Unary_Expression_Ptr x) = 0; + virtual T operator()(Function_Call_Ptr x) = 0; + virtual T operator()(Function_Call_Schema_Ptr x) = 0; + virtual T operator()(Custom_Warning_Ptr x) = 0; + virtual T operator()(Custom_Error_Ptr x) = 0; + virtual T operator()(Variable_Ptr x) = 0; + virtual T operator()(Number_Ptr x) = 0; + virtual T operator()(Color_Ptr x) = 0; + virtual T operator()(Boolean_Ptr x) = 0; + virtual T operator()(String_Schema_Ptr x) = 0; + virtual T operator()(String_Quoted_Ptr x) = 0; + virtual T operator()(String_Constant_Ptr x) = 0; + virtual T operator()(Supports_Condition_Ptr x) = 0; + virtual T operator()(Supports_Operator_Ptr x) = 0; + virtual T operator()(Supports_Negation_Ptr x) = 0; + virtual T operator()(Supports_Declaration_Ptr x) = 0; + virtual T operator()(Supports_Interpolation_Ptr x) = 0; + virtual T operator()(Media_Query_Ptr x) = 0; + virtual T operator()(Media_Query_Expression_Ptr x) = 0; + virtual T operator()(At_Root_Query_Ptr x) = 0; + virtual T operator()(Null_Ptr x) = 0; + virtual T operator()(Parent_Selector_Ptr x) = 0; + // parameters and arguments + virtual T operator()(Parameter_Ptr x) = 0; + virtual T operator()(Parameters_Ptr x) = 0; + virtual T operator()(Argument_Ptr x) = 0; + virtual T operator()(Arguments_Ptr x) = 0; + // selectors + virtual T operator()(Selector_Schema_Ptr x) = 0; + virtual T operator()(Placeholder_Selector_Ptr x) = 0; + virtual T operator()(Element_Selector_Ptr x) = 0; + virtual T operator()(Class_Selector_Ptr x) = 0; + virtual T operator()(Id_Selector_Ptr x) = 0; + virtual T operator()(Attribute_Selector_Ptr x) = 0; + virtual T operator()(Pseudo_Selector_Ptr x) = 0; + virtual T operator()(Wrapped_Selector_Ptr x) = 0; + virtual T operator()(Compound_Selector_Ptr x)= 0; + virtual T operator()(Complex_Selector_Ptr x) = 0; + virtual T operator()(Selector_List_Ptr x) = 0; + + template + T fallback(U x) { return T(); } + }; + + template + class Operation_CRTP : public Operation { + public: + D& impl() { return static_cast(*this); } + public: + T operator()(AST_Node_Ptr x) { return static_cast(this)->fallback(x); } + // statements + T operator()(Block_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Ruleset_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Bubble_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Trace_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Supports_Block_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Media_Block_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(At_Root_Block_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Directive_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Keyframe_Rule_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Declaration_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Assignment_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Import_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Import_Stub_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Warning_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Error_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Debug_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Comment_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(If_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(For_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Each_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(While_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Return_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Content_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Extension_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Definition_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Mixin_Call_Ptr x) { return static_cast(this)->fallback(x); } + // expressions + T operator()(List_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Map_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Function_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Binary_Expression_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Unary_Expression_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Function_Call_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Function_Call_Schema_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Custom_Warning_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Custom_Error_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Variable_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Number_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Color_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Boolean_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(String_Schema_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(String_Constant_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(String_Quoted_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Supports_Condition_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Supports_Operator_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Supports_Negation_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Supports_Declaration_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Supports_Interpolation_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Media_Query_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Media_Query_Expression_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(At_Root_Query_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Null_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Parent_Selector_Ptr x) { return static_cast(this)->fallback(x); } + // parameters and arguments + T operator()(Parameter_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Parameters_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Argument_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Arguments_Ptr x) { return static_cast(this)->fallback(x); } + // selectors + T operator()(Selector_Schema_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Placeholder_Selector_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Element_Selector_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Class_Selector_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Id_Selector_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Attribute_Selector_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Pseudo_Selector_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Wrapped_Selector_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Compound_Selector_Ptr x){ return static_cast(this)->fallback(x); } + T operator()(Complex_Selector_Ptr x) { return static_cast(this)->fallback(x); } + T operator()(Selector_List_Ptr x) { return static_cast(this)->fallback(x); } + + template + T fallback(U x) { return T(); } + }; + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/operators.cpp b/mybulma/node_modules/node-sass/src/libsass/src/operators.cpp new file mode 100644 index 0000000..a1fd562 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/operators.cpp @@ -0,0 +1,267 @@ +#include "sass.hpp" +#include "operators.hpp" + +namespace Sass { + + namespace Operators { + + inline double add(double x, double y) { return x + y; } + inline double sub(double x, double y) { return x - y; } + inline double mul(double x, double y) { return x * y; } + inline double div(double x, double y) { return x / y; } // x/0 checked by caller + + inline double mod(double x, double y) { // x/0 checked by caller + if ((x > 0 && y < 0) || (x < 0 && y > 0)) { + double ret = std::fmod(x, y); + return ret ? ret + y : ret; + } else { + return std::fmod(x, y); + } + } + + typedef double (*bop)(double, double); + bop ops[Sass_OP::NUM_OPS] = { + 0, 0, // and, or + 0, 0, 0, 0, 0, 0, // eq, neq, gt, gte, lt, lte + add, sub, mul, div, mod + }; + + /* static function, has no pstate or traces */ + bool eq(Expression_Obj lhs, Expression_Obj rhs) + { + // operation is undefined if one is not a number + if (!lhs || !rhs) throw Exception::UndefinedOperation(lhs, rhs, Sass_OP::EQ); + // use compare operator from ast node + return *lhs == *rhs; + } + + /* static function, throws OperationError, has no pstate or traces */ + bool cmp(Expression_Obj lhs, Expression_Obj rhs, const Sass_OP op) + { + // can only compare numbers!? + Number_Obj l = Cast(lhs); + Number_Obj r = Cast(rhs); + // operation is undefined if one is not a number + if (!l || !r) throw Exception::UndefinedOperation(lhs, rhs, op); + // use compare operator from ast node + return *l < *r; + } + + /* static functions, throws OperationError, has no pstate or traces */ + bool lt(Expression_Obj lhs, Expression_Obj rhs) { return cmp(lhs, rhs, Sass_OP::LT); } + bool neq(Expression_Obj lhs, Expression_Obj rhs) { return eq(lhs, rhs) == false; } + bool gt(Expression_Obj lhs, Expression_Obj rhs) { return !cmp(lhs, rhs, Sass_OP::GT) && neq(lhs, rhs); } + bool lte(Expression_Obj lhs, Expression_Obj rhs) { return cmp(lhs, rhs, Sass_OP::LTE) || eq(lhs, rhs); } + bool gte(Expression_Obj lhs, Expression_Obj rhs) { return !cmp(lhs, rhs, Sass_OP::GTE) || eq(lhs, rhs); } + + /* colour math deprecation warning */ + void op_color_deprecation(enum Sass_OP op, std::string lsh, std::string rhs, const ParserState& pstate) + { + std::string op_str( + op == Sass_OP::ADD ? "plus" : + op == Sass_OP::DIV ? "div" : + op == Sass_OP::SUB ? "minus" : + op == Sass_OP::MUL ? "times" : "" + ); + + std::string msg("The operation `" + lsh + " " + op_str + " " + rhs + "` is deprecated and will be an error in future versions."); + std::string tail("Consider using Sass's color functions instead.\nhttp://sass-lang.com/documentation/Sass/Script/Functions.html#other_color_functions"); + + deprecated(msg, tail, false, pstate); + } + + /* static function, throws OperationError, has no traces but optional pstate for returned value */ + Value_Ptr op_strings(Sass::Operand operand, Value& lhs, Value& rhs, struct Sass_Inspect_Options opt, const ParserState& pstate, bool delayed) + { + enum Sass_OP op = operand.operand; + + String_Quoted_Ptr lqstr = Cast(&lhs); + String_Quoted_Ptr rqstr = Cast(&rhs); + + std::string lstr(lqstr ? lqstr->value() : lhs.to_string(opt)); + std::string rstr(rqstr ? rqstr->value() : rhs.to_string(opt)); + + if (Cast(&lhs)) throw Exception::InvalidNullOperation(&lhs, &rhs, op); + if (Cast(&rhs)) throw Exception::InvalidNullOperation(&lhs, &rhs, op); + + std::string sep; + switch (op) { + case Sass_OP::ADD: sep = ""; break; + case Sass_OP::SUB: sep = "-"; break; + case Sass_OP::DIV: sep = "/"; break; + case Sass_OP::EQ: sep = "=="; break; + case Sass_OP::NEQ: sep = "!="; break; + case Sass_OP::LT: sep = "<"; break; + case Sass_OP::GT: sep = ">"; break; + case Sass_OP::LTE: sep = "<="; break; + case Sass_OP::GTE: sep = ">="; break; + default: + throw Exception::UndefinedOperation(&lhs, &rhs, op); + break; + } + + if (op == Sass_OP::ADD) { + // create string that might be quoted on output (but do not unquote what we pass) + return SASS_MEMORY_NEW(String_Quoted, pstate, lstr + rstr, 0, false, true); + } + + // add whitespace around operator + // but only if result is not delayed + if (sep != "" && delayed == false) { + if (operand.ws_before) sep = " " + sep; + if (operand.ws_after) sep = sep + " "; + } + + if (op == Sass_OP::SUB || op == Sass_OP::DIV) { + if (lqstr && lqstr->quote_mark()) lstr = quote(lstr); + if (rqstr && rqstr->quote_mark()) rstr = quote(rstr); + } + + return SASS_MEMORY_NEW(String_Constant, pstate, lstr + sep + rstr); + } + + /* static function, throws OperationError, has no traces but optional pstate for returned value */ + Value_Ptr op_colors(enum Sass_OP op, const Color& lhs, const Color& rhs, struct Sass_Inspect_Options opt, const ParserState& pstate, bool delayed) + { + + if (lhs.a() != rhs.a()) { + throw Exception::AlphaChannelsNotEqual(&lhs, &rhs, op); + } + if ((op == Sass_OP::DIV || op == Sass_OP::MOD) && (!rhs.r() || !rhs.g() || !rhs.b())) { + throw Exception::ZeroDivisionError(lhs, rhs); + } + + op_color_deprecation(op, lhs.to_string(), rhs.to_string(), pstate); + + return SASS_MEMORY_NEW(Color, + pstate, + ops[op](lhs.r(), rhs.r()), + ops[op](lhs.g(), rhs.g()), + ops[op](lhs.b(), rhs.b()), + lhs.a()); + } + + /* static function, throws OperationError, has no traces but optional pstate for returned value */ + Value_Ptr op_numbers(enum Sass_OP op, const Number& lhs, const Number& rhs, struct Sass_Inspect_Options opt, const ParserState& pstate, bool delayed) + { + double lval = lhs.value(); + double rval = rhs.value(); + + if (op == Sass_OP::MOD && rval == 0) { + return SASS_MEMORY_NEW(String_Quoted, pstate, "NaN"); + } + + if (op == Sass_OP::DIV && rval == 0) { + std::string result(lval ? "Infinity" : "NaN"); + return SASS_MEMORY_NEW(String_Quoted, pstate, result); + } + + size_t l_n_units = lhs.numerators.size(); + size_t l_d_units = lhs.numerators.size(); + size_t r_n_units = rhs.denominators.size(); + size_t r_d_units = rhs.denominators.size(); + // optimize out the most common and simplest case + if (l_n_units == r_n_units && l_d_units == r_d_units) { + if (l_n_units + l_d_units <= 1 && r_n_units + r_d_units <= 1) { + if (lhs.numerators == rhs.numerators) { + if (lhs.denominators == rhs.denominators) { + Number_Ptr v = SASS_MEMORY_COPY(&lhs); + v->value(ops[op](lval, rval)); + return v; + } + } + } + } + + Number_Obj v = SASS_MEMORY_COPY(&lhs); + + if (lhs.is_unitless() && (op == Sass_OP::ADD || op == Sass_OP::SUB || op == Sass_OP::MOD)) { + v->numerators = rhs.numerators; + v->denominators = rhs.denominators; + } + + if (op == Sass_OP::MUL) { + v->value(ops[op](lval, rval)); + v->numerators.insert(v->numerators.end(), + rhs.numerators.begin(), rhs.numerators.end() + ); + v->denominators.insert(v->denominators.end(), + rhs.denominators.begin(), rhs.denominators.end() + ); + v->reduce(); + } + else if (op == Sass_OP::DIV) { + v->value(ops[op](lval, rval)); + v->numerators.insert(v->numerators.end(), + rhs.denominators.begin(), rhs.denominators.end() + ); + v->denominators.insert(v->denominators.end(), + rhs.numerators.begin(), rhs.numerators.end() + ); + v->reduce(); + } + else { + Number ln(lhs), rn(rhs); + ln.reduce(); rn.reduce(); + double f(rn.convert_factor(ln)); + v->value(ops[op](lval, rn.value() * f)); + } + + v->pstate(pstate); + return v.detach(); + } + + /* static function, throws OperationError, has no traces but optional pstate for returned value */ + Value_Ptr op_number_color(enum Sass_OP op, const Number& lhs, const Color& rhs, struct Sass_Inspect_Options opt, const ParserState& pstate, bool delayed) + { + double lval = lhs.value(); + + switch (op) { + case Sass_OP::ADD: + case Sass_OP::MUL: { + op_color_deprecation(op, lhs.to_string(), rhs.to_string(opt), pstate); + return SASS_MEMORY_NEW(Color, + pstate, + ops[op](lval, rhs.r()), + ops[op](lval, rhs.g()), + ops[op](lval, rhs.b()), + rhs.a()); + } + case Sass_OP::SUB: + case Sass_OP::DIV: { + std::string color(rhs.to_string(opt)); + op_color_deprecation(op, lhs.to_string(), color, pstate); + return SASS_MEMORY_NEW(String_Quoted, + pstate, + lhs.to_string(opt) + + sass_op_separator(op) + + color); + } + default: break; + } + throw Exception::UndefinedOperation(&lhs, &rhs, op); + } + + /* static function, throws OperationError, has no traces but optional pstate for returned value */ + Value_Ptr op_color_number(enum Sass_OP op, const Color& lhs, const Number& rhs, struct Sass_Inspect_Options opt, const ParserState& pstate, bool delayed) + { + double rval = rhs.value(); + + if ((op == Sass_OP::DIV || op == Sass_OP::DIV) && rval == 0) { + // comparison of Fixnum with Float failed? + throw Exception::ZeroDivisionError(lhs, rhs); + } + + op_color_deprecation(op, lhs.to_string(), rhs.to_string(), pstate); + + return SASS_MEMORY_NEW(Color, + pstate, + ops[op](lhs.r(), rval), + ops[op](lhs.g(), rval), + ops[op](lhs.b(), rval), + lhs.a()); + } + + } + +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/operators.hpp b/mybulma/node_modules/node-sass/src/libsass/src/operators.hpp new file mode 100644 index 0000000..f89eb4e --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/operators.hpp @@ -0,0 +1,30 @@ +#ifndef SASS_OPERATORS_H +#define SASS_OPERATORS_H + +#include "values.hpp" +#include "sass/values.h" + +namespace Sass { + + namespace Operators { + + // equality operator using AST Node operator== + bool eq(Expression_Obj, Expression_Obj); + bool neq(Expression_Obj, Expression_Obj); + // specific operators based on cmp and eq + bool lt(Expression_Obj, Expression_Obj); + bool gt(Expression_Obj, Expression_Obj); + bool lte(Expression_Obj, Expression_Obj); + bool gte(Expression_Obj, Expression_Obj); + // arithmetic for all the combinations that matter + Value_Ptr op_strings(Sass::Operand, Value&, Value&, struct Sass_Inspect_Options opt, const ParserState& pstate, bool delayed = false); + Value_Ptr op_colors(enum Sass_OP, const Color&, const Color&, struct Sass_Inspect_Options opt, const ParserState& pstate, bool delayed = false); + Value_Ptr op_numbers(enum Sass_OP, const Number&, const Number&, struct Sass_Inspect_Options opt, const ParserState& pstate, bool delayed = false); + Value_Ptr op_number_color(enum Sass_OP, const Number&, const Color&, struct Sass_Inspect_Options opt, const ParserState& pstate, bool delayed = false); + Value_Ptr op_color_number(enum Sass_OP, const Color&, const Number&, struct Sass_Inspect_Options opt, const ParserState& pstate, bool delayed = false); + + }; + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/output.cpp b/mybulma/node_modules/node-sass/src/libsass/src/output.cpp new file mode 100644 index 0000000..b2ca65e --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/output.cpp @@ -0,0 +1,336 @@ +#include "sass.hpp" +#include "ast.hpp" +#include "output.hpp" + +namespace Sass { + + Output::Output(Sass_Output_Options& opt) + : Inspect(Emitter(opt)), + charset(""), + top_nodes(0) + {} + + Output::~Output() { } + + void Output::fallback_impl(AST_Node_Ptr n) + { + return n->perform(this); + } + + void Output::operator()(Number_Ptr n) + { + // check for a valid unit here + // includes result for reporting + if (!n->is_valid_css_unit()) { + // should be handle in check_expression + throw Exception::InvalidValue({}, *n); + } + // use values to_string facility + std::string res = n->to_string(opt); + // output the final token + append_token(res, n); + } + + void Output::operator()(Import_Ptr imp) + { + top_nodes.push_back(imp); + } + + void Output::operator()(Map_Ptr m) + { + // should be handle in check_expression + throw Exception::InvalidValue({}, *m); + } + + OutputBuffer Output::get_buffer(void) + { + + Emitter emitter(opt); + Inspect inspect(emitter); + + size_t size_nodes = top_nodes.size(); + for (size_t i = 0; i < size_nodes; i++) { + top_nodes[i]->perform(&inspect); + inspect.append_mandatory_linefeed(); + } + + // flush scheduled outputs + // maybe omit semicolon if possible + inspect.finalize(wbuf.buffer.size() == 0); + // prepend buffer on top + prepend_output(inspect.output()); + // make sure we end with a linefeed + if (!ends_with(wbuf.buffer, opt.linefeed)) { + // if the output is not completely empty + if (!wbuf.buffer.empty()) append_string(opt.linefeed); + } + + // search for unicode char + for(const char& chr : wbuf.buffer) { + // skip all ascii chars + // static cast to unsigned to handle `char` being signed / unsigned + if (static_cast(chr) < 128) continue; + // declare the charset + if (output_style() != COMPRESSED) + charset = "@charset \"UTF-8\";" + + std::string(opt.linefeed); + else charset = "\xEF\xBB\xBF"; + // abort search + break; + } + + // add charset as first line, before comments and imports + if (!charset.empty()) prepend_string(charset); + + return wbuf; + + } + + void Output::operator()(Comment_Ptr c) + { + std::string txt = c->text()->to_string(opt); + // if (indentation && txt == "/**/") return; + bool important = c->is_important(); + if (output_style() != COMPRESSED || important) { + if (buffer().size() == 0) { + top_nodes.push_back(c); + } else { + in_comment = true; + append_indentation(); + c->text()->perform(this); + in_comment = false; + if (indentation == 0) { + append_mandatory_linefeed(); + } else { + append_optional_linefeed(); + } + } + } + } + + void Output::operator()(Ruleset_Ptr r) + { + Selector_Obj s = r->selector(); + Block_Obj b = r->block(); + + // Filter out rulesets that aren't printable (process its children though) + if (!Util::isPrintable(r, output_style())) { + for (size_t i = 0, L = b->length(); i < L; ++i) { + const Statement_Obj& stm = b->at(i); + if (Cast(stm)) { + if (!Cast(stm)) { + stm->perform(this); + } + } + } + return; + } + + if (output_style() == NESTED) indentation += r->tabs(); + if (opt.source_comments) { + std::stringstream ss; + append_indentation(); + std::string path(File::abs2rel(r->pstate().path)); + ss << "/* line " << r->pstate().line + 1 << ", " << path << " */"; + append_string(ss.str()); + append_optional_linefeed(); + } + scheduled_crutch = s; + if (s) s->perform(this); + append_scope_opener(b); + for (size_t i = 0, L = b->length(); i < L; ++i) { + Statement_Obj stm = b->at(i); + bool bPrintExpression = true; + // Check print conditions + if (Declaration_Ptr dec = Cast(stm)) { + if (String_Constant_Ptr valConst = Cast(dec->value())) { + std::string val(valConst->value()); + if (String_Quoted_Ptr qstr = Cast(valConst)) { + if (!qstr->quote_mark() && val.empty()) { + bPrintExpression = false; + } + } + } + else if (List_Ptr list = Cast(dec->value())) { + bool all_invisible = true; + for (size_t list_i = 0, list_L = list->length(); list_i < list_L; ++list_i) { + Expression_Ptr item = list->at(list_i); + if (!item->is_invisible()) all_invisible = false; + } + if (all_invisible && !list->is_bracketed()) bPrintExpression = false; + } + } + // Print if OK + if (bPrintExpression) { + stm->perform(this); + } + } + if (output_style() == NESTED) indentation -= r->tabs(); + append_scope_closer(b); + + } + void Output::operator()(Keyframe_Rule_Ptr r) + { + Block_Obj b = r->block(); + Selector_Obj v = r->name(); + + if (!v.isNull()) { + v->perform(this); + } + + if (!b) { + append_colon_separator(); + return; + } + + append_scope_opener(); + for (size_t i = 0, L = b->length(); i < L; ++i) { + Statement_Obj stm = b->at(i); + stm->perform(this); + if (i < L - 1) append_special_linefeed(); + } + append_scope_closer(); + } + + void Output::operator()(Supports_Block_Ptr f) + { + if (f->is_invisible()) return; + + Supports_Condition_Obj c = f->condition(); + Block_Obj b = f->block(); + + // Filter out feature blocks that aren't printable (process its children though) + if (!Util::isPrintable(f, output_style())) { + for (size_t i = 0, L = b->length(); i < L; ++i) { + Statement_Obj stm = b->at(i); + if (Cast(stm)) { + stm->perform(this); + } + } + return; + } + + if (output_style() == NESTED) indentation += f->tabs(); + append_indentation(); + append_token("@supports", f); + append_mandatory_space(); + c->perform(this); + append_scope_opener(); + + for (size_t i = 0, L = b->length(); i < L; ++i) { + Statement_Obj stm = b->at(i); + stm->perform(this); + if (i < L - 1) append_special_linefeed(); + } + + if (output_style() == NESTED) indentation -= f->tabs(); + + append_scope_closer(); + + } + + void Output::operator()(Media_Block_Ptr m) + { + if (m->is_invisible()) return; + + Block_Obj b = m->block(); + + // Filter out media blocks that aren't printable (process its children though) + if (!Util::isPrintable(m, output_style())) { + for (size_t i = 0, L = b->length(); i < L; ++i) { + Statement_Obj stm = b->at(i); + if (Cast(stm)) { + stm->perform(this); + } + } + return; + } + if (output_style() == NESTED) indentation += m->tabs(); + append_indentation(); + append_token("@media", m); + append_mandatory_space(); + in_media_block = true; + m->media_queries()->perform(this); + in_media_block = false; + append_scope_opener(); + + for (size_t i = 0, L = b->length(); i < L; ++i) { + if (b->at(i)) { + Statement_Obj stm = b->at(i); + stm->perform(this); + } + if (i < L - 1) append_special_linefeed(); + } + + if (output_style() == NESTED) indentation -= m->tabs(); + append_scope_closer(); + } + + void Output::operator()(Directive_Ptr a) + { + std::string kwd = a->keyword(); + Selector_Obj s = a->selector(); + Expression_Obj v = a->value(); + Block_Obj b = a->block(); + + append_indentation(); + append_token(kwd, a); + if (s) { + append_mandatory_space(); + in_wrapped = true; + s->perform(this); + in_wrapped = false; + } + if (v) { + append_mandatory_space(); + // ruby sass bug? should use options? + append_token(v->to_string(/* opt */), v); + } + if (!b) { + append_delimiter(); + return; + } + + if (b->is_invisible() || b->length() == 0) { + append_optional_space(); + return append_string("{}"); + } + + append_scope_opener(); + + bool format = kwd != "@font-face";; + + for (size_t i = 0, L = b->length(); i < L; ++i) { + Statement_Obj stm = b->at(i); + stm->perform(this); + if (i < L - 1 && format) append_special_linefeed(); + } + + append_scope_closer(); + } + + void Output::operator()(String_Quoted_Ptr s) + { + if (s->quote_mark()) { + append_token(quote(s->value(), s->quote_mark()), s); + } else if (!in_comment) { + append_token(string_to_output(s->value()), s); + } else { + append_token(s->value(), s); + } + } + + void Output::operator()(String_Constant_Ptr s) + { + std::string value(s->value()); + if (s->can_compress_whitespace() && output_style() == COMPRESSED) { + value.erase(std::remove_if(value.begin(), value.end(), ::isspace), value.end()); + } + if (!in_comment && !in_custom_property) { + append_token(string_to_output(value), s); + } else { + append_token(value, s); + } + } + +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/output.hpp b/mybulma/node_modules/node-sass/src/libsass/src/output.hpp new file mode 100644 index 0000000..c460b13 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/output.hpp @@ -0,0 +1,54 @@ +#ifndef SASS_OUTPUT_H +#define SASS_OUTPUT_H + +#include +#include + +#include "util.hpp" +#include "inspect.hpp" +#include "operation.hpp" + +namespace Sass { + class Context; + + // Refactor to make it generic to find linefeed (look behind) + inline bool ends_with(std::string const & value, std::string const & ending) + { + if (ending.size() > value.size()) return false; + return std::equal(ending.rbegin(), ending.rend(), value.rbegin()); + } + + class Output : public Inspect { + protected: + using Inspect::operator(); + + public: + Output(Sass_Output_Options& opt); + virtual ~Output(); + + protected: + std::string charset; + std::vector top_nodes; + + public: + OutputBuffer get_buffer(void); + + virtual void operator()(Map_Ptr); + virtual void operator()(Ruleset_Ptr); + virtual void operator()(Supports_Block_Ptr); + virtual void operator()(Media_Block_Ptr); + virtual void operator()(Directive_Ptr); + virtual void operator()(Keyframe_Rule_Ptr); + virtual void operator()(Import_Ptr); + virtual void operator()(Comment_Ptr); + virtual void operator()(Number_Ptr); + virtual void operator()(String_Quoted_Ptr); + virtual void operator()(String_Constant_Ptr); + + void fallback_impl(AST_Node_Ptr n); + + }; + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/parser.cpp b/mybulma/node_modules/node-sass/src/libsass/src/parser.cpp new file mode 100644 index 0000000..28fe022 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/parser.cpp @@ -0,0 +1,3137 @@ +#include "sass.hpp" +#include "parser.hpp" +#include "file.hpp" +#include "inspect.hpp" +#include "constants.hpp" +#include "util.hpp" +#include "prelexer.hpp" +#include "color_maps.hpp" +#include "sass/functions.h" +#include "error_handling.hpp" + +// Notes about delayed: some ast nodes can have delayed evaluation so +// they can preserve their original semantics if needed. This is most +// prominently exhibited by the division operation, since it is not +// only a valid operation, but also a valid css statement (i.e. for +// fonts, as in `16px/24px`). When parsing lists and expression we +// unwrap single items from lists and other operations. A nested list +// must not be delayed, only the items of the first level sometimes +// are delayed (as with argument lists). To achieve this we need to +// pass status to the list parser, so this can be set correctly. +// Another case with delayed values are colors. In compressed mode +// only processed values get compressed (other are left as written). + +#include +#include +#include +#include + +namespace Sass { + using namespace Constants; + using namespace Prelexer; + + Parser Parser::from_c_str(const char* beg, Context& ctx, Backtraces traces, ParserState pstate, const char* source) + { + pstate.offset.column = 0; + pstate.offset.line = 0; + Parser p(ctx, pstate, traces); + p.source = source ? source : beg; + p.position = beg ? beg : p.source; + p.end = p.position + strlen(p.position); + Block_Obj root = SASS_MEMORY_NEW(Block, pstate); + p.block_stack.push_back(root); + root->is_root(true); + return p; + } + + Parser Parser::from_c_str(const char* beg, const char* end, Context& ctx, Backtraces traces, ParserState pstate, const char* source) + { + pstate.offset.column = 0; + pstate.offset.line = 0; + Parser p(ctx, pstate, traces); + p.source = source ? source : beg; + p.position = beg ? beg : p.source; + p.end = end ? end : p.position + strlen(p.position); + Block_Obj root = SASS_MEMORY_NEW(Block, pstate); + p.block_stack.push_back(root); + root->is_root(true); + return p; + } + + void Parser::advanceToNextToken() { + lex < css_comments >(false); + // advance to position + pstate += pstate.offset; + pstate.offset.column = 0; + pstate.offset.line = 0; + } + + Selector_List_Obj Parser::parse_selector(const char* beg, Context& ctx, Backtraces traces, ParserState pstate, const char* source) + { + Parser p = Parser::from_c_str(beg, ctx, traces, pstate, source); + // ToDo: ruby sass errors on parent references + // ToDo: remap the source-map entries somehow + return p.parse_selector_list(false); + } + + bool Parser::peek_newline(const char* start) + { + return peek_linefeed(start ? start : position) + && ! peek_css>(start); + } + + Parser Parser::from_token(Token t, Context& ctx, Backtraces traces, ParserState pstate, const char* source) + { + Parser p(ctx, pstate, traces); + p.source = source ? source : t.begin; + p.position = t.begin ? t.begin : p.source; + p.end = t.end ? t.end : p.position + strlen(p.position); + Block_Obj root = SASS_MEMORY_NEW(Block, pstate); + p.block_stack.push_back(root); + root->is_root(true); + return p; + } + + /* main entry point to parse root block */ + Block_Obj Parser::parse() + { + + // consume unicode BOM + read_bom(); + + // scan the input to find invalid utf8 sequences + const char* it = utf8::find_invalid(position, end); + + // report invalid utf8 + if (it != end) { + pstate += Offset::init(position, it); + traces.push_back(Backtrace(pstate)); + throw Exception::InvalidSass(pstate, traces, "Invalid UTF-8 sequence"); + } + + // create a block AST node to hold children + Block_Obj root = SASS_MEMORY_NEW(Block, pstate, 0, true); + + // check seems a bit esoteric but works + if (ctx.resources.size() == 1) { + // apply headers only on very first include + ctx.apply_custom_headers(root, path, pstate); + } + + // parse children nodes + block_stack.push_back(root); + parse_block_nodes(true); + block_stack.pop_back(); + + // update final position + root->update_pstate(pstate); + + if (position != end) { + css_error("Invalid CSS", " after ", ": expected selector or at-rule, was "); + } + + return root; + } + + + // convenience function for block parsing + // will create a new block ad-hoc for you + // this is the base block parsing function + Block_Obj Parser::parse_css_block(bool is_root) + { + + // parse comments before block + // lex < optional_css_comments >(); + + // lex mandatory opener or error out + if (!lex_css < exactly<'{'> >()) { + css_error("Invalid CSS", " after ", ": expected \"{\", was "); + } + // create new block and push to the selector stack + Block_Obj block = SASS_MEMORY_NEW(Block, pstate, 0, is_root); + block_stack.push_back(block); + + if (!parse_block_nodes(is_root)) css_error("Invalid CSS", " after ", ": expected \"}\", was "); + + if (!lex_css < exactly<'}'> >()) { + css_error("Invalid CSS", " after ", ": expected \"}\", was "); + } + + // update for end position + // this seems to be done somewhere else + // but that fixed selector schema issue + // block->update_pstate(pstate); + + // parse comments after block + // lex < optional_css_comments >(); + + block_stack.pop_back(); + + return block; + } + + // convenience function for block parsing + // will create a new block ad-hoc for you + // also updates the `in_at_root` flag + Block_Obj Parser::parse_block(bool is_root) + { + return parse_css_block(is_root); + } + + // the main block parsing function + // parses stuff between `{` and `}` + bool Parser::parse_block_nodes(bool is_root) + { + + // loop until end of string + while (position < end) { + + // we should be able to refactor this + parse_block_comments(); + lex < css_whitespace >(); + + if (lex < exactly<';'> >()) continue; + if (peek < end_of_file >()) return true; + if (peek < exactly<'}'> >()) return true; + + if (parse_block_node(is_root)) continue; + + parse_block_comments(); + + if (lex_css < exactly<';'> >()) continue; + if (peek_css < end_of_file >()) return true; + if (peek_css < exactly<'}'> >()) return true; + + // illegal sass + return false; + } + // return success + return true; + } + + // parser for a single node in a block + // semicolons must be lexed beforehand + bool Parser::parse_block_node(bool is_root) { + + Block_Obj block = block_stack.back(); + + parse_block_comments(); + + // throw away white-space + // includes line comments + lex < css_whitespace >(); + + Lookahead lookahead_result; + + // also parse block comments + + // first parse everything that is allowed in functions + if (lex < variable >(true)) { block->append(parse_assignment()); } + else if (lex < kwd_err >(true)) { block->append(parse_error()); } + else if (lex < kwd_dbg >(true)) { block->append(parse_debug()); } + else if (lex < kwd_warn >(true)) { block->append(parse_warning()); } + else if (lex < kwd_if_directive >(true)) { block->append(parse_if_directive()); } + else if (lex < kwd_for_directive >(true)) { block->append(parse_for_directive()); } + else if (lex < kwd_each_directive >(true)) { block->append(parse_each_directive()); } + else if (lex < kwd_while_directive >(true)) { block->append(parse_while_directive()); } + else if (lex < kwd_return_directive >(true)) { block->append(parse_return_directive()); } + + // parse imports to process later + else if (lex < kwd_import >(true)) { + Scope parent = stack.empty() ? Scope::Rules : stack.back(); + if (parent != Scope::Function && parent != Scope::Root && parent != Scope::Rules && parent != Scope::Media) { + if (! peek_css< uri_prefix >(position)) { // this seems to go in ruby sass 3.4.20 + error("Import directives may not be used within control directives or mixins."); + } + } + // this puts the parsed doc into sheets + // import stub will fetch this in expand + Import_Obj imp = parse_import(); + // if it is a url, we only add the statement + if (!imp->urls().empty()) block->append(imp); + // process all resources now (add Import_Stub nodes) + for (size_t i = 0, S = imp->incs().size(); i < S; ++i) { + block->append(SASS_MEMORY_NEW(Import_Stub, pstate, imp->incs()[i])); + } + } + + else if (lex < kwd_extend >(true)) { + Lookahead lookahead = lookahead_for_include(position); + if (!lookahead.found) css_error("Invalid CSS", " after ", ": expected selector, was "); + Selector_List_Obj target; + if (!lookahead.has_interpolants) { + target = parse_selector_list(true); + } + else { + target = SASS_MEMORY_NEW(Selector_List, pstate); + target->schema(parse_selector_schema(lookahead.found, true)); + } + + block->append(SASS_MEMORY_NEW(Extension, pstate, target)); + } + + // selector may contain interpolations which need delayed evaluation + else if ( + !(lookahead_result = lookahead_for_selector(position)).error && + !lookahead_result.is_custom_property + ) + { + block->append(parse_ruleset(lookahead_result)); + } + + // parse multiple specific keyword directives + else if (lex < kwd_media >(true)) { block->append(parse_media_block()); } + else if (lex < kwd_at_root >(true)) { block->append(parse_at_root_block()); } + else if (lex < kwd_include_directive >(true)) { block->append(parse_include_directive()); } + else if (lex < kwd_content_directive >(true)) { block->append(parse_content_directive()); } + else if (lex < kwd_supports_directive >(true)) { block->append(parse_supports_directive()); } + else if (lex < kwd_mixin >(true)) { block->append(parse_definition(Definition::MIXIN)); } + else if (lex < kwd_function >(true)) { block->append(parse_definition(Definition::FUNCTION)); } + + // ignore the @charset directive for now + else if (lex< kwd_charset_directive >(true)) { parse_charset_directive(); } + + // generic at keyword (keep last) + else if (lex< re_special_directive >(true)) { block->append(parse_special_directive()); } + else if (lex< re_prefixed_directive >(true)) { block->append(parse_prefixed_directive()); } + else if (lex< at_keyword >(true)) { block->append(parse_directive()); } + + else if (is_root && stack.back() != Scope::AtRoot /* && block->is_root() */) { + lex< css_whitespace >(); + if (position >= end) return true; + css_error("Invalid CSS", " after ", ": expected 1 selector or at-rule, was "); + } + // parse a declaration + else + { + // ToDo: how does it handle parse errors? + // maybe we are expected to parse something? + Declaration_Obj decl = parse_declaration(); + decl->tabs(indentation); + block->append(decl); + // maybe we have a "sub-block" + if (peek< exactly<'{'> >()) { + if (decl->is_indented()) ++ indentation; + // parse a propset that rides on the declaration's property + stack.push_back(Scope::Properties); + decl->block(parse_block()); + stack.pop_back(); + if (decl->is_indented()) -- indentation; + } + } + // something matched + return true; + } + // EO parse_block_nodes + + // parse imports inside the + Import_Obj Parser::parse_import() + { + Import_Obj imp = SASS_MEMORY_NEW(Import, pstate); + std::vector> to_import; + bool first = true; + do { + while (lex< block_comment >()); + if (lex< quoted_string >()) { + to_import.push_back(std::pair(std::string(lexed), 0)); + } + else if (lex< uri_prefix >()) { + Arguments_Obj args = SASS_MEMORY_NEW(Arguments, pstate); + Function_Call_Obj result = SASS_MEMORY_NEW(Function_Call, pstate, "url", args); + + if (lex< quoted_string >()) { + Expression_Obj quoted_url = parse_string(); + args->append(SASS_MEMORY_NEW(Argument, quoted_url->pstate(), quoted_url)); + } + else if (String_Obj string_url = parse_url_function_argument()) { + args->append(SASS_MEMORY_NEW(Argument, string_url->pstate(), string_url)); + } + else if (peek < skip_over_scopes < exactly < '(' >, exactly < ')' > > >(position)) { + Expression_Obj braced_url = parse_list(); // parse_interpolated_chunk(lexed); + args->append(SASS_MEMORY_NEW(Argument, braced_url->pstate(), braced_url)); + } + else { + error("malformed URL"); + } + if (!lex< exactly<')'> >()) error("URI is missing ')'"); + to_import.push_back(std::pair("", result)); + } + else { + if (first) error("@import directive requires a url or quoted path"); + else error("expecting another url or quoted path in @import list"); + } + first = false; + } while (lex_css< exactly<','> >()); + + if (!peek_css< alternatives< exactly<';'>, exactly<'}'>, end_of_file > >()) { + List_Obj import_queries = parse_media_queries(); + imp->import_queries(import_queries); + } + + for(auto location : to_import) { + if (location.second) { + imp->urls().push_back(location.second); + } + // check if custom importers want to take over the handling + else if (!ctx.call_importers(unquote(location.first), path, pstate, imp)) { + // nobody wants it, so we do our import + ctx.import_url(imp, location.first, path); + } + } + + return imp; + } + + Definition_Obj Parser::parse_definition(Definition::Type which_type) + { + std::string which_str(lexed); + if (!lex< identifier >()) error("invalid name in " + which_str + " definition"); + std::string name(Util::normalize_underscores(lexed)); + if (which_type == Definition::FUNCTION && (name == "and" || name == "or" || name == "not")) + { error("Invalid function name \"" + name + "\"."); } + ParserState source_position_of_def = pstate; + Parameters_Obj params = parse_parameters(); + if (which_type == Definition::MIXIN) stack.push_back(Scope::Mixin); + else stack.push_back(Scope::Function); + Block_Obj body = parse_block(); + stack.pop_back(); + return SASS_MEMORY_NEW(Definition, source_position_of_def, name, params, body, which_type); + } + + Parameters_Obj Parser::parse_parameters() + { + Parameters_Obj params = SASS_MEMORY_NEW(Parameters, pstate); + if (lex_css< exactly<'('> >()) { + // if there's anything there at all + if (!peek_css< exactly<')'> >()) { + do { + if (peek< exactly<')'> >()) break; + params->append(parse_parameter()); + } while (lex_css< exactly<','> >()); + } + if (!lex_css< exactly<')'> >()) { + css_error("Invalid CSS", " after ", ": expected \")\", was "); + } + } + return params; + } + + Parameter_Obj Parser::parse_parameter() + { + if (peek< alternatives< exactly<','>, exactly< '{' >, exactly<';'> > >()) { + css_error("Invalid CSS", " after ", ": expected variable (e.g. $foo), was "); + } + while (lex< alternatives < spaces, block_comment > >()); + lex < variable >(); + std::string name(Util::normalize_underscores(lexed)); + ParserState pos = pstate; + Expression_Obj val; + bool is_rest = false; + while (lex< alternatives < spaces, block_comment > >()); + if (lex< exactly<':'> >()) { // there's a default value + while (lex< block_comment >()); + val = parse_space_list(); + } + else if (lex< exactly< ellipsis > >()) { + is_rest = true; + } + return SASS_MEMORY_NEW(Parameter, pos, name, val, is_rest); + } + + Arguments_Obj Parser::parse_arguments() + { + Arguments_Obj args = SASS_MEMORY_NEW(Arguments, pstate); + if (lex_css< exactly<'('> >()) { + // if there's anything there at all + if (!peek_css< exactly<')'> >()) { + do { + if (peek< exactly<')'> >()) break; + args->append(parse_argument()); + } while (lex_css< exactly<','> >()); + } + if (!lex_css< exactly<')'> >()) { + css_error("Invalid CSS", " after ", ": expected expression (e.g. 1px, bold), was "); + } + } + return args; + } + + Argument_Obj Parser::parse_argument() + { + if (peek< alternatives< exactly<','>, exactly< '{' >, exactly<';'> > >()) { + css_error("Invalid CSS", " after ", ": expected \")\", was "); + } + if (peek_css< sequence < exactly< hash_lbrace >, exactly< rbrace > > >()) { + position += 2; + css_error("Invalid CSS", " after ", ": expected expression (e.g. 1px, bold), was "); + } + + Argument_Obj arg; + if (peek_css< sequence < variable, optional_css_comments, exactly<':'> > >()) { + lex_css< variable >(); + std::string name(Util::normalize_underscores(lexed)); + ParserState p = pstate; + lex_css< exactly<':'> >(); + Expression_Obj val = parse_space_list(); + arg = SASS_MEMORY_NEW(Argument, p, val, name); + } + else { + bool is_arglist = false; + bool is_keyword = false; + Expression_Obj val = parse_space_list(); + List_Ptr l = Cast(val); + if (lex_css< exactly< ellipsis > >()) { + if (val->concrete_type() == Expression::MAP || ( + (l != NULL && l->separator() == SASS_HASH) + )) is_keyword = true; + else is_arglist = true; + } + arg = SASS_MEMORY_NEW(Argument, pstate, val, "", is_arglist, is_keyword); + } + return arg; + } + + Assignment_Obj Parser::parse_assignment() + { + std::string name(Util::normalize_underscores(lexed)); + ParserState var_source_position = pstate; + if (!lex< exactly<':'> >()) error("expected ':' after " + name + " in assignment statement"); + if (peek_css< alternatives < exactly<';'>, end_of_file > >()) { + css_error("Invalid CSS", " after ", ": expected expression (e.g. 1px, bold), was "); + } + Expression_Obj val; + Lookahead lookahead = lookahead_for_value(position); + if (lookahead.has_interpolants && lookahead.found) { + val = parse_value_schema(lookahead.found); + } else { + val = parse_list(); + } + bool is_default = false; + bool is_global = false; + while (peek< alternatives < default_flag, global_flag > >()) { + if (lex< default_flag >()) is_default = true; + else if (lex< global_flag >()) is_global = true; + } + return SASS_MEMORY_NEW(Assignment, var_source_position, name, val, is_default, is_global); + } + + // a ruleset connects a selector and a block + Ruleset_Obj Parser::parse_ruleset(Lookahead lookahead) + { + NESTING_GUARD(nestings); + // inherit is_root from parent block + Block_Obj parent = block_stack.back(); + bool is_root = parent && parent->is_root(); + // make sure to move up the the last position + lex < optional_css_whitespace >(false, true); + // create the connector object (add parts later) + Ruleset_Obj ruleset = SASS_MEMORY_NEW(Ruleset, pstate); + // parse selector static or as schema to be evaluated later + if (lookahead.parsable) ruleset->selector(parse_selector_list(false)); + else { + Selector_List_Obj list = SASS_MEMORY_NEW(Selector_List, pstate); + list->schema(parse_selector_schema(lookahead.position, false)); + ruleset->selector(list); + } + // then parse the inner block + stack.push_back(Scope::Rules); + ruleset->block(parse_block()); + stack.pop_back(); + // update for end position + ruleset->update_pstate(pstate); + ruleset->block()->update_pstate(pstate); + // need this info for sanity checks + ruleset->is_root(is_root); + // return AST Node + return ruleset; + } + + // parse a selector schema that will be evaluated in the eval stage + // uses a string schema internally to do the actual schema handling + // in the eval stage we will be re-parse it into an actual selector + Selector_Schema_Obj Parser::parse_selector_schema(const char* end_of_selector, bool chroot) + { + NESTING_GUARD(nestings); + // move up to the start + lex< optional_spaces >(); + const char* i = position; + // selector schema re-uses string schema implementation + String_Schema_Ptr schema = SASS_MEMORY_NEW(String_Schema, pstate); + // the selector schema is pretty much just a wrapper for the string schema + Selector_Schema_Obj selector_schema = SASS_MEMORY_NEW(Selector_Schema, pstate, schema); + selector_schema->connect_parent(chroot == false); + selector_schema->media_block(last_media_block); + + // process until end + while (i < end_of_selector) { + // try to parse mutliple interpolants + if (const char* p = find_first_in_interval< exactly, block_comment >(i, end_of_selector)) { + // accumulate the preceding segment if the position has advanced + if (i < p) { + std::string parsed(i, p); + String_Constant_Obj str = SASS_MEMORY_NEW(String_Constant, pstate, parsed); + pstate += Offset(parsed); + str->update_pstate(pstate); + schema->append(str); + } + + // skip over all nested inner interpolations up to our own delimiter + const char* j = skip_over_scopes< exactly, exactly >(p + 2, end_of_selector); + // check if the interpolation never ends of only contains white-space (error out) + if (!j || peek < sequence < optional_spaces, exactly > >(p+2)) { + position = p+2; + css_error("Invalid CSS", " after ", ": expected expression (e.g. 1px, bold), was "); + } + // pass inner expression to the parser to resolve nested interpolations + pstate.add(p, p+2); + Expression_Obj interpolant = Parser::from_c_str(p+2, j, ctx, traces, pstate).parse_list(); + // set status on the list expression + interpolant->is_interpolant(true); + // schema->has_interpolants(true); + // add to the string schema + schema->append(interpolant); + // advance parser state + pstate.add(p+2, j); + // advance position + i = j; + } + // no more interpolants have been found + // add the last segment if there is one + else { + // make sure to add the last bits of the string up to the end (if any) + if (i < end_of_selector) { + std::string parsed(i, end_of_selector); + String_Constant_Obj str = SASS_MEMORY_NEW(String_Constant, pstate, parsed); + pstate += Offset(parsed); + str->update_pstate(pstate); + i = end_of_selector; + schema->append(str); + } + // exit loop + } + } + // EO until eos + + // update position + position = i; + + // update for end position + selector_schema->update_pstate(pstate); + schema->update_pstate(pstate); + + after_token = before_token = pstate; + + // return parsed result + return selector_schema.detach(); + } + // EO parse_selector_schema + + void Parser::parse_charset_directive() + { + lex < + sequence < + quoted_string, + optional_spaces, + exactly <';'> + > + >(); + } + + // called after parsing `kwd_include_directive` + Mixin_Call_Obj Parser::parse_include_directive() + { + // lex identifier into `lexed` var + lex_identifier(); // may error out + // normalize underscores to hyphens + std::string name(Util::normalize_underscores(lexed)); + // create the initial mixin call object + Mixin_Call_Obj call = SASS_MEMORY_NEW(Mixin_Call, pstate, name, 0, 0); + // parse mandatory arguments + call->arguments(parse_arguments()); + // parse optional block + if (peek < exactly <'{'> >()) { + call->block(parse_block()); + } + // return ast node + return call.detach(); + } + // EO parse_include_directive + + // parse a list of complex selectors + // this is the main entry point for most + Selector_List_Obj Parser::parse_selector_list(bool chroot) + { + bool reloop; + bool had_linefeed = false; + NESTING_GUARD(nestings); + Complex_Selector_Obj sel; + Selector_List_Obj group = SASS_MEMORY_NEW(Selector_List, pstate); + group->media_block(last_media_block); + + if (peek_css< alternatives < end_of_file, exactly <'{'>, exactly <','> > >()) { + css_error("Invalid CSS", " after ", ": expected selector, was "); + } + + do { + reloop = false; + + had_linefeed = had_linefeed || peek_newline(); + + if (peek_css< alternatives < class_char < selector_list_delims > > >()) + break; // in case there are superfluous commas at the end + + // now parse the complex selector + sel = parse_complex_selector(chroot); + + if (!sel) return group.detach(); + + sel->has_line_feed(had_linefeed); + + had_linefeed = false; + + while (peek_css< exactly<','> >()) + { + lex< css_comments >(false); + // consume everything up and including the comma separator + reloop = lex< exactly<','> >() != 0; + // remember line break (also between some commas) + had_linefeed = had_linefeed || peek_newline(); + // remember line break (also between some commas) + } + group->append(sel); + } + while (reloop); + while (lex_css< kwd_optional >()) { + group->is_optional(true); + } + // update for end position + group->update_pstate(pstate); + if (sel) sel->last()->has_line_break(false); + return group.detach(); + } + // EO parse_selector_list + + // a complex selector combines a compound selector with another + // complex selector, with one of four combinator operations. + // the compound selector (head) is optional, since the combinator + // can come first in the whole selector sequence (like `> DIV'). + Complex_Selector_Obj Parser::parse_complex_selector(bool chroot) + { + + NESTING_GUARD(nestings); + String_Obj reference = 0; + lex < block_comment >(); + advanceToNextToken(); + Complex_Selector_Obj sel = SASS_MEMORY_NEW(Complex_Selector, pstate); + + if (peek < end_of_file >()) return 0; + + // parse the left hand side + Compound_Selector_Obj lhs; + // special case if it starts with combinator ([+~>]) + if (!peek_css< class_char < selector_combinator_ops > >()) { + // parse the left hand side + lhs = parse_compound_selector(); + } + + + // parse combinator between lhs and rhs + Complex_Selector::Combinator combinator = Complex_Selector::ANCESTOR_OF; + if (lex< exactly<'+'> >()) combinator = Complex_Selector::ADJACENT_TO; + else if (lex< exactly<'~'> >()) combinator = Complex_Selector::PRECEDES; + else if (lex< exactly<'>'> >()) combinator = Complex_Selector::PARENT_OF; + else if (lex< sequence < exactly<'/'>, negate < exactly < '*' > > > >()) { + // comments are allowed, but not spaces? + combinator = Complex_Selector::REFERENCE; + if (!lex < re_reference_combinator >()) return 0; + reference = SASS_MEMORY_NEW(String_Constant, pstate, lexed); + if (!lex < exactly < '/' > >()) return 0; // ToDo: error msg? + } + + if (!lhs && combinator == Complex_Selector::ANCESTOR_OF) return 0; + + // lex < block_comment >(); + sel->head(lhs); + sel->combinator(combinator); + sel->media_block(last_media_block); + + if (combinator == Complex_Selector::REFERENCE) sel->reference(reference); + // has linfeed after combinator? + sel->has_line_break(peek_newline()); + // sel->has_line_feed(has_line_feed); + + // check if we got the abort condition (ToDo: optimize) + if (!peek_css< class_char < complex_selector_delims > >()) { + // parse next selector in sequence + sel->tail(parse_complex_selector(true)); + } + + // add a parent selector if we are not in a root + // also skip adding parent ref if we only have refs + if (!sel->has_parent_ref() && !chroot) { + // create the objects to wrap parent selector reference + Compound_Selector_Obj head = SASS_MEMORY_NEW(Compound_Selector, pstate); + Parent_Selector_Ptr parent = SASS_MEMORY_NEW(Parent_Selector, pstate, false); + parent->media_block(last_media_block); + head->media_block(last_media_block); + // add simple selector + head->append(parent); + // selector may not have any head yet + if (!sel->head()) { sel->head(head); } + // otherwise we need to create a new complex selector and set the old one as its tail + else { + sel = SASS_MEMORY_NEW(Complex_Selector, pstate, Complex_Selector::ANCESTOR_OF, head, sel); + sel->media_block(last_media_block); + } + // peek for linefeed and remember result on head + // if (peek_newline()) head->has_line_break(true); + } + + sel->update_pstate(pstate); + // complex selector + return sel; + } + // EO parse_complex_selector + + // parse one compound selector, which is basically + // a list of simple selectors (directly adjacent) + // lex them exactly (without skipping white-space) + Compound_Selector_Obj Parser::parse_compound_selector() + { + // init an empty compound selector wrapper + Compound_Selector_Obj seq = SASS_MEMORY_NEW(Compound_Selector, pstate); + seq->media_block(last_media_block); + + // skip initial white-space + lex< css_whitespace >(); + + // parse list + while (true) + { + // remove all block comments (don't skip white-space) + lex< delimited_by< slash_star, star_slash, false > >(false); + // parse functional + if (match < re_pseudo_selector >()) + { + seq->append(parse_simple_selector()); + } + // parse parent selector + else if (lex< exactly<'&'> >(false)) + { + // this produces a linefeed!? + seq->has_parent_reference(true); + seq->append(SASS_MEMORY_NEW(Parent_Selector, pstate)); + // parent selector only allowed at start + // upcoming Sass may allow also trailing + if (seq->length() > 1) { + ParserState state(pstate); + Simple_Selector_Obj cur = (*seq)[seq->length()-1]; + Simple_Selector_Obj prev = (*seq)[seq->length()-2]; + std::string sel(prev->to_string({ NESTED, 5 })); + std::string found(cur->to_string({ NESTED, 5 })); + if (lex < identifier >()) { found += std::string(lexed); } + error("Invalid CSS after \"" + sel + "\": expected \"{\", was \"" + found + "\"\n\n" + "\"" + found + "\" may only be used at the beginning of a compound selector.", state); + } + } + // parse type selector + else if (lex< re_type_selector >(false)) + { + seq->append(SASS_MEMORY_NEW(Element_Selector, pstate, lexed)); + } + // peek for abort conditions + else if (peek< spaces >()) break; + else if (peek< end_of_file >()) { break; } + else if (peek_css < class_char < selector_combinator_ops > >()) break; + else if (peek_css < class_char < complex_selector_delims > >()) break; + // otherwise parse another simple selector + else { + Simple_Selector_Obj sel = parse_simple_selector(); + if (!sel) return 0; + seq->append(sel); + } + } + + if (seq && !peek_css>>()) { + seq->has_line_break(peek_newline()); + } + + // EO while true + return seq; + + } + // EO parse_compound_selector + + Simple_Selector_Obj Parser::parse_simple_selector() + { + lex < css_comments >(false); + if (lex< class_name >()) { + return SASS_MEMORY_NEW(Class_Selector, pstate, lexed); + } + else if (lex< id_name >()) { + return SASS_MEMORY_NEW(Id_Selector, pstate, lexed); + } + else if (lex< alternatives < variable, number, static_reference_combinator > >()) { + return SASS_MEMORY_NEW(Element_Selector, pstate, lexed); + } + else if (peek< pseudo_not >()) { + return parse_negated_selector(); + } + else if (peek< re_pseudo_selector >()) { + return parse_pseudo_selector(); + } + else if (peek< exactly<':'> >()) { + return parse_pseudo_selector(); + } + else if (lex < exactly<'['> >()) { + return parse_attribute_selector(); + } + else if (lex< placeholder >()) { + Placeholder_Selector_Ptr sel = SASS_MEMORY_NEW(Placeholder_Selector, pstate, lexed); + sel->media_block(last_media_block); + return sel; + } + else { + css_error("Invalid CSS", " after ", ": expected selector, was "); + } + // failed + return 0; + } + + Wrapped_Selector_Obj Parser::parse_negated_selector() + { + lex< pseudo_not >(); + std::string name(lexed); + ParserState nsource_position = pstate; + Selector_List_Obj negated = parse_selector_list(true); + if (!lex< exactly<')'> >()) { + error("negated selector is missing ')'"); + } + name.erase(name.size() - 1); + return SASS_MEMORY_NEW(Wrapped_Selector, nsource_position, name, negated); + } + + // a pseudo selector often starts with one or two colons + // it can contain more selectors inside parentheses + Simple_Selector_Obj Parser::parse_pseudo_selector() { + if (lex< sequence< + optional < pseudo_prefix >, + // we keep the space within the name, strange enough + // ToDo: refactor output to schedule the space for it + // or do we really want to keep the real white-space? + sequence< identifier, optional < block_comment >, exactly<'('> > + > >()) + { + + std::string name(lexed); + name.erase(name.size() - 1); + ParserState p = pstate; + + // specially parse static stuff + // ToDo: really everything static? + if (peek_css < + sequence < + alternatives < + static_value, + binomial + >, + optional_css_whitespace, + exactly<')'> + > + >() + ) { + lex_css< alternatives < static_value, binomial > >(); + String_Constant_Obj expr = SASS_MEMORY_NEW(String_Constant, pstate, lexed); + if (lex_css< exactly<')'> >()) { + expr->can_compress_whitespace(true); + return SASS_MEMORY_NEW(Pseudo_Selector, p, name, expr); + } + } + else if (Selector_List_Obj wrapped = parse_selector_list(true)) { + if (wrapped && lex_css< exactly<')'> >()) { + return SASS_MEMORY_NEW(Wrapped_Selector, p, name, wrapped); + } + } + + } + // EO if pseudo selector + + else if (lex < sequence< optional < pseudo_prefix >, identifier > >()) { + return SASS_MEMORY_NEW(Pseudo_Selector, pstate, lexed); + } + else if(lex < pseudo_prefix >()) { + css_error("Invalid CSS", " after ", ": expected pseudoclass or pseudoelement, was "); + } + + css_error("Invalid CSS", " after ", ": expected \")\", was "); + + // unreachable statement + return 0; + } + + const char* Parser::re_attr_sensitive_close(const char* src) + { + return alternatives < exactly<']'>, exactly<'/'> >(src); + } + + const char* Parser::re_attr_insensitive_close(const char* src) + { + return sequence < insensitive<'i'>, re_attr_sensitive_close >(src); + } + + Attribute_Selector_Obj Parser::parse_attribute_selector() + { + ParserState p = pstate; + if (!lex_css< attribute_name >()) error("invalid attribute name in attribute selector"); + std::string name(lexed); + if (lex_css< re_attr_sensitive_close >()) { + return SASS_MEMORY_NEW(Attribute_Selector, p, name, "", 0, 0); + } + else if (lex_css< re_attr_insensitive_close >()) { + char modifier = lexed.begin[0]; + return SASS_MEMORY_NEW(Attribute_Selector, p, name, "", 0, modifier); + } + if (!lex_css< alternatives< exact_match, class_match, dash_match, + prefix_match, suffix_match, substring_match > >()) { + error("invalid operator in attribute selector for " + name); + } + std::string matcher(lexed); + + String_Obj value = 0; + if (lex_css< identifier >()) { + value = SASS_MEMORY_NEW(String_Constant, p, lexed); + } + else if (lex_css< quoted_string >()) { + value = parse_interpolated_chunk(lexed, true); // needed! + } + else { + error("expected a string constant or identifier in attribute selector for " + name); + } + + if (lex_css< re_attr_sensitive_close >()) { + return SASS_MEMORY_NEW(Attribute_Selector, p, name, matcher, value, 0); + } + else if (lex_css< re_attr_insensitive_close >()) { + char modifier = lexed.begin[0]; + return SASS_MEMORY_NEW(Attribute_Selector, p, name, matcher, value, modifier); + } + error("unterminated attribute selector for " + name); + return NULL; // to satisfy compilers (error must not return) + } + + /* parse block comment and add to block */ + void Parser::parse_block_comments() + { + Block_Obj block = block_stack.back(); + + while (lex< block_comment >()) { + bool is_important = lexed.begin[2] == '!'; + // flag on second param is to skip loosely over comments + String_Obj contents = parse_interpolated_chunk(lexed, true, false); + block->append(SASS_MEMORY_NEW(Comment, pstate, contents, is_important)); + } + } + + Declaration_Obj Parser::parse_declaration() { + String_Obj prop; + bool is_custom_property = false; + if (lex< sequence< optional< exactly<'*'> >, identifier_schema > >()) { + const std::string property(lexed); + is_custom_property = property.compare(0, 2, "--") == 0; + prop = parse_identifier_schema(); + } + else if (lex< sequence< optional< exactly<'*'> >, identifier, zero_plus< block_comment > > >()) { + const std::string property(lexed); + is_custom_property = property.compare(0, 2, "--") == 0; + prop = SASS_MEMORY_NEW(String_Constant, pstate, lexed); + } + else { + css_error("Invalid CSS", " after ", ": expected \"}\", was "); + } + bool is_indented = true; + const std::string property(lexed); + if (!lex_css< one_plus< exactly<':'> > >()) error("property \"" + escape_string(property) + "\" must be followed by a ':'"); + if (!is_custom_property && match< sequence< optional_css_comments, exactly<';'> > >()) error("style declaration must contain a value"); + if (match< sequence< optional_css_comments, exactly<'{'> > >()) is_indented = false; // don't indent if value is empty + if (is_custom_property) { + return SASS_MEMORY_NEW(Declaration, prop->pstate(), prop, parse_css_variable_value(), false, true); + } + lex < css_comments >(false); + if (peek_css< static_value >()) { + return SASS_MEMORY_NEW(Declaration, prop->pstate(), prop, parse_static_value()/*, lex()*/); + } + else { + Expression_Obj value; + Lookahead lookahead = lookahead_for_value(position); + if (lookahead.found) { + if (lookahead.has_interpolants) { + value = parse_value_schema(lookahead.found); + } else { + value = parse_list(DELAYED); + } + } + else { + value = parse_list(DELAYED); + if (List_Ptr list = Cast(value)) { + if (!list->is_bracketed() && list->length() == 0 && !peek< exactly <'{'> >()) { + css_error("Invalid CSS", " after ", ": expected expression (e.g. 1px, bold), was "); + } + } + } + lex < css_comments >(false); + Declaration_Obj decl = SASS_MEMORY_NEW(Declaration, prop->pstate(), prop, value/*, lex()*/); + decl->is_indented(is_indented); + decl->update_pstate(pstate); + return decl; + } + } + + // parse +/- and return false if negative + // this is never hit via spec tests + bool Parser::parse_number_prefix() + { + bool positive = true; + while(true) { + if (lex < block_comment >()) continue; + if (lex < number_prefix >()) continue; + if (lex < exactly < '-' > >()) { + positive = !positive; + continue; + } + break; + } + return positive; + } + + Expression_Obj Parser::parse_map() + { + NESTING_GUARD(nestings); + Expression_Obj key = parse_list(); + List_Obj map = SASS_MEMORY_NEW(List, pstate, 0, SASS_HASH); + + // it's not a map so return the lexed value as a list value + if (!lex_css< exactly<':'> >()) + { return key; } + + List_Obj l = Cast(key); + if (l && l->separator() == SASS_COMMA) { + css_error("Invalid CSS", " after ", ": expected \")\", was "); + } + + Expression_Obj value = parse_space_list(); + + map->append(key); + map->append(value); + + while (lex_css< exactly<','> >()) + { + // allow trailing commas - #495 + if (peek_css< exactly<')'> >(position)) + { break; } + + key = parse_space_list(); + + if (!(lex< exactly<':'> >())) + { css_error("Invalid CSS", " after ", ": expected \":\", was "); } + + value = parse_space_list(); + + map->append(key); + map->append(value); + } + + ParserState ps = map->pstate(); + ps.offset = pstate - ps + pstate.offset; + map->pstate(ps); + + return map; + } + + Expression_Obj Parser::parse_bracket_list() + { + NESTING_GUARD(nestings); + // check if we have an empty list + // return the empty list as such + if (peek_css< list_terminator >(position)) + { + // return an empty list (nothing to delay) + return SASS_MEMORY_NEW(List, pstate, 0, SASS_SPACE, false, true); + } + + bool has_paren = peek_css< exactly<'('> >() != NULL; + + // now try to parse a space list + Expression_Obj list = parse_space_list(); + // if it's a singleton, return it (don't wrap it) + if (!peek_css< exactly<','> >(position)) { + List_Obj l = Cast(list); + if (!l || l->is_bracketed() || has_paren) { + List_Obj bracketed_list = SASS_MEMORY_NEW(List, pstate, 1, SASS_SPACE, false, true); + bracketed_list->append(list); + return bracketed_list; + } + l->is_bracketed(true); + return l; + } + + // if we got so far, we actually do have a comma list + List_Obj bracketed_list = SASS_MEMORY_NEW(List, pstate, 2, SASS_COMMA, false, true); + // wrap the first expression + bracketed_list->append(list); + + while (lex_css< exactly<','> >()) + { + // check for abort condition + if (peek_css< list_terminator >(position) + ) { break; } + // otherwise add another expression + bracketed_list->append(parse_space_list()); + } + // return the list + return bracketed_list; + } + + // parse list returns either a space separated list, + // a comma separated list or any bare expression found. + // so to speak: we unwrap items from lists if possible here! + Expression_Obj Parser::parse_list(bool delayed) + { + NESTING_GUARD(nestings); + return parse_comma_list(delayed); + } + + // will return singletons unwrapped + Expression_Obj Parser::parse_comma_list(bool delayed) + { + NESTING_GUARD(nestings); + // check if we have an empty list + // return the empty list as such + if (peek_css< list_terminator >(position)) + { + // return an empty list (nothing to delay) + return SASS_MEMORY_NEW(List, pstate, 0); + } + + // now try to parse a space list + Expression_Obj list = parse_space_list(); + // if it's a singleton, return it (don't wrap it) + if (!peek_css< exactly<','> >(position)) { + // set_delay doesn't apply to list children + // so this will only undelay single values + if (!delayed) list->set_delayed(false); + return list; + } + + // if we got so far, we actually do have a comma list + List_Obj comma_list = SASS_MEMORY_NEW(List, pstate, 2, SASS_COMMA); + // wrap the first expression + comma_list->append(list); + + while (lex_css< exactly<','> >()) + { + // check for abort condition + if (peek_css< list_terminator >(position) + ) { break; } + // otherwise add another expression + comma_list->append(parse_space_list()); + } + // return the list + return comma_list; + } + // EO parse_comma_list + + // will return singletons unwrapped + Expression_Obj Parser::parse_space_list() + { + NESTING_GUARD(nestings); + Expression_Obj disj1 = parse_disjunction(); + // if it's a singleton, return it (don't wrap it) + if (peek_css< space_list_terminator >(position) + ) { + return disj1; } + + List_Obj space_list = SASS_MEMORY_NEW(List, pstate, 2, SASS_SPACE); + space_list->append(disj1); + + while ( + !(peek_css< space_list_terminator >(position)) && + peek_css< optional_css_whitespace >() != end + ) { + // the space is parsed implicitly? + space_list->append(parse_disjunction()); + } + // return the list + return space_list; + } + // EO parse_space_list + + // parse logical OR operation + Expression_Obj Parser::parse_disjunction() + { + NESTING_GUARD(nestings); + advanceToNextToken(); + ParserState state(pstate); + // parse the left hand side conjunction + Expression_Obj conj = parse_conjunction(); + // parse multiple right hand sides + std::vector operands; + while (lex_css< kwd_or >()) + operands.push_back(parse_conjunction()); + // if it's a singleton, return it directly + if (operands.size() == 0) return conj; + // fold all operands into one binary expression + Expression_Obj ex = fold_operands(conj, operands, { Sass_OP::OR }); + state.offset = pstate - state + pstate.offset; + ex->pstate(state); + return ex; + } + // EO parse_disjunction + + // parse logical AND operation + Expression_Obj Parser::parse_conjunction() + { + NESTING_GUARD(nestings); + advanceToNextToken(); + ParserState state(pstate); + // parse the left hand side relation + Expression_Obj rel = parse_relation(); + // parse multiple right hand sides + std::vector operands; + while (lex_css< kwd_and >()) { + operands.push_back(parse_relation()); + } + // if it's a singleton, return it directly + if (operands.size() == 0) return rel; + // fold all operands into one binary expression + Expression_Obj ex = fold_operands(rel, operands, { Sass_OP::AND }); + state.offset = pstate - state + pstate.offset; + ex->pstate(state); + return ex; + } + // EO parse_conjunction + + // parse comparison operations + Expression_Obj Parser::parse_relation() + { + NESTING_GUARD(nestings); + advanceToNextToken(); + ParserState state(pstate); + // parse the left hand side expression + Expression_Obj lhs = parse_expression(); + std::vector operands; + std::vector operators; + // if it's a singleton, return it (don't wrap it) + while (peek< alternatives < + kwd_eq, + kwd_neq, + kwd_gte, + kwd_gt, + kwd_lte, + kwd_lt + > >(position)) + { + // is directly adjancent to expression? + bool left_ws = peek < css_comments >() != NULL; + // parse the operator + enum Sass_OP op + = lex() ? Sass_OP::EQ + : lex() ? Sass_OP::NEQ + : lex() ? Sass_OP::GTE + : lex() ? Sass_OP::LTE + : lex() ? Sass_OP::GT + : lex() ? Sass_OP::LT + // we checked the possibilities on top of fn + : Sass_OP::EQ; + // is directly adjacent to expression? + bool right_ws = peek < css_comments >() != NULL; + operators.push_back({ op, left_ws, right_ws }); + operands.push_back(parse_expression()); + } + // we are called recursively for list, so we first + // fold inner binary expression which has delayed + // correctly set to zero. After folding we also unwrap + // single nested items. So we cannot set delay on the + // returned result here, as we have lost nestings ... + Expression_Obj ex = fold_operands(lhs, operands, operators); + state.offset = pstate - state + pstate.offset; + ex->pstate(state); + return ex; + } + // parse_relation + + // parse expression valid for operations + // called from parse_relation + // called from parse_for_directive + // called from parse_media_expression + // parse addition and subtraction operations + Expression_Obj Parser::parse_expression() + { + NESTING_GUARD(nestings); + advanceToNextToken(); + ParserState state(pstate); + // parses multiple add and subtract operations + // NOTE: make sure that identifiers starting with + // NOTE: dashes do NOT count as subtract operation + Expression_Obj lhs = parse_operators(); + // if it's a singleton, return it (don't wrap it) + if (!(peek_css< exactly<'+'> >(position) || + // condition is a bit misterious, but some combinations should not be counted as operations + (peek< no_spaces >(position) && peek< sequence< negate< unsigned_number >, exactly<'-'>, negate< space > > >(position)) || + (peek< sequence< negate< unsigned_number >, exactly<'-'>, negate< unsigned_number > > >(position))) || + peek< sequence < zero_plus < exactly <'-' > >, identifier > >(position)) + { return lhs; } + + std::vector operands; + std::vector operators; + bool left_ws = peek < css_comments >() != NULL; + while ( + lex_css< exactly<'+'> >() || + + ( + ! peek_css< sequence < zero_plus < exactly <'-' > >, identifier > >(position) + && lex_css< sequence< negate< digit >, exactly<'-'> > >() + ) + + ) { + + bool right_ws = peek < css_comments >() != NULL; + operators.push_back({ lexed.to_string() == "+" ? Sass_OP::ADD : Sass_OP::SUB, left_ws, right_ws }); + operands.push_back(parse_operators()); + left_ws = peek < css_comments >() != NULL; + } + + if (operands.size() == 0) return lhs; + Expression_Obj ex = fold_operands(lhs, operands, operators); + state.offset = pstate - state + pstate.offset; + ex->pstate(state); + return ex; + } + + // parse addition and subtraction operations + Expression_Obj Parser::parse_operators() + { + NESTING_GUARD(nestings); + advanceToNextToken(); + ParserState state(pstate); + Expression_Obj factor = parse_factor(); + // if it's a singleton, return it (don't wrap it) + std::vector operands; // factors + std::vector operators; // ops + // lex operations to apply to lhs + const char* left_ws = peek < css_comments >(); + while (lex_css< class_char< static_ops > >()) { + const char* right_ws = peek < css_comments >(); + switch(*lexed.begin) { + case '*': operators.push_back({ Sass_OP::MUL, left_ws != 0, right_ws != 0 }); break; + case '/': operators.push_back({ Sass_OP::DIV, left_ws != 0, right_ws != 0 }); break; + case '%': operators.push_back({ Sass_OP::MOD, left_ws != 0, right_ws != 0 }); break; + default: throw std::runtime_error("unknown static op parsed"); + } + operands.push_back(parse_factor()); + left_ws = peek < css_comments >(); + } + // operands and operators to binary expression + Expression_Obj ex = fold_operands(factor, operands, operators); + state.offset = pstate - state + pstate.offset; + ex->pstate(state); + return ex; + } + // EO parse_operators + + + // called from parse_operators + // called from parse_value_schema + Expression_Obj Parser::parse_factor() + { + NESTING_GUARD(nestings); + lex < css_comments >(false); + if (lex_css< exactly<'('> >()) { + // parse_map may return a list + Expression_Obj value = parse_map(); + // lex the expected closing parenthesis + if (!lex_css< exactly<')'> >()) error("unclosed parenthesis"); + // expression can be evaluated + return value; + } + else if (lex_css< exactly<'['> >()) { + // explicit bracketed + Expression_Obj value = parse_bracket_list(); + // lex the expected closing square bracket + if (!lex_css< exactly<']'> >()) error("unclosed squared bracket"); + return value; + } + // string may be interpolated + // if (lex< quoted_string >()) { + // return &parse_string(); + // } + else if (peek< ie_property >()) { + return parse_ie_property(); + } + else if (peek< ie_keyword_arg >()) { + return parse_ie_keyword_arg(); + } + else if (peek< sequence < calc_fn_call, exactly <'('> > >()) { + return parse_calc_function(); + } + else if (lex < functional_schema >()) { + return parse_function_call_schema(); + } + else if (lex< identifier_schema >()) { + String_Obj string = parse_identifier_schema(); + if (String_Schema_Ptr schema = Cast(string)) { + if (lex < exactly < '(' > >()) { + schema->append(parse_list()); + lex < exactly < ')' > >(); + } + } + return string; + } + else if (peek< sequence< uri_prefix, W, real_uri_value > >()) { + return parse_url_function_string(); + } + else if (peek< re_functional >()) { + return parse_function_call(); + } + else if (lex< exactly<'+'> >()) { + Unary_Expression_Ptr ex = SASS_MEMORY_NEW(Unary_Expression, pstate, Unary_Expression::PLUS, parse_factor()); + if (ex && ex->operand()) ex->is_delayed(ex->operand()->is_delayed()); + return ex; + } + else if (lex< exactly<'-'> >()) { + Unary_Expression_Ptr ex = SASS_MEMORY_NEW(Unary_Expression, pstate, Unary_Expression::MINUS, parse_factor()); + if (ex && ex->operand()) ex->is_delayed(ex->operand()->is_delayed()); + return ex; + } + else if (lex< exactly<'/'> >()) { + Unary_Expression_Ptr ex = SASS_MEMORY_NEW(Unary_Expression, pstate, Unary_Expression::SLASH, parse_factor()); + if (ex && ex->operand()) ex->is_delayed(ex->operand()->is_delayed()); + return ex; + } + else if (lex< sequence< kwd_not > >()) { + Unary_Expression_Ptr ex = SASS_MEMORY_NEW(Unary_Expression, pstate, Unary_Expression::NOT, parse_factor()); + if (ex && ex->operand()) ex->is_delayed(ex->operand()->is_delayed()); + return ex; + } + // this whole branch is never hit via spec tests + else if (peek < sequence < one_plus < alternatives < css_whitespace, exactly<'-'>, exactly<'+'> > >, number > >()) { + if (parse_number_prefix()) return parse_value(); // prefix is positive + Unary_Expression_Ptr ex = SASS_MEMORY_NEW(Unary_Expression, pstate, Unary_Expression::MINUS, parse_value()); + if (ex->operand()) ex->is_delayed(ex->operand()->is_delayed()); + return ex; + } + else { + return parse_value(); + } + } + + bool number_has_zero(const std::string& parsed) + { + size_t L = parsed.length(); + return !( (L > 0 && parsed.substr(0, 1) == ".") || + (L > 1 && parsed.substr(0, 2) == "0.") || + (L > 1 && parsed.substr(0, 2) == "-.") || + (L > 2 && parsed.substr(0, 3) == "-0.") ); + } + + Number_Ptr Parser::lexed_number(const ParserState& pstate, const std::string& parsed) + { + Number_Ptr nr = SASS_MEMORY_NEW(Number, + pstate, + sass_strtod(parsed.c_str()), + "", + number_has_zero(parsed)); + nr->is_interpolant(false); + nr->is_delayed(true); + return nr; + } + + Number_Ptr Parser::lexed_percentage(const ParserState& pstate, const std::string& parsed) + { + Number_Ptr nr = SASS_MEMORY_NEW(Number, + pstate, + sass_strtod(parsed.c_str()), + "%", + true); + nr->is_interpolant(false); + nr->is_delayed(true); + return nr; + } + + Number_Ptr Parser::lexed_dimension(const ParserState& pstate, const std::string& parsed) + { + size_t L = parsed.length(); + size_t num_pos = parsed.find_first_not_of(" \n\r\t"); + if (num_pos == std::string::npos) num_pos = L; + size_t unit_pos = parsed.find_first_not_of("-+0123456789.", num_pos); + if (parsed[unit_pos] == 'e' && is_number(parsed[unit_pos+1]) ) { + unit_pos = parsed.find_first_not_of("-+0123456789.", ++ unit_pos); + } + if (unit_pos == std::string::npos) unit_pos = L; + const std::string& num = parsed.substr(num_pos, unit_pos - num_pos); + Number_Ptr nr = SASS_MEMORY_NEW(Number, + pstate, + sass_strtod(num.c_str()), + Token(number(parsed.c_str())), + number_has_zero(parsed)); + nr->is_interpolant(false); + nr->is_delayed(true); + return nr; + } + + Value_Ptr Parser::lexed_hex_color(const ParserState& pstate, const std::string& parsed) + { + Color_Ptr color = NULL; + if (parsed[0] != '#') { + return SASS_MEMORY_NEW(String_Quoted, pstate, parsed); + } + // chop off the '#' + std::string hext(parsed.substr(1)); + if (parsed.length() == 4) { + std::string r(2, parsed[1]); + std::string g(2, parsed[2]); + std::string b(2, parsed[3]); + color = SASS_MEMORY_NEW(Color, + pstate, + static_cast(strtol(r.c_str(), NULL, 16)), + static_cast(strtol(g.c_str(), NULL, 16)), + static_cast(strtol(b.c_str(), NULL, 16)), + 1, // alpha channel + parsed); + } + else if (parsed.length() == 5) { + std::string r(2, parsed[1]); + std::string g(2, parsed[2]); + std::string b(2, parsed[3]); + std::string a(2, parsed[4]); + color = SASS_MEMORY_NEW(Color, + pstate, + static_cast(strtol(r.c_str(), NULL, 16)), + static_cast(strtol(g.c_str(), NULL, 16)), + static_cast(strtol(b.c_str(), NULL, 16)), + static_cast(strtol(a.c_str(), NULL, 16)) / 255, + parsed); + } + else if (parsed.length() == 7) { + std::string r(parsed.substr(1,2)); + std::string g(parsed.substr(3,2)); + std::string b(parsed.substr(5,2)); + color = SASS_MEMORY_NEW(Color, + pstate, + static_cast(strtol(r.c_str(), NULL, 16)), + static_cast(strtol(g.c_str(), NULL, 16)), + static_cast(strtol(b.c_str(), NULL, 16)), + 1, // alpha channel + parsed); + } + else if (parsed.length() == 9) { + std::string r(parsed.substr(1,2)); + std::string g(parsed.substr(3,2)); + std::string b(parsed.substr(5,2)); + std::string a(parsed.substr(7,2)); + color = SASS_MEMORY_NEW(Color, + pstate, + static_cast(strtol(r.c_str(), NULL, 16)), + static_cast(strtol(g.c_str(), NULL, 16)), + static_cast(strtol(b.c_str(), NULL, 16)), + static_cast(strtol(a.c_str(), NULL, 16)) / 255, + parsed); + } + color->is_interpolant(false); + color->is_delayed(false); + return color; + } + + Value_Ptr Parser::color_or_string(const std::string& lexed) const + { + if (auto color = name_to_color(lexed)) { + auto c = SASS_MEMORY_NEW(Color, color); + c->is_delayed(true); + c->pstate(pstate); + c->disp(lexed); + return c; + } else { + return SASS_MEMORY_NEW(String_Constant, pstate, lexed); + } + } + + // parse one value for a list + Expression_Obj Parser::parse_value() + { + lex< css_comments >(false); + if (lex< ampersand >()) + { + if (match< ampersand >()) { + warning("In Sass, \"&&\" means two copies of the parent selector. You probably want to use \"and\" instead.", pstate); + } + return SASS_MEMORY_NEW(Parent_Selector, pstate); } + + if (lex< kwd_important >()) + { return SASS_MEMORY_NEW(String_Constant, pstate, "!important"); } + + // parse `10%4px` into separated items and not a schema + if (lex< sequence < percentage, lookahead < number > > >()) + { return lexed_percentage(lexed); } + + if (lex< sequence < number, lookahead< sequence < op, number > > > >()) + { return lexed_number(lexed); } + + // string may be interpolated + if (lex< sequence < quoted_string, lookahead < exactly <'-'> > > >()) + { return parse_string(); } + + if (const char* stop = peek< value_schema >()) + { return parse_value_schema(stop); } + + // string may be interpolated + if (lex< quoted_string >()) + { return parse_string(); } + + if (lex< kwd_true >()) + { return SASS_MEMORY_NEW(Boolean, pstate, true); } + + if (lex< kwd_false >()) + { return SASS_MEMORY_NEW(Boolean, pstate, false); } + + if (lex< kwd_null >()) + { return SASS_MEMORY_NEW(Null, pstate); } + + if (lex< identifier >()) { + return color_or_string(lexed); + } + + if (lex< percentage >()) + { return lexed_percentage(lexed); } + + // match hex number first because 0x000 looks like a number followed by an identifier + if (lex< sequence < alternatives< hex, hex0 >, negate < exactly<'-'> > > >()) + { return lexed_hex_color(lexed); } + + if (lex< hexa >()) + { return lexed_hex_color(lexed); } + + if (lex< sequence < exactly <'#'>, identifier > >()) + { return SASS_MEMORY_NEW(String_Quoted, pstate, lexed); } + + // also handle the 10em- foo special case + // alternatives < exactly < '.' >, .. > -- `1.5em-.75em` is split into a list, not a binary expression + if (lex< sequence< dimension, optional< sequence< exactly<'-'>, lookahead< alternatives < space > > > > > >()) + { return lexed_dimension(lexed); } + + if (lex< sequence< static_component, one_plus< strict_identifier > > >()) + { return SASS_MEMORY_NEW(String_Constant, pstate, lexed); } + + if (lex< number >()) + { return lexed_number(lexed); } + + if (lex< variable >()) + { return SASS_MEMORY_NEW(Variable, pstate, Util::normalize_underscores(lexed)); } + + css_error("Invalid CSS", " after ", ": expected expression (e.g. 1px, bold), was "); + + // unreachable statement + return 0; + } + + // this parses interpolation inside other strings + // means the result should later be quoted again + String_Obj Parser::parse_interpolated_chunk(Token chunk, bool constant, bool css) + { + const char* i = chunk.begin; + // see if there any interpolants + const char* p = constant ? find_first_in_interval< exactly >(i, chunk.end) : + find_first_in_interval< exactly, block_comment >(i, chunk.end); + + if (!p) { + String_Quoted_Ptr str_quoted = SASS_MEMORY_NEW(String_Quoted, pstate, std::string(i, chunk.end), 0, false, false, true, css); + if (!constant && str_quoted->quote_mark()) str_quoted->quote_mark('*'); + return str_quoted; + } + + String_Schema_Obj schema = SASS_MEMORY_NEW(String_Schema, pstate, 0, css); + schema->is_interpolant(true); + while (i < chunk.end) { + p = constant ? find_first_in_interval< exactly >(i, chunk.end) : + find_first_in_interval< exactly, block_comment >(i, chunk.end); + if (p) { + if (i < p) { + // accumulate the preceding segment if it's nonempty + schema->append(SASS_MEMORY_NEW(String_Constant, pstate, std::string(i, p), css)); + } + // we need to skip anything inside strings + // create a new target in parser/prelexer + if (peek < sequence < optional_spaces, exactly > >(p+2)) { position = p+2; + css_error("Invalid CSS", " after ", ": expected expression (e.g. 1px, bold), was "); + } + const char* j = skip_over_scopes< exactly, exactly >(p + 2, chunk.end); // find the closing brace + if (j) { --j; + // parse the interpolant and accumulate it + Expression_Obj interp_node = Parser::from_token(Token(p+2, j), ctx, traces, pstate, source).parse_list(); + interp_node->is_interpolant(true); + schema->append(interp_node); + i = j; + } + else { + // throw an error if the interpolant is unterminated + error("unterminated interpolant inside string constant " + chunk.to_string()); + } + } + else { // no interpolants left; add the last segment if nonempty + // check if we need quotes here (was not sure after merge) + if (i < chunk.end) schema->append(SASS_MEMORY_NEW(String_Constant, pstate, std::string(i, chunk.end), css)); + break; + } + ++ i; + } + + return schema.detach(); + } + + String_Schema_Obj Parser::parse_css_variable_value(bool top_level) + { + String_Schema_Obj schema = SASS_MEMORY_NEW(String_Schema, pstate); + String_Schema_Obj tok; + if (!(tok = parse_css_variable_value_token(top_level))) { + return NULL; + } + + schema->concat(tok); + while ((tok = parse_css_variable_value_token(top_level))) { + schema->concat(tok); + } + + return schema.detach(); + } + + String_Schema_Obj Parser::parse_css_variable_value_token(bool top_level) + { + String_Schema_Obj schema = SASS_MEMORY_NEW(String_Schema, pstate); + if ( + (top_level && lex< css_variable_top_level_value >(false)) || + (!top_level && lex< css_variable_value >(false)) + ) { + Token str(lexed); + schema->append(SASS_MEMORY_NEW(String_Constant, pstate, str)); + } + else if (Expression_Obj tok = lex_interpolation()) { + if (String_Schema_Ptr s = Cast(tok)) { + schema->concat(s); + } else { + schema->append(tok); + } + } + else if (lex< quoted_string >()) { + Expression_Obj tok = parse_string(); + if (String_Schema_Ptr s = Cast(tok)) { + schema->concat(s); + } else { + schema->append(tok); + } + } + else { + if (peek< alternatives< exactly<'('>, exactly<'['>, exactly<'{'> > >()) { + if (lex< exactly<'('> >()) { + schema->append(SASS_MEMORY_NEW(String_Constant, pstate, std::string("("))); + if (String_Schema_Obj tok = parse_css_variable_value(false)) schema->concat(tok); + if (!lex< exactly<')'> >()) css_error("Invalid CSS", " after ", ": expected \")\", was "); + schema->append(SASS_MEMORY_NEW(String_Constant, pstate, std::string(")"))); + } + else if (lex< exactly<'['> >()) { + schema->append(SASS_MEMORY_NEW(String_Constant, pstate, std::string("["))); + if (String_Schema_Obj tok = parse_css_variable_value(false)) schema->concat(tok); + if (!lex< exactly<']'> >()) css_error("Invalid CSS", " after ", ": expected \"]\", was "); + schema->append(SASS_MEMORY_NEW(String_Constant, pstate, std::string("]"))); + } + else if (lex< exactly<'{'> >()) { + schema->append(SASS_MEMORY_NEW(String_Constant, pstate, std::string("{"))); + if (String_Schema_Obj tok = parse_css_variable_value(false)) schema->concat(tok); + if (!lex< exactly<'}'> >()) css_error("Invalid CSS", " after ", ": expected \"}\", was "); + schema->append(SASS_MEMORY_NEW(String_Constant, pstate, std::string("}"))); + } + } + } + + return schema->length() > 0 ? schema.detach() : NULL; + } + + Value_Obj Parser::parse_static_value() + { + lex< static_value >(); + Token str(lexed); + // static values always have trailing white- + // space and end delimiter (\s*[;]$) included + --pstate.offset.column; + --after_token.column; + --str.end; + --position; + + return color_or_string(str.time_wspace());; + } + + String_Obj Parser::parse_string() + { + return parse_interpolated_chunk(Token(lexed)); + } + + String_Obj Parser::parse_ie_property() + { + lex< ie_property >(); + Token str(lexed); + const char* i = str.begin; + // see if there any interpolants + const char* p = find_first_in_interval< exactly, block_comment >(str.begin, str.end); + if (!p) { + return SASS_MEMORY_NEW(String_Quoted, pstate, std::string(str.begin, str.end)); + } + + String_Schema_Ptr schema = SASS_MEMORY_NEW(String_Schema, pstate); + while (i < str.end) { + p = find_first_in_interval< exactly, block_comment >(i, str.end); + if (p) { + if (i < p) { + schema->append(SASS_MEMORY_NEW(String_Constant, pstate, std::string(i, p))); // accumulate the preceding segment if it's nonempty + } + if (peek < sequence < optional_spaces, exactly > >(p+2)) { position = p+2; + css_error("Invalid CSS", " after ", ": expected expression (e.g. 1px, bold), was "); + } + const char* j = skip_over_scopes< exactly, exactly >(p+2, str.end); // find the closing brace + if (j) { + // parse the interpolant and accumulate it + Expression_Obj interp_node = Parser::from_token(Token(p+2, j), ctx, traces, pstate, source).parse_list(); + interp_node->is_interpolant(true); + schema->append(interp_node); + i = j; + } + else { + // throw an error if the interpolant is unterminated + error("unterminated interpolant inside IE function " + str.to_string()); + } + } + else { // no interpolants left; add the last segment if nonempty + if (i < str.end) { + schema->append(SASS_MEMORY_NEW(String_Constant, pstate, std::string(i, str.end))); + } + break; + } + } + return schema; + } + + String_Obj Parser::parse_ie_keyword_arg() + { + String_Schema_Ptr kwd_arg = SASS_MEMORY_NEW(String_Schema, pstate, 3); + if (lex< variable >()) { + kwd_arg->append(SASS_MEMORY_NEW(Variable, pstate, Util::normalize_underscores(lexed))); + } else { + lex< alternatives< identifier_schema, identifier > >(); + kwd_arg->append(SASS_MEMORY_NEW(String_Constant, pstate, lexed)); + } + lex< exactly<'='> >(); + kwd_arg->append(SASS_MEMORY_NEW(String_Constant, pstate, lexed)); + if (peek< variable >()) kwd_arg->append(parse_list()); + else if (lex< number >()) { + std::string parsed(lexed); + Util::normalize_decimals(parsed); + kwd_arg->append(lexed_number(parsed)); + } + else if (peek < ie_keyword_arg_value >()) { kwd_arg->append(parse_list()); } + return kwd_arg; + } + + String_Schema_Obj Parser::parse_value_schema(const char* stop) + { + // initialize the string schema object to add tokens + String_Schema_Obj schema = SASS_MEMORY_NEW(String_Schema, pstate); + + if (peek>()) { + css_error("Invalid CSS", " after ", ": expected expression (e.g. 1px, bold), was "); + } + + const char* e; + const char* ee = end; + end = stop; + size_t num_items = 0; + bool need_space = false; + while (position < stop) { + // parse space between tokens + if (lex< spaces >() && num_items) { + need_space = true; + } + if (need_space) { + need_space = false; + // schema->append(SASS_MEMORY_NEW(String_Constant, pstate, " ")); + } + if ((e = peek< re_functional >()) && e < stop) { + schema->append(parse_function_call()); + } + // lex an interpolant /#{...}/ + else if (lex< exactly < hash_lbrace > >()) { + // Try to lex static expression first + if (peek< exactly< rbrace > >()) { + css_error("Invalid CSS", " after ", ": expected expression (e.g. 1px, bold), was "); + } + Expression_Obj ex; + if (lex< re_static_expression >()) { + ex = SASS_MEMORY_NEW(String_Constant, pstate, lexed); + } else { + ex = parse_list(true); + } + ex->is_interpolant(true); + schema->append(ex); + if (!lex < exactly < rbrace > >()) { + css_error("Invalid CSS", " after ", ": expected \"}\", was "); + } + } + // lex some string constants or other valid token + // Note: [-+] chars are left over from i.e. `#{3}+3` + else if (lex< alternatives < exactly<'%'>, exactly < '-' >, exactly < '+' > > >()) { + schema->append(SASS_MEMORY_NEW(String_Constant, pstate, lexed)); + } + // lex a quoted string + else if (lex< quoted_string >()) { + // need_space = true; + // if (schema->length()) schema->append(SASS_MEMORY_NEW(String_Constant, pstate, " ")); + // else need_space = true; + schema->append(parse_string()); + if ((*position == '"' || *position == '\'') || peek < alternatives < alpha > >()) { + // need_space = true; + } + if (peek < exactly < '-' > >()) break; + } + else if (lex< identifier >()) { + schema->append(SASS_MEMORY_NEW(String_Constant, pstate, lexed)); + if ((*position == '"' || *position == '\'') || peek < alternatives < alpha > >()) { + // need_space = true; + } + } + // lex (normalized) variable + else if (lex< variable >()) { + std::string name(Util::normalize_underscores(lexed)); + schema->append(SASS_MEMORY_NEW(Variable, pstate, name)); + } + // lex percentage value + else if (lex< percentage >()) { + schema->append(lexed_percentage(lexed)); + } + // lex dimension value + else if (lex< dimension >()) { + schema->append(lexed_dimension(lexed)); + } + // lex number value + else if (lex< number >()) { + schema->append(lexed_number(lexed)); + } + // lex hex color value + else if (lex< sequence < hex, negate < exactly < '-' > > > >()) { + schema->append(lexed_hex_color(lexed)); + } + else if (lex< sequence < exactly <'#'>, identifier > >()) { + schema->append(SASS_MEMORY_NEW(String_Quoted, pstate, lexed)); + } + // lex a value in parentheses + else if (peek< parenthese_scope >()) { + schema->append(parse_factor()); + } + else { + break; + } + ++num_items; + } + if (position != stop) { + schema->append(SASS_MEMORY_NEW(String_Constant, pstate, std::string(position, stop))); + position = stop; + } + end = ee; + return schema; + } + + // this parses interpolation outside other strings + // means the result must not be quoted again later + String_Obj Parser::parse_identifier_schema() + { + Token id(lexed); + const char* i = id.begin; + // see if there any interpolants + const char* p = find_first_in_interval< exactly, block_comment >(id.begin, id.end); + if (!p) { + return SASS_MEMORY_NEW(String_Constant, pstate, std::string(id.begin, id.end)); + } + + String_Schema_Obj schema = SASS_MEMORY_NEW(String_Schema, pstate); + while (i < id.end) { + p = find_first_in_interval< exactly, block_comment >(i, id.end); + if (p) { + if (i < p) { + // accumulate the preceding segment if it's nonempty + const char* o = position; position = i; + schema->append(parse_value_schema(p)); + position = o; + } + // we need to skip anything inside strings + // create a new target in parser/prelexer + if (peek < sequence < optional_spaces, exactly > >(p+2)) { position = p; + css_error("Invalid CSS", " after ", ": expected expression (e.g. 1px, bold), was "); + } + const char* j = skip_over_scopes< exactly, exactly >(p+2, id.end); // find the closing brace + if (j) { + // parse the interpolant and accumulate it + Expression_Obj interp_node = Parser::from_token(Token(p+2, j), ctx, traces, pstate, source).parse_list(DELAYED); + interp_node->is_interpolant(true); + schema->append(interp_node); + // schema->has_interpolants(true); + i = j; + } + else { + // throw an error if the interpolant is unterminated + error("unterminated interpolant inside interpolated identifier " + id.to_string()); + } + } + else { // no interpolants left; add the last segment if nonempty + if (i < end) { + const char* o = position; position = i; + schema->append(parse_value_schema(id.end)); + position = o; + } + break; + } + } + return schema ? schema.detach() : 0; + } + + // calc functions should preserve arguments + Function_Call_Obj Parser::parse_calc_function() + { + lex< identifier >(); + std::string name(lexed); + ParserState call_pos = pstate; + lex< exactly<'('> >(); + ParserState arg_pos = pstate; + const char* arg_beg = position; + parse_list(); + const char* arg_end = position; + lex< skip_over_scopes < + exactly < '(' >, + exactly < ')' > + > >(); + + Argument_Obj arg = SASS_MEMORY_NEW(Argument, arg_pos, parse_interpolated_chunk(Token(arg_beg, arg_end))); + Arguments_Obj args = SASS_MEMORY_NEW(Arguments, arg_pos); + args->append(arg); + return SASS_MEMORY_NEW(Function_Call, call_pos, name, args); + } + + String_Obj Parser::parse_url_function_string() + { + std::string prefix(""); + if (lex< uri_prefix >()) { + prefix = std::string(lexed); + } + + lex < optional_spaces >(); + String_Obj url_string = parse_url_function_argument(); + + std::string suffix(""); + if (lex< real_uri_suffix >()) { + suffix = std::string(lexed); + } + + std::string uri(""); + if (url_string) { + uri = url_string->to_string({ NESTED, 5 }); + } + + if (String_Schema_Ptr schema = Cast(url_string)) { + String_Schema_Obj res = SASS_MEMORY_NEW(String_Schema, pstate); + res->append(SASS_MEMORY_NEW(String_Constant, pstate, prefix)); + res->append(schema); + res->append(SASS_MEMORY_NEW(String_Constant, pstate, suffix)); + return res; + } else { + std::string res = prefix + uri + suffix; + return SASS_MEMORY_NEW(String_Constant, pstate, res); + } + } + + String_Obj Parser::parse_url_function_argument() + { + const char* p = position; + + std::string uri(""); + if (lex< real_uri_value >(false)) { + uri = lexed.to_string(); + } + + if (peek< exactly< hash_lbrace > >()) { + const char* pp = position; + // TODO: error checking for unclosed interpolants + while (pp && peek< exactly< hash_lbrace > >(pp)) { + pp = sequence< interpolant, real_uri_value >(pp); + } + if (!pp) return 0; + position = pp; + return parse_interpolated_chunk(Token(p, position)); + } + else if (uri != "") { + std::string res = Util::rtrim(uri); + return SASS_MEMORY_NEW(String_Constant, pstate, res); + } + + return 0; + } + + Function_Call_Obj Parser::parse_function_call() + { + lex< identifier >(); + std::string name(lexed); + + if (Util::normalize_underscores(name) == "content-exists" && stack.back() != Scope::Mixin) + { error("Cannot call content-exists() except within a mixin."); } + + ParserState call_pos = pstate; + Arguments_Obj args = parse_arguments(); + return SASS_MEMORY_NEW(Function_Call, call_pos, name, args); + } + + Function_Call_Schema_Obj Parser::parse_function_call_schema() + { + String_Obj name = parse_identifier_schema(); + ParserState source_position_of_call = pstate; + Arguments_Obj args = parse_arguments(); + + return SASS_MEMORY_NEW(Function_Call_Schema, source_position_of_call, name, args); + } + + Content_Obj Parser::parse_content_directive() + { + return SASS_MEMORY_NEW(Content, pstate); + } + + If_Obj Parser::parse_if_directive(bool else_if) + { + stack.push_back(Scope::Control); + ParserState if_source_position = pstate; + bool root = block_stack.back()->is_root(); + Expression_Obj predicate = parse_list(); + Block_Obj block = parse_block(root); + Block_Obj alternative = NULL; + + // only throw away comment if we parse a case + // we want all other comments to be parsed + if (lex_css< elseif_directive >()) { + alternative = SASS_MEMORY_NEW(Block, pstate); + alternative->append(parse_if_directive(true)); + } + else if (lex_css< kwd_else_directive >()) { + alternative = parse_block(root); + } + stack.pop_back(); + return SASS_MEMORY_NEW(If, if_source_position, predicate, block, alternative); + } + + For_Obj Parser::parse_for_directive() + { + stack.push_back(Scope::Control); + ParserState for_source_position = pstate; + bool root = block_stack.back()->is_root(); + lex_variable(); + std::string var(Util::normalize_underscores(lexed)); + if (!lex< kwd_from >()) error("expected 'from' keyword in @for directive"); + Expression_Obj lower_bound = parse_expression(); + bool inclusive = false; + if (lex< kwd_through >()) inclusive = true; + else if (lex< kwd_to >()) inclusive = false; + else error("expected 'through' or 'to' keyword in @for directive"); + Expression_Obj upper_bound = parse_expression(); + Block_Obj body = parse_block(root); + stack.pop_back(); + return SASS_MEMORY_NEW(For, for_source_position, var, lower_bound, upper_bound, body, inclusive); + } + + // helper to parse a var token + Token Parser::lex_variable() + { + // peek for dollar sign first + if (!peek< exactly <'$'> >()) { + css_error("Invalid CSS", " after ", ": expected \"$\", was "); + } + // we expect a simple identifier as the call name + if (!lex< sequence < exactly <'$'>, identifier > >()) { + lex< exactly <'$'> >(); // move pstate and position up + css_error("Invalid CSS", " after ", ": expected identifier, was "); + } + // return object + return token; + } + // helper to parse identifier + Token Parser::lex_identifier() + { + // we expect a simple identifier as the call name + if (!lex< identifier >()) { // ToDo: pstate wrong? + css_error("Invalid CSS", " after ", ": expected identifier, was "); + } + // return object + return token; + } + + Each_Obj Parser::parse_each_directive() + { + stack.push_back(Scope::Control); + ParserState each_source_position = pstate; + bool root = block_stack.back()->is_root(); + std::vector vars; + lex_variable(); + vars.push_back(Util::normalize_underscores(lexed)); + while (lex< exactly<','> >()) { + if (!lex< variable >()) error("@each directive requires an iteration variable"); + vars.push_back(Util::normalize_underscores(lexed)); + } + if (!lex< kwd_in >()) error("expected 'in' keyword in @each directive"); + Expression_Obj list = parse_list(); + Block_Obj body = parse_block(root); + stack.pop_back(); + return SASS_MEMORY_NEW(Each, each_source_position, vars, list, body); + } + + // called after parsing `kwd_while_directive` + While_Obj Parser::parse_while_directive() + { + stack.push_back(Scope::Control); + bool root = block_stack.back()->is_root(); + // create the initial while call object + While_Obj call = SASS_MEMORY_NEW(While, pstate, 0, 0); + // parse mandatory predicate + Expression_Obj predicate = parse_list(); + List_Obj l = Cast(predicate); + if (!predicate || (l && !l->length())) { + css_error("Invalid CSS", " after ", ": expected expression (e.g. 1px, bold), was ", false); + } + call->predicate(predicate); + // parse mandatory block + call->block(parse_block(root)); + // return ast node + stack.pop_back(); + // return ast node + return call.detach(); + } + + // EO parse_while_directive + Media_Block_Obj Parser::parse_media_block() + { + stack.push_back(Scope::Media); + Media_Block_Obj media_block = SASS_MEMORY_NEW(Media_Block, pstate, 0, 0); + + media_block->media_queries(parse_media_queries()); + + Media_Block_Obj prev_media_block = last_media_block; + last_media_block = media_block; + media_block->block(parse_css_block()); + last_media_block = prev_media_block; + stack.pop_back(); + return media_block.detach(); + } + + List_Obj Parser::parse_media_queries() + { + advanceToNextToken(); + List_Obj queries = SASS_MEMORY_NEW(List, pstate, 0, SASS_COMMA); + if (!peek_css < exactly <'{'> >()) queries->append(parse_media_query()); + while (lex_css < exactly <','> >()) queries->append(parse_media_query()); + queries->update_pstate(pstate); + return queries.detach(); + } + + // Expression_Ptr Parser::parse_media_query() + Media_Query_Obj Parser::parse_media_query() + { + advanceToNextToken(); + Media_Query_Obj media_query = SASS_MEMORY_NEW(Media_Query, pstate); + if (lex < kwd_not >()) { media_query->is_negated(true); lex < css_comments >(false); } + else if (lex < kwd_only >()) { media_query->is_restricted(true); lex < css_comments >(false); } + + if (lex < identifier_schema >()) media_query->media_type(parse_identifier_schema()); + else if (lex < identifier >()) media_query->media_type(parse_interpolated_chunk(lexed)); + else media_query->append(parse_media_expression()); + + while (lex_css < kwd_and >()) media_query->append(parse_media_expression()); + if (lex < identifier_schema >()) { + String_Schema_Ptr schema = SASS_MEMORY_NEW(String_Schema, pstate); + schema->append(media_query->media_type()); + schema->append(SASS_MEMORY_NEW(String_Constant, pstate, " ")); + schema->append(parse_identifier_schema()); + media_query->media_type(schema); + } + while (lex_css < kwd_and >()) media_query->append(parse_media_expression()); + + media_query->update_pstate(pstate); + + return media_query; + } + + Media_Query_Expression_Obj Parser::parse_media_expression() + { + if (lex < identifier_schema >()) { + String_Obj ss = parse_identifier_schema(); + return SASS_MEMORY_NEW(Media_Query_Expression, pstate, ss, 0, true); + } + if (!lex_css< exactly<'('> >()) { + error("media query expression must begin with '('"); + } + Expression_Obj feature; + if (peek_css< exactly<')'> >()) { + error("media feature required in media query expression"); + } + feature = parse_expression(); + Expression_Obj expression = 0; + if (lex_css< exactly<':'> >()) { + expression = parse_list(DELAYED); + } + if (!lex_css< exactly<')'> >()) { + error("unclosed parenthesis in media query expression"); + } + return SASS_MEMORY_NEW(Media_Query_Expression, feature->pstate(), feature, expression); + } + + // lexed after `kwd_supports_directive` + // these are very similar to media blocks + Supports_Block_Obj Parser::parse_supports_directive() + { + Supports_Condition_Obj cond = parse_supports_condition(); + if (!cond) { + css_error("Invalid CSS", " after ", ": expected @supports condition (e.g. (display: flexbox)), was ", false); + } + // create the ast node object for the support queries + Supports_Block_Obj query = SASS_MEMORY_NEW(Supports_Block, pstate, cond); + // additional block is mandatory + // parse inner block + query->block(parse_block()); + // return ast node + return query; + } + + // parse one query operation + // may encounter nested queries + Supports_Condition_Obj Parser::parse_supports_condition() + { + lex < css_whitespace >(); + Supports_Condition_Obj cond; + if ((cond = parse_supports_negation())) return cond; + if ((cond = parse_supports_operator())) return cond; + if ((cond = parse_supports_interpolation())) return cond; + return cond; + } + + Supports_Condition_Obj Parser::parse_supports_negation() + { + if (!lex < kwd_not >()) return 0; + Supports_Condition_Obj cond = parse_supports_condition_in_parens(); + return SASS_MEMORY_NEW(Supports_Negation, pstate, cond); + } + + Supports_Condition_Obj Parser::parse_supports_operator() + { + Supports_Condition_Obj cond = parse_supports_condition_in_parens(); + if (cond.isNull()) return 0; + + while (true) { + Supports_Operator::Operand op = Supports_Operator::OR; + if (lex < kwd_and >()) { op = Supports_Operator::AND; } + else if(!lex < kwd_or >()) { break; } + + lex < css_whitespace >(); + Supports_Condition_Obj right = parse_supports_condition_in_parens(); + + // Supports_Condition_Ptr cc = SASS_MEMORY_NEW(Supports_Condition, *static_cast(cond)); + cond = SASS_MEMORY_NEW(Supports_Operator, pstate, cond, right, op); + } + return cond; + } + + Supports_Condition_Obj Parser::parse_supports_interpolation() + { + if (!lex < interpolant >()) return 0; + + String_Obj interp = parse_interpolated_chunk(lexed); + if (!interp) return 0; + + return SASS_MEMORY_NEW(Supports_Interpolation, pstate, interp); + } + + // TODO: This needs some major work. Although feature conditions + // look like declarations their semantics differ significantly + Supports_Condition_Obj Parser::parse_supports_declaration() + { + Supports_Condition_Ptr cond; + // parse something declaration like + Expression_Obj feature = parse_expression(); + Expression_Obj expression = 0; + if (lex_css< exactly<':'> >()) { + expression = parse_list(DELAYED); + } + if (!feature || !expression) error("@supports condition expected declaration"); + cond = SASS_MEMORY_NEW(Supports_Declaration, + feature->pstate(), + feature, + expression); + // ToDo: maybe we need an additional error condition? + return cond; + } + + Supports_Condition_Obj Parser::parse_supports_condition_in_parens() + { + Supports_Condition_Obj interp = parse_supports_interpolation(); + if (interp != 0) return interp; + + if (!lex < exactly <'('> >()) return 0; + lex < css_whitespace >(); + + Supports_Condition_Obj cond = parse_supports_condition(); + if (cond != 0) { + if (!lex < exactly <')'> >()) error("unclosed parenthesis in @supports declaration"); + } else { + cond = parse_supports_declaration(); + if (!lex < exactly <')'> >()) error("unclosed parenthesis in @supports declaration"); + } + lex < css_whitespace >(); + return cond; + } + + At_Root_Block_Obj Parser::parse_at_root_block() + { + stack.push_back(Scope::AtRoot); + ParserState at_source_position = pstate; + Block_Obj body = 0; + At_Root_Query_Obj expr; + Lookahead lookahead_result; + if (lex_css< exactly<'('> >()) { + expr = parse_at_root_query(); + } + if (peek_css < exactly<'{'> >()) { + lex (); + body = parse_block(true); + } + else if ((lookahead_result = lookahead_for_selector(position)).found) { + Ruleset_Obj r = parse_ruleset(lookahead_result); + body = SASS_MEMORY_NEW(Block, r->pstate(), 1, true); + body->append(r); + } + At_Root_Block_Obj at_root = SASS_MEMORY_NEW(At_Root_Block, at_source_position, body); + if (!expr.isNull()) at_root->expression(expr); + stack.pop_back(); + return at_root; + } + + At_Root_Query_Obj Parser::parse_at_root_query() + { + if (peek< exactly<')'> >()) error("at-root feature required in at-root expression"); + + if (!peek< alternatives< kwd_with_directive, kwd_without_directive > >()) { + css_error("Invalid CSS", " after ", ": expected \"with\" or \"without\", was "); + } + + Expression_Obj feature = parse_list(); + if (!lex_css< exactly<':'> >()) error("style declaration must contain a value"); + Expression_Obj expression = parse_list(); + List_Obj value = SASS_MEMORY_NEW(List, feature->pstate(), 1); + + if (expression->concrete_type() == Expression::LIST) { + value = Cast(expression); + } + else value->append(expression); + + At_Root_Query_Obj cond = SASS_MEMORY_NEW(At_Root_Query, + value->pstate(), + feature, + value); + if (!lex_css< exactly<')'> >()) error("unclosed parenthesis in @at-root expression"); + return cond; + } + + Directive_Obj Parser::parse_special_directive() + { + std::string kwd(lexed); + + if (lexed == "@else") error("Invalid CSS: @else must come after @if"); + + // this whole branch is never hit via spec tests + + Directive_Ptr at_rule = SASS_MEMORY_NEW(Directive, pstate, kwd); + Lookahead lookahead = lookahead_for_include(position); + if (lookahead.found && !lookahead.has_interpolants) { + at_rule->selector(parse_selector_list(false)); + } + + lex < css_comments >(false); + + if (lex < static_property >()) { + at_rule->value(parse_interpolated_chunk(Token(lexed))); + } else if (!(peek < alternatives < exactly<'{'>, exactly<'}'>, exactly<';'> > >())) { + at_rule->value(parse_list()); + } + + lex < css_comments >(false); + + if (peek< exactly<'{'> >()) { + at_rule->block(parse_block()); + } + + return at_rule; + } + + // this whole branch is never hit via spec tests + Directive_Obj Parser::parse_prefixed_directive() + { + std::string kwd(lexed); + + if (lexed == "@else") error("Invalid CSS: @else must come after @if"); + + Directive_Obj at_rule = SASS_MEMORY_NEW(Directive, pstate, kwd); + Lookahead lookahead = lookahead_for_include(position); + if (lookahead.found && !lookahead.has_interpolants) { + at_rule->selector(parse_selector_list(false)); + } + + lex < css_comments >(false); + + if (lex < static_property >()) { + at_rule->value(parse_interpolated_chunk(Token(lexed))); + } else if (!(peek < alternatives < exactly<'{'>, exactly<'}'>, exactly<';'> > >())) { + at_rule->value(parse_list()); + } + + lex < css_comments >(false); + + if (peek< exactly<'{'> >()) { + at_rule->block(parse_block()); + } + + return at_rule; + } + + + Directive_Obj Parser::parse_directive() + { + Directive_Obj directive = SASS_MEMORY_NEW(Directive, pstate, lexed); + String_Schema_Obj val = parse_almost_any_value(); + // strip left and right if they are of type string + directive->value(val); + if (peek< exactly<'{'> >()) { + directive->block(parse_block()); + } + return directive; + } + + Expression_Obj Parser::lex_interpolation() + { + if (lex < interpolant >(true) != NULL) { + return parse_interpolated_chunk(lexed, true); + } + return 0; + } + + Expression_Obj Parser::lex_interp_uri() + { + // create a string schema by lexing optional interpolations + return lex_interp< re_string_uri_open, re_string_uri_close >(); + } + + Expression_Obj Parser::lex_interp_string() + { + Expression_Obj rv; + if ((rv = lex_interp< re_string_double_open, re_string_double_close >())) return rv; + if ((rv = lex_interp< re_string_single_open, re_string_single_close >())) return rv; + return rv; + } + + Expression_Obj Parser::lex_almost_any_value_chars() + { + const char* match = + lex < + one_plus < + alternatives < + sequence < + exactly <'\\'>, + any_char + >, + sequence < + negate < + sequence < + exactly < url_kwd >, + exactly <'('> + > + >, + neg_class_char < + almost_any_value_class + > + >, + sequence < + exactly <'/'>, + negate < + alternatives < + exactly <'/'>, + exactly <'*'> + > + > + >, + sequence < + exactly <'\\'>, + exactly <'#'>, + negate < + exactly <'{'> + > + >, + sequence < + exactly <'!'>, + negate < + alpha + > + > + > + > + >(false); + if (match) { + return SASS_MEMORY_NEW(String_Constant, pstate, lexed); + } + return NULL; + } + + Expression_Obj Parser::lex_almost_any_value_token() + { + Expression_Obj rv; + if (*position == 0) return 0; + if ((rv = lex_almost_any_value_chars())) return rv; + // if ((rv = lex_block_comment())) return rv; + // if ((rv = lex_single_line_comment())) return rv; + if ((rv = lex_interp_string())) return rv; + if ((rv = lex_interp_uri())) return rv; + if ((rv = lex_interpolation())) return rv; + if (lex< alternatives< hex, hex0 > >()) + { return lexed_hex_color(lexed); } + return rv; + } + + String_Schema_Obj Parser::parse_almost_any_value() + { + + String_Schema_Obj schema = SASS_MEMORY_NEW(String_Schema, pstate); + if (*position == 0) return 0; + lex < spaces >(false); + Expression_Obj token = lex_almost_any_value_token(); + if (!token) return 0; + schema->append(token); + if (*position == 0) { + schema->rtrim(); + return schema.detach(); + } + + while ((token = lex_almost_any_value_token())) { + schema->append(token); + } + + lex < css_whitespace >(); + + schema->rtrim(); + + return schema.detach(); + } + + Warning_Obj Parser::parse_warning() + { + if (stack.back() != Scope::Root && + stack.back() != Scope::Function && + stack.back() != Scope::Mixin && + stack.back() != Scope::Control && + stack.back() != Scope::Rules) { + error("Illegal nesting: Only properties may be nested beneath properties."); + } + return SASS_MEMORY_NEW(Warning, pstate, parse_list(DELAYED)); + } + + Error_Obj Parser::parse_error() + { + if (stack.back() != Scope::Root && + stack.back() != Scope::Function && + stack.back() != Scope::Mixin && + stack.back() != Scope::Control && + stack.back() != Scope::Rules) { + error("Illegal nesting: Only properties may be nested beneath properties."); + } + return SASS_MEMORY_NEW(Error, pstate, parse_list(DELAYED)); + } + + Debug_Obj Parser::parse_debug() + { + if (stack.back() != Scope::Root && + stack.back() != Scope::Function && + stack.back() != Scope::Mixin && + stack.back() != Scope::Control && + stack.back() != Scope::Rules) { + error("Illegal nesting: Only properties may be nested beneath properties."); + } + return SASS_MEMORY_NEW(Debug, pstate, parse_list(DELAYED)); + } + + Return_Obj Parser::parse_return_directive() + { + // check that we do not have an empty list (ToDo: check if we got all cases) + if (peek_css < alternatives < exactly < ';' >, exactly < '}' >, end_of_file > >()) + { css_error("Invalid CSS", " after ", ": expected expression (e.g. 1px, bold), was "); } + return SASS_MEMORY_NEW(Return, pstate, parse_list()); + } + + Lookahead Parser::lookahead_for_selector(const char* start) + { + // init result struct + Lookahead rv = Lookahead(); + // get start position + const char* p = start ? start : position; + // match in one big "regex" + rv.error = p; + if (const char* q = + peek < + re_selector_list + >(p) + ) { + bool could_be_property = peek< sequence< exactly<'-'>, exactly<'-'> > >(p) != 0; + bool could_be_escaped = false; + while (p < q) { + // did we have interpolations? + if (*p == '#' && *(p+1) == '{') { + rv.has_interpolants = true; + p = q; break; + } + // A property that's ambiguous with a nested selector is interpreted as a + // custom property. + if (*p == ':' && !could_be_escaped) { + rv.is_custom_property = could_be_property || p+1 == q || peek< space >(p+1); + } + could_be_escaped = *p == '\\'; + ++ p; + } + // store anyway } + + + // ToDo: remove + rv.error = q; + rv.position = q; + // check expected opening bracket + // only after successfull matching + if (peek < exactly<'{'> >(q)) rv.found = q; + // else if (peek < end_of_file >(q)) rv.found = q; + else if (peek < exactly<'('> >(q)) rv.found = q; + // else if (peek < exactly<';'> >(q)) rv.found = q; + // else if (peek < exactly<'}'> >(q)) rv.found = q; + if (rv.found || *p == 0) rv.error = 0; + } + + rv.parsable = ! rv.has_interpolants; + + // return result + return rv; + + } + // EO lookahead_for_selector + + // used in parse_block_nodes and parse_special_directive + // ToDo: actual usage is still not really clear to me? + Lookahead Parser::lookahead_for_include(const char* start) + { + // we actually just lookahead for a selector + Lookahead rv = lookahead_for_selector(start); + // but the "found" rules are different + if (const char* p = rv.position) { + // check for additional abort condition + if (peek < exactly<';'> >(p)) rv.found = p; + else if (peek < exactly<'}'> >(p)) rv.found = p; + } + // return result + return rv; + } + // EO lookahead_for_include + + // look ahead for a token with interpolation in it + // we mostly use the result if there is an interpolation + // everything that passes here gets parsed as one schema + // meaning it will not be parsed as a space separated list + Lookahead Parser::lookahead_for_value(const char* start) + { + // init result struct + Lookahead rv = Lookahead(); + // get start position + const char* p = start ? start : position; + // match in one big "regex" + if (const char* q = + peek < + non_greedy < + alternatives < + // consume whitespace + block_comment, // spaces, + // main tokens + sequence < + interpolant, + optional < + quoted_string + > + >, + identifier, + variable, + // issue #442 + sequence < + parenthese_scope, + interpolant, + optional < + quoted_string + > + > + >, + sequence < + // optional_spaces, + alternatives < + // end_of_file, + exactly<'{'>, + exactly<'}'>, + exactly<';'> + > + > + > + >(p) + ) { + if (p == q) return rv; + while (p < q) { + // did we have interpolations? + if (*p == '#' && *(p+1) == '{') { + rv.has_interpolants = true; + p = q; break; + } + ++ p; + } + // store anyway + // ToDo: remove + rv.position = q; + // check expected opening bracket + // only after successful matching + if (peek < exactly<'{'> >(q)) rv.found = q; + else if (peek < exactly<';'> >(q)) rv.found = q; + else if (peek < exactly<'}'> >(q)) rv.found = q; + } + + // return result + return rv; + } + // EO lookahead_for_value + + void Parser::read_bom() + { + size_t skip = 0; + std::string encoding; + bool utf_8 = false; + switch ((unsigned char) source[0]) { + case 0xEF: + skip = check_bom_chars(source, end, utf_8_bom, 3); + encoding = "UTF-8"; + utf_8 = true; + break; + case 0xFE: + skip = check_bom_chars(source, end, utf_16_bom_be, 2); + encoding = "UTF-16 (big endian)"; + break; + case 0xFF: + skip = check_bom_chars(source, end, utf_16_bom_le, 2); + skip += (skip ? check_bom_chars(source, end, utf_32_bom_le, 4) : 0); + encoding = (skip == 2 ? "UTF-16 (little endian)" : "UTF-32 (little endian)"); + break; + case 0x00: + skip = check_bom_chars(source, end, utf_32_bom_be, 4); + encoding = "UTF-32 (big endian)"; + break; + case 0x2B: + skip = check_bom_chars(source, end, utf_7_bom_1, 4) + | check_bom_chars(source, end, utf_7_bom_2, 4) + | check_bom_chars(source, end, utf_7_bom_3, 4) + | check_bom_chars(source, end, utf_7_bom_4, 4) + | check_bom_chars(source, end, utf_7_bom_5, 5); + encoding = "UTF-7"; + break; + case 0xF7: + skip = check_bom_chars(source, end, utf_1_bom, 3); + encoding = "UTF-1"; + break; + case 0xDD: + skip = check_bom_chars(source, end, utf_ebcdic_bom, 4); + encoding = "UTF-EBCDIC"; + break; + case 0x0E: + skip = check_bom_chars(source, end, scsu_bom, 3); + encoding = "SCSU"; + break; + case 0xFB: + skip = check_bom_chars(source, end, bocu_1_bom, 3); + encoding = "BOCU-1"; + break; + case 0x84: + skip = check_bom_chars(source, end, gb_18030_bom, 4); + encoding = "GB-18030"; + break; + default: break; + } + if (skip > 0 && !utf_8) error("only UTF-8 documents are currently supported; your document appears to be " + encoding); + position += skip; + } + + size_t check_bom_chars(const char* src, const char *end, const unsigned char* bom, size_t len) + { + size_t skip = 0; + if (src + len > end) return 0; + for (size_t i = 0; i < len; ++i, ++skip) { + if ((unsigned char) src[i] != bom[i]) return 0; + } + return skip; + } + + + Expression_Obj Parser::fold_operands(Expression_Obj base, std::vector& operands, Operand op) + { + for (size_t i = 0, S = operands.size(); i < S; ++i) { + base = SASS_MEMORY_NEW(Binary_Expression, base->pstate(), op, base, operands[i]); + } + return base; + } + + Expression_Obj Parser::fold_operands(Expression_Obj base, std::vector& operands, std::vector& ops, size_t i) + { + if (String_Schema_Ptr schema = Cast(base)) { + // return schema; + if (schema->has_interpolants()) { + if (i + 1 < operands.size() && ( + (ops[0].operand == Sass_OP::EQ) + || (ops[0].operand == Sass_OP::ADD) + || (ops[0].operand == Sass_OP::DIV) + || (ops[0].operand == Sass_OP::MUL) + || (ops[0].operand == Sass_OP::NEQ) + || (ops[0].operand == Sass_OP::LT) + || (ops[0].operand == Sass_OP::GT) + || (ops[0].operand == Sass_OP::LTE) + || (ops[0].operand == Sass_OP::GTE) + )) { + Expression_Obj rhs = fold_operands(operands[i], operands, ops, i + 1); + rhs = SASS_MEMORY_NEW(Binary_Expression, base->pstate(), ops[0], schema, rhs); + return rhs; + } + // return schema; + } + } + + for (size_t S = operands.size(); i < S; ++i) { + if (String_Schema_Ptr schema = Cast(operands[i])) { + if (schema->has_interpolants()) { + if (i + 1 < S) { + // this whole branch is never hit via spec tests + Expression_Obj rhs = fold_operands(operands[i+1], operands, ops, i + 2); + rhs = SASS_MEMORY_NEW(Binary_Expression, base->pstate(), ops[i], schema, rhs); + base = SASS_MEMORY_NEW(Binary_Expression, base->pstate(), ops[i], base, rhs); + return base; + } + base = SASS_MEMORY_NEW(Binary_Expression, base->pstate(), ops[i], base, operands[i]); + return base; + } else { + base = SASS_MEMORY_NEW(Binary_Expression, base->pstate(), ops[i], base, operands[i]); + } + } else { + base = SASS_MEMORY_NEW(Binary_Expression, base->pstate(), ops[i], base, operands[i]); + } + Binary_Expression_Ptr b = Cast(base.ptr()); + if (b && ops[i].operand == Sass_OP::DIV && b->left()->is_delayed() && b->right()->is_delayed()) { + base->is_delayed(true); + } + } + // nested binary expression are never to be delayed + if (Binary_Expression_Ptr b = Cast(base)) { + if (Cast(b->left())) base->set_delayed(false); + if (Cast(b->right())) base->set_delayed(false); + } + return base; + } + + void Parser::error(std::string msg, Position pos) + { + Position p(pos.line ? pos : before_token); + ParserState pstate(path, source, p, Offset(0, 0)); + traces.push_back(Backtrace(pstate)); + throw Exception::InvalidSass(pstate, traces, msg); + } + + void Parser::error(std::string msg) + { + error(msg, pstate); + } + + // print a css parsing error with actual context information from parsed source + void Parser::css_error(const std::string& msg, const std::string& prefix, const std::string& middle, const bool trim) + { + int max_len = 18; + const char* end = this->end; + while (*end != 0) ++ end; + const char* pos = peek < optional_spaces >(); + if (!pos) pos = position; + + const char* last_pos(pos); + if (last_pos > source) { + utf8::prior(last_pos, source); + } + // backup position to last significant char + while (trim && last_pos > source && last_pos < end) { + if (!Prelexer::is_space(*last_pos)) break; + utf8::prior(last_pos, source); + } + + bool ellipsis_left = false; + const char* pos_left(last_pos); + const char* end_left(last_pos); + + if (*pos_left) utf8::next(pos_left, end); + if (*end_left) utf8::next(end_left, end); + while (pos_left > source) { + if (utf8::distance(pos_left, end_left) >= max_len) { + utf8::prior(pos_left, source); + ellipsis_left = *(pos_left) != '\n' && + *(pos_left) != '\r'; + utf8::next(pos_left, end); + break; + } + + const char* prev = pos_left; + utf8::prior(prev, source); + if (*prev == '\r') break; + if (*prev == '\n') break; + pos_left = prev; + } + if (pos_left < source) { + pos_left = source; + } + + bool ellipsis_right = false; + const char* end_right(pos); + const char* pos_right(pos); + while (end_right < end) { + if (utf8::distance(pos_right, end_right) > max_len) { + ellipsis_left = *(pos_right) != '\n' && + *(pos_right) != '\r'; + break; + } + if (*end_right == '\r') break; + if (*end_right == '\n') break; + utf8::next(end_right, end); + } + // if (*end_right == 0) end_right ++; + + std::string left(pos_left, end_left); + std::string right(pos_right, end_right); + size_t left_subpos = left.size() > 15 ? left.size() - 15 : 0; + size_t right_subpos = right.size() > 15 ? right.size() - 15 : 0; + if (left_subpos && ellipsis_left) left = ellipsis + left.substr(left_subpos); + if (right_subpos && ellipsis_right) right = right.substr(right_subpos) + ellipsis; + // Hotfix when source is null, probably due to interpolation parsing!? + if (source == NULL || *source == 0) source = pstate.src; + // now pass new message to the more generic error function + error(msg + prefix + quote(left) + middle + quote(right)); + } + +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/parser.hpp b/mybulma/node_modules/node-sass/src/libsass/src/parser.hpp new file mode 100644 index 0000000..d2a6ddc --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/parser.hpp @@ -0,0 +1,400 @@ +#ifndef SASS_PARSER_H +#define SASS_PARSER_H + +#include +#include + +#include "ast.hpp" +#include "position.hpp" +#include "context.hpp" +#include "position.hpp" +#include "prelexer.hpp" + +#ifndef MAX_NESTING +// Note that this limit is not an exact science +// it depends on various factors, which some are +// not under our control (compile time or even OS +// dependent settings on the available stack size) +// It should fix most common segfault cases though. +#define MAX_NESTING 512 +#endif + +struct Lookahead { + const char* found; + const char* error; + const char* position; + bool parsable; + bool has_interpolants; + bool is_custom_property; +}; + +namespace Sass { + + class Parser : public ParserState { + public: + + enum Scope { Root, Mixin, Function, Media, Control, Properties, Rules, AtRoot }; + + Context& ctx; + std::vector block_stack; + std::vector stack; + Media_Block_Ptr last_media_block; + const char* source; + const char* position; + const char* end; + Position before_token; + Position after_token; + ParserState pstate; + Backtraces traces; + size_t indentation; + size_t nestings; + + Token lexed; + + Parser(Context& ctx, const ParserState& pstate, Backtraces traces) + : ParserState(pstate), ctx(ctx), block_stack(), stack(0), last_media_block(), + source(0), position(0), end(0), before_token(pstate), after_token(pstate), + pstate(pstate), traces(traces), indentation(0), nestings(0) + { + stack.push_back(Scope::Root); + } + + // static Parser from_string(const std::string& src, Context& ctx, ParserState pstate = ParserState("[STRING]")); + static Parser from_c_str(const char* src, Context& ctx, Backtraces, ParserState pstate = ParserState("[CSTRING]"), const char* source = 0); + static Parser from_c_str(const char* beg, const char* end, Context& ctx, Backtraces, ParserState pstate = ParserState("[CSTRING]"), const char* source = 0); + static Parser from_token(Token t, Context& ctx, Backtraces, ParserState pstate = ParserState("[TOKEN]"), const char* source = 0); + // special static parsers to convert strings into certain selectors + static Selector_List_Obj parse_selector(const char* src, Context& ctx, Backtraces, ParserState pstate = ParserState("[SELECTOR]"), const char* source = 0); + +#ifdef __clang__ + + // lex and peak uses the template parameter to branch on the action, which + // triggers clangs tautological comparison on the single-comparison + // branches. This is not a bug, just a merging of behaviour into + // one function + +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wtautological-compare" + +#endif + + + // skip current token and next whitespace + // moves ParserState right before next token + void advanceToNextToken(); + + bool peek_newline(const char* start = 0); + + // skip over spaces, tabs and line comments + template + const char* sneak(const char* start = 0) + { + using namespace Prelexer; + + // maybe use optional start position from arguments? + const char* it_position = start ? start : position; + + // skip white-space? + if (mx == spaces || + mx == no_spaces || + mx == css_comments || + mx == css_whitespace || + mx == optional_spaces || + mx == optional_css_comments || + mx == optional_css_whitespace + ) { + return it_position; + } + + // skip over spaces, tabs and sass line comments + const char* pos = optional_css_whitespace(it_position); + // always return a valid position + return pos ? pos : it_position; + + } + + // match will not skip over space, tabs and line comment + // return the position where the lexer match will occur + template + const char* match(const char* start = 0) + { + // match the given prelexer + return mx(position); + } + + // peek will only skip over space, tabs and line comment + // return the position where the lexer match will occur + template + const char* peek(const char* start = 0) + { + + // sneak up to the actual token we want to lex + // this should skip over white-space if desired + const char* it_before_token = sneak < mx >(start); + + // match the given prelexer + const char* match = mx(it_before_token); + + // check if match is in valid range + return match <= end ? match : 0; + + } + + // white-space handling is built into the lexer + // this way you do not need to parse it yourself + // some matchers don't accept certain white-space + // we do not support start arg, since we manipulate + // sourcemap offset and we modify the position pointer! + // lex will only skip over space, tabs and line comment + template + const char* lex(bool lazy = true, bool force = false) + { + + if (*position == 0) return 0; + + // position considered before lexed token + // we can skip whitespace or comments for + // lazy developers (but we need control) + const char* it_before_token = position; + + // sneak up to the actual token we want to lex + // this should skip over white-space if desired + if (lazy) it_before_token = sneak < mx >(position); + + // now call matcher to get position after token + const char* it_after_token = mx(it_before_token); + + // check if match is in valid range + if (it_after_token > end) return 0; + + // maybe we want to update the parser state anyway? + if (force == false) { + // assertion that we got a valid match + if (it_after_token == 0) return 0; + // assertion that we actually lexed something + if (it_after_token == it_before_token) return 0; + } + + // create new lexed token object (holds the parse results) + lexed = Token(position, it_before_token, it_after_token); + + // advance position (add whitespace before current token) + before_token = after_token.add(position, it_before_token); + + // update after_token position for current token + after_token.add(it_before_token, it_after_token); + + // ToDo: could probably do this incremetal on original object (API wants offset?) + pstate = ParserState(path, source, lexed, before_token, after_token - before_token); + + // advance internal char iterator + return position = it_after_token; + + } + + // lex_css skips over space, tabs, line and block comment + // all block comments will be consumed and thrown away + // source-map position will point to token after the comment + template + const char* lex_css() + { + // copy old token + Token prev = lexed; + // store previous pointer + const char* oldpos = position; + Position bt = before_token; + Position at = after_token; + ParserState op = pstate; + // throw away comments + // update srcmap position + lex < Prelexer::css_comments >(); + // now lex a new token + const char* pos = lex< mx >(); + // maybe restore prev state + if (pos == 0) { + pstate = op; + lexed = prev; + position = oldpos; + after_token = at; + before_token = bt; + } + // return match + return pos; + } + + // all block comments will be skipped and thrown away + template + const char* peek_css(const char* start = 0) + { + // now peek a token (skip comments first) + return peek< mx >(peek < Prelexer::css_comments >(start)); + } + +#ifdef __clang__ + +#pragma clang diagnostic pop + +#endif + + void error(std::string msg); + void error(std::string msg, Position pos); + // generate message with given and expected sample + // text before and in the middle are configurable + void css_error(const std::string& msg, + const std::string& prefix = " after ", + const std::string& middle = ", was: ", + const bool trim = true); + void read_bom(); + + Block_Obj parse(); + Import_Obj parse_import(); + Definition_Obj parse_definition(Definition::Type which_type); + Parameters_Obj parse_parameters(); + Parameter_Obj parse_parameter(); + Mixin_Call_Obj parse_include_directive(); + Arguments_Obj parse_arguments(); + Argument_Obj parse_argument(); + Assignment_Obj parse_assignment(); + Ruleset_Obj parse_ruleset(Lookahead lookahead); + Selector_List_Obj parse_selector_list(bool chroot); + Complex_Selector_Obj parse_complex_selector(bool chroot); + Selector_Schema_Obj parse_selector_schema(const char* end_of_selector, bool chroot); + Compound_Selector_Obj parse_compound_selector(); + Simple_Selector_Obj parse_simple_selector(); + Wrapped_Selector_Obj parse_negated_selector(); + Simple_Selector_Obj parse_pseudo_selector(); + Attribute_Selector_Obj parse_attribute_selector(); + Block_Obj parse_block(bool is_root = false); + Block_Obj parse_css_block(bool is_root = false); + bool parse_block_nodes(bool is_root = false); + bool parse_block_node(bool is_root = false); + + bool parse_number_prefix(); + Declaration_Obj parse_declaration(); + Expression_Obj parse_map(); + Expression_Obj parse_bracket_list(); + Expression_Obj parse_list(bool delayed = false); + Expression_Obj parse_comma_list(bool delayed = false); + Expression_Obj parse_space_list(); + Expression_Obj parse_disjunction(); + Expression_Obj parse_conjunction(); + Expression_Obj parse_relation(); + Expression_Obj parse_expression(); + Expression_Obj parse_operators(); + Expression_Obj parse_factor(); + Expression_Obj parse_value(); + Function_Call_Obj parse_calc_function(); + Function_Call_Obj parse_function_call(); + Function_Call_Schema_Obj parse_function_call_schema(); + String_Obj parse_url_function_string(); + String_Obj parse_url_function_argument(); + String_Obj parse_interpolated_chunk(Token, bool constant = false, bool css = true); + String_Obj parse_string(); + Value_Obj parse_static_value(); + String_Schema_Obj parse_css_variable_value(bool top_level = true); + String_Schema_Obj parse_css_variable_value_token(bool top_level = true); + String_Obj parse_ie_property(); + String_Obj parse_ie_keyword_arg(); + String_Schema_Obj parse_value_schema(const char* stop); + String_Obj parse_identifier_schema(); + If_Obj parse_if_directive(bool else_if = false); + For_Obj parse_for_directive(); + Each_Obj parse_each_directive(); + While_Obj parse_while_directive(); + Return_Obj parse_return_directive(); + Content_Obj parse_content_directive(); + void parse_charset_directive(); + Media_Block_Obj parse_media_block(); + List_Obj parse_media_queries(); + Media_Query_Obj parse_media_query(); + Media_Query_Expression_Obj parse_media_expression(); + Supports_Block_Obj parse_supports_directive(); + Supports_Condition_Obj parse_supports_condition(); + Supports_Condition_Obj parse_supports_negation(); + Supports_Condition_Obj parse_supports_operator(); + Supports_Condition_Obj parse_supports_interpolation(); + Supports_Condition_Obj parse_supports_declaration(); + Supports_Condition_Obj parse_supports_condition_in_parens(); + At_Root_Block_Obj parse_at_root_block(); + At_Root_Query_Obj parse_at_root_query(); + String_Schema_Obj parse_almost_any_value(); + Directive_Obj parse_special_directive(); + Directive_Obj parse_prefixed_directive(); + Directive_Obj parse_directive(); + Warning_Obj parse_warning(); + Error_Obj parse_error(); + Debug_Obj parse_debug(); + + Value_Ptr color_or_string(const std::string& lexed) const; + + // be more like ruby sass + Expression_Obj lex_almost_any_value_token(); + Expression_Obj lex_almost_any_value_chars(); + Expression_Obj lex_interp_string(); + Expression_Obj lex_interp_uri(); + Expression_Obj lex_interpolation(); + + // these will throw errors + Token lex_variable(); + Token lex_identifier(); + + void parse_block_comments(); + + Lookahead lookahead_for_value(const char* start = 0); + Lookahead lookahead_for_selector(const char* start = 0); + Lookahead lookahead_for_include(const char* start = 0); + + Expression_Obj fold_operands(Expression_Obj base, std::vector& operands, Operand op); + Expression_Obj fold_operands(Expression_Obj base, std::vector& operands, std::vector& ops, size_t i = 0); + + void throw_syntax_error(std::string message, size_t ln = 0); + void throw_read_error(std::string message, size_t ln = 0); + + + template + Expression_Obj lex_interp() + { + if (lex < open >(false)) { + String_Schema_Obj schema = SASS_MEMORY_NEW(String_Schema, pstate); + // std::cerr << "LEX [[" << std::string(lexed) << "]]\n"; + schema->append(SASS_MEMORY_NEW(String_Constant, pstate, lexed)); + if (position[0] == '#' && position[1] == '{') { + Expression_Obj itpl = lex_interpolation(); + if (!itpl.isNull()) schema->append(itpl); + while (lex < close >(false)) { + // std::cerr << "LEX [[" << std::string(lexed) << "]]\n"; + schema->append(SASS_MEMORY_NEW(String_Constant, pstate, lexed)); + if (position[0] == '#' && position[1] == '{') { + Expression_Obj itpl = lex_interpolation(); + if (!itpl.isNull()) schema->append(itpl); + } else { + return schema; + } + } + } else { + return SASS_MEMORY_NEW(String_Constant, pstate, lexed); + } + } + return 0; + } + + public: + static Number_Ptr lexed_number(const ParserState& pstate, const std::string& parsed); + static Number_Ptr lexed_dimension(const ParserState& pstate, const std::string& parsed); + static Number_Ptr lexed_percentage(const ParserState& pstate, const std::string& parsed); + static Value_Ptr lexed_hex_color(const ParserState& pstate, const std::string& parsed); + private: + Number_Ptr lexed_number(const std::string& parsed) { return lexed_number(pstate, parsed); }; + Number_Ptr lexed_dimension(const std::string& parsed) { return lexed_dimension(pstate, parsed); }; + Number_Ptr lexed_percentage(const std::string& parsed) { return lexed_percentage(pstate, parsed); }; + Value_Ptr lexed_hex_color(const std::string& parsed) { return lexed_hex_color(pstate, parsed); }; + + static const char* re_attr_sensitive_close(const char* src); + static const char* re_attr_insensitive_close(const char* src); + + }; + + size_t check_bom_chars(const char* src, const char *end, const unsigned char* bom, size_t len); +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/paths.hpp b/mybulma/node_modules/node-sass/src/libsass/src/paths.hpp new file mode 100644 index 0000000..aabab94 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/paths.hpp @@ -0,0 +1,71 @@ +#ifndef SASS_PATHS_H +#define SASS_PATHS_H + +#include +#include +#include + + +template +std::string vector_to_string(std::vector v) +{ + std::stringstream buffer; + buffer << "["; + + if (!v.empty()) + { buffer << v[0]; } + else + { buffer << "]"; } + + if (v.size() == 1) + { buffer << "]"; } + else + { + for (size_t i = 1, S = v.size(); i < S; ++i) buffer << ", " << v[i]; + buffer << "]"; + } + + return buffer.str(); +} + +namespace Sass { + + + template + std::vector > paths(std::vector > strata, size_t from_end = 0) + { + if (strata.empty()) { + return std::vector >(); + } + + size_t end = strata.size() - from_end; + if (end <= 1) { + std::vector > starting_points; + starting_points.reserve(strata[0].size()); + for (size_t i = 0, S = strata[0].size(); i < S; ++i) { + std::vector starting_point; + starting_point.push_back(strata[0][i]); + starting_points.push_back(starting_point); + } + return starting_points; + } + + std::vector > up_to_here = paths(strata, from_end + 1); + std::vector here = strata[end-1]; + + std::vector > branches; + branches.reserve(up_to_here.size() * here.size()); + for (size_t i = 0, S1 = up_to_here.size(); i < S1; ++i) { + for (size_t j = 0, S2 = here.size(); j < S2; ++j) { + std::vector branch = up_to_here[i]; + branch.push_back(here[j]); + branches.push_back(branch); + } + } + + return branches; + } + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/plugins.cpp b/mybulma/node_modules/node-sass/src/libsass/src/plugins.cpp new file mode 100644 index 0000000..eecba78 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/plugins.cpp @@ -0,0 +1,184 @@ +#include "sass.hpp" +#include +#include "output.hpp" +#include "plugins.hpp" + +#ifdef _WIN32 +#include +#else +#include +#include +#include +#include +#endif + +namespace Sass { + + Plugins::Plugins(void) { } + Plugins::~Plugins(void) + { + for (auto function : functions) { + sass_delete_function(function); + } + for (auto importer : importers) { + sass_delete_importer(importer); + } + for (auto header : headers) { + sass_delete_importer(header); + } + } + + // check if plugin is compatible with this version + // plugins may be linked static against libsass + // we try to be compatible between major versions + inline bool compatibility(const char* their_version) + { +// const char* their_version = "3.1.2"; + // first check if anyone has an unknown version + const char* our_version = libsass_version(); + if (!strcmp(their_version, "[na]")) return false; + if (!strcmp(our_version, "[na]")) return false; + + // find the position of the second dot + size_t pos = std::string(our_version).find('.', 0); + if (pos != std::string::npos) pos = std::string(our_version).find('.', pos + 1); + + // if we do not have two dots we fallback to compare complete string + if (pos == std::string::npos) { return strcmp(their_version, our_version) ? 0 : 1; } + // otherwise only compare up to the second dot (major versions) + else { return strncmp(their_version, our_version, pos) ? 0 : 1; } + + } + + // load one specific plugin + bool Plugins::load_plugin (const std::string& path) + { + + typedef const char* (*__plugin_version__)(void); + typedef Sass_Function_List (*__plugin_load_fns__)(void); + typedef Sass_Importer_List (*__plugin_load_imps__)(void); + + if (LOAD_LIB(plugin, path)) + { + // try to load initial function to query libsass version suppor + if (LOAD_LIB_FN(__plugin_version__, plugin_version, "libsass_get_version")) + { + // get the libsass version of the plugin + if (!compatibility(plugin_version())) return false; + // try to get import address for "libsass_load_functions" + if (LOAD_LIB_FN(__plugin_load_fns__, plugin_load_functions, "libsass_load_functions")) + { + Sass_Function_List fns = plugin_load_functions(), _p = fns; + while (fns && *fns) { functions.push_back(*fns); ++ fns; } + sass_free_memory(_p); // only delete the container, items not yet + } + // try to get import address for "libsass_load_importers" + if (LOAD_LIB_FN(__plugin_load_imps__, plugin_load_importers, "libsass_load_importers")) + { + Sass_Importer_List imps = plugin_load_importers(), _p = imps; + while (imps && *imps) { importers.push_back(*imps); ++ imps; } + sass_free_memory(_p); // only delete the container, items not yet + } + // try to get import address for "libsass_load_headers" + if (LOAD_LIB_FN(__plugin_load_imps__, plugin_load_headers, "libsass_load_headers")) + { + Sass_Importer_List imps = plugin_load_headers(), _p = imps; + while (imps && *imps) { headers.push_back(*imps); ++ imps; } + sass_free_memory(_p); // only delete the container, items not yet + } + // success + return true; + } + else + { + // print debug message to stderr (should not happen) + std::cerr << "failed loading 'libsass_support' in <" << path << ">" << std::endl; + if (const char* dlsym_error = dlerror()) std::cerr << dlsym_error << std::endl; + CLOSE_LIB(plugin); + } + } + else + { + // print debug message to stderr (should not happen) + std::cerr << "failed loading plugin <" << path << ">" << std::endl; + if (const char* dlopen_error = dlerror()) std::cerr << dlopen_error << std::endl; + } + + return false; + + } + + size_t Plugins::load_plugins(const std::string& path) + { + + // count plugins + size_t loaded = 0; + + #ifdef _WIN32 + + try + { + + // use wchar (utf16) + WIN32_FIND_DATAW data; + // trailing slash is guaranteed + std::string globsrch(path + "*.dll"); + // convert to wide chars (utf16) for system call + std::wstring wglobsrch(UTF_8::convert_to_utf16(globsrch)); + HANDLE hFile = FindFirstFileW(wglobsrch.c_str(), &data); + // check if system called returned a result + // ToDo: maybe we should print a debug message + if (hFile == INVALID_HANDLE_VALUE) return -1; + + // read directory + while (true) + { + try + { + // the system will report the filenames with wide chars (utf16) + std::string entry = UTF_8::convert_from_utf16(data.cFileName); + // check if file ending matches exactly + if (!ends_with(entry, ".dll")) continue; + // load the plugin and increase counter + if (load_plugin(path + entry)) ++ loaded; + // check if there should be more entries + if (GetLastError() == ERROR_NO_MORE_FILES) break; + // load next entry (check for return type) + if (!FindNextFileW(hFile, &data)) break; + } + catch (...) + { + // report the error to the console (should not happen) + // seems like we got strange data from the system call? + std::cerr << "filename in plugin path has invalid utf8?" << std::endl; + } + } + } + catch (utf8::invalid_utf8) + { + // report the error to the console (should not happen) + // implementors should make sure to provide valid utf8 + std::cerr << "plugin path contains invalid utf8" << std::endl; + } + + #else + + DIR *dp; + struct dirent *dirp; + if((dp = opendir(path.c_str())) == NULL) return -1; + while ((dirp = readdir(dp)) != NULL) { + #if __APPLE__ + if (!ends_with(dirp->d_name, ".dylib")) continue; + #else + if (!ends_with(dirp->d_name, ".so")) continue; + #endif + if (load_plugin(path + dirp->d_name)) ++ loaded; + } + closedir(dp); + + #endif + return loaded; + + } + +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/plugins.hpp b/mybulma/node_modules/node-sass/src/libsass/src/plugins.hpp new file mode 100644 index 0000000..fe4eed0 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/plugins.hpp @@ -0,0 +1,57 @@ +#ifndef SASS_PLUGINS_H +#define SASS_PLUGINS_H + +#include +#include +#include "utf8_string.hpp" +#include "sass/functions.h" + +#ifdef _WIN32 + + #define LOAD_LIB(var, path) HMODULE var = LoadLibraryW(UTF_8::convert_to_utf16(path).c_str()) + #define LOAD_LIB_WCHR(var, path_wide_str) HMODULE var = LoadLibraryW(path_wide_str.c_str()) + #define LOAD_LIB_FN(type, var, name) type var = (type) GetProcAddress(plugin, name) + #define CLOSE_LIB(var) FreeLibrary(var) + + #ifndef dlerror + #define dlerror() 0 + #endif + +#else + + #define LOAD_LIB(var, path) void* var = dlopen(path.c_str(), RTLD_LAZY) + #define LOAD_LIB_FN(type, var, name) type var = (type) dlsym(plugin, name) + #define CLOSE_LIB(var) dlclose(var) + +#endif + +namespace Sass { + + + class Plugins { + + public: // c-tor + Plugins(void); + ~Plugins(void); + + public: // methods + // load one specific plugin + bool load_plugin(const std::string& path); + // load all plugins from a directory + size_t load_plugins(const std::string& path); + + public: // public accessors + const std::vector get_headers(void) { return headers; } + const std::vector get_importers(void) { return importers; } + const std::vector get_functions(void) { return functions; } + + private: // private vars + std::vector headers; + std::vector importers; + std::vector functions; + + }; + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/position.cpp b/mybulma/node_modules/node-sass/src/libsass/src/position.cpp new file mode 100644 index 0000000..312e04c --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/position.cpp @@ -0,0 +1,181 @@ +#include "sass.hpp" +#include "position.hpp" + +namespace Sass { + + + Offset::Offset(const char chr) + : line(chr == '\n' ? 1 : 0), + column(chr == '\n' ? 0 : 1) + {} + + Offset::Offset(const char* string) + : line(0), column(0) + { + *this = inc(string, string + strlen(string)); + } + + Offset::Offset(const std::string& text) + : line(0), column(0) + { + *this = inc(text.c_str(), text.c_str() + text.size()); + } + + Offset::Offset(const size_t line, const size_t column) + : line(line), column(column) { } + + // init/create instance from const char substring + Offset Offset::init(const char* beg, const char* end) + { + Offset offset(0, 0); + if (end == 0) { + end += strlen(beg); + } + offset.add(beg, end); + return offset; + } + + // increase offset by given string (mostly called by lexer) + // increase line counter and count columns on the last line + Offset Offset::add(const char* begin, const char* end) + { + if (end == 0) return *this; + while (begin < end && *begin) { + if (*begin == '\n') { + ++ line; + // start new line + column = 0; + } else { + // do not count any utf8 continuation bytes + // https://stackoverflow.com/a/9356203/1550314 + // https://en.wikipedia.org/wiki/UTF-8#Description + unsigned char chr = *begin; + // skip over 10xxxxxx + // is 1st bit not set + if ((chr & 128) == 0) { + // regular ascii char + column += 1; + } + // is 2nd bit not set + else if ((chr & 64) == 0) { + // first utf8 byte + column += 1; + } + } + ++ begin; + } + return *this; + } + + // increase offset by given string (mostly called by lexer) + // increase line counter and count columns on the last line + Offset Offset::inc(const char* begin, const char* end) const + { + Offset offset(line, column); + offset.add(begin, end); + return offset; + } + + bool Offset::operator== (const Offset &pos) const + { + return line == pos.line && column == pos.column; + } + + bool Offset::operator!= (const Offset &pos) const + { + return line != pos.line || column != pos.column; + } + + void Offset::operator+= (const Offset &off) + { + *this = Offset(line + off.line, off.line > 0 ? off.column : column + off.column); + } + + Offset Offset::operator+ (const Offset &off) const + { + return Offset(line + off.line, off.line > 0 ? off.column : column + off.column); + } + + Offset Offset::operator- (const Offset &off) const + { + return Offset(line - off.line, off.line == line ? column - off.column : column); + } + + Position::Position(const size_t file) + : Offset(0, 0), file(file) { } + + Position::Position(const size_t file, const Offset& offset) + : Offset(offset), file(file) { } + + Position::Position(const size_t line, const size_t column) + : Offset(line, column), file(-1) { } + + Position::Position(const size_t file, const size_t line, const size_t column) + : Offset(line, column), file(file) { } + + + ParserState::ParserState(const char* path, const char* src, const size_t file) + : Position(file, 0, 0), path(path), src(src), offset(0, 0), token() { } + + ParserState::ParserState(const char* path, const char* src, const Position& position, Offset offset) + : Position(position), path(path), src(src), offset(offset), token() { } + + ParserState::ParserState(const char* path, const char* src, const Token& token, const Position& position, Offset offset) + : Position(position), path(path), src(src), offset(offset), token(token) { } + + Position Position::add(const char* begin, const char* end) + { + Offset::add(begin, end); + return *this; + } + + Position Position::inc(const char* begin, const char* end) const + { + Offset offset(line, column); + offset = offset.inc(begin, end); + return Position(file, offset); + } + + bool Position::operator== (const Position &pos) const + { + return file == pos.file && line == pos.line && column == pos.column; + } + + bool Position::operator!= (const Position &pos) const + { + return file == pos.file || line != pos.line || column != pos.column; + } + + void Position::operator+= (const Offset &off) + { + *this = Position(file, line + off.line, off.line > 0 ? off.column : column + off.column); + } + + const Position Position::operator+ (const Offset &off) const + { + return Position(file, line + off.line, off.line > 0 ? off.column : column + off.column); + } + + const Offset Position::operator- (const Offset &off) const + { + return Offset(line - off.line, off.line == line ? column - off.column : column); + } + + /* not used anymore - remove? + std::ostream& operator<<(std::ostream& strm, const Offset& off) + { + if (off.line == string::npos) strm << "-1:"; else strm << off.line << ":"; + if (off.column == string::npos) strm << "-1"; else strm << off.column; + return strm; + } */ + + /* not used anymore - remove? + std::ostream& operator<<(std::ostream& strm, const Position& pos) + { + if (pos.file != string::npos) strm << pos.file << ":"; + if (pos.line == string::npos) strm << "-1:"; else strm << pos.line << ":"; + if (pos.column == string::npos) strm << "-1"; else strm << pos.column; + return strm; + } */ + +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/position.hpp b/mybulma/node_modules/node-sass/src/libsass/src/position.hpp new file mode 100644 index 0000000..923be3c --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/position.hpp @@ -0,0 +1,124 @@ +#ifndef SASS_POSITION_H +#define SASS_POSITION_H + +#include +#include +// #include + +namespace Sass { + + + class Offset { + + public: // c-tor + Offset(const char chr); + Offset(const char* string); + Offset(const std::string& text); + Offset(const size_t line, const size_t column); + + // return new position, incremented by the given string + Offset add(const char* begin, const char* end); + Offset inc(const char* begin, const char* end) const; + + // init/create instance from const char substring + static Offset init(const char* beg, const char* end); + + public: // overload operators for position + void operator+= (const Offset &pos); + bool operator== (const Offset &pos) const; + bool operator!= (const Offset &pos) const; + Offset operator+ (const Offset &off) const; + Offset operator- (const Offset &off) const; + + public: // overload output stream operator + // friend std::ostream& operator<<(std::ostream& strm, const Offset& off); + + public: + Offset off() { return *this; } + + public: + size_t line; + size_t column; + + }; + + class Position : public Offset { + + public: // c-tor + Position(const size_t file); // line(0), column(0) + Position(const size_t file, const Offset& offset); + Position(const size_t line, const size_t column); // file(-1) + Position(const size_t file, const size_t line, const size_t column); + + public: // overload operators for position + void operator+= (const Offset &off); + bool operator== (const Position &pos) const; + bool operator!= (const Position &pos) const; + const Position operator+ (const Offset &off) const; + const Offset operator- (const Offset &off) const; + // return new position, incremented by the given string + Position add(const char* begin, const char* end); + Position inc(const char* begin, const char* end) const; + + public: // overload output stream operator + // friend std::ostream& operator<<(std::ostream& strm, const Position& pos); + + public: + size_t file; + + }; + + // Token type for representing lexed chunks of text + class Token { + public: + const char* prefix; + const char* begin; + const char* end; + + Token() + : prefix(0), begin(0), end(0) { } + Token(const char* b, const char* e) + : prefix(b), begin(b), end(e) { } + Token(const char* str) + : prefix(str), begin(str), end(str + strlen(str)) { } + Token(const char* p, const char* b, const char* e) + : prefix(p), begin(b), end(e) { } + + size_t length() const { return end - begin; } + std::string ws_before() const { return std::string(prefix, begin); } + const std::string to_string() const { return std::string(begin, end); } + std::string time_wspace() const { + std::string str(to_string()); + std::string whitespaces(" \t\f\v\n\r"); + return str.erase(str.find_last_not_of(whitespaces)+1); + } + + operator bool() { return begin && end && begin >= end; } + operator std::string() { return to_string(); } + + bool operator==(Token t) { return to_string() == t.to_string(); } + }; + + class ParserState : public Position { + + public: // c-tor + ParserState(const char* path, const char* src = 0, const size_t file = std::string::npos); + ParserState(const char* path, const char* src, const Position& position, Offset offset = Offset(0, 0)); + ParserState(const char* path, const char* src, const Token& token, const Position& position, Offset offset = Offset(0, 0)); + + public: // down casts + Offset off() { return *this; } + Position pos() { return *this; } + ParserState pstate() { return *this; } + + public: + const char* path; + const char* src; + Offset offset; + Token token; + + }; + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/prelexer.cpp b/mybulma/node_modules/node-sass/src/libsass/src/prelexer.cpp new file mode 100644 index 0000000..a43b1ee --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/prelexer.cpp @@ -0,0 +1,1774 @@ +#include "sass.hpp" +#include +#include +#include +#include "util.hpp" +#include "position.hpp" +#include "prelexer.hpp" +#include "constants.hpp" + + +namespace Sass { + // using namespace Lexer; + using namespace Constants; + + namespace Prelexer { + + + /* + + def string_re(open, close) + /#{open}((?:\\.|\#(?!\{)|[^#{close}\\#])*)(#{close}|#\{)/m + end + end + + # A hash of regular expressions that are used for tokenizing strings. + # + # The key is a `[Symbol, Boolean]` pair. + # The symbol represents which style of quotation to use, + # while the boolean represents whether or not the string + # is following an interpolated segment. + STRING_REGULAR_EXPRESSIONS = { + :double => { + /#{open}((?:\\.|\#(?!\{)|[^#{close}\\#])*)(#{close}|#\{)/m + false => string_re('"', '"'), + true => string_re('', '"') + }, + :single => { + false => string_re("'", "'"), + true => string_re('', "'") + }, + :uri => { + false => /url\(#{W}(#{URLCHAR}*?)(#{W}\)|#\{)/, + true => /(#{URLCHAR}*?)(#{W}\)|#\{)/ + }, + # Defined in https://developer.mozilla.org/en/CSS/@-moz-document as a + # non-standard version of http://www.w3.org/TR/css3-conditional/ + :url_prefix => { + false => /url-prefix\(#{W}(#{URLCHAR}*?)(#{W}\)|#\{)/, + true => /(#{URLCHAR}*?)(#{W}\)|#\{)/ + }, + :domain => { + false => /domain\(#{W}(#{URLCHAR}*?)(#{W}\)|#\{)/, + true => /(#{URLCHAR}*?)(#{W}\)|#\{)/ + } + } + */ + + /* + /#{open} + ( + \\. + | + \# (?!\{) + | + [^#{close}\\#] + )* + (#{close}|#\{) + /m + false => string_re('"', '"'), + true => string_re('', '"') + */ + extern const char string_double_negates[] = "\"\\#"; + const char* re_string_double_close(const char* src) + { + return sequence < + // valid chars + zero_plus < + alternatives < + // escaped char + sequence < + exactly <'\\'>, + any_char + >, + // non interpolate hash + sequence < + exactly <'#'>, + negate < + exactly <'{'> + > + >, + // other valid chars + neg_class_char < + string_double_negates + > + > + >, + // quoted string closer + // or interpolate opening + alternatives < + exactly <'"'>, + lookahead < exactly< hash_lbrace > > + > + >(src); + } + + const char* re_string_double_open(const char* src) + { + return sequence < + // quoted string opener + exactly <'"'>, + // valid chars + zero_plus < + alternatives < + // escaped char + sequence < + exactly <'\\'>, + any_char + >, + // non interpolate hash + sequence < + exactly <'#'>, + negate < + exactly <'{'> + > + >, + // other valid chars + neg_class_char < + string_double_negates + > + > + >, + // quoted string closer + // or interpolate opening + alternatives < + exactly <'"'>, + lookahead < exactly< hash_lbrace > > + > + >(src); + } + + extern const char string_single_negates[] = "'\\#"; + const char* re_string_single_close(const char* src) + { + return sequence < + // valid chars + zero_plus < + alternatives < + // escaped char + sequence < + exactly <'\\'>, + any_char + >, + // non interpolate hash + sequence < + exactly <'#'>, + negate < + exactly <'{'> + > + >, + // other valid chars + neg_class_char < + string_single_negates + > + > + >, + // quoted string closer + // or interpolate opening + alternatives < + exactly <'\''>, + lookahead < exactly< hash_lbrace > > + > + >(src); + } + + const char* re_string_single_open(const char* src) + { + return sequence < + // quoted string opener + exactly <'\''>, + // valid chars + zero_plus < + alternatives < + // escaped char + sequence < + exactly <'\\'>, + any_char + >, + // non interpolate hash + sequence < + exactly <'#'>, + negate < + exactly <'{'> + > + >, + // other valid chars + neg_class_char < + string_single_negates + > + > + >, + // quoted string closer + // or interpolate opening + alternatives < + exactly <'\''>, + lookahead < exactly< hash_lbrace > > + > + >(src); + } + + /* + :uri => { + false => /url\(#{W}(#{URLCHAR}*?)(#{W}\)|#\{)/, + true => /(#{URLCHAR}*?)(#{W}\)|#\{)/ + }, + */ + const char* re_string_uri_close(const char* src) + { + return sequence < + non_greedy< + alternatives< + class_char< real_uri_chars >, + uri_character, + NONASCII, + ESCAPE + >, + alternatives< + sequence < optional < W >, exactly <')'> >, + lookahead < exactly< hash_lbrace > > + > + >, + optional < + sequence < optional < W >, exactly <')'> > + > + >(src); + } + + const char* re_string_uri_open(const char* src) + { + return sequence < + exactly <'u'>, + exactly <'r'>, + exactly <'l'>, + exactly <'('>, + W, + alternatives< + quoted_string, + non_greedy< + alternatives< + class_char< real_uri_chars >, + uri_character, + NONASCII, + ESCAPE + >, + alternatives< + sequence < W, exactly <')'> >, + exactly< hash_lbrace > + > + > + > + >(src); + } + + // Match a line comment (/.*?(?=\n|\r\n?|\Z)/. + const char* line_comment(const char* src) + { + return sequence< + exactly < + slash_slash + >, + non_greedy< + any_char, + end_of_line + > + >(src); + } + + // Match a block comment. + const char* block_comment(const char* src) + { + return sequence< + delimited_by< + slash_star, + star_slash, + false + > + >(src); + } + /* not use anymore - remove? + const char* block_comment_prefix(const char* src) { + return exactly(src); + } + // Match either comment. + const char* comment(const char* src) { + return line_comment(src); + } + */ + + // Match zero plus white-space or line_comments + const char* optional_css_whitespace(const char* src) { + return zero_plus< alternatives >(src); + } + const char* css_whitespace(const char* src) { + return one_plus< alternatives >(src); + } + // Match optional_css_whitepace plus block_comments + const char* optional_css_comments(const char* src) { + return zero_plus< alternatives >(src); + } + const char* css_comments(const char* src) { + return one_plus< alternatives >(src); + } + + // Match one backslash escaped char /\\./ + const char* escape_seq(const char* src) + { + return sequence< + exactly<'\\'>, + alternatives < + minmax_range< + 1, 3, xdigit + >, + any_char + >, + optional < + exactly <' '> + > + >(src); + } + + // Match identifier start + const char* identifier_alpha(const char* src) + { + return alternatives< + unicode_seq, + alpha, + unicode, + exactly<'-'>, + exactly<'_'>, + NONASCII, + ESCAPE, + escape_seq + >(src); + } + + // Match identifier after start + const char* identifier_alnum(const char* src) + { + return alternatives< + unicode_seq, + alnum, + unicode, + exactly<'-'>, + exactly<'_'>, + NONASCII, + ESCAPE, + escape_seq + >(src); + } + + // Match CSS identifiers. + const char* strict_identifier(const char* src) + { + return sequence< + one_plus < strict_identifier_alpha >, + zero_plus < strict_identifier_alnum > + // word_boundary not needed + >(src); + } + + // Match CSS identifiers. + const char* identifier(const char* src) + { + return sequence< + zero_plus< exactly<'-'> >, + one_plus < identifier_alpha >, + zero_plus < identifier_alnum > + // word_boundary not needed + >(src); + } + + const char* strict_identifier_alpha(const char* src) + { + return alternatives < + alpha, + unicode, + escape_seq, + exactly<'_'> + >(src); + } + + const char* strict_identifier_alnum(const char* src) + { + return alternatives < + alnum, + unicode, + escape_seq, + exactly<'_'> + >(src); + } + + // Match a single CSS unit + const char* one_unit(const char* src) + { + return sequence < + optional < exactly <'-'> >, + strict_identifier_alpha, + zero_plus < alternatives< + strict_identifier_alnum, + sequence < + one_plus < exactly<'-'> >, + strict_identifier_alpha + > + > > + >(src); + } + + // Match numerator/denominator CSS units + const char* multiple_units(const char* src) + { + return + sequence < + one_unit, + zero_plus < + sequence < + exactly <'*'>, + one_unit + > + > + >(src); + } + + // Match complex CSS unit identifiers + const char* unit_identifier(const char* src) + { + return sequence < + multiple_units, + optional < + sequence < + exactly <'/'>, + negate < sequence < + exactly < calc_fn_kwd >, + exactly < '(' > + > >, + multiple_units + > > + >(src); + } + + const char* identifier_alnums(const char* src) + { + return one_plus< identifier_alnum >(src); + } + + // Match number prefix ([\+\-]+) + const char* number_prefix(const char* src) { + return alternatives < + exactly < '+' >, + sequence < + exactly < '-' >, + optional_css_whitespace, + exactly< '-' > + > + >(src); + } + + // Match interpolant schemas + const char* identifier_schema(const char* src) { + + return sequence < + one_plus < + sequence < + zero_plus < + alternatives < + sequence < + optional < + exactly <'$'> + >, + identifier + >, + exactly <'-'> + > + >, + interpolant, + zero_plus < + alternatives < + digits, + sequence < + optional < + exactly <'$'> + >, + identifier + >, + quoted_string, + exactly<'-'> + > + > + > + >, + negate < + exactly<'%'> + > + > (src); + } + + // interpolants can be recursive/nested + const char* interpolant(const char* src) { + return recursive_scopes< exactly, exactly >(src); + } + + // $re_squote = /'(?:$re_itplnt|\\.|[^'])*'/ + const char* single_quoted_string(const char* src) { + // match a single quoted string, while skipping interpolants + return sequence < + exactly <'\''>, + zero_plus < + alternatives < + // skip escapes + sequence < + exactly < '\\' >, + re_linebreak + >, + escape_seq, + unicode_seq, + // skip interpolants + interpolant, + // skip non delimiters + any_char_but < '\'' > + > + >, + exactly <'\''> + >(src); + } + + // $re_dquote = /"(?:$re_itp|\\.|[^"])*"/ + const char* double_quoted_string(const char* src) { + // match a single quoted string, while skipping interpolants + return sequence < + exactly <'"'>, + zero_plus < + alternatives < + // skip escapes + sequence < + exactly < '\\' >, + re_linebreak + >, + escape_seq, + unicode_seq, + // skip interpolants + interpolant, + // skip non delimiters + any_char_but < '"' > + > + >, + exactly <'"'> + >(src); + } + + // $re_quoted = /(?:$re_squote|$re_dquote)/ + const char* quoted_string(const char* src) { + // match a quoted string, while skipping interpolants + return alternatives< + single_quoted_string, + double_quoted_string + >(src); + } + + const char* sass_value(const char* src) { + return alternatives < + quoted_string, + identifier, + percentage, + hex, + dimension, + number + >(src); + } + + // this is basically `one_plus < sass_value >` + // takes care to not parse invalid combinations + const char* value_combinations(const char* src) { + // `2px-2px` is invalid combo + bool was_number = false; + const char* pos; + while (src) { + if ((pos = alternatives < quoted_string, identifier, percentage, hex >(src))) { + was_number = false; + src = pos; + } else if (!was_number && !exactly<'+'>(src) && (pos = alternatives < dimension, number >(src))) { + was_number = true; + src = pos; + } else { + break; + } + } + return src; + } + + // must be at least one interpolant + // can be surrounded by sass values + // make sure to never parse (dim)(dim) + // since this wrongly consumes `2px-1px` + // `2px1px` is valid number (unit `px1px`) + const char* value_schema(const char* src) + { + return sequence < + one_plus < + sequence < + optional < value_combinations >, + interpolant, + optional < value_combinations > + > + > + >(src); + } + + // Match CSS '@' keywords. + const char* at_keyword(const char* src) { + return sequence, identifier>(src); + } + + /* + tok(%r{ + ( + \\. + | + (?!url\() + [^"'/\#!;\{\}] # " + | + /(?![\*\/]) + | + \#(?!\{) + | + !(?![a-z]) # TODO: never consume "!" when issue 1126 is fixed. + )+ + }xi) || tok(COMMENT) || tok(SINGLE_LINE_COMMENT) || interp_string || interp_uri || + interpolation(:warn_for_color) + */ + const char* re_almost_any_value_token(const char* src) { + + return alternatives < + one_plus < + alternatives < + sequence < + exactly <'\\'>, + any_char + >, + sequence < + negate < + uri_prefix + >, + neg_class_char < + almost_any_value_class + > + >, + sequence < + exactly <'/'>, + negate < + alternatives < + exactly <'/'>, + exactly <'*'> + > + > + >, + sequence < + exactly <'\\'>, + exactly <'#'>, + negate < + exactly <'{'> + > + >, + sequence < + exactly <'!'>, + negate < + alpha + > + > + > + >, + block_comment, + line_comment, + interpolant, + space, + sequence < + exactly<'u'>, + exactly<'r'>, + exactly<'l'>, + exactly<'('>, + zero_plus < + alternatives < + class_char< real_uri_chars >, + uri_character, + NONASCII, + ESCAPE + > + >, + // false => /url\(#{W}(#{URLCHAR}*?)(#{W}\)|#\{)/, + // true => /(#{URLCHAR}*?)(#{W}\)|#\{)/ + exactly<')'> + > + >(src); + } + + /* + DIRECTIVES = Set[:mixin, :include, :function, :return, :debug, :warn, :for, + :each, :while, :if, :else, :extend, :import, :media, :charset, :content, + :_moz_document, :at_root, :error] + */ + const char* re_special_directive(const char* src) { + return alternatives < + word < mixin_kwd >, + word < include_kwd >, + word < function_kwd >, + word < return_kwd >, + word < debug_kwd >, + word < warn_kwd >, + word < for_kwd >, + word < each_kwd >, + word < while_kwd >, + word < if_kwd >, + word < else_kwd >, + word < extend_kwd >, + word < import_kwd >, + word < media_kwd >, + word < charset_kwd >, + word < content_kwd >, + // exactly < moz_document_kwd >, + word < at_root_kwd >, + word < error_kwd > + >(src); + } + + const char* re_prefixed_directive(const char* src) { + return sequence < + optional < + sequence < + exactly <'-'>, + one_plus < alnum >, + exactly <'-'> + > + >, + exactly < supports_kwd > + >(src); + } + + const char* re_reference_combinator(const char* src) { + return sequence < + optional < + sequence < + zero_plus < + exactly <'-'> + >, + identifier, + exactly <'|'> + > + >, + zero_plus < + exactly <'-'> + >, + identifier + >(src); + } + + const char* static_reference_combinator(const char* src) { + return sequence < + exactly <'/'>, + re_reference_combinator, + exactly <'/'> + >(src); + } + + const char* schema_reference_combinator(const char* src) { + return sequence < + exactly <'/'>, + optional < + sequence < + css_ip_identifier, + exactly <'|'> + > + >, + css_ip_identifier, + exactly <'/'> + > (src); + } + + const char* kwd_import(const char* src) { + return word(src); + } + + const char* kwd_at_root(const char* src) { + return word(src); + } + + const char* kwd_with_directive(const char* src) { + return word(src); + } + + const char* kwd_without_directive(const char* src) { + return word(src); + } + + const char* kwd_media(const char* src) { + return word(src); + } + + const char* kwd_supports_directive(const char* src) { + return word(src); + } + + const char* kwd_mixin(const char* src) { + return word(src); + } + + const char* kwd_function(const char* src) { + return word(src); + } + + const char* kwd_return_directive(const char* src) { + return word(src); + } + + const char* kwd_include_directive(const char* src) { + return word(src); + } + + const char* kwd_content_directive(const char* src) { + return word(src); + } + + const char* kwd_charset_directive(const char* src) { + return word(src); + } + + const char* kwd_extend(const char* src) { + return word(src); + } + + + const char* kwd_if_directive(const char* src) { + return word(src); + } + + const char* kwd_else_directive(const char* src) { + return word(src); + } + const char* elseif_directive(const char* src) { + return sequence< exactly< else_kwd >, + optional_css_comments, + word< if_after_else_kwd > >(src); + } + + const char* kwd_for_directive(const char* src) { + return word(src); + } + + const char* kwd_from(const char* src) { + return word(src); + } + + const char* kwd_to(const char* src) { + return word(src); + } + + const char* kwd_through(const char* src) { + return word(src); + } + + const char* kwd_each_directive(const char* src) { + return word(src); + } + + const char* kwd_in(const char* src) { + return word(src); + } + + const char* kwd_while_directive(const char* src) { + return word(src); + } + + const char* name(const char* src) { + return one_plus< alternatives< alnum, + exactly<'-'>, + exactly<'_'>, + escape_seq > >(src); + } + + const char* kwd_warn(const char* src) { + return word(src); + } + + const char* kwd_err(const char* src) { + return word(src); + } + + const char* kwd_dbg(const char* src) { + return word(src); + } + + /* not used anymore - remove? + const char* directive(const char* src) { + return sequence< exactly<'@'>, identifier >(src); + } */ + + const char* kwd_null(const char* src) { + return word(src); + } + + const char* css_identifier(const char* src) { + return sequence < + zero_plus < + exactly <'-'> + >, + identifier + >(src); + } + + const char* css_ip_identifier(const char* src) { + return sequence < + zero_plus < + exactly <'-'> + >, + alternatives < + identifier, + interpolant + > + >(src); + } + + // Match CSS type selectors + const char* namespace_prefix(const char* src) { + return sequence < + optional < + alternatives < + exactly <'*'>, + css_identifier + > + >, + exactly <'|'>, + negate < + exactly <'='> + > + >(src); + } + + // Match CSS type selectors + const char* namespace_schema(const char* src) { + return sequence < + optional < + alternatives < + exactly <'*'>, + css_ip_identifier + > + >, + exactly<'|'>, + negate < + exactly <'='> + > + >(src); + } + + const char* hyphens_and_identifier(const char* src) { + return sequence< zero_plus< exactly< '-' > >, identifier_alnums >(src); + } + const char* hyphens_and_name(const char* src) { + return sequence< zero_plus< exactly< '-' > >, name >(src); + } + const char* universal(const char* src) { + return sequence< optional, exactly<'*'> >(src); + } + // Match CSS id names. + const char* id_name(const char* src) { + return sequence, identifier_alnums >(src); + } + // Match CSS class names. + const char* class_name(const char* src) { + return sequence, identifier >(src); + } + // Attribute name in an attribute selector. + const char* attribute_name(const char* src) { + return alternatives< sequence< optional, identifier>, + identifier >(src); + } + // match placeholder selectors + const char* placeholder(const char* src) { + return sequence, identifier_alnums >(src); + } + // Match CSS numeric constants. + + const char* op(const char* src) { + return class_char(src); + } + const char* sign(const char* src) { + return class_char(src); + } + const char* unsigned_number(const char* src) { + return alternatives, + exactly<'.'>, + one_plus >, + digits>(src); + } + const char* number(const char* src) { + return sequence< + optional, + unsigned_number, + optional< + sequence< + exactly<'e'>, + optional, + unsigned_number + > + > + >(src); + } + const char* coefficient(const char* src) { + return alternatives< sequence< optional, digits >, + sign >(src); + } + const char* binomial(const char* src) { + return sequence < + optional < sign >, + optional < digits >, + exactly <'n'>, + zero_plus < sequence < + optional_css_whitespace, sign, + optional_css_whitespace, digits + > > + >(src); + } + const char* percentage(const char* src) { + return sequence< number, exactly<'%'> >(src); + } + const char* ampersand(const char* src) { + return exactly<'&'>(src); + } + + /* not used anymore - remove? + const char* em(const char* src) { + return sequence< number, exactly >(src); + } */ + const char* dimension(const char* src) { + return sequence(src); + } + const char* hex(const char* src) { + const char* p = sequence< exactly<'#'>, one_plus >(src); + ptrdiff_t len = p - src; + return (len != 4 && len != 7) ? 0 : p; + } + const char* hexa(const char* src) { + const char* p = sequence< exactly<'#'>, one_plus >(src); + ptrdiff_t len = p - src; + return (len != 5 && len != 9) ? 0 : p; + } + const char* hex0(const char* src) { + const char* p = sequence< exactly<'0'>, exactly<'x'>, one_plus >(src); + ptrdiff_t len = p - src; + return (len != 5 && len != 8) ? 0 : p; + } + + /* no longer used - remove? + const char* rgb_prefix(const char* src) { + return word(src); + }*/ + // Match CSS uri specifiers. + + const char* uri_prefix(const char* src) { + return sequence < + exactly < + url_kwd + >, + zero_plus < + sequence < + exactly <'-'>, + one_plus < + alpha + > + > + >, + exactly <'('> + >(src); + } + + // TODO: rename the following two functions + /* no longer used - remove? + const char* uri(const char* src) { + return sequence< exactly, + optional, + quoted_string, + optional, + exactly<')'> >(src); + }*/ + /* no longer used - remove? + const char* url_value(const char* src) { + return sequence< optional< sequence< identifier, exactly<':'> > >, // optional protocol + one_plus< sequence< zero_plus< exactly<'/'> >, filename > >, // one or more folders and/or trailing filename + optional< exactly<'/'> > >(src); + }*/ + /* no longer used - remove? + const char* url_schema(const char* src) { + return sequence< optional< sequence< identifier, exactly<':'> > >, // optional protocol + filename_schema >(src); // optional trailing slash + }*/ + // Match CSS "!important" keyword. + const char* kwd_important(const char* src) { + return sequence< exactly<'!'>, + optional_css_whitespace, + word >(src); + } + // Match CSS "!optional" keyword. + const char* kwd_optional(const char* src) { + return sequence< exactly<'!'>, + optional_css_whitespace, + word >(src); + } + // Match Sass "!default" keyword. + const char* default_flag(const char* src) { + return sequence< exactly<'!'>, + optional_css_whitespace, + word >(src); + } + // Match Sass "!global" keyword. + const char* global_flag(const char* src) { + return sequence< exactly<'!'>, + optional_css_whitespace, + word >(src); + } + // Match CSS pseudo-class/element prefixes. + const char* pseudo_prefix(const char* src) { + return sequence< exactly<':'>, optional< exactly<':'> > >(src); + } + // Match CSS function call openers. + const char* functional_schema(const char* src) { + return sequence < + one_plus < + sequence < + zero_plus < + alternatives < + identifier, + exactly <'-'> + > + >, + one_plus < + sequence < + interpolant, + alternatives < + digits, + identifier, + exactly<'+'>, + exactly<'-'> + > + > + > + > + >, + negate < + exactly <'%'> + >, + lookahead < + exactly <'('> + > + > (src); + } + + const char* re_nothing(const char* src) { + return src; + } + + const char* re_functional(const char* src) { + return sequence< identifier, optional < block_comment >, exactly<'('> >(src); + } + const char* re_pseudo_selector(const char* src) { + return sequence< identifier, optional < block_comment >, exactly<'('> >(src); + } + // Match the CSS negation pseudo-class. + const char* pseudo_not(const char* src) { + return word< pseudo_not_fn_kwd >(src); + } + // Match CSS 'odd' and 'even' keywords for functional pseudo-classes. + const char* even(const char* src) { + return word(src); + } + const char* odd(const char* src) { + return word(src); + } + // Match CSS attribute-matching operators. + const char* exact_match(const char* src) { return exactly<'='>(src); } + const char* class_match(const char* src) { return exactly(src); } + const char* dash_match(const char* src) { return exactly(src); } + const char* prefix_match(const char* src) { return exactly(src); } + const char* suffix_match(const char* src) { return exactly(src); } + const char* substring_match(const char* src) { return exactly(src); } + // Match CSS combinators. + /* not used anymore - remove? + const char* adjacent_to(const char* src) { + return sequence< optional_spaces, exactly<'+'> >(src); + } + const char* precedes(const char* src) { + return sequence< optional_spaces, exactly<'~'> >(src); + } + const char* parent_of(const char* src) { + return sequence< optional_spaces, exactly<'>'> >(src); + } + const char* ancestor_of(const char* src) { + return sequence< spaces, negate< exactly<'{'> > >(src); + }*/ + + // Match SCSS variable names. + const char* variable(const char* src) { + return sequence, identifier>(src); + } + + // parse `calc`, `-a-calc` and `--b-c-calc` + // but do not parse `foocalc` or `foo-calc` + const char* calc_fn_call(const char* src) { + return sequence < + optional < sequence < + hyphens, + one_plus < sequence < + strict_identifier, + hyphens + > > + > >, + exactly < calc_fn_kwd >, + word_boundary + >(src); + } + + // Match Sass boolean keywords. + const char* kwd_true(const char* src) { + return word(src); + } + const char* kwd_false(const char* src) { + return word(src); + } + const char* kwd_only(const char* src) { + return keyword < only_kwd >(src); + } + const char* kwd_and(const char* src) { + return keyword < and_kwd >(src); + } + const char* kwd_or(const char* src) { + return keyword < or_kwd >(src); + } + const char* kwd_not(const char* src) { + return keyword < not_kwd >(src); + } + const char* kwd_eq(const char* src) { + return exactly(src); + } + const char* kwd_neq(const char* src) { + return exactly(src); + } + const char* kwd_gt(const char* src) { + return exactly(src); + } + const char* kwd_gte(const char* src) { + return exactly(src); + } + const char* kwd_lt(const char* src) { + return exactly(src); + } + const char* kwd_lte(const char* src) { + return exactly(src); + } + + // match specific IE syntax + const char* ie_progid(const char* src) { + return sequence < + word, + exactly<':'>, + alternatives< identifier_schema, identifier >, + zero_plus< sequence< + exactly<'.'>, + alternatives< identifier_schema, identifier > + > >, + zero_plus < sequence< + exactly<'('>, + optional_css_whitespace, + optional < sequence< + alternatives< variable, identifier_schema, identifier >, + optional_css_whitespace, + exactly<'='>, + optional_css_whitespace, + alternatives< variable, identifier_schema, identifier, quoted_string, number, hex, hexa >, + zero_plus< sequence< + optional_css_whitespace, + exactly<','>, + optional_css_whitespace, + sequence< + alternatives< variable, identifier_schema, identifier >, + optional_css_whitespace, + exactly<'='>, + optional_css_whitespace, + alternatives< variable, identifier_schema, identifier, quoted_string, number, hex, hexa > + > + > > + > >, + optional_css_whitespace, + exactly<')'> + > > + >(src); + } + const char* ie_expression(const char* src) { + return sequence < word, exactly<'('>, skip_over_scopes< exactly<'('>, exactly<')'> > >(src); + } + const char* ie_property(const char* src) { + return alternatives < ie_expression, ie_progid >(src); + } + + // const char* ie_args(const char* src) { + // return sequence< alternatives< ie_keyword_arg, value_schema, quoted_string, interpolant, number, identifier, delimited_by< '(', ')', true> >, + // zero_plus< sequence< optional_css_whitespace, exactly<','>, optional_css_whitespace, alternatives< ie_keyword_arg, value_schema, quoted_string, interpolant, number, identifier, delimited_by<'(', ')', true> > > > >(src); + // } + + const char* ie_keyword_arg_property(const char* src) { + return alternatives < + variable, + identifier_schema, + identifier + >(src); + } + const char* ie_keyword_arg_value(const char* src) { + return alternatives < + variable, + identifier_schema, + identifier, + quoted_string, + number, + hex, + hexa, + sequence < + exactly < '(' >, + skip_over_scopes < + exactly < '(' >, + exactly < ')' > + > + > + >(src); + } + + const char* ie_keyword_arg(const char* src) { + return sequence < + ie_keyword_arg_property, + optional_css_whitespace, + exactly<'='>, + optional_css_whitespace, + ie_keyword_arg_value + >(src); + } + + // Path matching functions. + /* not used anymore - remove? + const char* folder(const char* src) { + return sequence< zero_plus< any_char_except<'/'> >, + exactly<'/'> >(src); + } + const char* folders(const char* src) { + return zero_plus< folder >(src); + }*/ + /* not used anymore - remove? + const char* chunk(const char* src) { + char inside_str = 0; + const char* p = src; + size_t depth = 0; + while (true) { + if (!*p) { + return 0; + } + else if (!inside_str && (*p == '"' || *p == '\'')) { + inside_str = *p; + } + else if (*p == inside_str && *(p-1) != '\\') { + inside_str = 0; + } + else if (*p == '(' && !inside_str) { + ++depth; + } + else if (*p == ')' && !inside_str) { + if (depth == 0) return p; + else --depth; + } + ++p; + } + // unreachable + return 0; + } + */ + + // follow the CSS spec more closely and see if this helps us scan URLs correctly + /* not used anymore - remove? + const char* NL(const char* src) { + return alternatives< exactly<'\n'>, + sequence< exactly<'\r'>, exactly<'\n'> >, + exactly<'\r'>, + exactly<'\f'> >(src); + }*/ + + const char* H(const char* src) { + return std::isxdigit(*src) ? src+1 : 0; + } + + const char* W(const char* src) { + return zero_plus< alternatives< + space, + exactly< '\t' >, + exactly< '\r' >, + exactly< '\n' >, + exactly< '\f' > + > >(src); + } + + const char* UUNICODE(const char* src) { + return sequence< exactly<'\\'>, + between, + optional< W > + >(src); + } + + const char* NONASCII(const char* src) { + return nonascii(src); + } + + const char* ESCAPE(const char* src) { + return alternatives< + UUNICODE, + sequence< + exactly<'\\'>, + alternatives< + NONASCII, + escapable_character + > + > + >(src); + } + + const char* list_terminator(const char* src) { + return alternatives < + exactly<';'>, + exactly<'}'>, + exactly<'{'>, + exactly<')'>, + exactly<']'>, + exactly<':'>, + end_of_file, + exactly, + default_flag, + global_flag + >(src); + }; + + const char* space_list_terminator(const char* src) { + return alternatives < + exactly<','>, + list_terminator + >(src); + }; + + + // const char* real_uri_prefix(const char* src) { + // return alternatives< + // exactly< url_kwd >, + // exactly< url_prefix_kwd > + // >(src); + // } + + const char* real_uri(const char* src) { + return sequence< + exactly< url_kwd >, + exactly< '(' >, + W, + real_uri_value, + exactly< ')' > + >(src); + } + + const char* real_uri_suffix(const char* src) { + return sequence< W, exactly< ')' > >(src); + } + + const char* real_uri_value(const char* src) { + return + sequence< + non_greedy< + alternatives< + class_char< real_uri_chars >, + uri_character, + NONASCII, + ESCAPE + >, + alternatives< + real_uri_suffix, + exactly< hash_lbrace > + > + > + > + (src); + } + + const char* static_string(const char* src) { + const char* pos = src; + const char * s = quoted_string(pos); + Token t(pos, s); + const unsigned int p = count_interval< interpolant >(t.begin, t.end); + return (p == 0) ? t.end : 0; + } + + const char* unicode_seq(const char* src) { + return sequence < + alternatives < + exactly< 'U' >, + exactly< 'u' > + >, + exactly< '+' >, + padded_token < + 6, xdigit, + exactly < '?' > + > + >(src); + } + + const char* static_component(const char* src) { + return alternatives< identifier, + static_string, + percentage, + hex, + hexa, + exactly<'|'>, + // exactly<'+'>, + sequence < number, unit_identifier >, + number, + sequence< exactly<'!'>, word > + >(src); + } + + const char* static_property(const char* src) { + return + sequence < + zero_plus< + sequence < + optional_css_comments, + alternatives < + exactly<','>, + exactly<'('>, + exactly<')'>, + kwd_optional, + quoted_string, + interpolant, + identifier, + percentage, + dimension, + variable, + alnum, + sequence < + exactly <'\\'>, + any_char + > + > + > + >, + lookahead < + sequence < + optional_css_comments, + alternatives < + exactly <';'>, + exactly <'}'>, + end_of_file + > + > + > + >(src); + } + + const char* static_value(const char* src) { + return sequence< sequence< + static_component, + zero_plus< identifier > + >, + zero_plus < sequence< + alternatives< + sequence< optional_spaces, alternatives< + exactly < '/' >, + exactly < ',' >, + exactly < ' ' > + >, optional_spaces >, + spaces + >, + static_component + > >, + zero_plus < spaces >, + alternatives< exactly<';'>, exactly<'}'> > + >(src); + } + + extern const char css_variable_url_negates[] = "()[]{}\"'#/"; + const char* css_variable_value(const char* src) { + return sequence< + alternatives< + sequence< + negate< exactly< url_fn_kwd > >, + one_plus< neg_class_char< css_variable_url_negates > > + >, + sequence< exactly<'#'>, negate< exactly<'{'> > >, + sequence< exactly<'/'>, negate< exactly<'*'> > >, + static_string, + real_uri, + block_comment + > + >(src); + } + + extern const char css_variable_url_top_level_negates[] = "()[]{}\"'#/;"; + const char* css_variable_top_level_value(const char* src) { + return sequence< + alternatives< + sequence< + negate< exactly< url_fn_kwd > >, + one_plus< neg_class_char< css_variable_url_top_level_negates > > + >, + sequence< exactly<'#'>, negate< exactly<'{'> > >, + sequence< exactly<'/'>, negate< exactly<'*'> > >, + static_string, + real_uri, + block_comment + > + >(src); + } + + const char* parenthese_scope(const char* src) { + return sequence < + exactly < '(' >, + skip_over_scopes < + exactly < '(' >, + exactly < ')' > + > + >(src); + } + + const char* re_selector_list(const char* src) { + return alternatives < + // partial bem selector + sequence < + ampersand, + one_plus < + exactly < '-' > + >, + word_boundary, + optional_spaces + >, + // main selector matching + one_plus < + alternatives < + // consume whitespace and comments + spaces, block_comment, line_comment, + // match `/deep/` selector (pass-trough) + // there is no functionality for it yet + schema_reference_combinator, + // match selector ops /[*&%,\[\]]/ + class_char < selector_lookahead_ops >, + // match selector combinators /[>+~]/ + class_char < selector_combinator_ops >, + // match pseudo selectors + sequence < + exactly <'('>, + optional_spaces, + optional , + optional_spaces, + exactly <')'> + >, + // match attribute compare operators + alternatives < + exact_match, class_match, dash_match, + prefix_match, suffix_match, substring_match + >, + // main selector match + sequence < + // allow namespace prefix + optional < namespace_schema >, + // modifiers prefixes + alternatives < + sequence < + exactly <'#'>, + // not for interpolation + negate < exactly <'{'> > + >, + // class match + exactly <'.'>, + // single or double colon + sequence < + optional < pseudo_prefix >, + // fix libsass issue 2376 + negate < uri_prefix > + > + >, + // accept hypens in token + one_plus < sequence < + // can start with hyphens + zero_plus < + sequence < + exactly <'-'>, + optional_spaces + > + >, + // now the main token + alternatives < + kwd_optional, + exactly <'*'>, + quoted_string, + interpolant, + identifier, + variable, + percentage, + binomial, + dimension, + alnum + > + > >, + // can also end with hyphens + zero_plus < exactly<'-'> > + > + > + > + >(src); + } + + const char* type_selector(const char* src) { + return sequence< optional, identifier>(src); + } + const char* re_type_selector(const char* src) { + return alternatives< type_selector, universal, dimension, percentage, number, identifier_alnums >(src); + } + const char* re_static_expression(const char* src) { + return sequence< number, optional_spaces, exactly<'/'>, optional_spaces, number >(src); + } + + // lexer special_fn: these functions cannot be overloaded + // (/((-[\w-]+-)?(calc|element)|expression|progid:[a-z\.]*)\(/i) + const char* re_special_fun(const char* src) { + + // match this first as we test prefix hyphens + if (const char* calc = calc_fn_call(src)) { + return calc; + } + + return sequence < + optional < + sequence < + exactly <'-'>, + one_plus < + alternatives < + alpha, + exactly <'+'>, + exactly <'-'> + > + > + > + >, + alternatives < + word < expression_kwd >, + sequence < + sequence < + exactly < progid_kwd >, + exactly <':'> + >, + zero_plus < + alternatives < + char_range <'a', 'z'>, + exactly <'.'> + > + > + > + > + >(src); + } + + } +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/prelexer.hpp b/mybulma/node_modules/node-sass/src/libsass/src/prelexer.hpp new file mode 100644 index 0000000..2d8f831 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/prelexer.hpp @@ -0,0 +1,484 @@ +#ifndef SASS_PRELEXER_H +#define SASS_PRELEXER_H + +#include +#include "lexer.hpp" + +namespace Sass { + // using namespace Lexer; + namespace Prelexer { + + //#################################### + // KEYWORD "REGEX" MATCHERS + //#################################### + + // Match Sass boolean keywords. + const char* kwd_true(const char* src); + const char* kwd_false(const char* src); + const char* kwd_only(const char* src); + const char* kwd_and(const char* src); + const char* kwd_or(const char* src); + const char* kwd_not(const char* src); + const char* kwd_eq(const char* src); + const char* kwd_neq(const char* src); + const char* kwd_gt(const char* src); + const char* kwd_gte(const char* src); + const char* kwd_lt(const char* src); + const char* kwd_lte(const char* src); + + // Match standard control chars + const char* kwd_at(const char* src); + const char* kwd_dot(const char* src); + const char* kwd_comma(const char* src); + const char* kwd_colon(const char* src); + const char* kwd_slash(const char* src); + const char* kwd_star(const char* src); + const char* kwd_plus(const char* src); + const char* kwd_minus(const char* src); + + //#################################### + // SPECIAL "REGEX" CONSTRUCTS + //#################################### + + // Match a sequence of characters delimited by the supplied chars. + template + const char* delimited_by(const char* src) { + src = exactly(src); + if (!src) return 0; + const char* stop; + while (true) { + if (!*src) return 0; + stop = exactly(src); + if (stop && (!esc || *(src - 1) != '\\')) return stop; + src = stop ? stop : src + 1; + } + } + + // skip to delimiter (mx) inside given range + // this will savely skip over all quoted strings + // recursive skip stuff delimited by start/stop + // first start/opener must be consumed already! + template + const char* skip_over_scopes(const char* src, const char* end) { + + size_t level = 0; + bool in_squote = false; + bool in_dquote = false; + // bool in_braces = false; + + while (*src) { + + // check for abort condition + if (end && src >= end) break; + + // has escaped sequence? + if (*src == '\\') { + ++ src; // skip this (and next) + } + else if (*src == '"') { + in_dquote = ! in_dquote; + } + else if (*src == '\'') { + in_squote = ! in_squote; + } + else if (in_dquote || in_squote) { + // take everything literally + } + + // find another opener inside? + else if (const char* pos = start(src)) { + ++ level; // increase counter + src = pos - 1; // advance position + } + + // look for the closer (maybe final, maybe not) + else if (const char* final = stop(src)) { + // only close one level? + if (level > 0) -- level; + // return position at end of stop + // delimiter may be multiple chars + else return final; + // advance position + src = final - 1; + } + + // next + ++ src; + } + + return 0; + } + + // skip to a skip delimited by parentheses + // uses smart `skip_over_scopes` internally + const char* parenthese_scope(const char* src); + + // skip to delimiter (mx) inside given range + // this will savely skip over all quoted strings + // recursive skip stuff delimited by start/stop + // first start/opener must be consumed already! + template + const char* skip_over_scopes(const char* src) { + return skip_over_scopes(src, 0); + } + + // Match a sequence of characters delimited by the supplied chars. + template + const char* recursive_scopes(const char* src) { + // parse opener + src = start(src); + // abort if not found + if (!src) return 0; + // parse the rest until final closer + return skip_over_scopes(src); + } + + // Match a sequence of characters delimited by the supplied strings. + template + const char* delimited_by(const char* src) { + src = exactly(src); + if (!src) return 0; + const char* stop; + while (true) { + if (!*src) return 0; + stop = exactly(src); + if (stop && (!esc || *(src - 1) != '\\')) return stop; + src = stop ? stop : src + 1; + } + } + + // Tries to match a certain number of times (between the supplied interval). + template + const char* between(const char* src) { + for (size_t i = 0; i < lo; ++i) { + src = mx(src); + if (!src) return 0; + } + for (size_t i = lo; i <= hi; ++i) { + const char* new_src = mx(src); + if (!new_src) return src; + src = new_src; + } + return src; + } + + // equivalent of STRING_REGULAR_EXPRESSIONS + const char* re_string_double_open(const char* src); + const char* re_string_double_close(const char* src); + const char* re_string_single_open(const char* src); + const char* re_string_single_close(const char* src); + const char* re_string_uri_open(const char* src); + const char* re_string_uri_close(const char* src); + + // Match a line comment. + const char* line_comment(const char* src); + + // Match a block comment. + const char* block_comment(const char* src); + // Match either. + const char* comment(const char* src); + // Match double- and single-quoted strings. + const char* double_quoted_string(const char* src); + const char* single_quoted_string(const char* src); + const char* quoted_string(const char* src); + // Match interpolants. + const char* interpolant(const char* src); + // Match number prefix ([\+\-]+) + const char* number_prefix(const char* src); + + // Match zero plus white-space or line_comments + const char* optional_css_whitespace(const char* src); + const char* css_whitespace(const char* src); + // Match optional_css_whitepace plus block_comments + const char* optional_css_comments(const char* src); + const char* css_comments(const char* src); + + // Match one backslash escaped char + const char* escape_seq(const char* src); + + // Match CSS css variables. + const char* custom_property_name(const char* src); + // Match a CSS identifier. + const char* identifier(const char* src); + const char* identifier_alpha(const char* src); + const char* identifier_alnum(const char* src); + const char* strict_identifier(const char* src); + const char* strict_identifier_alpha(const char* src); + const char* strict_identifier_alnum(const char* src); + // Match a CSS unit identifier. + const char* one_unit(const char* src); + const char* multiple_units(const char* src); + const char* unit_identifier(const char* src); + // const char* strict_identifier_alnums(const char* src); + // Match reference selector. + const char* re_reference_combinator(const char* src); + const char* static_reference_combinator(const char* src); + const char* schema_reference_combinator(const char* src); + + // Match interpolant schemas + const char* identifier_schema(const char* src); + const char* value_schema(const char* src); + const char* sass_value(const char* src); + // const char* filename(const char* src); + // const char* filename_schema(const char* src); + // const char* url_schema(const char* src); + // const char* url_value(const char* src); + const char* vendor_prefix(const char* src); + + const char* re_special_directive(const char* src); + const char* re_prefixed_directive(const char* src); + const char* re_almost_any_value_token(const char* src); + + // Match CSS '@' keywords. + const char* at_keyword(const char* src); + const char* kwd_import(const char* src); + const char* kwd_at_root(const char* src); + const char* kwd_with_directive(const char* src); + const char* kwd_without_directive(const char* src); + const char* kwd_media(const char* src); + const char* kwd_supports_directive(const char* src); + // const char* keyframes(const char* src); + // const char* keyf(const char* src); + const char* kwd_mixin(const char* src); + const char* kwd_function(const char* src); + const char* kwd_return_directive(const char* src); + const char* kwd_include_directive(const char* src); + const char* kwd_content_directive(const char* src); + const char* kwd_charset_directive(const char* src); + const char* kwd_extend(const char* src); + + const char* unicode_seq(const char* src); + + const char* kwd_if_directive(const char* src); + const char* kwd_else_directive(const char* src); + const char* elseif_directive(const char* src); + + const char* kwd_for_directive(const char* src); + const char* kwd_from(const char* src); + const char* kwd_to(const char* src); + const char* kwd_through(const char* src); + + const char* kwd_each_directive(const char* src); + const char* kwd_in(const char* src); + + const char* kwd_while_directive(const char* src); + + const char* re_nothing(const char* src); + + const char* re_special_fun(const char* src); + + const char* kwd_warn(const char* src); + const char* kwd_err(const char* src); + const char* kwd_dbg(const char* src); + + const char* kwd_null(const char* src); + + const char* re_selector_list(const char* src); + const char* re_type_selector(const char* src); + const char* re_static_expression(const char* src); + + // identifier that can start with hyphens + const char* css_identifier(const char* src); + const char* css_ip_identifier(const char* src); + + // Match CSS type selectors + const char* namespace_schema(const char* src); + const char* namespace_prefix(const char* src); + const char* type_selector(const char* src); + const char* hyphens_and_identifier(const char* src); + const char* hyphens_and_name(const char* src); + const char* universal(const char* src); + // Match CSS id names. + const char* id_name(const char* src); + // Match CSS class names. + const char* class_name(const char* src); + // Attribute name in an attribute selector + const char* attribute_name(const char* src); + // Match placeholder selectors. + const char* placeholder(const char* src); + // Match CSS numeric constants. + const char* op(const char* src); + const char* sign(const char* src); + const char* unsigned_number(const char* src); + const char* number(const char* src); + const char* coefficient(const char* src); + const char* binomial(const char* src); + const char* percentage(const char* src); + const char* ampersand(const char* src); + const char* dimension(const char* src); + const char* hex(const char* src); + const char* hexa(const char* src); + const char* hex0(const char* src); + // const char* rgb_prefix(const char* src); + // Match CSS uri specifiers. + const char* uri_prefix(const char* src); + // Match CSS "!important" keyword. + const char* kwd_important(const char* src); + // Match CSS "!optional" keyword. + const char* kwd_optional(const char* src); + // Match Sass "!default" keyword. + const char* default_flag(const char* src); + const char* global_flag(const char* src); + // Match CSS pseudo-class/element prefixes + const char* pseudo_prefix(const char* src); + // Match CSS function call openers. + const char* re_functional(const char* src); + const char* re_pseudo_selector(const char* src); + const char* functional_schema(const char* src); + const char* pseudo_not(const char* src); + // Match CSS 'odd' and 'even' keywords for functional pseudo-classes. + const char* even(const char* src); + const char* odd(const char* src); + // Match CSS attribute-matching operators. + const char* exact_match(const char* src); + const char* class_match(const char* src); + const char* dash_match(const char* src); + const char* prefix_match(const char* src); + const char* suffix_match(const char* src); + const char* substring_match(const char* src); + // Match CSS combinators. + // const char* adjacent_to(const char* src); + // const char* precedes(const char* src); + // const char* parent_of(const char* src); + // const char* ancestor_of(const char* src); + + // Match SCSS variable names. + const char* variable(const char* src); + const char* calc_fn_call(const char* src); + + // IE stuff + const char* ie_progid(const char* src); + const char* ie_expression(const char* src); + const char* ie_property(const char* src); + const char* ie_keyword_arg(const char* src); + const char* ie_keyword_arg_value(const char* src); + const char* ie_keyword_arg_property(const char* src); + + // characters that terminate parsing of a list + const char* list_terminator(const char* src); + const char* space_list_terminator(const char* src); + + // match url() + const char* H(const char* src); + const char* W(const char* src); + // `UNICODE` makes VS sad + const char* UUNICODE(const char* src); + const char* NONASCII(const char* src); + const char* ESCAPE(const char* src); + const char* real_uri(const char* src); + const char* real_uri_suffix(const char* src); + // const char* real_uri_prefix(const char* src); + const char* real_uri_value(const char* src); + + // Path matching functions. + // const char* folder(const char* src); + // const char* folders(const char* src); + + + const char* static_string(const char* src); + const char* static_component(const char* src); + const char* static_property(const char* src); + const char* static_value(const char* src); + + const char* css_variable_value(const char* src); + const char* css_variable_top_level_value(const char* src); + + // Utility functions for finding and counting characters in a string. + template + const char* find_first(const char* src) { + while (*src && *src != c) ++src; + return *src ? src : 0; + } + template + const char* find_first(const char* src) { + while (*src && !mx(src)) ++src; + return *src ? src : 0; + } + template + const char* find_first_in_interval(const char* beg, const char* end) { + bool esc = false; + while ((beg < end) && *beg) { + if (esc) esc = false; + else if (*beg == '\\') esc = true; + else if (mx(beg)) return beg; + ++beg; + } + return 0; + } + template + const char* find_first_in_interval(const char* beg, const char* end) { + bool esc = false; + while ((beg < end) && *beg) { + if (esc) esc = false; + else if (*beg == '\\') esc = true; + else if (const char* pos = skip(beg)) beg = pos; + else if (mx(beg)) return beg; + ++beg; + } + return 0; + } + template + unsigned int count_interval(const char* beg, const char* end) { + unsigned int counter = 0; + bool esc = false; + while (beg < end && *beg) { + const char* p; + if (esc) { + esc = false; + ++beg; + } else if (*beg == '\\') { + esc = true; + ++beg; + } else if ((p = mx(beg))) { + ++counter; + beg = p; + } + else { + ++beg; + } + } + return counter; + } + + template + const char* padded_token(const char* src) + { + size_t got = 0; + const char* pos = src; + while (got < size) { + if (!mx(pos)) break; + ++ pos; ++ got; + } + while (got < size) { + if (!pad(pos)) break; + ++ pos; ++ got; + } + return got ? pos : 0; + } + + template + const char* minmax_range(const char* src) + { + size_t got = 0; + const char* pos = src; + while (got < max) { + if (!mx(pos)) break; + ++ pos; ++ got; + } + if (got < min) return 0; + if (got > max) return 0; + return pos; + } + + template + const char* char_range(const char* src) + { + if (*src < min) return 0; + if (*src > max) return 0; + return src + 1; + } + + } +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/remove_placeholders.cpp b/mybulma/node_modules/node-sass/src/libsass/src/remove_placeholders.cpp new file mode 100644 index 0000000..15cddac --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/remove_placeholders.cpp @@ -0,0 +1,84 @@ +#include "sass.hpp" +#include "remove_placeholders.hpp" +#include "context.hpp" +#include "inspect.hpp" +#include + +namespace Sass { + + Remove_Placeholders::Remove_Placeholders() + { } + + void Remove_Placeholders::operator()(Block_Ptr b) { + for (size_t i = 0, L = b->length(); i < L; ++i) { + Statement_Ptr st = b->at(i); + st->perform(this); + } + } + + Selector_List_Ptr Remove_Placeholders::remove_placeholders(Selector_List_Ptr sl) + { + Selector_List_Ptr new_sl = SASS_MEMORY_NEW(Selector_List, sl->pstate()); + + for (size_t i = 0, L = sl->length(); i < L; ++i) { + if (!sl->at(i)->contains_placeholder()) { + new_sl->append(sl->at(i)); + } + } + + return new_sl; + + } + + + void Remove_Placeholders::operator()(Ruleset_Ptr r) { + // Create a new selector group without placeholders + Selector_List_Obj sl = Cast(r->selector()); + + if (sl) { + // Set the new placeholder selector list + r->selector(remove_placeholders(sl)); + // Remove placeholders in wrapped selectors + for (Complex_Selector_Obj cs : sl->elements()) { + while (cs) { + if (cs->head()) { + for (Simple_Selector_Obj& ss : cs->head()->elements()) { + if (Wrapped_Selector_Ptr ws = Cast(ss)) { + if (Selector_List_Ptr wsl = Cast(ws->selector())) { + Selector_List_Ptr clean = remove_placeholders(wsl); + // also clean superflous parent selectors + // probably not really the correct place + clean->remove_parent_selectors(); + ws->selector(clean); + } + } + } + } + cs = cs->tail(); + } + } + } + + // Iterate into child blocks + Block_Obj b = r->block(); + + for (size_t i = 0, L = b->length(); i < L; ++i) { + if (b->at(i)) { + Statement_Obj st = b->at(i); + st->perform(this); + } + } + } + + void Remove_Placeholders::operator()(Media_Block_Ptr m) { + operator()(m->block()); + } + void Remove_Placeholders::operator()(Supports_Block_Ptr m) { + operator()(m->block()); + } + + void Remove_Placeholders::operator()(Directive_Ptr a) { + if (a->block()) a->block()->perform(this); + } + +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/remove_placeholders.hpp b/mybulma/node_modules/node-sass/src/libsass/src/remove_placeholders.hpp new file mode 100644 index 0000000..c13b631 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/remove_placeholders.hpp @@ -0,0 +1,35 @@ +#ifndef SASS_REMOVE_PLACEHOLDERS_H +#define SASS_REMOVE_PLACEHOLDERS_H + +#pragma once + +#include "ast.hpp" +#include "operation.hpp" + +namespace Sass { + + + class Remove_Placeholders : public Operation_CRTP { + + void fallback_impl(AST_Node_Ptr n) {} + + public: + Selector_List_Ptr remove_placeholders(Selector_List_Ptr); + + public: + Remove_Placeholders(); + ~Remove_Placeholders() { } + + void operator()(Block_Ptr); + void operator()(Ruleset_Ptr); + void operator()(Media_Block_Ptr); + void operator()(Supports_Block_Ptr); + void operator()(Directive_Ptr); + + template + void fallback(U x) { return fallback_impl(x); } + }; + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/sass.cpp b/mybulma/node_modules/node-sass/src/libsass/src/sass.cpp new file mode 100644 index 0000000..72edd7c --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/sass.cpp @@ -0,0 +1,151 @@ +#include "sass.hpp" +#include +#include +#include +#include + +#include "sass.h" +#include "file.hpp" +#include "util.hpp" +#include "sass_context.hpp" +#include "sass_functions.hpp" + +namespace Sass { + + // helper to convert string list to vector + std::vector list2vec(struct string_list* cur) + { + std::vector list; + while (cur) { + list.push_back(cur->string); + cur = cur->next; + } + return list; + } + +} + +extern "C" { + using namespace Sass; + + // Allocate libsass heap memory + // Don't forget string termination! + void* ADDCALL sass_alloc_memory(size_t size) + { + void* ptr = malloc(size); + if (ptr == NULL) { + std::cerr << "Out of memory.\n"; + exit(EXIT_FAILURE); + } + return ptr; + } + + char* ADDCALL sass_copy_c_string(const char* str) + { + size_t len = strlen(str) + 1; + char* cpy = (char*) sass_alloc_memory(len); + std::memcpy(cpy, str, len); + return cpy; + } + + // Deallocate libsass heap memory + void ADDCALL sass_free_memory(void* ptr) + { + if (ptr) free (ptr); + } + + // caller must free the returned memory + char* ADDCALL sass_string_quote (const char *str, const char quote_mark) + { + std::string quoted = quote(str, quote_mark); + return sass_copy_c_string(quoted.c_str()); + } + + // caller must free the returned memory + char* ADDCALL sass_string_unquote (const char *str) + { + std::string unquoted = unquote(str); + return sass_copy_c_string(unquoted.c_str()); + } + + char* ADDCALL sass_compiler_find_include (const char* file, struct Sass_Compiler* compiler) + { + // get the last import entry to get current base directory + Sass_Import_Entry import = sass_compiler_get_last_import(compiler); + const std::vector& incs = compiler->cpp_ctx->include_paths; + // create the vector with paths to lookup + std::vector paths(1 + incs.size()); + paths.push_back(File::dir_name(import->abs_path)); + paths.insert( paths.end(), incs.begin(), incs.end() ); + // now resolve the file path relative to lookup paths + std::string resolved(File::find_include(file, paths)); + return sass_copy_c_string(resolved.c_str()); + } + + char* ADDCALL sass_compiler_find_file (const char* file, struct Sass_Compiler* compiler) + { + // get the last import entry to get current base directory + Sass_Import_Entry import = sass_compiler_get_last_import(compiler); + const std::vector& incs = compiler->cpp_ctx->include_paths; + // create the vector with paths to lookup + std::vector paths(1 + incs.size()); + paths.push_back(File::dir_name(import->abs_path)); + paths.insert( paths.end(), incs.begin(), incs.end() ); + // now resolve the file path relative to lookup paths + std::string resolved(File::find_file(file, paths)); + return sass_copy_c_string(resolved.c_str()); + } + + // Make sure to free the returned value! + // Incs array has to be null terminated! + // this has the original resolve logic for sass include + char* ADDCALL sass_find_include (const char* file, struct Sass_Options* opt) + { + std::vector vec(list2vec(opt->include_paths)); + std::string resolved(File::find_include(file, vec)); + return sass_copy_c_string(resolved.c_str()); + } + + // Make sure to free the returned value! + // Incs array has to be null terminated! + char* ADDCALL sass_find_file (const char* file, struct Sass_Options* opt) + { + std::vector vec(list2vec(opt->include_paths)); + std::string resolved(File::find_file(file, vec)); + return sass_copy_c_string(resolved.c_str()); + } + + // Get compiled libsass version + const char* ADDCALL libsass_version(void) + { + return LIBSASS_VERSION; + } + + // Get compiled libsass version + const char* ADDCALL libsass_language_version(void) + { + return LIBSASS_LANGUAGE_VERSION; + } + +} + +namespace Sass { + + // helper to aid dreaded MSVC debug mode + char* sass_copy_string(std::string str) + { + // In MSVC the following can lead to segfault: + // sass_copy_c_string(stream.str().c_str()); + // Reason is that the string returned by str() is disposed before + // sass_copy_c_string is invoked. The string is actually a stack + // object, so indeed nobody is holding on to it. So it seems + // perfectly fair to release it right away. So the const char* + // by c_str will point to invalid memory. I'm not sure if this is + // the behavior for all compiler, but I'm pretty sure we would + // have gotten more issues reported if that would be the case. + // Wrapping it in a functions seems the cleanest approach as the + // function must hold on to the stack variable until it's done. + return sass_copy_c_string(str.c_str()); + } + +} \ No newline at end of file diff --git a/mybulma/node_modules/node-sass/src/libsass/src/sass.hpp b/mybulma/node_modules/node-sass/src/libsass/src/sass.hpp new file mode 100644 index 0000000..f055049 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/sass.hpp @@ -0,0 +1,139 @@ +// must be the first include in all compile units +#ifndef SASS_SASS_H +#define SASS_SASS_H + +// undefine extensions macro to tell sys includes +// that we do not want any macros to be exported +// mainly fixes an issue on SmartOS (SEC macro) +#undef __EXTENSIONS__ + +#ifdef _MSC_VER +#pragma warning(disable : 4005) +#endif + +// aplies to MSVC and MinGW +#ifdef _WIN32 +// we do not want the ERROR macro +# define NOGDI +// we do not want the min/max macro +# define NOMINMAX +// we do not want the IN/OUT macro +# define _NO_W32_PSEUDO_MODIFIERS +#endif + + +// should we be case insensitive +// when dealing with files or paths +#ifndef FS_CASE_SENSITIVE +# ifdef _WIN32 +# define FS_CASE_SENSITIVE 0 +# else +# define FS_CASE_SENSITIVE 1 +# endif +#endif + +// path separation char +#ifndef PATH_SEP +# ifdef _WIN32 +# define PATH_SEP ';' +# else +# define PATH_SEP ':' +# endif +#endif + + +// include C-API header +#include "sass/base.h" + +// For C++ helper +#include + +// output behaviours +namespace Sass { + + // create some C++ aliases for the most used options + const static Sass_Output_Style NESTED = SASS_STYLE_NESTED; + const static Sass_Output_Style COMPACT = SASS_STYLE_COMPACT; + const static Sass_Output_Style EXPANDED = SASS_STYLE_EXPANDED; + const static Sass_Output_Style COMPRESSED = SASS_STYLE_COMPRESSED; + // only used internal to trigger ruby inspect behavior + const static Sass_Output_Style INSPECT = SASS_STYLE_INSPECT; + const static Sass_Output_Style TO_SASS = SASS_STYLE_TO_SASS; + + // helper to aid dreaded MSVC debug mode + // see implementation for more details + char* sass_copy_string(std::string str); + +} + +// input behaviours +enum Sass_Input_Style { + SASS_CONTEXT_NULL, + SASS_CONTEXT_FILE, + SASS_CONTEXT_DATA, + SASS_CONTEXT_FOLDER +}; + +// simple linked list +struct string_list { + string_list* next; + char* string; +}; + +// sass config options structure +struct Sass_Inspect_Options { + + // Output style for the generated css code + // A value from above SASS_STYLE_* constants + enum Sass_Output_Style output_style; + + // Precision for fractional numbers + int precision; + + // Do not compress colors in selectors + bool in_selector; + + // initialization list (constructor with defaults) + Sass_Inspect_Options(Sass_Output_Style style = Sass::NESTED, + int precision = 5, bool in_selector = false) + : output_style(style), precision(precision), in_selector(in_selector) + { } + +}; + +// sass config options structure +struct Sass_Output_Options : Sass_Inspect_Options { + + // String to be used for indentation + const char* indent; + // String to be used to for line feeds + const char* linefeed; + + // Emit comments in the generated CSS indicating + // the corresponding source line. + bool source_comments; + + // initialization list (constructor with defaults) + Sass_Output_Options(struct Sass_Inspect_Options opt, + const char* indent = " ", + const char* linefeed = "\n", + bool source_comments = false) + : Sass_Inspect_Options(opt), + indent(indent), linefeed(linefeed), + source_comments(source_comments) + { } + + // initialization list (constructor with defaults) + Sass_Output_Options(Sass_Output_Style style = Sass::NESTED, + int precision = 5, + const char* indent = " ", + const char* linefeed = "\n", + bool source_comments = false) + : Sass_Inspect_Options(style, precision), + indent(indent), linefeed(linefeed), + source_comments(source_comments) + { } + +}; + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/sass2scss.cpp b/mybulma/node_modules/node-sass/src/libsass/src/sass2scss.cpp new file mode 100644 index 0000000..56333b3 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/sass2scss.cpp @@ -0,0 +1,864 @@ +/** + * sass2scss + * Licensed under the MIT License + * Copyright (c) Marcel Greter + */ + +#ifdef _MSC_VER +#define _CRT_SECURE_NO_WARNINGS +#define _CRT_NONSTDC_NO_DEPRECATE +#endif + +// include library +#include +#include +#include +#include +#include +#include +#include + +///* +// +// src comments: comments in sass syntax (staring with //) +// css comments: multiline comments in css syntax (starting with /*) +// +// KEEP_COMMENT: keep src comments in the resulting css code +// STRIP_COMMENT: strip out all comments (either src or css) +// CONVERT_COMMENT: convert all src comments to css comments +// +//*/ + +// our own header +#include "sass2scss.h" + +// add namespace for c++ +namespace Sass +{ + + // return the actual prettify value from options + #define PRETTIFY(converter) (converter.options - (converter.options & 248)) + // query the options integer to check if the option is enables + #define KEEP_COMMENT(converter) ((converter.options & SASS2SCSS_KEEP_COMMENT) == SASS2SCSS_KEEP_COMMENT) + #define STRIP_COMMENT(converter) ((converter.options & SASS2SCSS_STRIP_COMMENT) == SASS2SCSS_STRIP_COMMENT) + #define CONVERT_COMMENT(converter) ((converter.options & SASS2SCSS_CONVERT_COMMENT) == SASS2SCSS_CONVERT_COMMENT) + + // some makros to access the indentation stack + #define INDENT(converter) (converter.indents.top()) + + // some makros to query comment parser status + #define IS_PARSING(converter) (converter.comment == "") + #define IS_COMMENT(converter) (converter.comment != "") + #define IS_SRC_COMMENT(converter) (converter.comment == "//" && ! CONVERT_COMMENT(converter)) + #define IS_CSS_COMMENT(converter) (converter.comment == "/*" || (converter.comment == "//" && CONVERT_COMMENT(converter))) + + // pretty printer helper function + static std::string closer (const converter& converter) + { + return PRETTIFY(converter) == 0 ? " }" : + PRETTIFY(converter) <= 1 ? " }" : + "\n" + INDENT(converter) + "}"; + } + + // pretty printer helper function + static std::string opener (const converter& converter) + { + return PRETTIFY(converter) == 0 ? " { " : + PRETTIFY(converter) <= 2 ? " {" : + "\n" + INDENT(converter) + "{"; + } + + // check if the given string is a pseudo selector + // needed to differentiate from sass property syntax + static bool isPseudoSelector (std::string& sel) + { + + size_t len = sel.length(); + if (len < 1) return false; + size_t pos = sel.find_first_not_of("abcdefghijklmnopqrstuvwxyz-ABCDEFGHIJKLMNOPQRSTUVWXYZ", 1); + if (pos != std::string::npos) sel.erase(pos, std::string::npos); + size_t i = sel.length(); + while (i -- > 0) { sel.at(i) = tolower(sel.at(i)); } + + // CSS Level 1 - Recommendation + if (sel == ":link") return true; + if (sel == ":visited") return true; + if (sel == ":active") return true; + + // CSS Level 2 (Revision 1) - Recommendation + if (sel == ":lang") return true; + if (sel == ":first-child") return true; + if (sel == ":hover") return true; + if (sel == ":focus") return true; + // disabled - also valid properties + // if (sel == ":left") return true; + // if (sel == ":right") return true; + if (sel == ":first") return true; + + // Selectors Level 3 - Recommendation + if (sel == ":target") return true; + if (sel == ":root") return true; + if (sel == ":nth-child") return true; + if (sel == ":nth-last-of-child") return true; + if (sel == ":nth-of-type") return true; + if (sel == ":nth-last-of-type") return true; + if (sel == ":last-child") return true; + if (sel == ":first-of-type") return true; + if (sel == ":last-of-type") return true; + if (sel == ":only-child") return true; + if (sel == ":only-of-type") return true; + if (sel == ":empty") return true; + if (sel == ":not") return true; + + // CSS Basic User Interface Module Level 3 - Working Draft + if (sel == ":default") return true; + if (sel == ":valid") return true; + if (sel == ":invalid") return true; + if (sel == ":in-range") return true; + if (sel == ":out-of-range") return true; + if (sel == ":required") return true; + if (sel == ":optional") return true; + if (sel == ":read-only") return true; + if (sel == ":read-write") return true; + if (sel == ":dir") return true; + if (sel == ":enabled") return true; + if (sel == ":disabled") return true; + if (sel == ":checked") return true; + if (sel == ":indeterminate") return true; + if (sel == ":nth-last-child") return true; + + // Selectors Level 4 - Working Draft + if (sel == ":any-link") return true; + if (sel == ":local-link") return true; + if (sel == ":scope") return true; + if (sel == ":active-drop-target") return true; + if (sel == ":valid-drop-target") return true; + if (sel == ":invalid-drop-target") return true; + if (sel == ":current") return true; + if (sel == ":past") return true; + if (sel == ":future") return true; + if (sel == ":placeholder-shown") return true; + if (sel == ":user-error") return true; + if (sel == ":blank") return true; + if (sel == ":nth-match") return true; + if (sel == ":nth-last-match") return true; + if (sel == ":nth-column") return true; + if (sel == ":nth-last-column") return true; + if (sel == ":matches") return true; + + // Fullscreen API - Living Standard + if (sel == ":fullscreen") return true; + + // not a pseudo selector + return false; + + } + + // check if there is some char data + // will ignore everything in comments + static bool hasCharData (std::string& sass) + { + + size_t col_pos = 0; + + while (true) + { + + // try to find some meaningfull char + col_pos = sass.find_first_not_of(" \t\n\v\f\r", col_pos); + + // there was no meaningfull char found + if (col_pos == std::string::npos) return false; + + // found a multiline comment opener + if (sass.substr(col_pos, 2) == "/*") + { + // find the multiline comment closer + col_pos = sass.find("*/", col_pos); + // maybe we did not find the closer here + if (col_pos == std::string::npos) return false; + // skip closer + col_pos += 2; + } + else + { + return true; + } + + } + + } + // EO hasCharData + + // find src comment opener + // correctly skips quoted strings + static size_t findCommentOpener (std::string& sass) + { + + size_t col_pos = 0; + bool apoed = false; + bool quoted = false; + bool comment = false; + size_t brackets = 0; + + while (col_pos != std::string::npos) + { + + // process all interesting chars + col_pos = sass.find_first_of("\"\'/\\*()", col_pos); + + // assertion for valid result + if (col_pos != std::string::npos) + { + char character = sass.at(col_pos); + + if (character == '(') + { + if (!quoted && !apoed) brackets ++; + } + else if (character == ')') + { + if (!quoted && !apoed) brackets --; + } + else if (character == '\"') + { + // invert quote bool + if (!apoed && !comment) quoted = !quoted; + } + else if (character == '\'') + { + // invert quote bool + if (!quoted && !comment) apoed = !apoed; + } + else if (col_pos > 0 && character == '/') + { + if (sass.at(col_pos - 1) == '*') + { + comment = false; + } + // next needs to be a slash too + else if (sass.at(col_pos - 1) == '/') + { + // only found if not in single or double quote, bracket or comment + if (!quoted && !apoed && !comment && brackets == 0) return col_pos - 1; + } + } + else if (character == '\\') + { + // skip next char if in quote + if (quoted || apoed) col_pos ++; + } + // this might be a comment opener + else if (col_pos > 0 && character == '*') + { + // opening a multiline comment + if (sass.at(col_pos - 1) == '/') + { + // we are now in a comment + if (!quoted && !apoed) comment = true; + } + } + + // skip char + col_pos ++; + + } + + } + // EO while + + return col_pos; + + } + // EO findCommentOpener + + // remove multiline comments from sass string + // correctly skips quoted strings + static std::string removeMultilineComment (std::string &sass) + { + + std::string clean = ""; + size_t col_pos = 0; + size_t open_pos = 0; + size_t close_pos = 0; + bool apoed = false; + bool quoted = false; + bool comment = false; + + // process sass til string end + while (col_pos != std::string::npos) + { + + // process all interesting chars + col_pos = sass.find_first_of("\"\'/\\*", col_pos); + + // assertion for valid result + if (col_pos != std::string::npos) + { + char character = sass.at(col_pos); + + // found quoted string delimiter + if (character == '\"') + { + if (!apoed && !comment) quoted = !quoted; + } + else if (character == '\'') + { + if (!quoted && !comment) apoed = !apoed; + } + // found possible comment closer + else if (character == '/') + { + // look back to see if it is actually a closer + if (comment && col_pos > 0 && sass.at(col_pos - 1) == '*') + { + close_pos = col_pos + 1; comment = false; + } + } + else if (character == '\\') + { + // skip escaped char + if (quoted || apoed) col_pos ++; + } + // this might be a comment opener + else if (character == '*') + { + // look back to see if it is actually an opener + if (!quoted && !apoed && col_pos > 0 && sass.at(col_pos - 1) == '/') + { + comment = true; open_pos = col_pos - 1; + clean += sass.substr(close_pos, open_pos - close_pos); + } + } + + // skip char + col_pos ++; + + } + + } + // EO while + + // add final parts (add half open comment text) + if (comment) clean += sass.substr(open_pos); + else clean += sass.substr(close_pos); + + // return string + return clean; + + } + // EO removeMultilineComment + + // right trim a given string + std::string rtrim(const std::string &sass) + { + std::string trimmed = sass; + size_t pos_ws = trimmed.find_last_not_of(" \t\n\v\f\r"); + if (pos_ws != std::string::npos) + { trimmed.erase(pos_ws + 1); } + else { trimmed.clear(); } + return trimmed; + } + // EO rtrim + + // flush whitespace and print additional text, but + // only print additional chars and buffer whitespace + std::string flush (std::string& sass, converter& converter) + { + + // return flushed + std::string scss = ""; + + // print whitespace buffer + scss += PRETTIFY(converter) > 0 ? + converter.whitespace : ""; + // reset whitespace buffer + converter.whitespace = ""; + + // remove possible newlines from string + size_t pos_right = sass.find_last_not_of("\n\r"); + if (pos_right == std::string::npos) return scss; + + // get the linefeeds from the string + std::string lfs = sass.substr(pos_right + 1); + sass = sass.substr(0, pos_right + 1); + + // find some source comment opener + size_t comment_pos = findCommentOpener(sass); + // check if there was a source comment + if (comment_pos != std::string::npos) + { + // convert comment (but only outside other coments) + if (CONVERT_COMMENT(converter) && !IS_COMMENT(converter)) + { + // convert to multiline comment + sass.at(comment_pos + 1) = '*'; + // add comment node to the whitespace + sass += " */"; + } + // not at line start + if (comment_pos > 0) + { + // also include whitespace before the actual comment opener + size_t ws_pos = sass.find_last_not_of(SASS2SCSS_FIND_WHITESPACE, comment_pos - 1); + comment_pos = ws_pos == std::string::npos ? 0 : ws_pos + 1; + } + if (!STRIP_COMMENT(converter)) + { + // add comment node to the whitespace + converter.whitespace += sass.substr(comment_pos); + } + else + { + // sass = removeMultilineComments(sass); + } + // update the actual sass code + sass = sass.substr(0, comment_pos); + } + + // add newline as getline discharged it + converter.whitespace += lfs + "\n"; + + // maybe remove any leading whitespace + if (PRETTIFY(converter) == 0) + { + // remove leading whitespace and update string + size_t pos_left = sass.find_first_not_of(SASS2SCSS_FIND_WHITESPACE); + if (pos_left != std::string::npos) sass = sass.substr(pos_left); + } + + // add flushed data + scss += sass; + + // return string + return scss; + + } + // EO flush + + // process a line of the sass text + std::string process (std::string& sass, converter& converter) + { + + // resulting string + std::string scss = ""; + + // strip multi line comments + if (STRIP_COMMENT(converter)) + { + sass = removeMultilineComment(sass); + } + + // right trim input + sass = rtrim(sass); + + // get postion of first meaningfull character in string + size_t pos_left = sass.find_first_not_of(SASS2SCSS_FIND_WHITESPACE); + + // special case for final run + if (converter.end_of_file) pos_left = 0; + + // maybe has only whitespace + if (pos_left == std::string::npos) + { + // just add complete whitespace + converter.whitespace += sass + "\n"; + } + // have meaningfull first char + else + { + + // extract and store indentation string + std::string indent = sass.substr(0, pos_left); + + // check if current line starts a comment + std::string open = sass.substr(pos_left, 2); + + // line has less or same indentation + // finalize previous open parser context + if (indent.length() <= INDENT(converter).length()) + { + + // close multilinie comment + if (IS_CSS_COMMENT(converter)) + { + // check if comments will be stripped anyway + if (!STRIP_COMMENT(converter)) scss += " */"; + } + // close src comment comment + else if (IS_SRC_COMMENT(converter)) + { + // add a newline to avoid closer on same line + // this would put the bracket in the comment node + // no longer needed since we parse them correctly + // if (KEEP_COMMENT(converter)) scss += "\n"; + } + // close css properties + else if (converter.property) + { + // add closer unless in concat mode + if (!converter.comma) + { + // if there was no colon we have a selector + // looks like there were no inner properties + if (converter.selector) scss += " {}"; + // add final semicolon + else if (!converter.semicolon) scss += ";"; + } + } + + // reset comment state + converter.comment = ""; + + } + + // make sure we close every "higher" block + while (indent.length() < INDENT(converter).length()) + { + // pop stacked context + converter.indents.pop(); + // print close bracket + if (IS_PARSING(converter)) + { scss += closer(converter); } + else { scss += " */"; } + // reset comment state + converter.comment = ""; + } + + // reset converter state + converter.selector = false; + + // looks like some undocumented behavior ... + // https://github.com/mgreter/sass2scss/issues/29 + if (sass.substr(pos_left, 1) == "\\") { + converter.selector = true; + sass[pos_left] = ' '; + } + + // check if we have sass property syntax + if (sass.substr(pos_left, 1) == ":" && sass.substr(pos_left, 2) != "::") + { + + // default to a selector + // change back if property found + converter.selector = true; + // get postion of first whitespace char + size_t pos_wspace = sass.find_first_of(SASS2SCSS_FIND_WHITESPACE, pos_left); + // assertion check for valid result + if (pos_wspace != std::string::npos) + { + // get the possible pseudo selector + std::string pseudo = sass.substr(pos_left, pos_wspace - pos_left); + // get position of the first real property value char + // pseudo selectors get this far, but have no actual value + size_t pos_value = sass.find_first_not_of(SASS2SCSS_FIND_WHITESPACE, pos_wspace); + // assertion check for valid result + if (pos_value != std::string::npos) + { + // only process if not (fallowed by a semicolon or is a pseudo selector) + if (!(sass.at(pos_value) == ':' || isPseudoSelector(pseudo))) + { + // create new string by interchanging the colon sign for property and value + sass = indent + sass.substr(pos_left + 1, pos_wspace - pos_left - 1) + ":" + sass.substr(pos_wspace); + // try to find a colon in the current line, but only ... + size_t pos_colon = sass.find_first_not_of(":", pos_left); + // assertion for valid result + if (pos_colon != std::string::npos) + { + // ... after the first word (skip begining colons) + pos_colon = sass.find_first_of(":", pos_colon); + // it is a selector if there was no colon found + converter.selector = pos_colon == std::string::npos; + } + } + } + } + + // check if we have a BEM property (one colon and no selector) + if (sass.substr(pos_left, 1) == ":" && converter.selector == true) { + size_t pos_wspace = sass.find_first_of(SASS2SCSS_FIND_WHITESPACE, pos_left); + sass = indent + sass.substr(pos_left + 1, pos_wspace) + ":"; + } + + } + + // terminate some statements immediately + else if ( + sass.substr(pos_left, 5) == "@warn" || + sass.substr(pos_left, 6) == "@debug" || + sass.substr(pos_left, 6) == "@error" || + sass.substr(pos_left, 8) == "@charset" || + sass.substr(pos_left, 10) == "@namespace" + ) { sass = indent + sass.substr(pos_left); } + // replace some specific sass shorthand directives (if not fallowed by a white space character) + else if (sass.substr(pos_left, 1) == "=") + { sass = indent + "@mixin " + sass.substr(pos_left + 1); } + else if (sass.substr(pos_left, 1) == "+") + { + // must be followed by a mixin call (no whitespace afterwards or at ending directly) + if (sass[pos_left+1] != 0 && sass[pos_left+1] != ' ' && sass[pos_left+1] != '\t') { + sass = indent + "@include " + sass.substr(pos_left + 1); + } + } + + // add quotes for import if needed + else if (sass.substr(pos_left, 7) == "@import") + { + // get positions for the actual import url + size_t pos_import = sass.find_first_of(SASS2SCSS_FIND_WHITESPACE, pos_left + 7); + size_t pos_quote = sass.find_first_not_of(SASS2SCSS_FIND_WHITESPACE, pos_import); + // leave proper urls untouched + if (sass.substr(pos_quote, 4) != "url(") + { + // check if the url appears to be already quoted + if (sass.substr(pos_quote, 1) != "\"" && sass.substr(pos_quote, 1) != "\'") + { + // get position of the last char on the line + size_t pos_end = sass.find_last_not_of(SASS2SCSS_FIND_WHITESPACE); + // assertion check for valid result + if (pos_end != std::string::npos) + { + // add quotes around the full line after the import statement + sass = sass.substr(0, pos_quote) + "\"" + sass.substr(pos_quote, pos_end - pos_quote + 1) + "\""; + } + } + } + + } + else if ( + sass.substr(pos_left, 7) != "@return" && + sass.substr(pos_left, 7) != "@extend" && + sass.substr(pos_left, 8) != "@include" && + sass.substr(pos_left, 8) != "@content" + ) { + + // probably a selector anyway + converter.selector = true; + // try to find first colon in the current line + size_t pos_colon = sass.find_first_of(":", pos_left); + // assertion that we have a colon + if (pos_colon != std::string::npos) + { + // it is not a selector if we have a space after a colon + if (sass[pos_colon+1] == ' ') converter.selector = false; + if (sass[pos_colon+1] == ' ') converter.selector = false; + } + + } + + // current line has more indentation + if (indent.length() >= INDENT(converter).length()) + { + // not in comment mode + if (IS_PARSING(converter)) + { + // has meaningfull chars + if (hasCharData(sass)) + { + // is probably a property + // also true for selectors + converter.property = true; + } + } + } + // current line has more indentation + if (indent.length() > INDENT(converter).length()) + { + // not in comment mode + if (IS_PARSING(converter)) + { + // had meaningfull chars + if (converter.property) + { + // print block opener + scss += opener(converter); + // push new stack context + converter.indents.push(""); + // store block indentation + INDENT(converter) = indent; + } + } + // is and will be a src comment + else if (!IS_CSS_COMMENT(converter)) + { + // scss does not allow multiline src comments + // therefore add forward slashes to all lines + sass.at(INDENT(converter).length()+0) = '/'; + // there is an edge case here if indentation + // is minimal (will overwrite the fist char) + sass.at(INDENT(converter).length()+1) = '/'; + // could code around that, but I dont' think + // this will ever be the cause for any trouble + } + } + + // line is opening a new comment + if (open == "/*" || open == "//") + { + // reset the property state + converter.property = false; + // close previous comment + if (IS_CSS_COMMENT(converter) && open != "") + { + if (!STRIP_COMMENT(converter) && !CONVERT_COMMENT(converter)) scss += " */"; + } + // force single line comments + // into a correct css comment + if (CONVERT_COMMENT(converter)) + { + if (IS_PARSING(converter)) + { sass.at(pos_left + 1) = '*'; } + } + // set comment flag + converter.comment = open; + + } + + // flush data only under certain conditions + if (!( + // strip css and src comments if option is set + (IS_COMMENT(converter) && STRIP_COMMENT(converter)) || + // strip src comment even if strip option is not set + // but only if the keep src comment option is not set + (IS_SRC_COMMENT(converter) && ! KEEP_COMMENT(converter)) + )) + { + // flush data and buffer whitespace + scss += flush(sass, converter); + } + + // get postion of last meaningfull char + size_t pos_right = sass.find_last_not_of(SASS2SCSS_FIND_WHITESPACE); + + // check for invalid result + if (pos_right != std::string::npos) + { + + // get the last meaningfull char + std::string close = sass.substr(pos_right, 1); + + // check if next line should be concatenated (list mode) + converter.comma = IS_PARSING(converter) && close == ","; + converter.semicolon = IS_PARSING(converter) && close == ";"; + + // check if we have more than + // one meaningfull char + if (pos_right > 0) + { + + // get the last two chars from string + std::string close = sass.substr(pos_right - 1, 2); + // update parser status for expicitly closed comment + if (close == "*/") converter.comment = ""; + + } + + } + // EO have meaningfull chars from end + + } + // EO have meaningfull chars from start + + // return scss + return scss; + + } + // EO process + + // read line with either CR, LF or CR LF format + // http://stackoverflow.com/a/6089413/1550314 + static std::istream& safeGetline(std::istream& is, std::string& t) + { + t.clear(); + + // The characters in the stream are read one-by-one using a std::streambuf. + // That is faster than reading them one-by-one using the std::istream. + // Code that uses streambuf this way must be guarded by a sentry object. + // The sentry object performs various tasks, + // such as thread synchronization and updating the stream state. + + std::istream::sentry se(is, true); + std::streambuf* sb = is.rdbuf(); + + for(;;) { + int c = sb->sbumpc(); + switch (c) { + case '\n': + return is; + case '\r': + if(sb->sgetc() == '\n') + sb->sbumpc(); + return is; + case EOF: + // Also handle the case when the last line has no line ending + if(t.empty()) + is.setstate(std::ios::eofbit); + return is; + default: + t += (char)c; + } + } + } + + // the main converter function for c++ + char* sass2scss (const std::string& sass, const int options) + { + + // local variables + std::string line; + std::string scss = ""; + std::stringstream stream(sass); + + // create converter variable + converter converter; + // initialise all options + converter.comma = false; + converter.property = false; + converter.selector = false; + converter.semicolon = false; + converter.end_of_file = false; + converter.comment = ""; + converter.whitespace = ""; + converter.indents.push(""); + converter.options = options; + + // read line by line and process them + while(safeGetline(stream, line) && !stream.eof()) + { scss += process(line, converter); } + + // create mutable string + std::string closer = ""; + // set the end of file flag + converter.end_of_file = true; + // process to close all open blocks + scss += process(closer, converter); + + // allocate new memory on the heap + // caller has to free it after use + char * cstr = (char*) malloc (scss.length() + 1); + // create a copy of the string + strcpy (cstr, scss.c_str()); + // return pointer + return &cstr[0]; + + } + // EO sass2scss + +} +// EO namespace + +// implement for c +extern "C" +{ + + char* ADDCALL sass2scss (const char* sass, const int options) + { + return Sass::sass2scss(sass, options); + } + + // Get compiled sass2scss version + const char* ADDCALL sass2scss_version(void) { + return SASS2SCSS_VERSION; + } + +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/sass_context.cpp b/mybulma/node_modules/node-sass/src/libsass/src/sass_context.cpp new file mode 100644 index 0000000..afadc66 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/sass_context.cpp @@ -0,0 +1,769 @@ +#include "sass.hpp" +#include +#include +#include +#include +#include + +#include "sass.h" +#include "ast.hpp" +#include "file.hpp" +#include "json.hpp" +#include "util.hpp" +#include "context.hpp" +#include "sass_context.hpp" +#include "sass_functions.hpp" +#include "ast_fwd_decl.hpp" +#include "error_handling.hpp" + +#define LFEED "\n" + +// C++ helper +namespace Sass { + // see sass_copy_c_string(std::string str) + static inline JsonNode* json_mkstream(const std::stringstream& stream) + { + // hold on to string on stack! + std::string str(stream.str()); + return json_mkstring(str.c_str()); + } + + static int handle_error(Sass_Context* c_ctx) { + try { + throw; + } + catch (Exception::Base& e) { + std::stringstream msg_stream; + std::string cwd(Sass::File::get_cwd()); + std::string msg_prefix(e.errtype()); + bool got_newline = false; + msg_stream << msg_prefix << ": "; + const char* msg = e.what(); + while (msg && *msg) { + if (*msg == '\r') { + got_newline = true; + } + else if (*msg == '\n') { + got_newline = true; + } + else if (got_newline) { + msg_stream << std::string(msg_prefix.size() + 2, ' '); + got_newline = false; + } + msg_stream << *msg; + ++msg; + } + if (!got_newline) msg_stream << "\n"; + + if (e.traces.empty()) { + // we normally should have some traces, still here as a fallback + std::string rel_path(Sass::File::abs2rel(e.pstate.path, cwd, cwd)); + msg_stream << std::string(msg_prefix.size() + 2, ' '); + msg_stream << " on line " << e.pstate.line + 1 << " of " << rel_path << "\n"; + } + else { + std::string rel_path(Sass::File::abs2rel(e.pstate.path, cwd, cwd)); + msg_stream << traces_to_string(e.traces, " "); + } + + // now create the code trace (ToDo: maybe have util functions?) + if (e.pstate.line != std::string::npos && e.pstate.column != std::string::npos) { + size_t lines = e.pstate.line; + const char* line_beg = e.pstate.src; + // scan through src until target line + // move line_beg pointer to line start + while (line_beg && *line_beg && lines != 0) { + if (*line_beg == '\n') --lines; + utf8::unchecked::next(line_beg); + } + const char* line_end = line_beg; + // move line_end before next newline character + while (line_end && *line_end && *line_end != '\n') { + if (*line_end == '\n') break; + if (*line_end == '\r') break; + utf8::unchecked::next(line_end); + } + if (line_end && *line_end != 0) ++ line_end; + size_t line_len = line_end - line_beg; + size_t move_in = 0; size_t shorten = 0; + size_t left_chars = 42; size_t max_chars = 76; + // reported excerpt should not exceed `max_chars` chars + if (e.pstate.column > line_len) left_chars = e.pstate.column; + if (e.pstate.column > left_chars) move_in = e.pstate.column - left_chars; + if (line_len > max_chars + move_in) shorten = line_len - move_in - max_chars; + utf8::advance(line_beg, move_in, line_end); + utf8::retreat(line_end, shorten, line_beg); + std::string sanitized; std::string marker(e.pstate.column - move_in, '-'); + utf8::replace_invalid(line_beg, line_end, std::back_inserter(sanitized)); + msg_stream << ">> " << sanitized << "\n"; + msg_stream << " " << marker << "^\n"; + } + + JsonNode* json_err = json_mkobject(); + json_append_member(json_err, "status", json_mknumber(1)); + json_append_member(json_err, "file", json_mkstring(e.pstate.path)); + json_append_member(json_err, "line", json_mknumber((double)(e.pstate.line + 1))); + json_append_member(json_err, "column", json_mknumber((double)(e.pstate.column + 1))); + json_append_member(json_err, "message", json_mkstring(e.what())); + json_append_member(json_err, "formatted", json_mkstream(msg_stream)); + try { c_ctx->error_json = json_stringify(json_err, " "); } + catch (...) {} + c_ctx->error_message = sass_copy_string(msg_stream.str()); + c_ctx->error_text = sass_copy_c_string(e.what()); + c_ctx->error_status = 1; + c_ctx->error_file = sass_copy_c_string(e.pstate.path); + c_ctx->error_line = e.pstate.line + 1; + c_ctx->error_column = e.pstate.column + 1; + c_ctx->error_src = e.pstate.src; + c_ctx->output_string = 0; + c_ctx->source_map_string = 0; + json_delete(json_err); + } + catch (std::bad_alloc& ba) { + std::stringstream msg_stream; + JsonNode* json_err = json_mkobject(); + msg_stream << "Unable to allocate memory: " << ba.what() << std::endl; + json_append_member(json_err, "status", json_mknumber(2)); + json_append_member(json_err, "message", json_mkstring(ba.what())); + json_append_member(json_err, "formatted", json_mkstream(msg_stream)); + try { c_ctx->error_json = json_stringify(json_err, " "); } + catch (...) {} + c_ctx->error_message = sass_copy_string(msg_stream.str()); + c_ctx->error_text = sass_copy_c_string(ba.what()); + c_ctx->error_status = 2; + c_ctx->output_string = 0; + c_ctx->source_map_string = 0; + json_delete(json_err); + } + catch (std::exception& e) { + std::stringstream msg_stream; + JsonNode* json_err = json_mkobject(); + msg_stream << "Internal Error: " << e.what() << std::endl; + json_append_member(json_err, "status", json_mknumber(3)); + json_append_member(json_err, "message", json_mkstring(e.what())); + json_append_member(json_err, "formatted", json_mkstream(msg_stream)); + try { c_ctx->error_json = json_stringify(json_err, " "); } + catch (...) {} + c_ctx->error_message = sass_copy_string(msg_stream.str()); + c_ctx->error_text = sass_copy_c_string(e.what()); + c_ctx->error_status = 3; + c_ctx->output_string = 0; + c_ctx->source_map_string = 0; + json_delete(json_err); + } + catch (std::string& e) { + std::stringstream msg_stream; + JsonNode* json_err = json_mkobject(); + msg_stream << "Internal Error: " << e << std::endl; + json_append_member(json_err, "status", json_mknumber(4)); + json_append_member(json_err, "message", json_mkstring(e.c_str())); + json_append_member(json_err, "formatted", json_mkstream(msg_stream)); + try { c_ctx->error_json = json_stringify(json_err, " "); } + catch (...) {} + c_ctx->error_message = sass_copy_string(msg_stream.str()); + c_ctx->error_text = sass_copy_c_string(e.c_str()); + c_ctx->error_status = 4; + c_ctx->output_string = 0; + c_ctx->source_map_string = 0; + json_delete(json_err); + } + catch (const char* e) { + std::stringstream msg_stream; + JsonNode* json_err = json_mkobject(); + msg_stream << "Internal Error: " << e << std::endl; + json_append_member(json_err, "status", json_mknumber(4)); + json_append_member(json_err, "message", json_mkstring(e)); + json_append_member(json_err, "formatted", json_mkstream(msg_stream)); + try { c_ctx->error_json = json_stringify(json_err, " "); } + catch (...) {} + c_ctx->error_message = sass_copy_string(msg_stream.str()); + c_ctx->error_text = sass_copy_c_string(e); + c_ctx->error_status = 4; + c_ctx->output_string = 0; + c_ctx->source_map_string = 0; + json_delete(json_err); + } + catch (...) { + std::stringstream msg_stream; + JsonNode* json_err = json_mkobject(); + msg_stream << "Unknown error occurred" << std::endl; + json_append_member(json_err, "status", json_mknumber(5)); + json_append_member(json_err, "message", json_mkstring("unknown")); + try { c_ctx->error_json = json_stringify(json_err, " "); } + catch (...) {} + c_ctx->error_message = sass_copy_string(msg_stream.str()); + c_ctx->error_text = sass_copy_c_string("unknown"); + c_ctx->error_status = 5; + c_ctx->output_string = 0; + c_ctx->source_map_string = 0; + json_delete(json_err); + } + return c_ctx->error_status; + } + + // allow one error handler to throw another error + // this can happen with invalid utf8 and json lib + static int handle_errors(Sass_Context* c_ctx) { + try { return handle_error(c_ctx); } + catch (...) { return handle_error(c_ctx); } + } + + static Block_Obj sass_parse_block(Sass_Compiler* compiler) throw() + { + + // assert valid pointer + if (compiler == 0) return 0; + // The cpp context must be set by now + Context* cpp_ctx = compiler->cpp_ctx; + Sass_Context* c_ctx = compiler->c_ctx; + // We will take care to wire up the rest + compiler->cpp_ctx->c_compiler = compiler; + compiler->state = SASS_COMPILER_PARSED; + + try { + + // get input/output path from options + std::string input_path = safe_str(c_ctx->input_path); + std::string output_path = safe_str(c_ctx->output_path); + + // maybe skip some entries of included files + // we do not include stdin for data contexts + bool skip = c_ctx->type == SASS_CONTEXT_DATA; + + // dispatch parse call + Block_Obj root(cpp_ctx->parse()); + // abort on errors + if (!root) return 0; + + // skip all prefixed files? (ToDo: check srcmap) + // IMO source-maps should point to headers already + // therefore don't skip it for now. re-enable or + // remove completely once this is tested + size_t headers = cpp_ctx->head_imports; + + // copy the included files on to the context (dont forget to free later) + if (copy_strings(cpp_ctx->get_included_files(skip, headers), &c_ctx->included_files) == NULL) + throw(std::bad_alloc()); + + // return parsed block + return root; + + } + // pass errors to generic error handler + catch (...) { handle_errors(c_ctx); } + + // error + return 0; + + } + +} + +extern "C" { + using namespace Sass; + + static void sass_clear_options (struct Sass_Options* options); + static void sass_reset_options (struct Sass_Options* options); + static void copy_options(struct Sass_Options* to, struct Sass_Options* from) { + // do not overwrite ourself + if (to == from) return; + // free assigned memory + sass_clear_options(to); + // move memory + *to = *from; + // Reset pointers on source + sass_reset_options(from); + } + + #define IMPLEMENT_SASS_OPTION_ACCESSOR(type, option) \ + type ADDCALL sass_option_get_##option (struct Sass_Options* options) { return options->option; } \ + void ADDCALL sass_option_set_##option (struct Sass_Options* options, type option) { options->option = option; } + #define IMPLEMENT_SASS_OPTION_STRING_GETTER(type, option, def) \ + type ADDCALL sass_option_get_##option (struct Sass_Options* options) { return safe_str(options->option, def); } + #define IMPLEMENT_SASS_OPTION_STRING_SETTER(type, option, def) \ + void ADDCALL sass_option_set_##option (struct Sass_Options* options, type option) \ + { free(options->option); options->option = option || def ? sass_copy_c_string(option ? option : def) : 0; } + #define IMPLEMENT_SASS_OPTION_STRING_ACCESSOR(type, option, def) \ + IMPLEMENT_SASS_OPTION_STRING_GETTER(type, option, def) \ + IMPLEMENT_SASS_OPTION_STRING_SETTER(type, option, def) + + #define IMPLEMENT_SASS_CONTEXT_GETTER(type, option) \ + type ADDCALL sass_context_get_##option (struct Sass_Context* ctx) { return ctx->option; } + #define IMPLEMENT_SASS_CONTEXT_TAKER(type, option) \ + type sass_context_take_##option (struct Sass_Context* ctx) \ + { type foo = ctx->option; ctx->option = 0; return foo; } + + + // generic compilation function (not exported, use file/data compile instead) + static Sass_Compiler* sass_prepare_context (Sass_Context* c_ctx, Context* cpp_ctx) throw() + { + try { + // register our custom functions + if (c_ctx->c_functions) { + auto this_func_data = c_ctx->c_functions; + while (this_func_data && *this_func_data) { + cpp_ctx->add_c_function(*this_func_data); + ++this_func_data; + } + } + + // register our custom headers + if (c_ctx->c_headers) { + auto this_head_data = c_ctx->c_headers; + while (this_head_data && *this_head_data) { + cpp_ctx->add_c_header(*this_head_data); + ++this_head_data; + } + } + + // register our custom importers + if (c_ctx->c_importers) { + auto this_imp_data = c_ctx->c_importers; + while (this_imp_data && *this_imp_data) { + cpp_ctx->add_c_importer(*this_imp_data); + ++this_imp_data; + } + } + + // reset error status + c_ctx->error_json = 0; + c_ctx->error_text = 0; + c_ctx->error_message = 0; + c_ctx->error_status = 0; + // reset error position + c_ctx->error_src = 0; + c_ctx->error_file = 0; + c_ctx->error_line = std::string::npos; + c_ctx->error_column = std::string::npos; + + // allocate a new compiler instance + void* ctxmem = calloc(1, sizeof(struct Sass_Compiler)); + if (ctxmem == 0) { std::cerr << "Error allocating memory for context" << std::endl; return 0; } + Sass_Compiler* compiler = (struct Sass_Compiler*) ctxmem; + compiler->state = SASS_COMPILER_CREATED; + + // store in sass compiler + compiler->c_ctx = c_ctx; + compiler->cpp_ctx = cpp_ctx; + cpp_ctx->c_compiler = compiler; + + // use to parse block + return compiler; + + } + // pass errors to generic error handler + catch (...) { handle_errors(c_ctx); } + + // error + return 0; + + } + + // generic compilation function (not exported, use file/data compile instead) + static int sass_compile_context (Sass_Context* c_ctx, Context* cpp_ctx) + { + + // prepare sass compiler with context and options + Sass_Compiler* compiler = sass_prepare_context(c_ctx, cpp_ctx); + + try { + // call each compiler step + sass_compiler_parse(compiler); + sass_compiler_execute(compiler); + } + // pass errors to generic error handler + catch (...) { handle_errors(c_ctx); } + + sass_delete_compiler(compiler); + + return c_ctx->error_status; + } + + inline void init_options (struct Sass_Options* options) + { + options->precision = 5; + options->indent = " "; + options->linefeed = LFEED; + } + + Sass_Options* ADDCALL sass_make_options (void) + { + struct Sass_Options* options = (struct Sass_Options*) calloc(1, sizeof(struct Sass_Options)); + if (options == 0) { std::cerr << "Error allocating memory for options" << std::endl; return 0; } + init_options(options); + return options; + } + + Sass_File_Context* ADDCALL sass_make_file_context(const char* input_path) + { + SharedObj::setTaint(true); // needed for static colors + struct Sass_File_Context* ctx = (struct Sass_File_Context*) calloc(1, sizeof(struct Sass_File_Context)); + if (ctx == 0) { std::cerr << "Error allocating memory for file context" << std::endl; return 0; } + ctx->type = SASS_CONTEXT_FILE; + init_options(ctx); + try { + if (input_path == 0) { throw(std::runtime_error("File context created without an input path")); } + if (*input_path == 0) { throw(std::runtime_error("File context created with empty input path")); } + sass_option_set_input_path(ctx, input_path); + } catch (...) { + handle_errors(ctx); + } + return ctx; + } + + Sass_Data_Context* ADDCALL sass_make_data_context(char* source_string) + { + struct Sass_Data_Context* ctx = (struct Sass_Data_Context*) calloc(1, sizeof(struct Sass_Data_Context)); + if (ctx == 0) { std::cerr << "Error allocating memory for data context" << std::endl; return 0; } + ctx->type = SASS_CONTEXT_DATA; + init_options(ctx); + try { + if (source_string == 0) { throw(std::runtime_error("Data context created without a source string")); } + if (*source_string == 0) { throw(std::runtime_error("Data context created with empty source string")); } + ctx->source_string = source_string; + } catch (...) { + handle_errors(ctx); + } + return ctx; + } + + struct Sass_Compiler* ADDCALL sass_make_data_compiler (struct Sass_Data_Context* data_ctx) + { + if (data_ctx == 0) return 0; + Context* cpp_ctx = new Data_Context(*data_ctx); + return sass_prepare_context(data_ctx, cpp_ctx); + } + + struct Sass_Compiler* ADDCALL sass_make_file_compiler (struct Sass_File_Context* file_ctx) + { + if (file_ctx == 0) return 0; + Context* cpp_ctx = new File_Context(*file_ctx); + return sass_prepare_context(file_ctx, cpp_ctx); + } + + int ADDCALL sass_compile_data_context(Sass_Data_Context* data_ctx) + { + if (data_ctx == 0) return 1; + if (data_ctx->error_status) + return data_ctx->error_status; + try { + if (data_ctx->source_string == 0) { throw(std::runtime_error("Data context has no source string")); } + // empty source string is a valid case, even if not really usefull (different than with file context) + // if (*data_ctx->source_string == 0) { throw(std::runtime_error("Data context has empty source string")); } + } + catch (...) { return handle_errors(data_ctx) | 1; } + Context* cpp_ctx = new Data_Context(*data_ctx); + return sass_compile_context(data_ctx, cpp_ctx); + } + + int ADDCALL sass_compile_file_context(Sass_File_Context* file_ctx) + { + if (file_ctx == 0) return 1; + if (file_ctx->error_status) + return file_ctx->error_status; + try { + if (file_ctx->input_path == 0) { throw(std::runtime_error("File context has no input path")); } + if (*file_ctx->input_path == 0) { throw(std::runtime_error("File context has empty input path")); } + } + catch (...) { return handle_errors(file_ctx) | 1; } + Context* cpp_ctx = new File_Context(*file_ctx); + return sass_compile_context(file_ctx, cpp_ctx); + } + + int ADDCALL sass_compiler_parse(struct Sass_Compiler* compiler) + { + if (compiler == 0) return 1; + if (compiler->state == SASS_COMPILER_PARSED) return 0; + if (compiler->state != SASS_COMPILER_CREATED) return -1; + if (compiler->c_ctx == NULL) return 1; + if (compiler->cpp_ctx == NULL) return 1; + if (compiler->c_ctx->error_status) + return compiler->c_ctx->error_status; + // parse the context we have set up (file or data) + compiler->root = sass_parse_block(compiler); + // success + return 0; + } + + int ADDCALL sass_compiler_execute(struct Sass_Compiler* compiler) + { + if (compiler == 0) return 1; + if (compiler->state == SASS_COMPILER_EXECUTED) return 0; + if (compiler->state != SASS_COMPILER_PARSED) return -1; + if (compiler->c_ctx == NULL) return 1; + if (compiler->cpp_ctx == NULL) return 1; + if (compiler->root.isNull()) return 1; + if (compiler->c_ctx->error_status) + return compiler->c_ctx->error_status; + compiler->state = SASS_COMPILER_EXECUTED; + Context* cpp_ctx = compiler->cpp_ctx; + Block_Obj root = compiler->root; + // compile the parsed root block + try { compiler->c_ctx->output_string = cpp_ctx->render(root); } + // pass catched errors to generic error handler + catch (...) { return handle_errors(compiler->c_ctx) | 1; } + // generate source map json and store on context + compiler->c_ctx->source_map_string = cpp_ctx->render_srcmap(); + // success + return 0; + } + + // helper function, not exported, only accessible locally + static void sass_reset_options (struct Sass_Options* options) + { + // free pointer before + // or copy/move them + options->input_path = 0; + options->output_path = 0; + options->plugin_path = 0; + options->include_path = 0; + options->source_map_file = 0; + options->source_map_root = 0; + options->c_functions = 0; + options->c_importers = 0; + options->c_headers = 0; + options->plugin_paths = 0; + options->include_paths = 0; + } + + // helper function, not exported, only accessible locally + static void sass_clear_options (struct Sass_Options* options) + { + if (options == 0) return; + // Deallocate custom functions, headers and importes + sass_delete_function_list(options->c_functions); + sass_delete_importer_list(options->c_importers); + sass_delete_importer_list(options->c_headers); + // Deallocate inc paths + if (options->plugin_paths) { + struct string_list* cur; + struct string_list* next; + cur = options->plugin_paths; + while (cur) { + next = cur->next; + free(cur->string); + free(cur); + cur = next; + } + } + // Deallocate inc paths + if (options->include_paths) { + struct string_list* cur; + struct string_list* next; + cur = options->include_paths; + while (cur) { + next = cur->next; + free(cur->string); + free(cur); + cur = next; + } + } + // Free options strings + free(options->input_path); + free(options->output_path); + free(options->plugin_path); + free(options->include_path); + free(options->source_map_file); + free(options->source_map_root); + // Reset our pointers + options->input_path = 0; + options->output_path = 0; + options->plugin_path = 0; + options->include_path = 0; + options->source_map_file = 0; + options->source_map_root = 0; + options->c_functions = 0; + options->c_importers = 0; + options->c_headers = 0; + options->plugin_paths = 0; + options->include_paths = 0; + } + + // helper function, not exported, only accessible locally + // sass_free_context is also defined in old sass_interface + static void sass_clear_context (struct Sass_Context* ctx) + { + if (ctx == 0) return; + // release the allocated memory (mostly via sass_copy_c_string) + if (ctx->output_string) free(ctx->output_string); + if (ctx->source_map_string) free(ctx->source_map_string); + if (ctx->error_message) free(ctx->error_message); + if (ctx->error_text) free(ctx->error_text); + if (ctx->error_json) free(ctx->error_json); + if (ctx->error_file) free(ctx->error_file); + free_string_array(ctx->included_files); + // play safe and reset properties + ctx->output_string = 0; + ctx->source_map_string = 0; + ctx->error_message = 0; + ctx->error_text = 0; + ctx->error_json = 0; + ctx->error_file = 0; + ctx->included_files = 0; + // debug leaked memory + #ifdef DEBUG_SHARED_PTR + SharedObj::dumpMemLeaks(); + #endif + // now clear the options + sass_clear_options(ctx); + } + + void ADDCALL sass_delete_compiler (struct Sass_Compiler* compiler) + { + if (compiler == 0) { + return; + } + Context* cpp_ctx = compiler->cpp_ctx; + if (cpp_ctx) delete(cpp_ctx); + compiler->cpp_ctx = NULL; + compiler->c_ctx = NULL; + compiler->root = NULL; + free(compiler); + } + + void ADDCALL sass_delete_options (struct Sass_Options* options) + { + sass_clear_options(options); free(options); + } + + // Deallocate all associated memory with file context + void ADDCALL sass_delete_file_context (struct Sass_File_Context* ctx) + { + // clear the context and free it + sass_clear_context(ctx); free(ctx); + } + // Deallocate all associated memory with data context + void ADDCALL sass_delete_data_context (struct Sass_Data_Context* ctx) + { + // clean the source string if it was not passed + // we reset this member once we start parsing + if (ctx->source_string) free(ctx->source_string); + // clear the context and free it + sass_clear_context(ctx); free(ctx); + } + + // Getters for sass context from specific implementations + struct Sass_Context* ADDCALL sass_file_context_get_context(struct Sass_File_Context* ctx) { return ctx; } + struct Sass_Context* ADDCALL sass_data_context_get_context(struct Sass_Data_Context* ctx) { return ctx; } + + // Getters for context options from Sass_Context + struct Sass_Options* ADDCALL sass_context_get_options(struct Sass_Context* ctx) { return ctx; } + struct Sass_Options* ADDCALL sass_file_context_get_options(struct Sass_File_Context* ctx) { return ctx; } + struct Sass_Options* ADDCALL sass_data_context_get_options(struct Sass_Data_Context* ctx) { return ctx; } + void ADDCALL sass_file_context_set_options (struct Sass_File_Context* ctx, struct Sass_Options* opt) { copy_options(ctx, opt); } + void ADDCALL sass_data_context_set_options (struct Sass_Data_Context* ctx, struct Sass_Options* opt) { copy_options(ctx, opt); } + + // Getters for Sass_Compiler options (get conected sass context) + enum Sass_Compiler_State ADDCALL sass_compiler_get_state(struct Sass_Compiler* compiler) { return compiler->state; } + struct Sass_Context* ADDCALL sass_compiler_get_context(struct Sass_Compiler* compiler) { return compiler->c_ctx; } + struct Sass_Options* ADDCALL sass_compiler_get_options(struct Sass_Compiler* compiler) { return compiler->c_ctx; } + // Getters for Sass_Compiler options (query import stack) + size_t ADDCALL sass_compiler_get_import_stack_size(struct Sass_Compiler* compiler) { return compiler->cpp_ctx->import_stack.size(); } + Sass_Import_Entry ADDCALL sass_compiler_get_last_import(struct Sass_Compiler* compiler) { return compiler->cpp_ctx->import_stack.back(); } + Sass_Import_Entry ADDCALL sass_compiler_get_import_entry(struct Sass_Compiler* compiler, size_t idx) { return compiler->cpp_ctx->import_stack[idx]; } + // Getters for Sass_Compiler options (query function stack) + size_t ADDCALL sass_compiler_get_callee_stack_size(struct Sass_Compiler* compiler) { return compiler->cpp_ctx->callee_stack.size(); } + Sass_Callee_Entry ADDCALL sass_compiler_get_last_callee(struct Sass_Compiler* compiler) { return &compiler->cpp_ctx->callee_stack.back(); } + Sass_Callee_Entry ADDCALL sass_compiler_get_callee_entry(struct Sass_Compiler* compiler, size_t idx) { return &compiler->cpp_ctx->callee_stack[idx]; } + + // Calculate the size of the stored null terminated array + size_t ADDCALL sass_context_get_included_files_size (struct Sass_Context* ctx) + { size_t l = 0; auto i = ctx->included_files; while (i && *i) { ++i; ++l; } return l; } + + // Create getter and setters for options + IMPLEMENT_SASS_OPTION_ACCESSOR(int, precision); + IMPLEMENT_SASS_OPTION_ACCESSOR(enum Sass_Output_Style, output_style); + IMPLEMENT_SASS_OPTION_ACCESSOR(bool, source_comments); + IMPLEMENT_SASS_OPTION_ACCESSOR(bool, source_map_embed); + IMPLEMENT_SASS_OPTION_ACCESSOR(bool, source_map_contents); + IMPLEMENT_SASS_OPTION_ACCESSOR(bool, source_map_file_urls); + IMPLEMENT_SASS_OPTION_ACCESSOR(bool, omit_source_map_url); + IMPLEMENT_SASS_OPTION_ACCESSOR(bool, is_indented_syntax_src); + IMPLEMENT_SASS_OPTION_ACCESSOR(Sass_Function_List, c_functions); + IMPLEMENT_SASS_OPTION_ACCESSOR(Sass_Importer_List, c_importers); + IMPLEMENT_SASS_OPTION_ACCESSOR(Sass_Importer_List, c_headers); + IMPLEMENT_SASS_OPTION_ACCESSOR(const char*, indent); + IMPLEMENT_SASS_OPTION_ACCESSOR(const char*, linefeed); + IMPLEMENT_SASS_OPTION_STRING_SETTER(const char*, plugin_path, 0); + IMPLEMENT_SASS_OPTION_STRING_SETTER(const char*, include_path, 0); + IMPLEMENT_SASS_OPTION_STRING_ACCESSOR(const char*, input_path, 0); + IMPLEMENT_SASS_OPTION_STRING_ACCESSOR(const char*, output_path, 0); + IMPLEMENT_SASS_OPTION_STRING_ACCESSOR(const char*, source_map_file, 0); + IMPLEMENT_SASS_OPTION_STRING_ACCESSOR(const char*, source_map_root, 0); + + // Create getter and setters for context + IMPLEMENT_SASS_CONTEXT_GETTER(int, error_status); + IMPLEMENT_SASS_CONTEXT_GETTER(const char*, error_json); + IMPLEMENT_SASS_CONTEXT_GETTER(const char*, error_message); + IMPLEMENT_SASS_CONTEXT_GETTER(const char*, error_text); + IMPLEMENT_SASS_CONTEXT_GETTER(const char*, error_file); + IMPLEMENT_SASS_CONTEXT_GETTER(size_t, error_line); + IMPLEMENT_SASS_CONTEXT_GETTER(size_t, error_column); + IMPLEMENT_SASS_CONTEXT_GETTER(const char*, error_src); + IMPLEMENT_SASS_CONTEXT_GETTER(const char*, output_string); + IMPLEMENT_SASS_CONTEXT_GETTER(const char*, source_map_string); + IMPLEMENT_SASS_CONTEXT_GETTER(char**, included_files); + + // Take ownership of memory (value on context is set to 0) + IMPLEMENT_SASS_CONTEXT_TAKER(char*, error_json); + IMPLEMENT_SASS_CONTEXT_TAKER(char*, error_message); + IMPLEMENT_SASS_CONTEXT_TAKER(char*, error_text); + IMPLEMENT_SASS_CONTEXT_TAKER(char*, error_file); + IMPLEMENT_SASS_CONTEXT_TAKER(char*, output_string); + IMPLEMENT_SASS_CONTEXT_TAKER(char*, source_map_string); + IMPLEMENT_SASS_CONTEXT_TAKER(char**, included_files); + + // Push function for include paths (no manipulation support for now) + void ADDCALL sass_option_push_include_path(struct Sass_Options* options, const char* path) + { + + struct string_list* include_path = (struct string_list*) calloc(1, sizeof(struct string_list)); + if (include_path == 0) return; + include_path->string = path ? sass_copy_c_string(path) : 0; + struct string_list* last = options->include_paths; + if (!options->include_paths) { + options->include_paths = include_path; + } else { + while (last->next) + last = last->next; + last->next = include_path; + } + + } + + // Push function for include paths (no manipulation support for now) + size_t ADDCALL sass_option_get_include_path_size(struct Sass_Options* options) + { + size_t len = 0; + struct string_list* cur = options->include_paths; + while (cur) { len ++; cur = cur->next; } + return len; + } + + // Push function for include paths (no manipulation support for now) + const char* ADDCALL sass_option_get_include_path(struct Sass_Options* options, size_t i) + { + struct string_list* cur = options->include_paths; + while (i) { i--; cur = cur->next; } + return cur->string; + } + + // Push function for plugin paths (no manipulation support for now) + void ADDCALL sass_option_push_plugin_path(struct Sass_Options* options, const char* path) + { + + struct string_list* plugin_path = (struct string_list*) calloc(1, sizeof(struct string_list)); + if (plugin_path == 0) return; + plugin_path->string = path ? sass_copy_c_string(path) : 0; + struct string_list* last = options->plugin_paths; + if (!options->plugin_paths) { + options->plugin_paths = plugin_path; + } else { + while (last->next) + last = last->next; + last->next = plugin_path; + } + + } + +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/sass_context.hpp b/mybulma/node_modules/node-sass/src/libsass/src/sass_context.hpp new file mode 100644 index 0000000..8ae1fb1 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/sass_context.hpp @@ -0,0 +1,129 @@ +#ifndef SASS_SASS_CONTEXT_H +#define SASS_SASS_CONTEXT_H + +#include "sass/base.h" +#include "sass/context.h" +#include "ast_fwd_decl.hpp" + +// sass config options structure +struct Sass_Options : Sass_Output_Options { + + // embed sourceMappingUrl as data uri + bool source_map_embed; + + // embed include contents in maps + bool source_map_contents; + + // create file urls for sources + bool source_map_file_urls; + + // Disable sourceMappingUrl in css output + bool omit_source_map_url; + + // Treat source_string as sass (as opposed to scss) + bool is_indented_syntax_src; + + // The input path is used for source map + // generation. It can be used to define + // something with string compilation or to + // overload the input file path. It is + // set to "stdin" for data contexts and + // to the input file on file contexts. + char* input_path; + + // The output path is used for source map + // generation. LibSass will not write to + // this file, it is just used to create + // information in source-maps etc. + char* output_path; + + // Colon-separated list of paths + // Semicolon-separated on Windows + // Maybe use array interface instead? + char* include_path; + char* plugin_path; + + // Include paths (linked string list) + struct string_list* include_paths; + // Plugin paths (linked string list) + struct string_list* plugin_paths; + + // Path to source map file + // Enables source map generation + // Used to create sourceMappingUrl + char* source_map_file; + + // Directly inserted in source maps + char* source_map_root; + + // Custom functions that can be called from sccs code + Sass_Function_List c_functions; + + // List of custom importers + Sass_Importer_List c_importers; + + // List of custom headers + Sass_Importer_List c_headers; + +}; + + +// base for all contexts +struct Sass_Context : Sass_Options +{ + + // store context type info + enum Sass_Input_Style type; + + // generated output data + char* output_string; + + // generated source map json + char* source_map_string; + + // error status + int error_status; + char* error_json; + char* error_text; + char* error_message; + // error position + char* error_file; + size_t error_line; + size_t error_column; + const char* error_src; + + // report imported files + char** included_files; + +}; + +// struct for file compilation +struct Sass_File_Context : Sass_Context { + + // no additional fields required + // input_path is already on options + +}; + +// struct for data compilation +struct Sass_Data_Context : Sass_Context { + + // provided source string + char* source_string; + char* srcmap_string; + +}; + +// link c and cpp context +struct Sass_Compiler { + // progress status + Sass_Compiler_State state; + // original c context + Sass_Context* c_ctx; + // Sass::Context + Sass::Context* cpp_ctx; + // Sass::Block + Sass::Block_Obj root; +}; + +#endif \ No newline at end of file diff --git a/mybulma/node_modules/node-sass/src/libsass/src/sass_functions.cpp b/mybulma/node_modules/node-sass/src/libsass/src/sass_functions.cpp new file mode 100644 index 0000000..bfbf258 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/sass_functions.cpp @@ -0,0 +1,207 @@ +#include "sass.hpp" +#include +#include "util.hpp" +#include "context.hpp" +#include "values.hpp" +#include "sass/functions.h" +#include "sass_functions.hpp" + +extern "C" { + using namespace Sass; + + Sass_Function_List ADDCALL sass_make_function_list(size_t length) + { + return (Sass_Function_List) calloc(length + 1, sizeof(Sass_Function_Entry)); + } + + Sass_Function_Entry ADDCALL sass_make_function(const char* signature, Sass_Function_Fn function, void* cookie) + { + Sass_Function_Entry cb = (Sass_Function_Entry) calloc(1, sizeof(Sass_Function)); + if (cb == 0) return 0; + cb->signature = sass_copy_c_string(signature); + cb->function = function; + cb->cookie = cookie; + return cb; + } + + void ADDCALL sass_delete_function(Sass_Function_Entry entry) + { + free(entry->signature); + free(entry); + } + + // Deallocator for the allocated memory + void ADDCALL sass_delete_function_list(Sass_Function_List list) + { + Sass_Function_List it = list; + if (list == 0) return; + while(*list) { + sass_delete_function(*list); + ++list; + } + free(it); + } + + // Setters and getters for callbacks on function lists + Sass_Function_Entry ADDCALL sass_function_get_list_entry(Sass_Function_List list, size_t pos) { return list[pos]; } + void sass_function_set_list_entry(Sass_Function_List list, size_t pos, Sass_Function_Entry cb) { list[pos] = cb; } + + const char* ADDCALL sass_function_get_signature(Sass_Function_Entry cb) { return cb->signature; } + Sass_Function_Fn ADDCALL sass_function_get_function(Sass_Function_Entry cb) { return cb->function; } + void* ADDCALL sass_function_get_cookie(Sass_Function_Entry cb) { return cb->cookie; } + + Sass_Importer_Entry ADDCALL sass_make_importer(Sass_Importer_Fn importer, double priority, void* cookie) + { + Sass_Importer_Entry cb = (Sass_Importer_Entry) calloc(1, sizeof(Sass_Importer)); + if (cb == 0) return 0; + cb->importer = importer; + cb->priority = priority; + cb->cookie = cookie; + return cb; + } + + Sass_Importer_Fn ADDCALL sass_importer_get_function(Sass_Importer_Entry cb) { return cb->importer; } + double ADDCALL sass_importer_get_priority (Sass_Importer_Entry cb) { return cb->priority; } + void* ADDCALL sass_importer_get_cookie(Sass_Importer_Entry cb) { return cb->cookie; } + + // Just in case we have some stray import structs + void ADDCALL sass_delete_importer (Sass_Importer_Entry cb) + { + free(cb); + } + + // Creator for sass custom importer function list + Sass_Importer_List ADDCALL sass_make_importer_list(size_t length) + { + return (Sass_Importer_List) calloc(length + 1, sizeof(Sass_Importer_Entry)); + } + + // Deallocator for the allocated memory + void ADDCALL sass_delete_importer_list(Sass_Importer_List list) + { + Sass_Importer_List it = list; + if (list == 0) return; + while(*list) { + sass_delete_importer(*list); + ++list; + } + free(it); + } + + Sass_Importer_Entry ADDCALL sass_importer_get_list_entry(Sass_Importer_List list, size_t idx) { return list[idx]; } + void ADDCALL sass_importer_set_list_entry(Sass_Importer_List list, size_t idx, Sass_Importer_Entry cb) { list[idx] = cb; } + + // Creator for sass custom importer return argument list + Sass_Import_List ADDCALL sass_make_import_list(size_t length) + { + return (Sass_Import**) calloc(length + 1, sizeof(Sass_Import*)); + } + + // Creator for a single import entry returned by the custom importer inside the list + // We take ownership of the memory for source and srcmap (freed when context is destroyd) + Sass_Import_Entry ADDCALL sass_make_import(const char* imp_path, const char* abs_path, char* source, char* srcmap) + { + Sass_Import* v = (Sass_Import*) calloc(1, sizeof(Sass_Import)); + if (v == 0) return 0; + v->imp_path = imp_path ? sass_copy_c_string(imp_path) : 0; + v->abs_path = abs_path ? sass_copy_c_string(abs_path) : 0; + v->source = source; + v->srcmap = srcmap; + v->error = 0; + v->line = -1; + v->column = -1; + return v; + } + + // Older style, but somehow still valid - keep around or deprecate? + Sass_Import_Entry ADDCALL sass_make_import_entry(const char* path, char* source, char* srcmap) + { + return sass_make_import(path, path, source, srcmap); + } + + // Upgrade a normal import entry to throw an error (original path can be re-used by error reporting) + Sass_Import_Entry ADDCALL sass_import_set_error(Sass_Import_Entry import, const char* error, size_t line, size_t col) + { + if (import == 0) return 0; + if (import->error) free(import->error); + import->error = error ? sass_copy_c_string(error) : 0; + import->line = line ? line : -1; + import->column = col ? col : -1; + return import; + } + + // Setters and getters for entries on the import list + void ADDCALL sass_import_set_list_entry(Sass_Import_List list, size_t idx, Sass_Import_Entry entry) { list[idx] = entry; } + Sass_Import_Entry ADDCALL sass_import_get_list_entry(Sass_Import_List list, size_t idx) { return list[idx]; } + + // Deallocator for the allocated memory + void ADDCALL sass_delete_import_list(Sass_Import_List list) + { + Sass_Import_List it = list; + if (list == 0) return; + while(*list) { + sass_delete_import(*list); + ++list; + } + free(it); + } + + // Just in case we have some stray import structs + void ADDCALL sass_delete_import(Sass_Import_Entry import) + { + free(import->imp_path); + free(import->abs_path); + free(import->source); + free(import->srcmap); + free(import->error); + free(import); + } + + // Getter for callee entry + const char* ADDCALL sass_callee_get_name(Sass_Callee_Entry entry) { return entry->name; } + const char* ADDCALL sass_callee_get_path(Sass_Callee_Entry entry) { return entry->path; } + size_t ADDCALL sass_callee_get_line(Sass_Callee_Entry entry) { return entry->line; } + size_t ADDCALL sass_callee_get_column(Sass_Callee_Entry entry) { return entry->column; } + enum Sass_Callee_Type ADDCALL sass_callee_get_type(Sass_Callee_Entry entry) { return entry->type; } + Sass_Env_Frame ADDCALL sass_callee_get_env (Sass_Callee_Entry entry) { return &entry->env; } + + // Getters and Setters for environments (lexical, local and global) + union Sass_Value* ADDCALL sass_env_get_lexical (Sass_Env_Frame env, const char* name) { + Expression_Ptr ex = Cast((*env->frame)[name]); + return ex != NULL ? ast_node_to_sass_value(ex) : NULL; + } + void ADDCALL sass_env_set_lexical (Sass_Env_Frame env, const char* name, union Sass_Value* val) { + (*env->frame)[name] = sass_value_to_ast_node(val); + } + union Sass_Value* ADDCALL sass_env_get_local (Sass_Env_Frame env, const char* name) { + Expression_Ptr ex = Cast(env->frame->get_local(name)); + return ex != NULL ? ast_node_to_sass_value(ex) : NULL; + } + void ADDCALL sass_env_set_local (Sass_Env_Frame env, const char* name, union Sass_Value* val) { + env->frame->set_local(name, sass_value_to_ast_node(val)); + } + union Sass_Value* ADDCALL sass_env_get_global (Sass_Env_Frame env, const char* name) { + Expression_Ptr ex = Cast(env->frame->get_global(name)); + return ex != NULL ? ast_node_to_sass_value(ex) : NULL; + } + void ADDCALL sass_env_set_global (Sass_Env_Frame env, const char* name, union Sass_Value* val) { + env->frame->set_global(name, sass_value_to_ast_node(val)); + } + + // Getter for import entry + const char* ADDCALL sass_import_get_imp_path(Sass_Import_Entry entry) { return entry->imp_path; } + const char* ADDCALL sass_import_get_abs_path(Sass_Import_Entry entry) { return entry->abs_path; } + const char* ADDCALL sass_import_get_source(Sass_Import_Entry entry) { return entry->source; } + const char* ADDCALL sass_import_get_srcmap(Sass_Import_Entry entry) { return entry->srcmap; } + + // Getter for import error entry + size_t ADDCALL sass_import_get_error_line(Sass_Import_Entry entry) { return entry->line; } + size_t ADDCALL sass_import_get_error_column(Sass_Import_Entry entry) { return entry->column; } + const char* ADDCALL sass_import_get_error_message(Sass_Import_Entry entry) { return entry->error; } + + // Explicit functions to take ownership of the memory + // Resets our own property since we do not know if it is still alive + char* ADDCALL sass_import_take_source(Sass_Import_Entry entry) { char* ptr = entry->source; entry->source = 0; return ptr; } + char* ADDCALL sass_import_take_srcmap(Sass_Import_Entry entry) { char* ptr = entry->srcmap; entry->srcmap = 0; return ptr; } + +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/sass_functions.hpp b/mybulma/node_modules/node-sass/src/libsass/src/sass_functions.hpp new file mode 100644 index 0000000..3b646d6 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/sass_functions.hpp @@ -0,0 +1,50 @@ +#ifndef SASS_SASS_FUNCTIONS_H +#define SASS_SASS_FUNCTIONS_H + +#include "sass.h" +#include "environment.hpp" +#include "functions.hpp" + +// Struct to hold custom function callback +struct Sass_Function { + char* signature; + Sass_Function_Fn function; + void* cookie; +}; + +// External import entry +struct Sass_Import { + char* imp_path; // path as found in the import statement + char *abs_path; // path after importer has resolved it + char* source; + char* srcmap; + // error handling + char* error; + size_t line; + size_t column; +}; + +// External environments +struct Sass_Env { + // links to parent frames + Sass::Env* frame; +}; + +// External call entry +struct Sass_Callee { + const char* name; + const char* path; + size_t line; + size_t column; + enum Sass_Callee_Type type; + struct Sass_Env env; +}; + +// Struct to hold importer callback +struct Sass_Importer { + Sass_Importer_Fn importer; + double priority; + void* cookie; +}; + +#endif \ No newline at end of file diff --git a/mybulma/node_modules/node-sass/src/libsass/src/sass_util.cpp b/mybulma/node_modules/node-sass/src/libsass/src/sass_util.cpp new file mode 100644 index 0000000..3aef2bc --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/sass_util.cpp @@ -0,0 +1,149 @@ +#include "sass.hpp" +#include "node.hpp" + +namespace Sass { + + + /* + # This is the equivalent of ruby's Sass::Util.paths. + # + # Return an array of all possible paths through the given arrays. + # + # @param arrs [NodeCollection>] + # @return [NodeCollection>] + # + # @example + # paths([[1, 2], [3, 4], [5]]) #=> + # # [[1, 3, 5], + # # [2, 3, 5], + # # [1, 4, 5], + # # [2, 4, 5]] + + The following is the modified version of the ruby code that was more portable to C++. You + should be able to drop it into ruby 3.2.19 and get the same results from ruby sass. + + def paths(arrs) + // I changed the inject and maps to an iterative approach to make it easier to implement in C++ + loopStart = [[]] + + for arr in arrs do + permutations = [] + for e in arr do + for path in loopStart do + permutations.push(path + [e]) + end + end + loopStart = permutations + end + end + */ + Node paths(const Node& arrs) { + + Node loopStart = Node::createCollection(); + loopStart.collection()->push_back(Node::createCollection()); + + for (NodeDeque::iterator arrsIter = arrs.collection()->begin(), arrsEndIter = arrs.collection()->end(); + arrsIter != arrsEndIter; ++arrsIter) { + + Node& arr = *arrsIter; + + Node permutations = Node::createCollection(); + + for (NodeDeque::iterator arrIter = arr.collection()->begin(), arrIterEnd = arr.collection()->end(); + arrIter != arrIterEnd; ++arrIter) { + + Node& e = *arrIter; + + for (NodeDeque::iterator loopStartIter = loopStart.collection()->begin(), loopStartIterEnd = loopStart.collection()->end(); + loopStartIter != loopStartIterEnd; ++loopStartIter) { + + Node& path = *loopStartIter; + + Node newPermutation = Node::createCollection(); + newPermutation.got_line_feed = arr.got_line_feed; + newPermutation.plus(path); + newPermutation.collection()->push_back(e); + + permutations.collection()->push_back(newPermutation); + } + } + + loopStart = permutations; + } + + return loopStart; + } + + + /* + This is the equivalent of ruby sass' Sass::Util.flatten and [].flatten. + Sass::Util.flatten requires the number of levels to flatten, while + [].flatten doesn't and will flatten the entire array. This function + supports both. + + # Flattens the first `n` nested arrays. If n == -1, all arrays will be flattened + # + # @param arr [NodeCollection] The array to flatten + # @param n [int] The number of levels to flatten + # @return [NodeCollection] The flattened array + + The following is the modified version of the ruby code that was more portable to C++. You + should be able to drop it into ruby 3.2.19 and get the same results from ruby sass. + + def flatten(arr, n = -1) + if n != -1 and n == 0 then + return arr + end + + flattened = [] + + for e in arr do + if e.is_a?(Array) then + flattened.concat(flatten(e, n - 1)) + else + flattened << e + end + end + + return flattened + end + */ + Node flatten(Node& arr, int n) { + if (n != -1 && n == 0) { + return arr; + } + + Node flattened = Node::createCollection(); + if (arr.got_line_feed) flattened.got_line_feed = true; + + for (NodeDeque::iterator iter = arr.collection()->begin(), iterEnd = arr.collection()->end(); + iter != iterEnd; iter++) { + Node& e = *iter; + + // e has the lf set + if (e.isCollection()) { + + // e.collection().got_line_feed = e.got_line_feed; + Node recurseFlattened = flatten(e, n - 1); + + if(e.got_line_feed) { + flattened.got_line_feed = e.got_line_feed; + recurseFlattened.got_line_feed = e.got_line_feed; + } + + for(auto i : (*recurseFlattened.collection())) { + if (recurseFlattened.got_line_feed) { + + i.got_line_feed = true; + } + flattened.collection()->push_back(i); + } + + } else { + flattened.collection()->push_back(e); + } + } + + return flattened; + } +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/sass_util.hpp b/mybulma/node_modules/node-sass/src/libsass/src/sass_util.hpp new file mode 100644 index 0000000..816da5f --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/sass_util.hpp @@ -0,0 +1,256 @@ +#ifndef SASS_SASS_UTIL_H +#define SASS_SASS_UTIL_H + +#include "ast.hpp" +#include "node.hpp" +#include "debug.hpp" + +namespace Sass { + + + + + /* + This is for ports of functions in the Sass:Util module. + */ + + + /* + # Return a Node collection of all possible paths through the given Node collection of Node collections. + # + # @param arrs [NodeCollection>] + # @return [NodeCollection>] + # + # @example + # paths([[1, 2], [3, 4], [5]]) #=> + # # [[1, 3, 5], + # # [2, 3, 5], + # # [1, 4, 5], + # # [2, 4, 5]] + */ + Node paths(const Node& arrs); + + + /* + This class is a default implementation of a Node comparator that can be passed to the lcs function below. + It uses operator== for equality comparision. It then returns one if the Nodes are equal. + */ + class DefaultLcsComparator { + public: + bool operator()(const Node& one, const Node& two, Node& out) const { + // TODO: Is this the correct C++ interpretation? + // block ||= proc {|a, b| a == b && a} + if (one == two) { + out = one; + return true; + } + + return false; + } + }; + + + typedef std::vector > LCSTable; + + + /* + This is the equivalent of ruby's Sass::Util.lcs_backtrace. + + # Computes a single longest common subsequence for arrays x and y. + # Algorithm from http://en.wikipedia.org/wiki/Longest_common_subsequence_problem#Reading_out_an_LCS + */ + template + Node lcs_backtrace(const LCSTable& c, const Node& x, const Node& y, int i, int j, const ComparatorType& comparator) { + DEBUG_PRINTLN(LCS, "LCSBACK: X=" << x << " Y=" << y << " I=" << i << " J=" << j) + + if (i == 0 || j == 0) { + DEBUG_PRINTLN(LCS, "RETURNING EMPTY") + return Node::createCollection(); + } + + NodeDeque& xChildren = *(x.collection()); + NodeDeque& yChildren = *(y.collection()); + + Node compareOut = Node::createNil(); + if (comparator(xChildren[i], yChildren[j], compareOut)) { + DEBUG_PRINTLN(LCS, "RETURNING AFTER ELEM COMPARE") + Node result = lcs_backtrace(c, x, y, i - 1, j - 1, comparator); + result.collection()->push_back(compareOut); + return result; + } + + if (c[i][j - 1] > c[i - 1][j]) { + DEBUG_PRINTLN(LCS, "RETURNING AFTER TABLE COMPARE") + return lcs_backtrace(c, x, y, i, j - 1, comparator); + } + + DEBUG_PRINTLN(LCS, "FINAL RETURN") + return lcs_backtrace(c, x, y, i - 1, j, comparator); + } + + + /* + This is the equivalent of ruby's Sass::Util.lcs_table. + + # Calculates the memoization table for the Least Common Subsequence algorithm. + # Algorithm from http://en.wikipedia.org/wiki/Longest_common_subsequence_problem#Computing_the_length_of_the_LCS + */ + template + void lcs_table(const Node& x, const Node& y, const ComparatorType& comparator, LCSTable& out) { + DEBUG_PRINTLN(LCS, "LCSTABLE: X=" << x << " Y=" << y) + + NodeDeque& xChildren = *(x.collection()); + NodeDeque& yChildren = *(y.collection()); + + LCSTable c(xChildren.size(), std::vector(yChildren.size())); + + // These shouldn't be necessary since the vector will be initialized to 0 already. + // x.size.times {|i| c[i][0] = 0} + // y.size.times {|j| c[0][j] = 0} + + for (size_t i = 1; i < xChildren.size(); i++) { + for (size_t j = 1; j < yChildren.size(); j++) { + Node compareOut = Node::createNil(); + + if (comparator(xChildren[i], yChildren[j], compareOut)) { + c[i][j] = c[i - 1][j - 1] + 1; + } else { + c[i][j] = std::max(c[i][j - 1], c[i - 1][j]); + } + } + } + + out = c; + } + + + /* + This is the equivalent of ruby's Sass::Util.lcs. + + # Computes a single longest common subsequence for `x` and `y`. + # If there are more than one longest common subsequences, + # the one returned is that which starts first in `x`. + + # @param x [NodeCollection] + # @param y [NodeCollection] + # @comparator An equality check between elements of `x` and `y`. + # @return [NodeCollection] The LCS + + http://en.wikipedia.org/wiki/Longest_common_subsequence_problem + */ + template + Node lcs(Node& x, Node& y, const ComparatorType& comparator) { + DEBUG_PRINTLN(LCS, "LCS: X=" << x << " Y=" << y) + + Node newX = Node::createCollection(); + newX.collection()->push_back(Node::createNil()); + newX.plus(x); + + Node newY = Node::createCollection(); + newY.collection()->push_back(Node::createNil()); + newY.plus(y); + + LCSTable table; + lcs_table(newX, newY, comparator, table); + + return lcs_backtrace(table, newX, newY, static_cast(newX.collection()->size()) - 1, static_cast(newY.collection()->size()) - 1, comparator); + } + + + /* + This is the equivalent of ruby sass' Sass::Util.flatten and [].flatten. + Sass::Util.flatten requires the number of levels to flatten, while + [].flatten doesn't and will flatten the entire array. This function + supports both. + + # Flattens the first `n` nested arrays. If n == -1, all arrays will be flattened + # + # @param arr [NodeCollection] The array to flatten + # @param n [int] The number of levels to flatten + # @return [NodeCollection] The flattened array + */ + Node flatten(Node& arr, int n = -1); + + + /* + This is the equivalent of ruby's Sass::Util.group_by_to_a. + + # Performs the equivalent of `enum.group_by.to_a`, but with a guaranteed + # order. Unlike [#hash_to_a], the resulting order isn't sorted key order; + # instead, it's the same order as `#group_by` has under Ruby 1.9 (key + # appearance order). + # + # @param enum [Enumerable] + # @return [Array<[Object, Array]>] An array of pairs. + + TODO: update @param and @return once I know what those are. + + The following is the modified version of the ruby code that was more portable to C++. You + should be able to drop it into ruby 3.2.19 and get the same results from ruby sass. + + def group_by_to_a(enum, &block) + order = {} + + arr = [] + + grouped = {} + + for e in enum do + key = block[e] + unless order.include?(key) + order[key] = order.size + end + + if not grouped.has_key?(key) then + grouped[key] = [e] + else + grouped[key].push(e) + end + end + + grouped.each do |key, vals| + arr[order[key]] = [key, vals] + end + + arr + end + + */ + template + void group_by_to_a(std::vector& enumeration, KeyFunctorType& keyFunc, std::vector > >& arr /*out*/) { + + std::map order; + + std::map > grouped; + + for (typename std::vector::iterator enumIter = enumeration.begin(), enumIterEnd = enumeration.end(); enumIter != enumIterEnd; enumIter++) { + EnumType& e = *enumIter; + + KeyType key = keyFunc(e); + + if (grouped.find(key->hash()) == grouped.end()) { + order.insert(std::make_pair((unsigned int)order.size(), key)); + + std::vector newCollection; + newCollection.push_back(e); + grouped.insert(std::make_pair(key->hash(), newCollection)); + } else { + std::vector& collection = grouped.at(key->hash()); + collection.push_back(e); + } + } + + for (unsigned int index = 0; index < order.size(); index++) { + KeyType& key = order.at(index); + std::vector& values = grouped.at(key->hash()); + + std::pair > grouping = std::make_pair(key, values); + + arr.push_back(grouping); + } + } + + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/sass_values.cpp b/mybulma/node_modules/node-sass/src/libsass/src/sass_values.cpp new file mode 100644 index 0000000..34c591a --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/sass_values.cpp @@ -0,0 +1,357 @@ +#include "sass.hpp" +#include +#include +#include "util.hpp" +#include "eval.hpp" +#include "values.hpp" +#include "operators.hpp" +#include "sass/values.h" +#include "sass_values.hpp" + +extern "C" { + using namespace Sass; + + // Return the sass tag for a generic sass value + enum Sass_Tag ADDCALL sass_value_get_tag(const union Sass_Value* v) { return v->unknown.tag; } + + // Check value for specified type + bool ADDCALL sass_value_is_null(const union Sass_Value* v) { return v->unknown.tag == SASS_NULL; } + bool ADDCALL sass_value_is_number(const union Sass_Value* v) { return v->unknown.tag == SASS_NUMBER; } + bool ADDCALL sass_value_is_string(const union Sass_Value* v) { return v->unknown.tag == SASS_STRING; } + bool ADDCALL sass_value_is_boolean(const union Sass_Value* v) { return v->unknown.tag == SASS_BOOLEAN; } + bool ADDCALL sass_value_is_color(const union Sass_Value* v) { return v->unknown.tag == SASS_COLOR; } + bool ADDCALL sass_value_is_list(const union Sass_Value* v) { return v->unknown.tag == SASS_LIST; } + bool ADDCALL sass_value_is_map(const union Sass_Value* v) { return v->unknown.tag == SASS_MAP; } + bool ADDCALL sass_value_is_error(const union Sass_Value* v) { return v->unknown.tag == SASS_ERROR; } + bool ADDCALL sass_value_is_warning(const union Sass_Value* v) { return v->unknown.tag == SASS_WARNING; } + + // Getters and setters for Sass_Number + double ADDCALL sass_number_get_value(const union Sass_Value* v) { return v->number.value; } + void ADDCALL sass_number_set_value(union Sass_Value* v, double value) { v->number.value = value; } + const char* ADDCALL sass_number_get_unit(const union Sass_Value* v) { return v->number.unit; } + void ADDCALL sass_number_set_unit(union Sass_Value* v, char* unit) { v->number.unit = unit; } + + // Getters and setters for Sass_String + const char* ADDCALL sass_string_get_value(const union Sass_Value* v) { return v->string.value; } + void ADDCALL sass_string_set_value(union Sass_Value* v, char* value) { v->string.value = value; } + bool ADDCALL sass_string_is_quoted(const union Sass_Value* v) { return v->string.quoted; } + void ADDCALL sass_string_set_quoted(union Sass_Value* v, bool quoted) { v->string.quoted = quoted; } + + // Getters and setters for Sass_Boolean + bool ADDCALL sass_boolean_get_value(const union Sass_Value* v) { return v->boolean.value; } + void ADDCALL sass_boolean_set_value(union Sass_Value* v, bool value) { v->boolean.value = value; } + + // Getters and setters for Sass_Color + double ADDCALL sass_color_get_r(const union Sass_Value* v) { return v->color.r; } + void ADDCALL sass_color_set_r(union Sass_Value* v, double r) { v->color.r = r; } + double ADDCALL sass_color_get_g(const union Sass_Value* v) { return v->color.g; } + void ADDCALL sass_color_set_g(union Sass_Value* v, double g) { v->color.g = g; } + double ADDCALL sass_color_get_b(const union Sass_Value* v) { return v->color.b; } + void ADDCALL sass_color_set_b(union Sass_Value* v, double b) { v->color.b = b; } + double ADDCALL sass_color_get_a(const union Sass_Value* v) { return v->color.a; } + void ADDCALL sass_color_set_a(union Sass_Value* v, double a) { v->color.a = a; } + + // Getters and setters for Sass_List + size_t ADDCALL sass_list_get_length(const union Sass_Value* v) { return v->list.length; } + enum Sass_Separator ADDCALL sass_list_get_separator(const union Sass_Value* v) { return v->list.separator; } + void ADDCALL sass_list_set_separator(union Sass_Value* v, enum Sass_Separator separator) { v->list.separator = separator; } + bool ADDCALL sass_list_get_is_bracketed(const union Sass_Value* v) { return v->list.is_bracketed; } + void ADDCALL sass_list_set_is_bracketed(union Sass_Value* v, bool is_bracketed) { v->list.is_bracketed = is_bracketed; } + // Getters and setters for Sass_List values + union Sass_Value* ADDCALL sass_list_get_value(const union Sass_Value* v, size_t i) { return v->list.values[i]; } + void ADDCALL sass_list_set_value(union Sass_Value* v, size_t i, union Sass_Value* value) { v->list.values[i] = value; } + + // Getters and setters for Sass_Map + size_t ADDCALL sass_map_get_length(const union Sass_Value* v) { return v->map.length; } + // Getters and setters for Sass_List keys and values + union Sass_Value* ADDCALL sass_map_get_key(const union Sass_Value* v, size_t i) { return v->map.pairs[i].key; } + union Sass_Value* ADDCALL sass_map_get_value(const union Sass_Value* v, size_t i) { return v->map.pairs[i].value; } + void ADDCALL sass_map_set_key(union Sass_Value* v, size_t i, union Sass_Value* key) { v->map.pairs[i].key = key; } + void ADDCALL sass_map_set_value(union Sass_Value* v, size_t i, union Sass_Value* val) { v->map.pairs[i].value = val; } + + // Getters and setters for Sass_Error + char* ADDCALL sass_error_get_message(const union Sass_Value* v) { return v->error.message; }; + void ADDCALL sass_error_set_message(union Sass_Value* v, char* msg) { v->error.message = msg; }; + + // Getters and setters for Sass_Warning + char* ADDCALL sass_warning_get_message(const union Sass_Value* v) { return v->warning.message; }; + void ADDCALL sass_warning_set_message(union Sass_Value* v, char* msg) { v->warning.message = msg; }; + + // Creator functions for all value types + + union Sass_Value* ADDCALL sass_make_boolean(bool val) + { + union Sass_Value* v = (Sass_Value*) calloc(1, sizeof(Sass_Value)); + if (v == 0) return 0; + v->boolean.tag = SASS_BOOLEAN; + v->boolean.value = val; + return v; + } + + union Sass_Value* ADDCALL sass_make_number(double val, const char* unit) + { + union Sass_Value* v = (Sass_Value*) calloc(1, sizeof(Sass_Value)); + if (v == 0) return 0; + v->number.tag = SASS_NUMBER; + v->number.value = val; + v->number.unit = unit ? sass_copy_c_string(unit) : 0; + if (v->number.unit == 0) { free(v); return 0; } + return v; + } + + union Sass_Value* ADDCALL sass_make_color(double r, double g, double b, double a) + { + union Sass_Value* v = (Sass_Value*) calloc(1, sizeof(Sass_Value)); + if (v == 0) return 0; + v->color.tag = SASS_COLOR; + v->color.r = r; + v->color.g = g; + v->color.b = b; + v->color.a = a; + return v; + } + + union Sass_Value* ADDCALL sass_make_string(const char* val) + { + union Sass_Value* v = (Sass_Value*) calloc(1, sizeof(Sass_Value)); + if (v == 0) return 0; + v->string.quoted = false; + v->string.tag = SASS_STRING; + v->string.value = val ? sass_copy_c_string(val) : 0; + if (v->string.value == 0) { free(v); return 0; } + return v; + } + + union Sass_Value* ADDCALL sass_make_qstring(const char* val) + { + union Sass_Value* v = (Sass_Value*) calloc(1, sizeof(Sass_Value)); + if (v == 0) return 0; + v->string.quoted = true; + v->string.tag = SASS_STRING; + v->string.value = val ? sass_copy_c_string(val) : 0; + if (v->string.value == 0) { free(v); return 0; } + return v; + } + + union Sass_Value* ADDCALL sass_make_list(size_t len, enum Sass_Separator sep, bool is_bracketed) + { + union Sass_Value* v = (Sass_Value*) calloc(1, sizeof(Sass_Value)); + if (v == 0) return 0; + v->list.tag = SASS_LIST; + v->list.length = len; + v->list.separator = sep; + v->list.is_bracketed = is_bracketed; + v->list.values = (union Sass_Value**) calloc(len, sizeof(union Sass_Value*)); + if (v->list.values == 0) { free(v); return 0; } + return v; + } + + union Sass_Value* ADDCALL sass_make_map(size_t len) + { + union Sass_Value* v = (Sass_Value*) calloc(1, sizeof(Sass_Value)); + if (v == 0) return 0; + v->map.tag = SASS_MAP; + v->map.length = len; + v->map.pairs = (struct Sass_MapPair*) calloc(len, sizeof(struct Sass_MapPair)); + if (v->map.pairs == 0) { free(v); return 0; } + return v; + } + + union Sass_Value* ADDCALL sass_make_null(void) + { + union Sass_Value* v = (Sass_Value*) calloc(1, sizeof(Sass_Value)); + if (v == 0) return 0; + v->null.tag = SASS_NULL; + return v; + } + + union Sass_Value* ADDCALL sass_make_error(const char* msg) + { + union Sass_Value* v = (Sass_Value*) calloc(1, sizeof(Sass_Value)); + if (v == 0) return 0; + v->error.tag = SASS_ERROR; + v->error.message = msg ? sass_copy_c_string(msg) : 0; + if (v->error.message == 0) { free(v); return 0; } + return v; + } + + union Sass_Value* ADDCALL sass_make_warning(const char* msg) + { + union Sass_Value* v = (Sass_Value*) calloc(1, sizeof(Sass_Value)); + if (v == 0) return 0; + v->warning.tag = SASS_WARNING; + v->warning.message = msg ? sass_copy_c_string(msg) : 0; + if (v->warning.message == 0) { free(v); return 0; } + return v; + } + + // will free all associated sass values + void ADDCALL sass_delete_value(union Sass_Value* val) { + + size_t i; + if (val == 0) return; + switch(val->unknown.tag) { + case SASS_NULL: { + } break; + case SASS_BOOLEAN: { + } break; + case SASS_NUMBER: { + free(val->number.unit); + } break; + case SASS_COLOR: { + } break; + case SASS_STRING: { + free(val->string.value); + } break; + case SASS_LIST: { + for (i=0; ilist.length; i++) { + sass_delete_value(val->list.values[i]); + } + free(val->list.values); + } break; + case SASS_MAP: { + for (i=0; imap.length; i++) { + sass_delete_value(val->map.pairs[i].key); + sass_delete_value(val->map.pairs[i].value); + } + free(val->map.pairs); + } break; + case SASS_ERROR: { + free(val->error.message); + } break; + case SASS_WARNING: { + free(val->error.message); + } break; + default: break; + } + + free(val); + + } + + // Make a deep cloned copy of the given sass value + union Sass_Value* ADDCALL sass_clone_value (const union Sass_Value* val) + { + + size_t i; + if (val == 0) return 0; + switch(val->unknown.tag) { + case SASS_NULL: { + return sass_make_null(); + } + case SASS_BOOLEAN: { + return sass_make_boolean(val->boolean.value); + } + case SASS_NUMBER: { + return sass_make_number(val->number.value, val->number.unit); + } + case SASS_COLOR: { + return sass_make_color(val->color.r, val->color.g, val->color.b, val->color.a); + } + case SASS_STRING: { + return sass_string_is_quoted(val) ? sass_make_qstring(val->string.value) : sass_make_string(val->string.value); + } + case SASS_LIST: { + union Sass_Value* list = sass_make_list(val->list.length, val->list.separator, val->list.is_bracketed); + for (i = 0; i < list->list.length; i++) { + list->list.values[i] = sass_clone_value(val->list.values[i]); + } + return list; + } + case SASS_MAP: { + union Sass_Value* map = sass_make_map(val->map.length); + for (i = 0; i < val->map.length; i++) { + map->map.pairs[i].key = sass_clone_value(val->map.pairs[i].key); + map->map.pairs[i].value = sass_clone_value(val->map.pairs[i].value); + } + return map; + } + case SASS_ERROR: { + return sass_make_error(val->error.message); + } + case SASS_WARNING: { + return sass_make_warning(val->warning.message); + } + default: break; + } + + return 0; + + } + + union Sass_Value* ADDCALL sass_value_stringify (const union Sass_Value* v, bool compressed, int precision) + { + Value_Obj val = sass_value_to_ast_node(v); + Sass_Inspect_Options options(compressed ? COMPRESSED : NESTED, precision); + std::string str(val->to_string(options)); + return sass_make_qstring(str.c_str()); + } + + union Sass_Value* ADDCALL sass_value_op (enum Sass_OP op, const union Sass_Value* a, const union Sass_Value* b) + { + + Sass::Value_Ptr rv; + + try { + + Value_Obj lhs = sass_value_to_ast_node(a); + Value_Obj rhs = sass_value_to_ast_node(b); + struct Sass_Inspect_Options options(NESTED, 5); + + // see if it's a relational expression + switch(op) { + case Sass_OP::EQ: return sass_make_boolean(Operators::eq(lhs, rhs)); + case Sass_OP::NEQ: return sass_make_boolean(Operators::neq(lhs, rhs)); + case Sass_OP::GT: return sass_make_boolean(Operators::gt(lhs, rhs)); + case Sass_OP::GTE: return sass_make_boolean(Operators::gte(lhs, rhs)); + case Sass_OP::LT: return sass_make_boolean(Operators::lt(lhs, rhs)); + case Sass_OP::LTE: return sass_make_boolean(Operators::lte(lhs, rhs)); + case Sass_OP::AND: return ast_node_to_sass_value(lhs->is_false() ? lhs : rhs); + case Sass_OP::OR: return ast_node_to_sass_value(lhs->is_false() ? rhs : lhs); + default: break; + } + + if (sass_value_is_number(a) && sass_value_is_number(b)) { + Number_Ptr_Const l_n = Cast(lhs); + Number_Ptr_Const r_n = Cast(rhs); + rv = Operators::op_numbers(op, *l_n, *r_n, options, l_n->pstate()); + } + else if (sass_value_is_number(a) && sass_value_is_color(a)) { + Number_Ptr_Const l_n = Cast(lhs); + Color_Ptr_Const r_c = Cast(rhs); + rv = Operators::op_number_color(op, *l_n, *r_c, options, l_n->pstate()); + } + else if (sass_value_is_color(a) && sass_value_is_number(b)) { + Color_Ptr_Const l_c = Cast(lhs); + Number_Ptr_Const r_n = Cast(rhs); + rv = Operators::op_color_number(op, *l_c, *r_n, options, l_c->pstate()); + } + else if (sass_value_is_color(a) && sass_value_is_color(b)) { + Color_Ptr_Const l_c = Cast(lhs); + Color_Ptr_Const r_c = Cast(rhs); + rv = Operators::op_colors(op, *l_c, *r_c, options, l_c->pstate()); + } + else /* convert other stuff to string and apply operation */ { + Value_Ptr l_v = Cast(lhs); + Value_Ptr r_v = Cast(rhs); + rv = Operators::op_strings(op, *l_v, *r_v, options, l_v->pstate()); + } + + // ToDo: maybe we should should return null value? + if (!rv) return sass_make_error("invalid return value"); + + // convert result back to ast node + return ast_node_to_sass_value(rv); + + } + + // simply pass the error message back to the caller for now + catch (Exception::InvalidSass& e) { return sass_make_error(e.what()); } + catch (std::bad_alloc&) { return sass_make_error("memory exhausted"); } + catch (std::exception& e) { return sass_make_error(e.what()); } + catch (std::string& e) { return sass_make_error(e.c_str()); } + catch (const char* e) { return sass_make_error(e); } + catch (...) { return sass_make_error("unknown"); } + } + +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/sass_values.hpp b/mybulma/node_modules/node-sass/src/libsass/src/sass_values.hpp new file mode 100644 index 0000000..9aa5cdb --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/sass_values.hpp @@ -0,0 +1,82 @@ +#ifndef SASS_SASS_VALUES_H +#define SASS_SASS_VALUES_H + +#include "sass.h" + +struct Sass_Unknown { + enum Sass_Tag tag; +}; + +struct Sass_Boolean { + enum Sass_Tag tag; + bool value; +}; + +struct Sass_Number { + enum Sass_Tag tag; + double value; + char* unit; +}; + +struct Sass_Color { + enum Sass_Tag tag; + double r; + double g; + double b; + double a; +}; + +struct Sass_String { + enum Sass_Tag tag; + bool quoted; + char* value; +}; + +struct Sass_List { + enum Sass_Tag tag; + enum Sass_Separator separator; + bool is_bracketed; + size_t length; + // null terminated "array" + union Sass_Value** values; +}; + +struct Sass_Map { + enum Sass_Tag tag; + size_t length; + struct Sass_MapPair* pairs; +}; + +struct Sass_Null { + enum Sass_Tag tag; +}; + +struct Sass_Error { + enum Sass_Tag tag; + char* message; +}; + +struct Sass_Warning { + enum Sass_Tag tag; + char* message; +}; + +union Sass_Value { + struct Sass_Unknown unknown; + struct Sass_Boolean boolean; + struct Sass_Number number; + struct Sass_Color color; + struct Sass_String string; + struct Sass_List list; + struct Sass_Map map; + struct Sass_Null null; + struct Sass_Error error; + struct Sass_Warning warning; +}; + +struct Sass_MapPair { + union Sass_Value* key; + union Sass_Value* value; +}; + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/source_map.cpp b/mybulma/node_modules/node-sass/src/libsass/src/source_map.cpp new file mode 100644 index 0000000..c171a3f --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/source_map.cpp @@ -0,0 +1,195 @@ +#include "sass.hpp" +#include +#include +#include +#include + +#include "ast.hpp" +#include "json.hpp" +#include "context.hpp" +#include "position.hpp" +#include "source_map.hpp" + +namespace Sass { + SourceMap::SourceMap() : current_position(0, 0, 0), file("stdin") { } + SourceMap::SourceMap(const std::string& file) : current_position(0, 0, 0), file(file) { } + + std::string SourceMap::render_srcmap(Context &ctx) { + + const bool include_sources = ctx.c_options.source_map_contents; + const std::vector links = ctx.srcmap_links; + const std::vector& sources(ctx.resources); + + JsonNode* json_srcmap = json_mkobject(); + + json_append_member(json_srcmap, "version", json_mknumber(3)); + + const char *file_name = file.c_str(); + JsonNode *json_file_name = json_mkstring(file_name); + json_append_member(json_srcmap, "file", json_file_name); + + // pass-through sourceRoot option + if (!ctx.source_map_root.empty()) { + JsonNode* root = json_mkstring(ctx.source_map_root.c_str()); + json_append_member(json_srcmap, "sourceRoot", root); + } + + JsonNode *json_sources = json_mkarray(); + for (size_t i = 0; i < source_index.size(); ++i) { + std::string source(links[source_index[i]]); + if (ctx.c_options.source_map_file_urls) { + source = File::rel2abs(source); + // check for windows abs path + if (source[0] == '/') { + // ends up with three slashes + source = "file://" + source; + } else { + // needs an additional slash + source = "file:///" + source; + } + } + const char* source_name = source.c_str(); + JsonNode *json_source_name = json_mkstring(source_name); + json_append_element(json_sources, json_source_name); + } + json_append_member(json_srcmap, "sources", json_sources); + + if (include_sources && source_index.size()) { + JsonNode *json_contents = json_mkarray(); + for (size_t i = 0; i < source_index.size(); ++i) { + const Resource& resource(sources[source_index[i]]); + JsonNode *json_content = json_mkstring(resource.contents); + json_append_element(json_contents, json_content); + } + json_append_member(json_srcmap, "sourcesContent", json_contents); + } + + JsonNode *json_names = json_mkarray(); + // so far we have no implementation for names + // no problem as we do not alter any identifiers + json_append_member(json_srcmap, "names", json_names); + + std::string mappings = serialize_mappings(); + JsonNode *json_mappings = json_mkstring(mappings.c_str()); + json_append_member(json_srcmap, "mappings", json_mappings); + + char *str = json_stringify(json_srcmap, "\t"); + std::string result = std::string(str); + free(str); + json_delete(json_srcmap); + return result; + } + + std::string SourceMap::serialize_mappings() { + std::string result = ""; + + size_t previous_generated_line = 0; + size_t previous_generated_column = 0; + size_t previous_original_line = 0; + size_t previous_original_column = 0; + size_t previous_original_file = 0; + for (size_t i = 0; i < mappings.size(); ++i) { + const size_t generated_line = mappings[i].generated_position.line; + const size_t generated_column = mappings[i].generated_position.column; + const size_t original_line = mappings[i].original_position.line; + const size_t original_column = mappings[i].original_position.column; + const size_t original_file = mappings[i].original_position.file; + + if (generated_line != previous_generated_line) { + previous_generated_column = 0; + if (generated_line > previous_generated_line) { + result += std::string(generated_line - previous_generated_line, ';'); + previous_generated_line = generated_line; + } + } + else if (i > 0) { + result += ","; + } + + // generated column + result += base64vlq.encode(static_cast(generated_column) - static_cast(previous_generated_column)); + previous_generated_column = generated_column; + // file + result += base64vlq.encode(static_cast(original_file) - static_cast(previous_original_file)); + previous_original_file = original_file; + // source line + result += base64vlq.encode(static_cast(original_line) - static_cast(previous_original_line)); + previous_original_line = original_line; + // source column + result += base64vlq.encode(static_cast(original_column) - static_cast(previous_original_column)); + previous_original_column = original_column; + } + + return result; + } + + void SourceMap::prepend(const OutputBuffer& out) + { + Offset size(out.smap.current_position); + for (Mapping mapping : out.smap.mappings) { + if (mapping.generated_position.line > size.line) { + throw(std::runtime_error("prepend sourcemap has illegal line")); + } + if (mapping.generated_position.line == size.line) { + if (mapping.generated_position.column > size.column) { + throw(std::runtime_error("prepend sourcemap has illegal column")); + } + } + } + // adjust the buffer offset + prepend(Offset(out.buffer)); + // now add the new mappings + VECTOR_UNSHIFT(mappings, out.smap.mappings); + } + + void SourceMap::append(const OutputBuffer& out) + { + append(Offset(out.buffer)); + } + + void SourceMap::prepend(const Offset& offset) + { + if (offset.line != 0 || offset.column != 0) { + for (Mapping& mapping : mappings) { + // move stuff on the first old line + if (mapping.generated_position.line == 0) { + mapping.generated_position.column += offset.column; + } + // make place for the new lines + mapping.generated_position.line += offset.line; + } + } + if (current_position.line == 0) { + current_position.column += offset.column; + } + current_position.line += offset.line; + } + + void SourceMap::append(const Offset& offset) + { + current_position += offset; + } + + void SourceMap::add_open_mapping(const AST_Node_Ptr node) + { + mappings.push_back(Mapping(node->pstate(), current_position)); + } + + void SourceMap::add_close_mapping(const AST_Node_Ptr node) + { + mappings.push_back(Mapping(node->pstate() + node->pstate().offset, current_position)); + } + + ParserState SourceMap::remap(const ParserState& pstate) { + for (size_t i = 0; i < mappings.size(); ++i) { + if ( + mappings[i].generated_position.file == pstate.file && + mappings[i].generated_position.line == pstate.line && + mappings[i].generated_position.column == pstate.column + ) return ParserState(pstate.path, pstate.src, mappings[i].original_position, pstate.offset); + } + return ParserState(pstate.path, pstate.src, Position(-1, -1, -1), Offset(0, 0)); + + } + +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/source_map.hpp b/mybulma/node_modules/node-sass/src/libsass/src/source_map.hpp new file mode 100644 index 0000000..0778564 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/source_map.hpp @@ -0,0 +1,62 @@ +#ifndef SASS_SOURCE_MAP_H +#define SASS_SOURCE_MAP_H + +#include +#include + +#include "ast_fwd_decl.hpp" +#include "base64vlq.hpp" +#include "position.hpp" +#include "mapping.hpp" + +#define VECTOR_PUSH(vec, ins) vec.insert(vec.end(), ins.begin(), ins.end()) +#define VECTOR_UNSHIFT(vec, ins) vec.insert(vec.begin(), ins.begin(), ins.end()) + +namespace Sass { + + class Context; + class OutputBuffer; + + class SourceMap { + + public: + std::vector source_index; + SourceMap(); + SourceMap(const std::string& file); + + void append(const Offset& offset); + void prepend(const Offset& offset); + void append(const OutputBuffer& out); + void prepend(const OutputBuffer& out); + void add_open_mapping(const AST_Node_Ptr node); + void add_close_mapping(const AST_Node_Ptr node); + + std::string render_srcmap(Context &ctx); + ParserState remap(const ParserState& pstate); + + private: + + std::string serialize_mappings(); + + std::vector mappings; + Position current_position; +public: + std::string file; +private: + Base64VLQ base64vlq; + }; + + class OutputBuffer { + public: + OutputBuffer(void) + : buffer(""), + smap() + { } + public: + std::string buffer; + SourceMap smap; + }; + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/subset_map.cpp b/mybulma/node_modules/node-sass/src/libsass/src/subset_map.cpp new file mode 100644 index 0000000..24513e4 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/subset_map.cpp @@ -0,0 +1,55 @@ +#include "sass.hpp" +#include "ast.hpp" +#include "subset_map.hpp" + +namespace Sass { + + void Subset_Map::put(const Compound_Selector_Obj& sel, const SubSetMapPair& value) + { + if (sel->empty()) throw std::runtime_error("internal error: subset map keys may not be empty"); + size_t index = values_.size(); + values_.push_back(value); + for (size_t i = 0, S = sel->length(); i < S; ++i) + { + hash_[(*sel)[i]].push_back(std::make_pair(sel, index)); + } + } + + std::vector Subset_Map::get_kv(const Compound_Selector_Obj& sel) + { + SimpleSelectorDict dict(sel->begin(), sel->end()); // XXX Set + std::vector indices; + for (size_t i = 0, S = sel->length(); i < S; ++i) { + if (!hash_.count((*sel)[i])) { + continue; + } + const std::vector >& subsets = hash_[(*sel)[i]]; + for (const std::pair& item : subsets) { + bool include = true; + for (const Simple_Selector_Obj& it : item.first->elements()) { + auto found = dict.find(it); + if (found == dict.end()) { + include = false; + break; + } + } + if (include) indices.push_back(item.second); + } + } + sort(indices.begin(), indices.end()); + std::vector::iterator indices_end = unique(indices.begin(), indices.end()); + indices.resize(distance(indices.begin(), indices_end)); + + std::vector results; + for (size_t i = 0, S = indices.size(); i < S; ++i) { + results.push_back(values_[indices[i]]); + } + return results; + } + + std::vector Subset_Map::get_v(const Compound_Selector_Obj& sel) + { + return get_kv(sel); + } + +} \ No newline at end of file diff --git a/mybulma/node_modules/node-sass/src/libsass/src/subset_map.hpp b/mybulma/node_modules/node-sass/src/libsass/src/subset_map.hpp new file mode 100644 index 0000000..5c091e6 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/subset_map.hpp @@ -0,0 +1,76 @@ +#ifndef SASS_SUBSET_MAP_H +#define SASS_SUBSET_MAP_H + +#include +#include +#include +#include +#include + +#include "ast_fwd_decl.hpp" + + +// #include +// #include +// template +// std::string vector_to_string(std::vector v) +// { +// std::stringstream buffer; +// buffer << "["; + +// if (!v.empty()) +// { buffer << v[0]; } +// else +// { buffer << "]"; } + +// if (v.size() == 1) +// { buffer << "]"; } +// else +// { +// for (size_t i = 1, S = v.size(); i < S; ++i) buffer << ", " << v[i]; +// buffer << "]"; +// } + +// return buffer.str(); +// } + +// template +// std::string set_to_string(set v) +// { +// std::stringstream buffer; +// buffer << "["; +// typename std::set::iterator i = v.begin(); +// if (!v.empty()) +// { buffer << *i; } +// else +// { buffer << "]"; } + +// if (v.size() == 1) +// { buffer << "]"; } +// else +// { +// for (++i; i != v.end(); ++i) buffer << ", " << *i; +// buffer << "]"; +// } + +// return buffer.str(); +// } + +namespace Sass { + + class Subset_Map { + private: + std::vector values_; + std::map >, OrderNodes > hash_; + public: + void put(const Compound_Selector_Obj& sel, const SubSetMapPair& value); + std::vector get_kv(const Compound_Selector_Obj& s); + std::vector get_v(const Compound_Selector_Obj& s); + bool empty() { return values_.empty(); } + void clear() { values_.clear(); hash_.clear(); } + const std::vector values(void) { return values_; } + }; + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/support/libsass.pc.in b/mybulma/node_modules/node-sass/src/libsass/src/support/libsass.pc.in new file mode 100644 index 0000000..d201bfa --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/support/libsass.pc.in @@ -0,0 +1,11 @@ +prefix=@prefix@ +exec_prefix=@exec_prefix@ +libdir=@libdir@ +includedir=@includedir@ + +Name: libsass +URL: https://github.com/sass/libsass +Description: A C implementation of a Sass compiler +Version: @VERSION@ +Libs: -L${libdir} -lsass +Cflags: -I${includedir} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/to_c.cpp b/mybulma/node_modules/node-sass/src/libsass/src/to_c.cpp new file mode 100644 index 0000000..8a6ea8d --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/to_c.cpp @@ -0,0 +1,74 @@ +#include "sass.hpp" +#include "to_c.hpp" +#include "ast.hpp" + +namespace Sass { + + union Sass_Value* To_C::fallback_impl(AST_Node_Ptr n) + { return sass_make_error("unknown type for C-API"); } + + union Sass_Value* To_C::operator()(Boolean_Ptr b) + { return sass_make_boolean(b->value()); } + + union Sass_Value* To_C::operator()(Number_Ptr n) + { return sass_make_number(n->value(), n->unit().c_str()); } + + union Sass_Value* To_C::operator()(Custom_Warning_Ptr w) + { return sass_make_warning(w->message().c_str()); } + + union Sass_Value* To_C::operator()(Custom_Error_Ptr e) + { return sass_make_error(e->message().c_str()); } + + union Sass_Value* To_C::operator()(Color_Ptr c) + { return sass_make_color(c->r(), c->g(), c->b(), c->a()); } + + union Sass_Value* To_C::operator()(String_Constant_Ptr s) + { + if (s->quote_mark()) { + return sass_make_qstring(s->value().c_str()); + } else { + return sass_make_string(s->value().c_str()); + } + } + + union Sass_Value* To_C::operator()(String_Quoted_Ptr s) + { return sass_make_qstring(s->value().c_str()); } + + union Sass_Value* To_C::operator()(List_Ptr l) + { + union Sass_Value* v = sass_make_list(l->length(), l->separator(), l->is_bracketed()); + for (size_t i = 0, L = l->length(); i < L; ++i) { + sass_list_set_value(v, i, (*l)[i]->perform(this)); + } + return v; + } + + union Sass_Value* To_C::operator()(Map_Ptr m) + { + union Sass_Value* v = sass_make_map(m->length()); + int i = 0; + for (auto key : m->keys()) { + sass_map_set_key(v, i, key->perform(this)); + sass_map_set_value(v, i, m->at(key)->perform(this)); + i++; + } + return v; + } + + union Sass_Value* To_C::operator()(Arguments_Ptr a) + { + union Sass_Value* v = sass_make_list(a->length(), SASS_COMMA, false); + for (size_t i = 0, L = a->length(); i < L; ++i) { + sass_list_set_value(v, i, (*a)[i]->perform(this)); + } + return v; + } + + union Sass_Value* To_C::operator()(Argument_Ptr a) + { return a->value()->perform(this); } + + // not strictly necessary because of the fallback + union Sass_Value* To_C::operator()(Null_Ptr n) + { return sass_make_null(); } + +}; diff --git a/mybulma/node_modules/node-sass/src/libsass/src/to_c.hpp b/mybulma/node_modules/node-sass/src/libsass/src/to_c.hpp new file mode 100644 index 0000000..a5331e3 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/to_c.hpp @@ -0,0 +1,39 @@ +#ifndef SASS_TO_C_H +#define SASS_TO_C_H + +#include "ast_fwd_decl.hpp" +#include "operation.hpp" +#include "sass/values.h" + +namespace Sass { + + class To_C : public Operation_CRTP { + // override this to define a catch-all + union Sass_Value* fallback_impl(AST_Node_Ptr n); + + public: + + To_C() { } + ~To_C() { } + + union Sass_Value* operator()(Boolean_Ptr); + union Sass_Value* operator()(Number_Ptr); + union Sass_Value* operator()(Color_Ptr); + union Sass_Value* operator()(String_Constant_Ptr); + union Sass_Value* operator()(String_Quoted_Ptr); + union Sass_Value* operator()(Custom_Warning_Ptr); + union Sass_Value* operator()(Custom_Error_Ptr); + union Sass_Value* operator()(List_Ptr); + union Sass_Value* operator()(Map_Ptr); + union Sass_Value* operator()(Null_Ptr); + union Sass_Value* operator()(Arguments_Ptr); + union Sass_Value* operator()(Argument_Ptr); + + // dispatch to fallback implementation + union Sass_Value* fallback(AST_Node_Ptr x) + { return fallback_impl(x); } + }; + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/to_value.cpp b/mybulma/node_modules/node-sass/src/libsass/src/to_value.cpp new file mode 100644 index 0000000..3912c55 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/to_value.cpp @@ -0,0 +1,112 @@ +#include "sass.hpp" +#include "ast.hpp" +#include "to_value.hpp" + +namespace Sass { + + Value_Ptr To_Value::fallback_impl(AST_Node_Ptr n) + { + // throw a runtime error if this happens + // we want a well defined set of possible nodes + throw std::runtime_error("invalid node for to_value"); + } + + // Custom_Error is a valid value + Value_Ptr To_Value::operator()(Custom_Error_Ptr e) + { + return e; + } + + // Custom_Warning is a valid value + Value_Ptr To_Value::operator()(Custom_Warning_Ptr w) + { + return w; + } + + // Boolean is a valid value + Value_Ptr To_Value::operator()(Boolean_Ptr b) + { + return b; + } + + // Number is a valid value + Value_Ptr To_Value::operator()(Number_Ptr n) + { + return n; + } + + // Color is a valid value + Value_Ptr To_Value::operator()(Color_Ptr c) + { + return c; + } + + // String_Constant is a valid value + Value_Ptr To_Value::operator()(String_Constant_Ptr s) + { + return s; + } + + // String_Quoted is a valid value + Value_Ptr To_Value::operator()(String_Quoted_Ptr s) + { + return s; + } + + // List is a valid value + Value_Ptr To_Value::operator()(List_Ptr l) + { + List_Obj ll = SASS_MEMORY_NEW(List, + l->pstate(), + l->length(), + l->separator(), + l->is_arglist(), + l->is_bracketed()); + for (size_t i = 0, L = l->length(); i < L; ++i) { + ll->append((*l)[i]->perform(this)); + } + return ll.detach(); + } + + // Map is a valid value + Value_Ptr To_Value::operator()(Map_Ptr m) + { + return m; + } + + // Null is a valid value + Value_Ptr To_Value::operator()(Null_Ptr n) + { + return n; + } + + // Function is a valid value + Value_Ptr To_Value::operator()(Function_Ptr n) + { + return n; + } + + // Argument returns its value + Value_Ptr To_Value::operator()(Argument_Ptr arg) + { + if (!arg->name().empty()) return 0; + return arg->value()->perform(this); + } + + // Selector_List is converted to a string + Value_Ptr To_Value::operator()(Selector_List_Ptr s) + { + return SASS_MEMORY_NEW(String_Quoted, + s->pstate(), + s->to_string(ctx.c_options)); + } + + // Binary_Expression is converted to a string + Value_Ptr To_Value::operator()(Binary_Expression_Ptr s) + { + return SASS_MEMORY_NEW(String_Quoted, + s->pstate(), + s->to_string(ctx.c_options)); + } + +}; diff --git a/mybulma/node_modules/node-sass/src/libsass/src/to_value.hpp b/mybulma/node_modules/node-sass/src/libsass/src/to_value.hpp new file mode 100644 index 0000000..8f64128 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/to_value.hpp @@ -0,0 +1,50 @@ +#ifndef SASS_TO_VALUE_H +#define SASS_TO_VALUE_H + +#include "operation.hpp" +#include "sass/values.h" +#include "ast_fwd_decl.hpp" + +namespace Sass { + + class To_Value : public Operation_CRTP { + + Value_Ptr fallback_impl(AST_Node_Ptr n); + + private: + + Context& ctx; + + public: + + To_Value(Context& ctx) + : ctx(ctx) + { } + ~To_Value() { } + using Operation::operator(); + + Value_Ptr operator()(Argument_Ptr); + Value_Ptr operator()(Boolean_Ptr); + Value_Ptr operator()(Number_Ptr); + Value_Ptr operator()(Color_Ptr); + Value_Ptr operator()(String_Constant_Ptr); + Value_Ptr operator()(String_Quoted_Ptr); + Value_Ptr operator()(Custom_Warning_Ptr); + Value_Ptr operator()(Custom_Error_Ptr); + Value_Ptr operator()(List_Ptr); + Value_Ptr operator()(Map_Ptr); + Value_Ptr operator()(Null_Ptr); + Value_Ptr operator()(Function_Ptr); + + // convert to string via `To_String` + Value_Ptr operator()(Selector_List_Ptr); + Value_Ptr operator()(Binary_Expression_Ptr); + + // fallback throws error + template + Value_Ptr fallback(U x) { return fallback_impl(x); } + }; + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/units.cpp b/mybulma/node_modules/node-sass/src/libsass/src/units.cpp new file mode 100644 index 0000000..779f1d2 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/units.cpp @@ -0,0 +1,501 @@ +#include "sass.hpp" +#include +#include "units.hpp" +#include "error_handling.hpp" + +namespace Sass { + + /* the conversion matrix can be readed the following way */ + /* if you go down, the factor is for the numerator (multiply) */ + /* if you go right, the factor is for the denominator (divide) */ + /* and yes, we actually use both, not sure why, but why not!? */ + + const double size_conversion_factors[6][6] = + { + /* in cm pc mm pt px */ + /* in */ { 1, 2.54, 6, 25.4, 72, 96, }, + /* cm */ { 1.0/2.54, 1, 6.0/2.54, 10, 72.0/2.54, 96.0/2.54 }, + /* pc */ { 1.0/6.0, 2.54/6.0, 1, 25.4/6.0, 72.0/6.0, 96.0/6.0 }, + /* mm */ { 1.0/25.4, 1.0/10.0, 6.0/25.4, 1, 72.0/25.4, 96.0/25.4 }, + /* pt */ { 1.0/72.0, 2.54/72.0, 6.0/72.0, 25.4/72.0, 1, 96.0/72.0 }, + /* px */ { 1.0/96.0, 2.54/96.0, 6.0/96.0, 25.4/96.0, 72.0/96.0, 1, } + }; + + const double angle_conversion_factors[4][4] = + { + /* deg grad rad turn */ + /* deg */ { 1, 40.0/36.0, PI/180.0, 1.0/360.0 }, + /* grad */ { 36.0/40.0, 1, PI/200.0, 1.0/400.0 }, + /* rad */ { 180.0/PI, 200.0/PI, 1, 0.5/PI }, + /* turn */ { 360.0, 400.0, 2.0*PI, 1 } + }; + + const double time_conversion_factors[2][2] = + { + /* s ms */ + /* s */ { 1, 1000.0 }, + /* ms */ { 1/1000.0, 1 } + }; + const double frequency_conversion_factors[2][2] = + { + /* Hz kHz */ + /* Hz */ { 1, 1/1000.0 }, + /* kHz */ { 1000.0, 1 } + }; + const double resolution_conversion_factors[3][3] = + { + /* dpi dpcm dppx */ + /* dpi */ { 1, 1/2.54, 1/96.0 }, + /* dpcm */ { 2.54, 1, 2.54/96 }, + /* dppx */ { 96, 96/2.54, 1 } + }; + + UnitClass get_unit_type(UnitType unit) + { + switch (unit & 0xFF00) + { + case UnitClass::LENGTH: return UnitClass::LENGTH; + case UnitClass::ANGLE: return UnitClass::ANGLE; + case UnitClass::TIME: return UnitClass::TIME; + case UnitClass::FREQUENCY: return UnitClass::FREQUENCY; + case UnitClass::RESOLUTION: return UnitClass::RESOLUTION; + default: return UnitClass::INCOMMENSURABLE; + } + }; + + std::string get_unit_class(UnitType unit) + { + switch (unit & 0xFF00) + { + case UnitClass::LENGTH: return "LENGTH"; + case UnitClass::ANGLE: return "ANGLE"; + case UnitClass::TIME: return "TIME"; + case UnitClass::FREQUENCY: return "FREQUENCY"; + case UnitClass::RESOLUTION: return "RESOLUTION"; + default: return "INCOMMENSURABLE"; + } + }; + + UnitType get_main_unit(const UnitClass unit) + { + switch (unit) + { + case UnitClass::LENGTH: return UnitType::PX; + case UnitClass::ANGLE: return UnitType::DEG; + case UnitClass::TIME: return UnitType::SEC; + case UnitClass::FREQUENCY: return UnitType::HERTZ; + case UnitClass::RESOLUTION: return UnitType::DPI; + default: return UnitType::UNKNOWN; + } + }; + + UnitType string_to_unit(const std::string& s) + { + // size units + if (s == "px") return UnitType::PX; + else if (s == "pt") return UnitType::PT; + else if (s == "pc") return UnitType::PC; + else if (s == "mm") return UnitType::MM; + else if (s == "cm") return UnitType::CM; + else if (s == "in") return UnitType::IN; + // angle units + else if (s == "deg") return UnitType::DEG; + else if (s == "grad") return UnitType::GRAD; + else if (s == "rad") return UnitType::RAD; + else if (s == "turn") return UnitType::TURN; + // time units + else if (s == "s") return UnitType::SEC; + else if (s == "ms") return UnitType::MSEC; + // frequency units + else if (s == "Hz") return UnitType::HERTZ; + else if (s == "kHz") return UnitType::KHERTZ; + // resolutions units + else if (s == "dpi") return UnitType::DPI; + else if (s == "dpcm") return UnitType::DPCM; + else if (s == "dppx") return UnitType::DPPX; + // for unknown units + else return UnitType::UNKNOWN; + } + + const char* unit_to_string(UnitType unit) + { + switch (unit) { + // size units + case UnitType::PX: return "px"; + case UnitType::PT: return "pt"; + case UnitType::PC: return "pc"; + case UnitType::MM: return "mm"; + case UnitType::CM: return "cm"; + case UnitType::IN: return "in"; + // angle units + case UnitType::DEG: return "deg"; + case UnitType::GRAD: return "grad"; + case UnitType::RAD: return "rad"; + case UnitType::TURN: return "turn"; + // time units + case UnitType::SEC: return "s"; + case UnitType::MSEC: return "ms"; + // frequency units + case UnitType::HERTZ: return "Hz"; + case UnitType::KHERTZ: return "kHz"; + // resolutions units + case UnitType::DPI: return "dpi"; + case UnitType::DPCM: return "dpcm"; + case UnitType::DPPX: return "dppx"; + // for unknown units + default: return ""; + } + } + + std::string unit_to_class(const std::string& s) + { + if (s == "px") return "LENGTH"; + else if (s == "pt") return "LENGTH"; + else if (s == "pc") return "LENGTH"; + else if (s == "mm") return "LENGTH"; + else if (s == "cm") return "LENGTH"; + else if (s == "in") return "LENGTH"; + // angle units + else if (s == "deg") return "ANGLE"; + else if (s == "grad") return "ANGLE"; + else if (s == "rad") return "ANGLE"; + else if (s == "turn") return "ANGLE"; + // time units + else if (s == "s") return "TIME"; + else if (s == "ms") return "TIME"; + // frequency units + else if (s == "Hz") return "FREQUENCY"; + else if (s == "kHz") return "FREQUENCY"; + // resolutions units + else if (s == "dpi") return "RESOLUTION"; + else if (s == "dpcm") return "RESOLUTION"; + else if (s == "dppx") return "RESOLUTION"; + // for unknown units + return "CUSTOM:" + s; + } + + // throws incompatibleUnits exceptions + double conversion_factor(const std::string& s1, const std::string& s2) + { + // assert for same units + if (s1 == s2) return 1; + // get unit enum from string + UnitType u1 = string_to_unit(s1); + UnitType u2 = string_to_unit(s2); + // query unit group types + UnitClass t1 = get_unit_type(u1); + UnitClass t2 = get_unit_type(u2); + // return the conversion factor + return conversion_factor(u1, u2, t1, t2); + } + + // throws incompatibleUnits exceptions + double conversion_factor(UnitType u1, UnitType u2, UnitClass t1, UnitClass t2) + { + // can't convert between groups + if (t1 != t2) return 0; + // get absolute offset + // used for array acces + size_t i1 = u1 - t1; + size_t i2 = u2 - t2; + // process known units + switch (t1) { + case LENGTH: + return size_conversion_factors[i1][i2]; + case ANGLE: + return angle_conversion_factors[i1][i2]; + case TIME: + return time_conversion_factors[i1][i2]; + case FREQUENCY: + return frequency_conversion_factors[i1][i2]; + case RESOLUTION: + return resolution_conversion_factors[i1][i2]; + case INCOMMENSURABLE: + return 0; + } + // fallback + return 0; + } + + double convert_units(const std::string& lhs, const std::string& rhs, int& lhsexp, int& rhsexp) + { + double f = 0; + // do not convert same ones + if (lhs == rhs) return 0; + // skip already canceled out unit + if (lhsexp == 0) return 0; + if (rhsexp == 0) return 0; + // check if it can be converted + UnitType ulhs = string_to_unit(lhs); + UnitType urhs = string_to_unit(rhs); + // skip units we cannot convert + if (ulhs == UNKNOWN) return 0; + if (urhs == UNKNOWN) return 0; + // query unit group types + UnitClass clhs = get_unit_type(ulhs); + UnitClass crhs = get_unit_type(urhs); + // skip units we cannot convert + if (clhs != crhs) return 0; + // if right denominator is bigger than lhs, we want to keep it in rhs unit + if (rhsexp < 0 && lhsexp > 0 && - rhsexp > lhsexp) { + // get the conversion factor for units + f = conversion_factor(urhs, ulhs, clhs, crhs); + // left hand side has been consumned + f = std::pow(f, lhsexp); + rhsexp += lhsexp; + lhsexp = 0; + } + else { + // get the conversion factor for units + f = conversion_factor(ulhs, urhs, clhs, crhs); + // right hand side has been consumned + f = std::pow(f, rhsexp); + lhsexp += rhsexp; + rhsexp = 0; + } + return f; + } + + bool Units::operator< (const Units& rhs) const + { + return (numerators < rhs.numerators) && + (denominators < rhs.denominators); + } + bool Units::operator== (const Units& rhs) const + { + return (numerators == rhs.numerators) && + (denominators == rhs.denominators); + } + + double Units::normalize() + { + + size_t iL = numerators.size(); + size_t nL = denominators.size(); + + // the final conversion factor + double factor = 1; + + for (size_t i = 0; i < iL; i++) { + std::string &lhs = numerators[i]; + UnitType ulhs = string_to_unit(lhs); + if (ulhs == UNKNOWN) continue; + UnitClass clhs = get_unit_type(ulhs); + UnitType umain = get_main_unit(clhs); + if (ulhs == umain) continue; + double f(conversion_factor(umain, ulhs, clhs, clhs)); + if (f == 0) throw std::runtime_error("INVALID"); + numerators[i] = unit_to_string(umain); + factor /= f; + } + + for (size_t n = 0; n < nL; n++) { + std::string &rhs = denominators[n]; + UnitType urhs = string_to_unit(rhs); + if (urhs == UNKNOWN) continue; + UnitClass crhs = get_unit_type(urhs); + UnitType umain = get_main_unit(crhs); + if (urhs == umain) continue; + double f(conversion_factor(umain, urhs, crhs, crhs)); + if (f == 0) throw std::runtime_error("INVALID"); + denominators[n] = unit_to_string(umain); + factor /= f; + } + + std::sort (numerators.begin(), numerators.end()); + std::sort (denominators.begin(), denominators.end()); + + // return for conversion + return factor; + } + + double Units::reduce() + { + + size_t iL = numerators.size(); + size_t nL = denominators.size(); + + // have less than two units? + if (iL + nL < 2) return 1; + + // first make sure same units cancel each other out + // it seems that a map table will fit nicely to do this + // we basically construct exponents for each unit + // has the advantage that they will be pre-sorted + std::map exponents; + + // initialize by summing up occurences in unit vectors + // this will already cancel out equivalent units (e.q. px/px) + for (size_t i = 0; i < iL; i ++) exponents[numerators[i]] += 1; + for (size_t n = 0; n < nL; n ++) exponents[denominators[n]] -= 1; + + // the final conversion factor + double factor = 1; + + // convert between compatible units + for (size_t i = 0; i < iL; i++) { + for (size_t n = 0; n < nL; n++) { + std::string &lhs = numerators[i], &rhs = denominators[n]; + int &lhsexp = exponents[lhs], &rhsexp = exponents[rhs]; + double f(convert_units(lhs, rhs, lhsexp, rhsexp)); + if (f == 0) continue; + factor /= f; + } + } + + // now we can build up the new unit arrays + numerators.clear(); + denominators.clear(); + + // recreate sorted units vectors + for (auto exp : exponents) { + int &exponent = exp.second; + while (exponent > 0 && exponent --) + numerators.push_back(exp.first); + while (exponent < 0 && exponent ++) + denominators.push_back(exp.first); + } + + // return for conversion + return factor; + + } + + std::string Units::unit() const + { + std::string u; + size_t iL = numerators.size(); + size_t nL = denominators.size(); + for (size_t i = 0; i < iL; i += 1) { + if (i) u += '*'; + u += numerators[i]; + } + if (nL != 0) u += '/'; + for (size_t n = 0; n < nL; n += 1) { + if (n) u += '*'; + u += denominators[n]; + } + return u; + } + + bool Units::is_unitless() const + { + return numerators.empty() && + denominators.empty(); + } + + bool Units::is_valid_css_unit() const + { + return numerators.size() <= 1 && + denominators.size() == 0; + } + + // this does not cover all cases (multiple prefered units) + double Units::convert_factor(const Units& r) const + { + + std::vector miss_nums(0); + std::vector miss_dens(0); + // create copy since we need these for state keeping + std::vector r_nums(r.numerators); + std::vector r_dens(r.denominators); + + auto l_num_it = numerators.begin(); + auto l_num_end = numerators.end(); + + bool l_unitless = is_unitless(); + auto r_unitless = r.is_unitless(); + + // overall conversion + double factor = 1; + + // process all left numerators + while (l_num_it != l_num_end) + { + // get and increment afterwards + const std::string l_num = *(l_num_it ++); + + auto r_num_it = r_nums.begin(), r_num_end = r_nums.end(); + + bool found = false; + // search for compatible numerator + while (r_num_it != r_num_end) + { + // get and increment afterwards + const std::string r_num = *(r_num_it); + // get possible conversion factor for units + double conversion = conversion_factor(l_num, r_num); + // skip incompatible numerator + if (conversion == 0) { + ++ r_num_it; + continue; + } + // apply to global factor + factor *= conversion; + // remove item from vector + r_nums.erase(r_num_it); + // found numerator + found = true; + break; + } + // maybe we did not find any + // left numerator is leftover + if (!found) miss_nums.push_back(l_num); + } + + auto l_den_it = denominators.begin(); + auto l_den_end = denominators.end(); + + // process all left denominators + while (l_den_it != l_den_end) + { + // get and increment afterwards + const std::string l_den = *(l_den_it ++); + + auto r_den_it = r_dens.begin(); + auto r_den_end = r_dens.end(); + + bool found = false; + // search for compatible denominator + while (r_den_it != r_den_end) + { + // get and increment afterwards + const std::string r_den = *(r_den_it); + // get possible converstion factor for units + double conversion = conversion_factor(l_den, r_den); + // skip incompatible denominator + if (conversion == 0) { + ++ r_den_it; + continue; + } + // apply to global factor + factor /= conversion; + // remove item from vector + r_dens.erase(r_den_it); + // found denominator + found = true; + break; + } + // maybe we did not find any + // left denominator is leftover + if (!found) miss_dens.push_back(l_den); + } + + // check left-overs (ToDo: might cancel out?) + if (miss_nums.size() > 0 && !r_unitless) { + throw Exception::IncompatibleUnits(r, *this); + } + else if (miss_dens.size() > 0 && !r_unitless) { + throw Exception::IncompatibleUnits(r, *this); + } + else if (r_nums.size() > 0 && !l_unitless) { + throw Exception::IncompatibleUnits(r, *this); + } + else if (r_dens.size() > 0 && !l_unitless) { + throw Exception::IncompatibleUnits(r, *this); + } + + return factor; + } + +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/units.hpp b/mybulma/node_modules/node-sass/src/libsass/src/units.hpp new file mode 100644 index 0000000..306f534 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/units.hpp @@ -0,0 +1,109 @@ +#ifndef SASS_UNITS_H +#define SASS_UNITS_H + +#include +#include +#include +#include + +namespace Sass { + + const double PI = std::acos(-1); + + enum UnitClass { + LENGTH = 0x000, + ANGLE = 0x100, + TIME = 0x200, + FREQUENCY = 0x300, + RESOLUTION = 0x400, + INCOMMENSURABLE = 0x500 + }; + + enum UnitType { + + // size units + IN = UnitClass::LENGTH, + CM, + PC, + MM, + PT, + PX, + + // angle units + DEG = ANGLE, + GRAD, + RAD, + TURN, + + // time units + SEC = TIME, + MSEC, + + // frequency units + HERTZ = FREQUENCY, + KHERTZ, + + // resolutions units + DPI = RESOLUTION, + DPCM, + DPPX, + + // for unknown units + UNKNOWN = INCOMMENSURABLE + + }; + + class Units { + public: + std::vector numerators; + std::vector denominators; + public: + // default constructor + Units() : + numerators(), + denominators() + { } + // copy constructor + Units(const Units* ptr) : + numerators(ptr->numerators), + denominators(ptr->denominators) + { } + // convert to string + std::string unit() const; + // get if units are empty + bool is_unitless() const; + // return if valid for css + bool is_valid_css_unit() const; + // reduce units for output + // returns conversion factor + double reduce(); + // normalize units for compare + // returns conversion factor + double normalize(); + // compare operations + bool operator< (const Units& rhs) const; + bool operator== (const Units& rhs) const; + // factor to convert into given units + double convert_factor(const Units&) const; + }; + + extern const double size_conversion_factors[6][6]; + extern const double angle_conversion_factors[4][4]; + extern const double time_conversion_factors[2][2]; + extern const double frequency_conversion_factors[2][2]; + extern const double resolution_conversion_factors[3][3]; + + UnitType get_main_unit(const UnitClass unit); + enum Sass::UnitType string_to_unit(const std::string&); + const char* unit_to_string(Sass::UnitType unit); + enum Sass::UnitClass get_unit_type(Sass::UnitType unit); + std::string get_unit_class(Sass::UnitType unit); + std::string unit_to_class(const std::string&); + // throws incompatibleUnits exceptions + double conversion_factor(const std::string&, const std::string&); + double conversion_factor(UnitType, UnitType, UnitClass, UnitClass); + double convert_units(const std::string&, const std::string&, int&, int&); + +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/utf8.h b/mybulma/node_modules/node-sass/src/libsass/src/utf8.h new file mode 100644 index 0000000..82b13f5 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/utf8.h @@ -0,0 +1,34 @@ +// Copyright 2006 Nemanja Trifunovic + +/* +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +*/ + + +#ifndef UTF8_FOR_CPP_2675DCD0_9480_4c0c_B92A_CC14C027B731 +#define UTF8_FOR_CPP_2675DCD0_9480_4c0c_B92A_CC14C027B731 + +#include "utf8/checked.h" +#include "utf8/unchecked.h" + +#endif // header guard diff --git a/mybulma/node_modules/node-sass/src/libsass/src/utf8/checked.h b/mybulma/node_modules/node-sass/src/libsass/src/utf8/checked.h new file mode 100644 index 0000000..693aee9 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/utf8/checked.h @@ -0,0 +1,334 @@ +// Copyright 2006 Nemanja Trifunovic + +/* +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +*/ + + +#ifndef UTF8_FOR_CPP_CHECKED_H_2675DCD0_9480_4c0c_B92A_CC14C027B731 +#define UTF8_FOR_CPP_CHECKED_H_2675DCD0_9480_4c0c_B92A_CC14C027B731 + +#include "core.h" +#include + +namespace utf8 +{ + // Base for the exceptions that may be thrown from the library + class exception : public ::std::exception { + }; + + // Exceptions that may be thrown from the library functions. + class invalid_code_point : public exception { + uint32_t cp; + public: + invalid_code_point(uint32_t cp) : cp(cp) {} + virtual const char* what() const throw() { return "Invalid code point"; } + uint32_t code_point() const {return cp;} + }; + + class invalid_utf8 : public exception { + uint8_t u8; + public: + invalid_utf8 (uint8_t u) : u8(u) {} + virtual const char* what() const throw() { return "Invalid UTF-8"; } + uint8_t utf8_octet() const {return u8;} + }; + + class invalid_utf16 : public exception { + uint16_t u16; + public: + invalid_utf16 (uint16_t u) : u16(u) {} + virtual const char* what() const throw() { return "Invalid UTF-16"; } + uint16_t utf16_word() const {return u16;} + }; + + class not_enough_room : public exception { + public: + virtual const char* what() const throw() { return "Not enough space"; } + }; + + /// The library API - functions intended to be called by the users + + template + octet_iterator append(uint32_t cp, octet_iterator result) + { + if (!utf8::internal::is_code_point_valid(cp)) + throw invalid_code_point(cp); + + if (cp < 0x80) // one octet + *(result++) = static_cast(cp); + else if (cp < 0x800) { // two octets + *(result++) = static_cast((cp >> 6) | 0xc0); + *(result++) = static_cast((cp & 0x3f) | 0x80); + } + else if (cp < 0x10000) { // three octets + *(result++) = static_cast((cp >> 12) | 0xe0); + *(result++) = static_cast(((cp >> 6) & 0x3f) | 0x80); + *(result++) = static_cast((cp & 0x3f) | 0x80); + } + else { // four octets + *(result++) = static_cast((cp >> 18) | 0xf0); + *(result++) = static_cast(((cp >> 12) & 0x3f) | 0x80); + *(result++) = static_cast(((cp >> 6) & 0x3f) | 0x80); + *(result++) = static_cast((cp & 0x3f) | 0x80); + } + return result; + } + + template + output_iterator replace_invalid(octet_iterator start, octet_iterator end, output_iterator out, uint32_t replacement) + { + while (start != end) { + octet_iterator sequence_start = start; + internal::utf_error err_code = utf8::internal::validate_next(start, end); + switch (err_code) { + case internal::UTF8_OK : + for (octet_iterator it = sequence_start; it != start; ++it) + *out++ = *it; + break; + case internal::NOT_ENOUGH_ROOM: + throw not_enough_room(); + case internal::INVALID_LEAD: + out = utf8::append (replacement, out); + ++start; + break; + case internal::INCOMPLETE_SEQUENCE: + case internal::OVERLONG_SEQUENCE: + case internal::INVALID_CODE_POINT: + out = utf8::append (replacement, out); + ++start; + // just one replacement mark for the sequence + while (start != end && utf8::internal::is_trail(*start)) + ++start; + break; + } + } + return out; + } + + template + inline output_iterator replace_invalid(octet_iterator start, octet_iterator end, output_iterator out) + { + static const uint32_t replacement_marker = utf8::internal::mask16(0xfffd); + return utf8::replace_invalid(start, end, out, replacement_marker); + } + + template + uint32_t next(octet_iterator& it, octet_iterator end) + { + uint32_t cp = 0; + internal::utf_error err_code = utf8::internal::validate_next(it, end, cp); + switch (err_code) { + case internal::UTF8_OK : + break; + case internal::NOT_ENOUGH_ROOM : + throw not_enough_room(); + case internal::INVALID_LEAD : + case internal::INCOMPLETE_SEQUENCE : + case internal::OVERLONG_SEQUENCE : + throw invalid_utf8(*it); + case internal::INVALID_CODE_POINT : + throw invalid_code_point(cp); + } + return cp; + } + + template + uint32_t peek_next(octet_iterator it, octet_iterator end) + { + return utf8::next(it, end); + } + + template + uint32_t prior(octet_iterator& it, octet_iterator start) + { + // can't do much if it == start + if (it == start) + throw not_enough_room(); + + octet_iterator end = it; + // Go back until we hit either a lead octet or start + while (utf8::internal::is_trail(*(--it))) + if (it == start) + throw invalid_utf8(*it); // error - no lead byte in the sequence + return utf8::peek_next(it, end); + } + + /// Deprecated in versions that include "prior" + template + uint32_t previous(octet_iterator& it, octet_iterator pass_start) + { + octet_iterator end = it; + while (utf8::internal::is_trail(*(--it))) + if (it == pass_start) + throw invalid_utf8(*it); // error - no lead byte in the sequence + octet_iterator temp = it; + return utf8::next(temp, end); + } + + template + void advance (octet_iterator& it, distance_type n, octet_iterator end) + { + for (distance_type i = 0; i < n; ++i) + utf8::next(it, end); + } + + template + void retreat (octet_iterator& it, distance_type n, octet_iterator start) + { + for (distance_type i = 0; i < n; ++i) + utf8::prior(it, start); + } + + template + typename std::iterator_traits::difference_type + distance (octet_iterator first, octet_iterator last) + { + typename std::iterator_traits::difference_type dist; + for (dist = 0; first < last; ++dist) + utf8::next(first, last); + return dist; + } + + template + octet_iterator utf16to8 (u16bit_iterator start, u16bit_iterator end, octet_iterator result) + { + while (start != end) { + uint32_t cp = utf8::internal::mask16(*start++); + // Take care of surrogate pairs first + if (utf8::internal::is_lead_surrogate(cp)) { + if (start != end) { + uint32_t trail_surrogate = utf8::internal::mask16(*start++); + if (utf8::internal::is_trail_surrogate(trail_surrogate)) + cp = (cp << 10) + trail_surrogate + internal::SURROGATE_OFFSET; + else + throw invalid_utf16(static_cast(trail_surrogate)); + } + else + throw invalid_utf16(static_cast(cp)); + + } + // Lone trail surrogate + else if (utf8::internal::is_trail_surrogate(cp)) + throw invalid_utf16(static_cast(cp)); + + result = utf8::append(cp, result); + } + return result; + } + + template + u16bit_iterator utf8to16 (octet_iterator start, octet_iterator end, u16bit_iterator result) + { + while (start != end) { + uint32_t cp = utf8::next(start, end); + if (cp > 0xffff) { //make a surrogate pair + *result++ = static_cast((cp >> 10) + internal::LEAD_OFFSET); + *result++ = static_cast((cp & 0x3ff) + internal::TRAIL_SURROGATE_MIN); + } + else + *result++ = static_cast(cp); + } + return result; + } + + template + octet_iterator utf32to8 (u32bit_iterator start, u32bit_iterator end, octet_iterator result) + { + while (start != end) + result = utf8::append(*(start++), result); + + return result; + } + + template + u32bit_iterator utf8to32 (octet_iterator start, octet_iterator end, u32bit_iterator result) + { + while (start != end) + (*result++) = utf8::next(start, end); + + return result; + } + + // The iterator class + template + class iterator : public std::iterator { + octet_iterator it; + octet_iterator range_start; + octet_iterator range_end; + public: + iterator () {} + explicit iterator (const octet_iterator& octet_it, + const octet_iterator& range_start, + const octet_iterator& range_end) : + it(octet_it), range_start(range_start), range_end(range_end) + { + if (it < range_start || it > range_end) + throw std::out_of_range("Invalid utf-8 iterator position"); + } + // the default "big three" are OK + octet_iterator base () const { return it; } + uint32_t operator * () const + { + octet_iterator temp = it; + return utf8::next(temp, range_end); + } + bool operator == (const iterator& rhs) const + { + if (range_start != rhs.range_start || range_end != rhs.range_end) + throw std::logic_error("Comparing utf-8 iterators defined with different ranges"); + return (it == rhs.it); + } + bool operator != (const iterator& rhs) const + { + return !(operator == (rhs)); + } + iterator& operator ++ () + { + utf8::next(it, range_end); + return *this; + } + iterator operator ++ (int) + { + iterator temp = *this; + utf8::next(it, range_end); + return temp; + } + iterator& operator -- () + { + utf8::prior(it, range_start); + return *this; + } + iterator operator -- (int) + { + iterator temp = *this; + utf8::prior(it, range_start); + return temp; + } + }; // class iterator + +} // namespace utf8 + +#endif //header guard + + diff --git a/mybulma/node_modules/node-sass/src/libsass/src/utf8/core.h b/mybulma/node_modules/node-sass/src/libsass/src/utf8/core.h new file mode 100644 index 0000000..f85081f --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/utf8/core.h @@ -0,0 +1,329 @@ +// Copyright 2006 Nemanja Trifunovic + +/* +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +*/ + + +#ifndef UTF8_FOR_CPP_CORE_H_2675DCD0_9480_4c0c_B92A_CC14C027B731 +#define UTF8_FOR_CPP_CORE_H_2675DCD0_9480_4c0c_B92A_CC14C027B731 + +#include + +namespace utf8 +{ + // The typedefs for 8-bit, 16-bit and 32-bit unsigned integers + // You may need to change them to match your system. + // These typedefs have the same names as ones from cstdint, or boost/cstdint + typedef unsigned char uint8_t; + typedef unsigned short uint16_t; + typedef unsigned int uint32_t; + +// Helper code - not intended to be directly called by the library users. May be changed at any time +namespace internal +{ + // Unicode constants + // Leading (high) surrogates: 0xd800 - 0xdbff + // Trailing (low) surrogates: 0xdc00 - 0xdfff + const uint16_t LEAD_SURROGATE_MIN = 0xd800u; + const uint16_t LEAD_SURROGATE_MAX = 0xdbffu; + const uint16_t TRAIL_SURROGATE_MIN = 0xdc00u; + const uint16_t TRAIL_SURROGATE_MAX = 0xdfffu; + const uint16_t LEAD_OFFSET = LEAD_SURROGATE_MIN - (0x10000 >> 10); + const uint32_t SURROGATE_OFFSET = 0x10000u - (LEAD_SURROGATE_MIN << 10) - TRAIL_SURROGATE_MIN; + + // Maximum valid value for a Unicode code point + const uint32_t CODE_POINT_MAX = 0x0010ffffu; + + template + inline uint8_t mask8(octet_type oc) + { + return static_cast(0xff & oc); + } + template + inline uint16_t mask16(u16_type oc) + { + return static_cast(0xffff & oc); + } + template + inline bool is_trail(octet_type oc) + { + return ((utf8::internal::mask8(oc) >> 6) == 0x2); + } + + template + inline bool is_lead_surrogate(u16 cp) + { + return (cp >= LEAD_SURROGATE_MIN && cp <= LEAD_SURROGATE_MAX); + } + + template + inline bool is_trail_surrogate(u16 cp) + { + return (cp >= TRAIL_SURROGATE_MIN && cp <= TRAIL_SURROGATE_MAX); + } + + template + inline bool is_surrogate(u16 cp) + { + return (cp >= LEAD_SURROGATE_MIN && cp <= TRAIL_SURROGATE_MAX); + } + + template + inline bool is_code_point_valid(u32 cp) + { + return (cp <= CODE_POINT_MAX && !utf8::internal::is_surrogate(cp)); + } + + template + inline typename std::iterator_traits::difference_type + sequence_length(octet_iterator lead_it) + { + uint8_t lead = utf8::internal::mask8(*lead_it); + if (lead < 0x80) + return 1; + else if ((lead >> 5) == 0x6) + return 2; + else if ((lead >> 4) == 0xe) + return 3; + else if ((lead >> 3) == 0x1e) + return 4; + else + return 0; + } + + template + inline bool is_overlong_sequence(uint32_t cp, octet_difference_type length) + { + if (cp < 0x80) { + if (length != 1) + return true; + } + else if (cp < 0x800) { + if (length != 2) + return true; + } + else if (cp < 0x10000) { + if (length != 3) + return true; + } + + return false; + } + + enum utf_error {UTF8_OK, NOT_ENOUGH_ROOM, INVALID_LEAD, INCOMPLETE_SEQUENCE, OVERLONG_SEQUENCE, INVALID_CODE_POINT}; + + /// Helper for get_sequence_x + template + utf_error increase_safely(octet_iterator& it, octet_iterator end) + { + if (++it == end) + return NOT_ENOUGH_ROOM; + + if (!utf8::internal::is_trail(*it)) + return INCOMPLETE_SEQUENCE; + + return UTF8_OK; + } + + #define UTF8_CPP_INCREASE_AND_RETURN_ON_ERROR(IT, END) {utf_error ret = increase_safely(IT, END); if (ret != UTF8_OK) return ret;} + + /// get_sequence_x functions decode utf-8 sequences of the length x + template + utf_error get_sequence_1(octet_iterator& it, octet_iterator end, uint32_t& code_point) + { + if (it == end) + return NOT_ENOUGH_ROOM; + + code_point = utf8::internal::mask8(*it); + + return UTF8_OK; + } + + template + utf_error get_sequence_2(octet_iterator& it, octet_iterator end, uint32_t& code_point) + { + if (it == end) + return NOT_ENOUGH_ROOM; + + code_point = utf8::internal::mask8(*it); + + UTF8_CPP_INCREASE_AND_RETURN_ON_ERROR(it, end) + + code_point = ((code_point << 6) & 0x7ff) + ((*it) & 0x3f); + + return UTF8_OK; + } + + template + utf_error get_sequence_3(octet_iterator& it, octet_iterator end, uint32_t& code_point) + { + if (it == end) + return NOT_ENOUGH_ROOM; + + code_point = utf8::internal::mask8(*it); + + UTF8_CPP_INCREASE_AND_RETURN_ON_ERROR(it, end) + + code_point = ((code_point << 12) & 0xffff) + ((utf8::internal::mask8(*it) << 6) & 0xfff); + + UTF8_CPP_INCREASE_AND_RETURN_ON_ERROR(it, end) + + code_point += (*it) & 0x3f; + + return UTF8_OK; + } + + template + utf_error get_sequence_4(octet_iterator& it, octet_iterator end, uint32_t& code_point) + { + if (it == end) + return NOT_ENOUGH_ROOM; + + code_point = utf8::internal::mask8(*it); + + UTF8_CPP_INCREASE_AND_RETURN_ON_ERROR(it, end) + + code_point = ((code_point << 18) & 0x1fffff) + ((utf8::internal::mask8(*it) << 12) & 0x3ffff); + + UTF8_CPP_INCREASE_AND_RETURN_ON_ERROR(it, end) + + code_point += (utf8::internal::mask8(*it) << 6) & 0xfff; + + UTF8_CPP_INCREASE_AND_RETURN_ON_ERROR(it, end) + + code_point += (*it) & 0x3f; + + return UTF8_OK; + } + + #undef UTF8_CPP_INCREASE_AND_RETURN_ON_ERROR + + template + utf_error validate_next(octet_iterator& it, octet_iterator end, uint32_t& code_point) + { + // Save the original value of it so we can go back in case of failure + // Of course, it does not make much sense with i.e. stream iterators + octet_iterator original_it = it; + + uint32_t cp = 0; + // Determine the sequence length based on the lead octet + typedef typename std::iterator_traits::difference_type octet_difference_type; + const octet_difference_type length = utf8::internal::sequence_length(it); + + // Get trail octets and calculate the code point + utf_error err = UTF8_OK; + switch (length) { + case 0: + return INVALID_LEAD; + case 1: + err = utf8::internal::get_sequence_1(it, end, cp); + break; + case 2: + err = utf8::internal::get_sequence_2(it, end, cp); + break; + case 3: + err = utf8::internal::get_sequence_3(it, end, cp); + break; + case 4: + err = utf8::internal::get_sequence_4(it, end, cp); + break; + } + + if (err == UTF8_OK) { + // Decoding succeeded. Now, security checks... + if (utf8::internal::is_code_point_valid(cp)) { + if (!utf8::internal::is_overlong_sequence(cp, length)){ + // Passed! Return here. + code_point = cp; + ++it; + return UTF8_OK; + } + else + err = OVERLONG_SEQUENCE; + } + else + err = INVALID_CODE_POINT; + } + + // Failure branch - restore the original value of the iterator + it = original_it; + return err; + } + + template + inline utf_error validate_next(octet_iterator& it, octet_iterator end) { + uint32_t ignored; + return utf8::internal::validate_next(it, end, ignored); + } + +} // namespace internal + + /// The library API - functions intended to be called by the users + + // Byte order mark + const uint8_t bom[] = {0xef, 0xbb, 0xbf}; + + template + octet_iterator find_invalid(octet_iterator start, octet_iterator end) + { + octet_iterator result = start; + while (result != end) { + utf8::internal::utf_error err_code = utf8::internal::validate_next(result, end); + if (err_code != internal::UTF8_OK) + return result; + } + return result; + } + + template + inline bool is_valid(octet_iterator start, octet_iterator end) + { + return (utf8::find_invalid(start, end) == end); + } + + template + inline bool starts_with_bom (octet_iterator it, octet_iterator end) + { + return ( + ((it != end) && (utf8::internal::mask8(*it++)) == bom[0]) && + ((it != end) && (utf8::internal::mask8(*it++)) == bom[1]) && + ((it != end) && (utf8::internal::mask8(*it)) == bom[2]) + ); + } + + //Deprecated in release 2.3 + template + inline bool is_bom (octet_iterator it) + { + return ( + (utf8::internal::mask8(*it++)) == bom[0] && + (utf8::internal::mask8(*it++)) == bom[1] && + (utf8::internal::mask8(*it)) == bom[2] + ); + } +} // namespace utf8 + +#endif // header guard + + diff --git a/mybulma/node_modules/node-sass/src/libsass/src/utf8/unchecked.h b/mybulma/node_modules/node-sass/src/libsass/src/utf8/unchecked.h new file mode 100644 index 0000000..01bdd07 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/utf8/unchecked.h @@ -0,0 +1,235 @@ +// Copyright 2006 Nemanja Trifunovic + +/* +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +*/ + + +#ifndef UTF8_FOR_CPP_UNCHECKED_H_2675DCD0_9480_4c0c_B92A_CC14C027B731 +#define UTF8_FOR_CPP_UNCHECKED_H_2675DCD0_9480_4c0c_B92A_CC14C027B731 + +#include "core.h" + +namespace utf8 +{ + namespace unchecked + { + template + octet_iterator append(uint32_t cp, octet_iterator result) + { + if (cp < 0x80) // one octet + *(result++) = static_cast(cp); + else if (cp < 0x800) { // two octets + *(result++) = static_cast((cp >> 6) | 0xc0); + *(result++) = static_cast((cp & 0x3f) | 0x80); + } + else if (cp < 0x10000) { // three octets + *(result++) = static_cast((cp >> 12) | 0xe0); + *(result++) = static_cast(((cp >> 6) & 0x3f) | 0x80); + *(result++) = static_cast((cp & 0x3f) | 0x80); + } + else { // four octets + *(result++) = static_cast((cp >> 18) | 0xf0); + *(result++) = static_cast(((cp >> 12) & 0x3f)| 0x80); + *(result++) = static_cast(((cp >> 6) & 0x3f) | 0x80); + *(result++) = static_cast((cp & 0x3f) | 0x80); + } + return result; + } + + template + uint32_t next(octet_iterator& it) + { + uint32_t cp = utf8::internal::mask8(*it); + typename std::iterator_traits::difference_type length = utf8::internal::sequence_length(it); + switch (length) { + case 1: + break; + case 2: + it++; + cp = ((cp << 6) & 0x7ff) + ((*it) & 0x3f); + break; + case 3: + ++it; + cp = ((cp << 12) & 0xffff) + ((utf8::internal::mask8(*it) << 6) & 0xfff); + ++it; + cp += (*it) & 0x3f; + break; + case 4: + ++it; + cp = ((cp << 18) & 0x1fffff) + ((utf8::internal::mask8(*it) << 12) & 0x3ffff); + ++it; + cp += (utf8::internal::mask8(*it) << 6) & 0xfff; + ++it; + cp += (*it) & 0x3f; + break; + } + ++it; + return cp; + } + + template + uint32_t peek_next(octet_iterator it) + { + return utf8::unchecked::next(it); + } + + template + uint32_t prior(octet_iterator& it) + { + while (utf8::internal::is_trail(*(--it))) ; + octet_iterator temp = it; + return utf8::unchecked::next(temp); + } + + // Deprecated in versions that include prior, but only for the sake of consistency (see utf8::previous) + template + inline uint32_t previous(octet_iterator& it) + { + return utf8::unchecked::prior(it); + } + + template + void advance (octet_iterator& it, distance_type n) + { + for (distance_type i = 0; i < n; ++i) + utf8::unchecked::next(it); + } + + template + void retreat (octet_iterator& it, distance_type n) + { + for (distance_type i = 0; i < n; ++i) + utf8::unchecked::prior(it); + } + + template + typename std::iterator_traits::difference_type + distance (octet_iterator first, octet_iterator last) + { + typename std::iterator_traits::difference_type dist; + for (dist = 0; first < last; ++dist) + utf8::unchecked::next(first); + return dist; + } + + template + octet_iterator utf16to8 (u16bit_iterator start, u16bit_iterator end, octet_iterator result) + { + while (start != end) { + uint32_t cp = utf8::internal::mask16(*start++); + // Take care of surrogate pairs first + if (utf8::internal::is_lead_surrogate(cp)) { + uint32_t trail_surrogate = utf8::internal::mask16(*start++); + cp = (cp << 10) + trail_surrogate + internal::SURROGATE_OFFSET; + } + result = utf8::unchecked::append(cp, result); + } + return result; + } + + template + u16bit_iterator utf8to16 (octet_iterator start, octet_iterator end, u16bit_iterator result) + { + while (start < end) { + uint32_t cp = utf8::unchecked::next(start); + if (cp > 0xffff) { //make a surrogate pair + *result++ = static_cast((cp >> 10) + internal::LEAD_OFFSET); + *result++ = static_cast((cp & 0x3ff) + internal::TRAIL_SURROGATE_MIN); + } + else + *result++ = static_cast(cp); + } + return result; + } + + template + octet_iterator utf32to8 (u32bit_iterator start, u32bit_iterator end, octet_iterator result) + { + while (start != end) + result = utf8::unchecked::append(*(start++), result); + + return result; + } + + template + u32bit_iterator utf8to32 (octet_iterator start, octet_iterator end, u32bit_iterator result) + { + while (start < end) + (*result++) = utf8::unchecked::next(start); + + return result; + } + + // The iterator class + template + class iterator : public std::iterator { + octet_iterator it; + public: + iterator () {} + explicit iterator (const octet_iterator& octet_it): it(octet_it) {} + // the default "big three" are OK + octet_iterator base () const { return it; } + uint32_t operator * () const + { + octet_iterator temp = it; + return utf8::unchecked::next(temp); + } + bool operator == (const iterator& rhs) const + { + return (it == rhs.it); + } + bool operator != (const iterator& rhs) const + { + return !(operator == (rhs)); + } + iterator& operator ++ () + { + ::std::advance(it, utf8::internal::sequence_length(it)); + return *this; + } + iterator operator ++ (int) + { + iterator temp = *this; + ::std::advance(it, utf8::internal::sequence_length(it)); + return temp; + } + iterator& operator -- () + { + utf8::unchecked::prior(it); + return *this; + } + iterator operator -- (int) + { + iterator temp = *this; + utf8::unchecked::prior(it); + return temp; + } + }; // class iterator + + } // namespace utf8::unchecked +} // namespace utf8 + + +#endif // header guard + diff --git a/mybulma/node_modules/node-sass/src/libsass/src/utf8_string.cpp b/mybulma/node_modules/node-sass/src/libsass/src/utf8_string.cpp new file mode 100644 index 0000000..1942552 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/utf8_string.cpp @@ -0,0 +1,102 @@ +#include "sass.hpp" +#include +#include +#include +#include + +#include "utf8.h" + +namespace Sass { + namespace UTF_8 { + using std::string; + + // naming conventions: + // offset: raw byte offset (0 based) + // position: code point offset (0 based) + // index: code point offset (1 based or negative) + + // function that will count the number of code points (utf-8 characters) from the given beginning to the given end + size_t code_point_count(const string& str, size_t start, size_t end) { + return utf8::distance(str.begin() + start, str.begin() + end); + } + + size_t code_point_count(const string& str) { + return utf8::distance(str.begin(), str.end()); + } + + // function that will return the byte offset at a code point position + size_t offset_at_position(const string& str, size_t position) { + string::const_iterator it = str.begin(); + utf8::advance(it, position, str.end()); + return std::distance(str.begin(), it); + } + + // function that returns number of bytes in a character at offset + size_t code_point_size_at_offset(const string& str, size_t offset) { + // get iterator from string and forward by offset + string::const_iterator stop = str.begin() + offset; + // check if beyond boundary + if (stop == str.end()) return 0; + // advance by one code point + utf8::advance(stop, 1, str.end()); + // calculate offset for code point + return stop - str.begin() - offset; + } + + // function that will return a normalized index, given a crazy one + size_t normalize_index(int index, size_t len) { + long signed_len = static_cast(len); + // assuming the index is 1-based + // we are returning a 0-based index + if (index > 0 && index <= signed_len) { + // positive and within string length + return index-1; + } + else if (index > signed_len) { + // positive and past string length + return len; + } + else if (index == 0) { + return 0; + } + else if (std::abs((double)index) <= signed_len) { + // negative and within string length + return index + signed_len; + } + else { + // negative and past string length + return 0; + } + } + + #ifdef _WIN32 + + // utf16 functions + using std::wstring; + + // convert from utf16/wide string to utf8 string + string convert_from_utf16(const wstring& utf16) + { + string utf8; + // pre-allocate expected memory + utf8.reserve(sizeof(utf16)/2); + utf8::utf16to8(utf16.begin(), utf16.end(), + back_inserter(utf8)); + return utf8; + } + + // convert from utf8 string to utf16/wide string + wstring convert_to_utf16(const string& utf8) + { + wstring utf16; + // pre-allocate expected memory + utf16.reserve(code_point_count(utf8)*2); + utf8::utf8to16(utf8.begin(), utf8.end(), + back_inserter(utf16)); + return utf16; + } + + #endif + + } +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/utf8_string.hpp b/mybulma/node_modules/node-sass/src/libsass/src/utf8_string.hpp new file mode 100644 index 0000000..5e879be --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/utf8_string.hpp @@ -0,0 +1,37 @@ +#ifndef SASS_UTF8_STRING_H +#define SASS_UTF8_STRING_H + +#include +#include "utf8.h" + +namespace Sass { + namespace UTF_8 { + + // naming conventions: + // offset: raw byte offset (0 based) + // position: code point offset (0 based) + // index: code point offset (1 based or negative) + + // function that will count the number of code points (utf-8 characters) from the beginning to the given end + size_t code_point_count(const std::string& str, size_t start, size_t end); + size_t code_point_count(const std::string& str); + + // function that will return the byte offset of a code point in a + size_t offset_at_position(const std::string& str, size_t position); + + // function that returns number of bytes in a character in a string + size_t code_point_size_at_offset(const std::string& str, size_t offset); + + // function that will return a normalized index, given a crazy one + size_t normalize_index(int index, size_t len); + + #ifdef _WIN32 + // functions to handle unicode paths on windows + std::string convert_from_utf16(const std::wstring& wstr); + std::wstring convert_to_utf16(const std::string& str); + #endif + + } +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/util.cpp b/mybulma/node_modules/node-sass/src/libsass/src/util.cpp new file mode 100644 index 0000000..60f69ab --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/util.cpp @@ -0,0 +1,733 @@ +#include "sass.hpp" +#include "sass.h" +#include "ast.hpp" +#include "util.hpp" +#include "lexer.hpp" +#include "prelexer.hpp" +#include "constants.hpp" +#include "utf8/checked.h" + +#include +#include +#if defined(_MSC_VER) && _MSC_VER >= 1800 && _MSC_VER < 1900 && defined(_M_X64) +#include +#endif + +namespace Sass { + + double round(double val, size_t precision) + { + // Disable FMA3-optimized implementation when compiling with VS2013 for x64 targets + // See https://github.com/sass/node-sass/issues/1854 for details + // FIXME: Remove this workaround when we switch to VS2015+ + #if defined(_MSC_VER) && _MSC_VER >= 1800 && _MSC_VER < 1900 && defined(_M_X64) + static std::once_flag flag; + std::call_once(flag, []() { _set_FMA3_enable(0); }); + #endif + + // https://github.com/sass/sass/commit/4e3e1d5684cc29073a507578fc977434ff488c93 + if (fmod(val, 1) - 0.5 > - std::pow(0.1, precision + 1)) return std::ceil(val); + else if (fmod(val, 1) - 0.5 > std::pow(0.1, precision)) return std::floor(val); + // work around some compiler issue + // cygwin has it not defined in std + using namespace std; + return ::round(val); + } + + /* Locale unspecific atof function. */ + double sass_strtod(const char *str) + { + char separator = *(localeconv()->decimal_point); + if(separator != '.'){ + // The current locale specifies another + // separator. convert the separator to the + // one understood by the locale if needed + const char *found = strchr(str, '.'); + if(found != NULL){ + // substitution is required. perform the substitution on a copy + // of the string. This is slower but it is thread safe. + char *copy = sass_copy_c_string(str); + *(copy + (found - str)) = separator; + double res = strtod(copy, NULL); + free(copy); + return res; + } + } + + return strtod(str, NULL); + } + + // helper for safe access to c_ctx + const char* safe_str (const char* str, const char* alt) { + return str == NULL ? alt : str; + } + + void free_string_array(char ** arr) { + if(!arr) + return; + + char **it = arr; + while (it && (*it)) { + free(*it); + ++it; + } + + free(arr); + } + + char **copy_strings(const std::vector& strings, char*** array, int skip) { + int num = static_cast(strings.size()) - skip; + char** arr = (char**) calloc(num + 1, sizeof(char*)); + if (arr == 0) + return *array = (char **)NULL; + + for(int i = 0; i < num; i++) { + arr[i] = (char*) malloc(sizeof(char) * (strings[i + skip].size() + 1)); + if (arr[i] == 0) { + free_string_array(arr); + return *array = (char **)NULL; + } + std::copy(strings[i + skip].begin(), strings[i + skip].end(), arr[i]); + arr[i][strings[i + skip].size()] = '\0'; + } + + arr[num] = 0; + return *array = arr; + } + + // read css string (handle multiline DELIM) + std::string read_css_string(const std::string& str, bool css) + { + if (!css) return str; + std::string out(""); + bool esc = false; + for (auto i : str) { + if (i == '\\') { + esc = ! esc; + } else if (esc && i == '\r') { + continue; + } else if (esc && i == '\n') { + out.resize (out.size () - 1); + esc = false; + continue; + } else { + esc = false; + } + out.push_back(i); + } + // happens when parsing does not correctly skip + // over escaped sequences for ie. interpolations + // one example: foo\#{interpolate} + // if (esc) out += '\\'; + return out; + } + + // double escape all escape sequences + // keep unescaped quotes and backslashes + std::string evacuate_escapes(const std::string& str) + { + std::string out(""); + bool esc = false; + for (auto i : str) { + if (i == '\\' && !esc) { + out += '\\'; + out += '\\'; + esc = true; + } else if (esc && i == '"') { + out += '\\'; + out += i; + esc = false; + } else if (esc && i == '\'') { + out += '\\'; + out += i; + esc = false; + } else if (esc && i == '\\') { + out += '\\'; + out += i; + esc = false; + } else { + esc = false; + out += i; + } + } + // happens when parsing does not correctly skip + // over escaped sequences for ie. interpolations + // one example: foo\#{interpolate} + // if (esc) out += '\\'; + return out; + } + + // bell characters are replaced with spaces + void newline_to_space(std::string& str) + { + std::replace(str.begin(), str.end(), '\n', ' '); + } + + // bell characters are replaced with spaces + // also eats spaces after line-feeds (ltrim) + std::string string_to_output(const std::string& str) + { + std::string out(""); + bool lf = false; + for (auto i : str) { + if (i == '\n') { + out += ' '; + lf = true; + } else if (!(lf && isspace(i))) { + out += i; + lf = false; + } + } + return out; + } + + std::string escape_string(const std::string& str) + { + std::string out(""); + for (auto i : str) { + if (i == '\n') { + out += "\\n"; + } else if (i == '\r') { + out += "\\r"; + } else if (i == '\t') { + out += "\\t"; + } else { + out += i; + } + } + return out; + } + + std::string comment_to_string(const std::string& text) + { + std::string str = ""; + size_t has = 0; + char prev = 0; + bool clean = false; + for (auto i : text) { + if (clean) { + if (i == '\n') { has = 0; } + else if (i == '\r') { has = 0; } + else if (i == '\t') { ++ has; } + else if (i == ' ') { ++ has; } + else if (i == '*') {} + else { + clean = false; + str += ' '; + if (prev == '*' && i == '/') str += "*/"; + else str += i; + } + } else if (i == '\n') { + clean = true; + } else if (i == '\r') { + clean = true; + } else { + str += i; + } + prev = i; + } + if (has) return str; + else return text; + } + + // find best quote_mark by detecting if the string contains any single + // or double quotes. When a single quote is found, we not we want a double + // quote as quote_mark. Otherwise we check if the string cotains any double + // quotes, which will trigger the use of single quotes as best quote_mark. + char detect_best_quotemark(const char* s, char qm) + { + // ensure valid fallback quote_mark + char quote_mark = qm && qm != '*' ? qm : '"'; + while (*s) { + // force double quotes as soon + // as one single quote is found + if (*s == '\'') { return '"'; } + // a single does not force quote_mark + // maybe we see a double quote later + else if (*s == '"') { quote_mark = '\''; } + ++ s; + } + return quote_mark; + } + + std::string read_hex_escapes(const std::string& s) + { + + std::string result; + bool skipped = false; + + for (size_t i = 0, L = s.length(); i < L; ++i) { + + // implement the same strange ruby sass behavior + // an escape sequence can also mean a unicode char + if (s[i] == '\\' && !skipped) { + + // remember + skipped = true; + + // escape length + size_t len = 1; + + // parse as many sequence chars as possible + // ToDo: Check if ruby aborts after possible max + while (i + len < L && s[i + len] && isxdigit(s[i + len])) ++ len; + + if (len > 1) { + + // convert the extracted hex string to code point value + // ToDo: Maybe we could do this without creating a substring + uint32_t cp = strtol(s.substr (i + 1, len - 1).c_str(), NULL, 16); + + if (s[i + len] == ' ') ++ len; + + // assert invalid code points + if (cp == 0) cp = 0xFFFD; + // replace bell character + // if (cp == '\n') cp = 32; + + // use a very simple approach to convert via utf8 lib + // maybe there is a more elegant way; maybe we shoud + // convert the whole output from string to a stream!? + // allocate memory for utf8 char and convert to utf8 + unsigned char u[5] = {0,0,0,0,0}; utf8::append(cp, u); + for(size_t m = 0; m < 5 && u[m]; m++) result.push_back(u[m]); + + // skip some more chars? + i += len - 1; skipped = false; + + } + + else { + + skipped = false; + + result.push_back(s[i]); + + } + + } + + else { + + result.push_back(s[i]); + + } + + } + + return result; + + } + + std::string unquote(const std::string& s, char* qd, bool keep_utf8_sequences, bool strict) + { + + // not enough room for quotes + // no possibility to unquote + if (s.length() < 2) return s; + + char q; + bool skipped = false; + + // this is no guarantee that the unquoting will work + // what about whitespace before/after the quote_mark? + if (*s.begin() == '"' && *s.rbegin() == '"') q = '"'; + else if (*s.begin() == '\'' && *s.rbegin() == '\'') q = '\''; + else return s; + + std::string unq; + unq.reserve(s.length()-2); + + for (size_t i = 1, L = s.length() - 1; i < L; ++i) { + + // implement the same strange ruby sass behavior + // an escape sequence can also mean a unicode char + if (s[i] == '\\' && !skipped) { + // remember + skipped = true; + + // skip it + // ++ i; + + // if (i == L) break; + + // escape length + size_t len = 1; + + // parse as many sequence chars as possible + // ToDo: Check if ruby aborts after possible max + while (i + len < L && s[i + len] && isxdigit(s[i + len])) ++ len; + + // hex string? + if (keep_utf8_sequences) { + unq.push_back(s[i]); + } else if (len > 1) { + + // convert the extracted hex string to code point value + // ToDo: Maybe we could do this without creating a substring + uint32_t cp = strtol(s.substr (i + 1, len - 1).c_str(), NULL, 16); + + if (s[i + len] == ' ') ++ len; + + // assert invalid code points + if (cp == 0) cp = 0xFFFD; + // replace bell character + // if (cp == '\n') cp = 32; + + // use a very simple approach to convert via utf8 lib + // maybe there is a more elegant way; maybe we shoud + // convert the whole output from string to a stream!? + // allocate memory for utf8 char and convert to utf8 + unsigned char u[5] = {0,0,0,0,0}; utf8::append(cp, u); + for(size_t m = 0; m < 5 && u[m]; m++) unq.push_back(u[m]); + + // skip some more chars? + i += len - 1; skipped = false; + + } + + + } + // check for unexpected delimiter + // be strict and throw error back + // else if (!skipped && q == s[i]) { + // // don't be that strict + // return s; + // // this basically always means an internal error and not users fault + // error("Unescaped delimiter in string to unquote found. [" + s + "]", ParserState("[UNQUOTE]")); + // } + else { + if (strict && !skipped) { + if (s[i] == q) return s; + } + skipped = false; + unq.push_back(s[i]); + } + + } + if (skipped) { return s; } + if (qd) *qd = q; + return unq; + + } + + std::string quote(const std::string& s, char q) + { + + // autodetect with fallback to given quote + q = detect_best_quotemark(s.c_str(), q); + + // return an empty quoted string + if (s.empty()) return std::string(2, q ? q : '"'); + + std::string quoted; + quoted.reserve(s.length()+2); + quoted.push_back(q); + + const char* it = s.c_str(); + const char* end = it + strlen(it) + 1; + while (*it && it < end) { + const char* now = it; + + if (*it == q) { + quoted.push_back('\\'); + } else if (*it == '\\') { + quoted.push_back('\\'); + } + + int cp = utf8::next(it, end); + + // in case of \r, check if the next in sequence + // is \n and then advance the iterator and skip \r + if (cp == '\r' && it < end && utf8::peek_next(it, end) == '\n') { + cp = utf8::next(it, end); + } + + if (cp == '\n') { + quoted.push_back('\\'); + quoted.push_back('a'); + // we hope we can remove this flag once we figure out + // why ruby sass has these different output behaviors + // gsub(/\n(?![a-fA-F0-9\s])/, "\\a").gsub("\n", "\\a ") + using namespace Prelexer; + if (alternatives < + Prelexer::char_range<'a', 'f'>, + Prelexer::char_range<'A', 'F'>, + Prelexer::char_range<'0', '9'>, + space + >(it) != NULL) { + quoted.push_back(' '); + } + } else if (cp < 127) { + quoted.push_back((char) cp); + } else { + while (now < it) { + quoted.push_back(*now); + ++ now; + } + } + } + + quoted.push_back(q); + return quoted; + } + + bool is_hex_doublet(double n) + { + return n == 0x00 || n == 0x11 || n == 0x22 || n == 0x33 || + n == 0x44 || n == 0x55 || n == 0x66 || n == 0x77 || + n == 0x88 || n == 0x99 || n == 0xAA || n == 0xBB || + n == 0xCC || n == 0xDD || n == 0xEE || n == 0xFF ; + } + + bool is_color_doublet(double r, double g, double b) + { + return is_hex_doublet(r) && is_hex_doublet(g) && is_hex_doublet(b); + } + + bool peek_linefeed(const char* start) + { + using namespace Prelexer; + using namespace Constants; + return sequence < + zero_plus < + alternatives < + exactly <' '>, + exactly <'\t'>, + line_comment, + block_comment, + delimited_by < + slash_star, + star_slash, + false + > + > + >, + re_linebreak + >(start) != 0; + } + + namespace Util { + using std::string; + + std::string rtrim(const std::string &str) { + std::string trimmed = str; + size_t pos_ws = trimmed.find_last_not_of(" \t\n\v\f\r"); + if (pos_ws != std::string::npos) + { trimmed.erase(pos_ws + 1); } + else { trimmed.clear(); } + return trimmed; + } + + std::string normalize_underscores(const std::string& str) { + std::string normalized = str; + for(size_t i = 0, L = normalized.length(); i < L; ++i) { + if(normalized[i] == '_') { + normalized[i] = '-'; + } + } + return normalized; + } + + std::string normalize_decimals(const std::string& str) { + std::string prefix = "0"; + std::string normalized = str; + + return normalized[0] == '.' ? normalized.insert(0, prefix) : normalized; + } + + bool isPrintable(Ruleset_Ptr r, Sass_Output_Style style) { + if (r == NULL) { + return false; + } + + Block_Obj b = r->block(); + + Selector_List_Ptr sl = Cast(r->selector()); + bool hasSelectors = sl ? sl->length() > 0 : false; + + if (!hasSelectors) { + return false; + } + + bool hasDeclarations = false; + bool hasPrintableChildBlocks = false; + for (size_t i = 0, L = b->length(); i < L; ++i) { + Statement_Obj stm = b->at(i); + if (Cast(stm)) { + return true; + } else if (Declaration_Ptr d = Cast(stm)) { + return isPrintable(d, style); + } else if (Has_Block_Ptr p = Cast(stm)) { + Block_Obj pChildBlock = p->block(); + if (isPrintable(pChildBlock, style)) { + hasPrintableChildBlocks = true; + } + } else if (Comment_Ptr c = Cast(stm)) { + // keep for uncompressed + if (style != COMPRESSED) { + hasDeclarations = true; + } + // output style compressed + if (c->is_important()) { + hasDeclarations = c->is_important(); + } + } else { + hasDeclarations = true; + } + + if (hasDeclarations || hasPrintableChildBlocks) { + return true; + } + } + + return false; + } + + bool isPrintable(String_Constant_Ptr s, Sass_Output_Style style) + { + return ! s->value().empty(); + } + + bool isPrintable(String_Quoted_Ptr s, Sass_Output_Style style) + { + return true; + } + + bool isPrintable(Declaration_Ptr d, Sass_Output_Style style) + { + Expression_Obj val = d->value(); + if (String_Quoted_Obj sq = Cast(val)) return isPrintable(sq.ptr(), style); + if (String_Constant_Obj sc = Cast(val)) return isPrintable(sc.ptr(), style); + return true; + } + + bool isPrintable(Supports_Block_Ptr f, Sass_Output_Style style) { + if (f == NULL) { + return false; + } + + Block_Obj b = f->block(); + + bool hasDeclarations = false; + bool hasPrintableChildBlocks = false; + for (size_t i = 0, L = b->length(); i < L; ++i) { + Statement_Obj stm = b->at(i); + if (Cast(stm) || Cast(stm)) { + hasDeclarations = true; + } + else if (Has_Block_Ptr b = Cast(stm)) { + Block_Obj pChildBlock = b->block(); + if (!b->is_invisible()) { + if (isPrintable(pChildBlock, style)) { + hasPrintableChildBlocks = true; + } + } + } + + if (hasDeclarations || hasPrintableChildBlocks) { + return true; + } + } + + return false; + } + + bool isPrintable(Media_Block_Ptr m, Sass_Output_Style style) + { + if (m == 0) return false; + Block_Obj b = m->block(); + if (b == 0) return false; + for (size_t i = 0, L = b->length(); i < L; ++i) { + Statement_Obj stm = b->at(i); + if (Cast(stm)) return true; + else if (Cast(stm)) return true; + else if (Comment_Ptr c = Cast(stm)) { + if (isPrintable(c, style)) { + return true; + } + } + else if (Ruleset_Ptr r = Cast(stm)) { + if (isPrintable(r, style)) { + return true; + } + } + else if (Supports_Block_Ptr f = Cast(stm)) { + if (isPrintable(f, style)) { + return true; + } + } + else if (Media_Block_Ptr mb = Cast(stm)) { + if (isPrintable(mb, style)) { + return true; + } + } + else if (Has_Block_Ptr b = Cast(stm)) { + if (isPrintable(b->block(), style)) { + return true; + } + } + } + return false; + } + + bool isPrintable(Comment_Ptr c, Sass_Output_Style style) + { + // keep for uncompressed + if (style != COMPRESSED) { + return true; + } + // output style compressed + if (c->is_important()) { + return true; + } + // not printable + return false; + }; + + bool isPrintable(Block_Obj b, Sass_Output_Style style) { + if (!b) { + return false; + } + + for (size_t i = 0, L = b->length(); i < L; ++i) { + Statement_Obj stm = b->at(i); + if (Cast(stm) || Cast(stm)) { + return true; + } + else if (Comment_Ptr c = Cast(stm)) { + if (isPrintable(c, style)) { + return true; + } + } + else if (Ruleset_Ptr r = Cast(stm)) { + if (isPrintable(r, style)) { + return true; + } + } + else if (Supports_Block_Ptr f = Cast(stm)) { + if (isPrintable(f, style)) { + return true; + } + } + else if (Media_Block_Ptr m = Cast(stm)) { + if (isPrintable(m, style)) { + return true; + } + } + else if (Has_Block_Ptr b = Cast(stm)) { + if (isPrintable(b->block(), style)) { + return true; + } + } + } + + return false; + } + + bool isAscii(const char chr) { + return unsigned(chr) < 128; + } + + } +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/util.hpp b/mybulma/node_modules/node-sass/src/libsass/src/util.hpp new file mode 100644 index 0000000..f23475f --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/util.hpp @@ -0,0 +1,56 @@ +#ifndef SASS_UTIL_H +#define SASS_UTIL_H + +#include +#include +#include +#include "sass.hpp" +#include "sass/base.h" +#include "ast_fwd_decl.hpp" + +#define SASS_ASSERT(cond, msg) assert(cond && msg) + +namespace Sass { + + double round(double val, size_t precision = 0); + double sass_strtod(const char* str); + const char* safe_str(const char *, const char* = ""); + void free_string_array(char **); + char **copy_strings(const std::vector&, char ***, int = 0); + std::string read_css_string(const std::string& str, bool css = true); + std::string evacuate_escapes(const std::string& str); + std::string string_to_output(const std::string& str); + std::string comment_to_string(const std::string& text); + std::string read_hex_escapes(const std::string& str); + std::string escape_string(const std::string& str); + void newline_to_space(std::string& str); + + std::string quote(const std::string&, char q = 0); + std::string unquote(const std::string&, char* q = 0, bool keep_utf8_sequences = false, bool strict = true); + char detect_best_quotemark(const char* s, char qm = '"'); + + bool is_hex_doublet(double n); + bool is_color_doublet(double r, double g, double b); + + bool peek_linefeed(const char* start); + + namespace Util { + + std::string rtrim(const std::string& str); + + std::string normalize_underscores(const std::string& str); + std::string normalize_decimals(const std::string& str); + + bool isPrintable(Ruleset_Ptr r, Sass_Output_Style style = NESTED); + bool isPrintable(Supports_Block_Ptr r, Sass_Output_Style style = NESTED); + bool isPrintable(Media_Block_Ptr r, Sass_Output_Style style = NESTED); + bool isPrintable(Comment_Ptr b, Sass_Output_Style style = NESTED); + bool isPrintable(Block_Obj b, Sass_Output_Style style = NESTED); + bool isPrintable(String_Constant_Ptr s, Sass_Output_Style style = NESTED); + bool isPrintable(String_Quoted_Ptr s, Sass_Output_Style style = NESTED); + bool isPrintable(Declaration_Ptr d, Sass_Output_Style style = NESTED); + bool isAscii(const char chr); + + } +} +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/src/values.cpp b/mybulma/node_modules/node-sass/src/libsass/src/values.cpp new file mode 100644 index 0000000..0f2fd48 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/values.cpp @@ -0,0 +1,131 @@ +#include "sass.hpp" +#include "sass.h" +#include "values.hpp" + +#include + +namespace Sass { + + // convert value from C++ side to C-API + union Sass_Value* ast_node_to_sass_value (const Expression_Ptr val) + { + if (val->concrete_type() == Expression::NUMBER) + { + Number_Ptr_Const res = Cast(val); + return sass_make_number(res->value(), res->unit().c_str()); + } + else if (val->concrete_type() == Expression::COLOR) + { + Color_Ptr_Const col = Cast(val); + return sass_make_color(col->r(), col->g(), col->b(), col->a()); + } + else if (val->concrete_type() == Expression::LIST) + { + List_Ptr_Const l = Cast(val); + union Sass_Value* list = sass_make_list(l->size(), l->separator(), l->is_bracketed()); + for (size_t i = 0, L = l->length(); i < L; ++i) { + Expression_Obj obj = l->at(i); + auto val = ast_node_to_sass_value(obj); + sass_list_set_value(list, i, val); + } + return list; + } + else if (val->concrete_type() == Expression::MAP) + { + Map_Ptr_Const m = Cast(val); + union Sass_Value* map = sass_make_map(m->length()); + size_t i = 0; for (Expression_Obj key : m->keys()) { + sass_map_set_key(map, i, ast_node_to_sass_value(key)); + sass_map_set_value(map, i, ast_node_to_sass_value(m->at(key))); + ++ i; + } + return map; + } + else if (val->concrete_type() == Expression::NULL_VAL) + { + return sass_make_null(); + } + else if (val->concrete_type() == Expression::BOOLEAN) + { + Boolean_Ptr_Const res = Cast(val); + return sass_make_boolean(res->value()); + } + else if (val->concrete_type() == Expression::STRING) + { + if (String_Quoted_Ptr_Const qstr = Cast(val)) + { + return sass_make_qstring(qstr->value().c_str()); + } + else if (String_Constant_Ptr_Const cstr = Cast(val)) + { + return sass_make_string(cstr->value().c_str()); + } + } + return sass_make_error("unknown sass value type"); + } + + // convert value from C-API to C++ side + Value_Ptr sass_value_to_ast_node (const union Sass_Value* val) + { + switch (sass_value_get_tag(val)) { + case SASS_NUMBER: + return SASS_MEMORY_NEW(Number, + ParserState("[C-VALUE]"), + sass_number_get_value(val), + sass_number_get_unit(val)); + case SASS_BOOLEAN: + return SASS_MEMORY_NEW(Boolean, + ParserState("[C-VALUE]"), + sass_boolean_get_value(val)); + case SASS_COLOR: + return SASS_MEMORY_NEW(Color, + ParserState("[C-VALUE]"), + sass_color_get_r(val), + sass_color_get_g(val), + sass_color_get_b(val), + sass_color_get_a(val)); + case SASS_STRING: + if (sass_string_is_quoted(val)) { + return SASS_MEMORY_NEW(String_Quoted, + ParserState("[C-VALUE]"), + sass_string_get_value(val)); + } + return SASS_MEMORY_NEW(String_Constant, + ParserState("[C-VALUE]"), + sass_string_get_value(val)); + case SASS_LIST: { + List_Ptr l = SASS_MEMORY_NEW(List, + ParserState("[C-VALUE]"), + sass_list_get_length(val), + sass_list_get_separator(val)); + for (size_t i = 0, L = sass_list_get_length(val); i < L; ++i) { + l->append(sass_value_to_ast_node(sass_list_get_value(val, i))); + } + l->is_bracketed(sass_list_get_is_bracketed(val)); + return l; + } + case SASS_MAP: { + Map_Ptr m = SASS_MEMORY_NEW(Map, ParserState("[C-VALUE]")); + for (size_t i = 0, L = sass_map_get_length(val); i < L; ++i) { + *m << std::make_pair( + sass_value_to_ast_node(sass_map_get_key(val, i)), + sass_value_to_ast_node(sass_map_get_value(val, i))); + } + return m; + } + case SASS_NULL: + return SASS_MEMORY_NEW(Null, ParserState("[C-VALUE]")); + case SASS_ERROR: + return SASS_MEMORY_NEW(Custom_Error, + ParserState("[C-VALUE]"), + sass_error_get_message(val)); + case SASS_WARNING: + return SASS_MEMORY_NEW(Custom_Warning, + ParserState("[C-VALUE]"), + sass_warning_get_message(val)); + default: break; + } + return 0; + } + +} diff --git a/mybulma/node_modules/node-sass/src/libsass/src/values.hpp b/mybulma/node_modules/node-sass/src/libsass/src/values.hpp new file mode 100644 index 0000000..f78ca12 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/src/values.hpp @@ -0,0 +1,12 @@ +#ifndef SASS_VALUES_H +#define SASS_VALUES_H + +#include "ast.hpp" + +namespace Sass { + + union Sass_Value* ast_node_to_sass_value (const Expression_Ptr val); + Value_Ptr sass_value_to_ast_node (const union Sass_Value* val); + +} +#endif diff --git a/mybulma/node_modules/node-sass/src/libsass/test/test_node.cpp b/mybulma/node_modules/node-sass/src/libsass/test/test_node.cpp new file mode 100644 index 0000000..905dc18 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/test/test_node.cpp @@ -0,0 +1,94 @@ +#include +#include + +#include "node.hpp" +#include "parser.hpp" + + +#define STATIC_ARRAY_SIZE(array) (sizeof((array))/sizeof((array[0]))) + + +namespace Sass { + + Context ctx = Context::Data(); + + const char* const ROUNDTRIP_TESTS[] = { + NULL, + "~", + "CMPD", + "~ CMPD", + "CMPD >", + "> > CMPD", + "CMPD ~ ~", + "> + CMPD1.CMPD2 > ~", + "> + CMPD1.CMPD2 CMPD3.CMPD4 > ~", + "+ CMPD1 CMPD2 ~ CMPD3 + CMPD4 > CMPD5 > ~" + }; + + + + static Complex_Selector* createComplexSelector(std::string src) { + std::string temp(src); + temp += ";"; + return (*Parser::from_c_str(temp.c_str(), ctx, "", Position()).parse_selector_list())[0]; + } + + + void roundtripTest(const char* toTest) { + + // Create the initial selector + + Complex_Selector* pOrigSelector = NULL; + if (toTest) { + pOrigSelector = createComplexSelector(toTest); + } + + std::string expected(pOrigSelector ? pOrigSelector->to_string() : "NULL"); + + + // Roundtrip the selector into a node and back + + Node node = complexSelectorToNode(pOrigSelector, ctx); + + std::stringstream nodeStringStream; + nodeStringStream << node; + std::string nodeString = nodeStringStream.str(); + cout << "ASNODE: " << node << endl; + + Complex_Selector* pNewSelector = nodeToComplexSelector(node, ctx); + + // Show the result + + std::string result(pNewSelector ? pNewSelector->to_string() : "NULL"); + + cout << "SELECTOR: " << expected << endl; + cout << "NEW SELECTOR: " << result << endl; + + + // Test that they are equal using the equality operator + + assert( (!pOrigSelector && !pNewSelector ) || (pOrigSelector && pNewSelector) ); + if (pOrigSelector) { + assert( *pOrigSelector == *pNewSelector ); + } + + + // Test that they are equal by comparing the string versions of the selectors + + assert(expected == result); + + } + + + int main() { + for (int index = 0; index < STATIC_ARRAY_SIZE(ROUNDTRIP_TESTS); index++) { + const char* const toTest = ROUNDTRIP_TESTS[index]; + cout << "\nINPUT STRING: " << (toTest ? toTest : "NULL") << endl; + roundtripTest(toTest); + } + + cout << "\nTesting Done.\n"; + } + + +} diff --git a/mybulma/node_modules/node-sass/src/libsass/test/test_paths.cpp b/mybulma/node_modules/node-sass/src/libsass/test/test_paths.cpp new file mode 100644 index 0000000..bfcf8ec --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/test/test_paths.cpp @@ -0,0 +1,28 @@ +#include +#include "../paths.hpp" + +using namespace Sass; + +template +std::vector& operator<<(std::vector& v, const T& e) +{ + v.push_back(e); + return v; +} + +int main() +{ + std::vector v1, v2, v3; + v1 << 1 << 2; + v2 << 3; + v3 << 4 << 5 << 6; + + std::vector > ss; + ss << v1 << v2 << v3; + + std::vector > ps = paths(ss); + for (size_t i = 0, S = ps.size(); i < S; ++i) { + std::cout << vector_to_string(ps[i]) << std::endl; + } + return 0; +} diff --git a/mybulma/node_modules/node-sass/src/libsass/test/test_selector_difference.cpp b/mybulma/node_modules/node-sass/src/libsass/test/test_selector_difference.cpp new file mode 100644 index 0000000..e2880c0 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/test/test_selector_difference.cpp @@ -0,0 +1,25 @@ +#include "../ast.hpp" +#include "../context.hpp" +#include "../parser.hpp" +#include +#include + +using namespace Sass; + +Context ctx = Context::Data(); + +Compound_Selector* selector(std::string src) +{ return Parser::from_c_str(src.c_str(), ctx, "", Position()).parse_compound_selector(); } + +void diff(std::string s, std::string t) +{ + std::cout << s << " - " << t << " = " << selector(s + ";")->minus(selector(t + ";"), ctx)->to_string() << std::endl; +} + +int main() +{ + diff(".a.b.c", ".c.b"); + diff(".a.b.c", ".fludge.b"); + + return 0; +} diff --git a/mybulma/node_modules/node-sass/src/libsass/test/test_specificity.cpp b/mybulma/node_modules/node-sass/src/libsass/test/test_specificity.cpp new file mode 100644 index 0000000..ba9bbfc --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/test/test_specificity.cpp @@ -0,0 +1,25 @@ +#include "../ast.hpp" +#include "../context.hpp" +#include "../parser.hpp" +#include +#include + +using namespace Sass; + +Context ctx = Context::Data(); + +Selector* selector(std::string src) +{ return Parser::from_c_str(src.c_str(), ctx, "", Position()).parse_selector_list(); } + +void spec(std::string sel) +{ std::cout << sel << "\t::\t" << selector(sel + ";")->specificity() << std::endl; } + +int main() +{ + spec("foo bar hux"); + spec(".foo .bar hux"); + spec("#foo .bar[hux='mux']"); + spec("a b c d e f"); + + return 0; +} diff --git a/mybulma/node_modules/node-sass/src/libsass/test/test_subset_map.cpp b/mybulma/node_modules/node-sass/src/libsass/test/test_subset_map.cpp new file mode 100644 index 0000000..3794514 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/test/test_subset_map.cpp @@ -0,0 +1,472 @@ +#include +#include +#include +#include "../subset_map.hpp" + +Subset_Map ssm; + +string toString(std::vector v); +string toString(std::vector>> v); +void assertEqual(string std::sExpected, std::string sResult); + +void setup() { + ssm.clear(); + + //@ssm[Set[1, 2]] = "Foo" + std::vector s1; + s1.push_back("1"); + s1.push_back("2"); + ssm.put(s1, "Foo"); + + //@ssm[Set["fizz", "fazz"]] = "Bar" + std::vector s2; + s2.push_back("fizz"); + s2.push_back("fazz"); + ssm.put(s2, "Bar"); + + //@ssm[Set[:foo, :bar]] = "Baz" + std::vector s3; + s3.push_back(":foo"); + s3.push_back(":bar"); + ssm.put(s3, "Baz"); + + //@ssm[Set[:foo, :bar, :baz]] = "Bang" + std::vector s4; + s4.push_back(":foo"); + s4.push_back(":bar"); + s4.push_back(":baz"); + ssm.put(s4, "Bang"); + + //@ssm[Set[:bip, :bop, :blip]] = "Qux" + std::vector s5; + s5.push_back(":bip"); + s5.push_back(":bop"); + s5.push_back(":blip"); + ssm.put(s5, "Qux"); + + //@ssm[Set[:bip, :bop]] = "Thram" + std::vector s6; + s6.push_back(":bip"); + s6.push_back(":bop"); + ssm.put(s6, "Thram"); +} + +void testEqualKeys() { + std::cout << "testEqualKeys" << std::endl; + + //assert_equal [["Foo", Set[1, 2]]], @ssm.get(Set[1, 2]) + std::vector k1; + k1.push_back("1"); + k1.push_back("2"); + assertEqual("[[Foo, Set[1, 2]]]", toString(ssm.get_kv(k1))); + + //assert_equal [["Bar", Set["fizz", "fazz"]]], @ssm.get(Set["fizz", "fazz"]) + std::vector k2; + k2.push_back("fizz"); + k2.push_back("fazz"); + assertEqual("[[Bar, Set[fizz, fazz]]]", toString(ssm.get_kv(k2))); + + std::cout << std::endl; +} + +void testSubsetKeys() { + std::cout << "testSubsetKeys" << std::endl; + + //assert_equal [["Foo", Set[1, 2]]], @ssm.get(Set[1, 2, "fuzz"]) + std::vector k1; + k1.push_back("1"); + k1.push_back("2"); + k1.push_back("fuzz"); + assertEqual("[[Foo, Set[1, 2]]]", toString(ssm.get_kv(k1))); + + //assert_equal [["Bar", Set["fizz", "fazz"]]], @ssm.get(Set["fizz", "fazz", 3]) + std::vector k2; + k2.push_back("fizz"); + k2.push_back("fazz"); + k2.push_back("3"); + assertEqual("[[Bar, Set[fizz, fazz]]]", toString(ssm.get_kv(k2))); + + std::cout << std::endl; +} + +void testSupersetKeys() { + std::cout << "testSupersetKeys" << std::endl; + + //assert_equal [], @ssm.get(Set[1]) + std::vector k1; + k1.push_back("1"); + assertEqual("[]", toString(ssm.get_kv(k1))); + + //assert_equal [], @ssm.get(Set[2]) + std::vector k2; + k2.push_back("2"); + assertEqual("[]", toString(ssm.get_kv(k2))); + + //assert_equal [], @ssm.get(Set["fizz"]) + std::vector k3; + k3.push_back("fizz"); + assertEqual("[]", toString(ssm.get_kv(k3))); + + //assert_equal [], @ssm.get(Set["fazz"]) + std::vector k4; + k4.push_back("fazz"); + assertEqual("[]", toString(ssm.get_kv(k4))); + + std::cout << std::endl; +} + +void testDisjointKeys() { + std::cout << "testDisjointKeys" << std::endl; + + //assert_equal [], @ssm.get(Set[3, 4]) + std::vector k1; + k1.push_back("3"); + k1.push_back("4"); + assertEqual("[]", toString(ssm.get_kv(k1))); + + //assert_equal [], @ssm.get(Set["fuzz", "frizz"]) + std::vector k2; + k2.push_back("fuzz"); + k2.push_back("frizz"); + assertEqual("[]", toString(ssm.get_kv(k2))); + + //assert_equal [], @ssm.get(Set["gran", 15]) + std::vector k3; + k3.push_back("gran"); + k3.push_back("15"); + assertEqual("[]", toString(ssm.get_kv(k3))); + + std::cout << std::endl; +} + +void testSemiDisjointKeys() { + std::cout << "testSemiDisjointKeys" << std::endl; + + //assert_equal [], @ssm.get(Set[2, 3]) + std::vector k1; + k1.push_back("2"); + k1.push_back("3"); + assertEqual("[]", toString(ssm.get_kv(k1))); + + //assert_equal [], @ssm.get(Set["fizz", "fuzz"]) + std::vector k2; + k2.push_back("fizz"); + k2.push_back("fuzz"); + assertEqual("[]", toString(ssm.get_kv(k2))); + + //assert_equal [], @ssm.get(Set[1, "fazz"]) + std::vector k3; + k3.push_back("1"); + k3.push_back("fazz"); + assertEqual("[]", toString(ssm.get_kv(k3))); + + std::cout << std::endl; +} + +void testEmptyKeySet() { + std::cout << "testEmptyKeySet" << std::endl; + + //assert_raises(ArgumentError) {@ssm[Set[]] = "Fail"} + std::vector s1; + try { + ssm.put(s1, "Fail"); + } + catch (const char* &e) { + assertEqual("internal error: subset map keys may not be empty", e); + } +} + +void testEmptyKeyGet() { + std::cout << "testEmptyKeyGet" << std::endl; + + //assert_equal [], @ssm.get(Set[]) + std::vector k1; + assertEqual("[]", toString(ssm.get_kv(k1))); + + std::cout << std::endl; +} +void testMultipleSubsets() { + std::cout << "testMultipleSubsets" << std::endl; + + //assert_equal [["Foo", Set[1, 2]], ["Bar", Set["fizz", "fazz"]]], @ssm.get(Set[1, 2, "fizz", "fazz"]) + std::vector k1; + k1.push_back("1"); + k1.push_back("2"); + k1.push_back("fizz"); + k1.push_back("fazz"); + assertEqual("[[Foo, Set[1, 2]], [Bar, Set[fizz, fazz]]]", toString(ssm.get_kv(k1))); + + //assert_equal [["Foo", Set[1, 2]], ["Bar", Set["fizz", "fazz"]]], @ssm.get(Set[1, 2, 3, "fizz", "fazz", "fuzz"]) + std::vector k2; + k2.push_back("1"); + k2.push_back("2"); + k2.push_back("3"); + k2.push_back("fizz"); + k2.push_back("fazz"); + k2.push_back("fuzz"); + assertEqual("[[Foo, Set[1, 2]], [Bar, Set[fizz, fazz]]]", toString(ssm.get_kv(k2))); + + //assert_equal [["Baz", Set[:foo, :bar]]], @ssm.get(Set[:foo, :bar]) + std::vector k3; + k3.push_back(":foo"); + k3.push_back(":bar"); + assertEqual("[[Baz, Set[:foo, :bar]]]", toString(ssm.get_kv(k3))); + + //assert_equal [["Baz", Set[:foo, :bar]], ["Bang", Set[:foo, :bar, :baz]]], @ssm.get(Set[:foo, :bar, :baz]) + std::vector k4; + k4.push_back(":foo"); + k4.push_back(":bar"); + k4.push_back(":baz"); + assertEqual("[[Baz, Set[:foo, :bar]], [Bang, Set[:foo, :bar, :baz]]]", toString(ssm.get_kv(k4))); + + std::cout << std::endl; +} +void testBracketBracket() { + std::cout << "testBracketBracket" << std::endl; + + //assert_equal ["Foo"], @ssm[Set[1, 2, "fuzz"]] + std::vector k1; + k1.push_back("1"); + k1.push_back("2"); + k1.push_back("fuzz"); + assertEqual("[Foo]", toString(ssm.get_v(k1))); + + //assert_equal ["Baz", "Bang"], @ssm[Set[:foo, :bar, :baz]] + std::vector k2; + k2.push_back(":foo"); + k2.push_back(":bar"); + k2.push_back(":baz"); + assertEqual("[Baz, Bang]", toString(ssm.get_v(k2))); + + std::cout << std::endl; +} + +void testKeyOrder() { + std::cout << "testEqualKeys" << std::endl; + + //assert_equal [["Foo", Set[1, 2]]], @ssm.get(Set[2, 1]) + std::vector k1; + k1.push_back("2"); + k1.push_back("1"); + assertEqual("[[Foo, Set[1, 2]]]", toString(ssm.get_kv(k1))); + + std::cout << std::endl; +} + +void testOrderPreserved() { + std::cout << "testOrderPreserved" << std::endl; + //@ssm[Set[10, 11, 12]] = 1 + std::vector s1; + s1.push_back("10"); + s1.push_back("11"); + s1.push_back("12"); + ssm.put(s1, "1"); + + //@ssm[Set[10, 11]] = 2 + std::vector s2; + s2.push_back("10"); + s2.push_back("11"); + ssm.put(s2, "2"); + + //@ssm[Set[11]] = 3 + std::vector s3; + s3.push_back("11"); + ssm.put(s3, "3"); + + //@ssm[Set[11, 12]] = 4 + std::vector s4; + s4.push_back("11"); + s4.push_back("12"); + ssm.put(s4, "4"); + + //@ssm[Set[9, 10, 11, 12, 13]] = 5 + std::vector s5; + s5.push_back("9"); + s5.push_back("10"); + s5.push_back("11"); + s5.push_back("12"); + s5.push_back("13"); + ssm.put(s5, "5"); + + //@ssm[Set[10, 13]] = 6 + std::vector s6; + s6.push_back("10"); + s6.push_back("13"); + ssm.put(s6, "6"); + + //assert_equal([[1, Set[10, 11, 12]], [2, Set[10, 11]], [3, Set[11]], [4, Set[11, 12]], [5, Set[9, 10, 11, 12, 13]], [6, Set[10, 13]]], @ssm.get(Set[9, 10, 11, 12, 13])) + std::vector k1; + k1.push_back("9"); + k1.push_back("10"); + k1.push_back("11"); + k1.push_back("12"); + k1.push_back("13"); + assertEqual("[[1, Set[10, 11, 12]], [2, Set[10, 11]], [3, Set[11]], [4, Set[11, 12]], [5, Set[9, 10, 11, 12, 13]], [6, Set[10, 13]]]", toString(ssm.get_kv(k1))); + + std::cout << std::endl; +} +void testMultipleEqualValues() { + std::cout << "testMultipleEqualValues" << std::endl; + //@ssm[Set[11, 12]] = 1 + std::vector s1; + s1.push_back("11"); + s1.push_back("12"); + ssm.put(s1, "1"); + + //@ssm[Set[12, 13]] = 2 + std::vector s2; + s2.push_back("12"); + s2.push_back("13"); + ssm.put(s2, "2"); + + //@ssm[Set[13, 14]] = 1 + std::vector s3; + s3.push_back("13"); + s3.push_back("14"); + ssm.put(s3, "1"); + + //@ssm[Set[14, 15]] = 1 + std::vector s4; + s4.push_back("14"); + s4.push_back("15"); + ssm.put(s4, "1"); + + //assert_equal([[1, Set[11, 12]], [2, Set[12, 13]], [1, Set[13, 14]], [1, Set[14, 15]]], @ssm.get(Set[11, 12, 13, 14, 15])) + std::vector k1; + k1.push_back("11"); + k1.push_back("12"); + k1.push_back("13"); + k1.push_back("14"); + k1.push_back("15"); + assertEqual("[[1, Set[11, 12]], [2, Set[12, 13]], [1, Set[13, 14]], [1, Set[14, 15]]]", toString(ssm.get_kv(k1))); + + std::cout << std::endl; +} + +int main() +{ + std::vector s1; + s1.push_back("1"); + s1.push_back("2"); + + std::vector s2; + s2.push_back("2"); + s2.push_back("3"); + + std::vector s3; + s3.push_back("3"); + s3.push_back("4"); + + ssm.put(s1, "value1"); + ssm.put(s2, "value2"); + ssm.put(s3, "value3"); + + std::vector s4; + s4.push_back("1"); + s4.push_back("2"); + s4.push_back("3"); + + std::vector > > fetched(ssm.get_kv(s4)); + + std::cout << "PRINTING RESULTS:" << std::endl; + for (size_t i = 0, S = fetched.size(); i < S; ++i) { + std::cout << fetched[i].first << std::endl; + } + + Subset_Map ssm2; + ssm2.put(s1, "foo"); + ssm2.put(s2, "bar"); + ssm2.put(s4, "hux"); + + std::vector > > fetched2(ssm2.get_kv(s4)); + + std::cout << std::endl << "PRINTING RESULTS:" << std::endl; + for (size_t i = 0, S = fetched2.size(); i < S; ++i) { + std::cout << fetched2[i].first << std::endl; + } + + std::cout << "TRYING ON A SELECTOR-LIKE OBJECT" << std::endl; + + Subset_Map sel_ssm; + std::vector target; + target.push_back("desk"); + target.push_back(".wood"); + + std::vector actual; + actual.push_back("desk"); + actual.push_back(".wood"); + actual.push_back(".mine"); + + sel_ssm.put(target, "has-aquarium"); + std::vector > > fetched3(sel_ssm.get_kv(actual)); + std::cout << "RESULTS:" << std::endl; + for (size_t i = 0, S = fetched3.size(); i < S; ++i) { + std::cout << fetched3[i].first << std::endl; + } + + std::cout << std::endl; + + // BEGIN PORTED RUBY TESTS FROM /test/sass/util/subset_map_test.rb + + setup(); + testEqualKeys(); + testSubsetKeys(); + testSupersetKeys(); + testDisjointKeys(); + testSemiDisjointKeys(); + testEmptyKeySet(); + testEmptyKeyGet(); + testMultipleSubsets(); + testBracketBracket(); + testKeyOrder(); + + setup(); + testOrderPreserved(); + + setup(); + testMultipleEqualValues(); + + return 0; +} + +string toString(std::vector>> v) +{ + std::stringstream buffer; + buffer << "["; + for (size_t i = 0, S = v.size(); i < S; ++i) { + buffer << "[" << v[i].first; + buffer << ", Set["; + for (size_t j = 0, S = v[i].second.size(); j < S; ++j) { + buffer << v[i].second[j]; + if (j < S-1) { + buffer << ", "; + } + } + buffer << "]]"; + if (i < S-1) { + buffer << ", "; + } + } + buffer << "]"; + return buffer.str(); +} + +string toString(std::vector v) +{ + std::stringstream buffer; + buffer << "["; + for (size_t i = 0, S = v.size(); i < S; ++i) { + buffer << v[i]; + if (i < S-1) { + buffer << ", "; + } + } + buffer << "]"; + return buffer.str(); +} + +void assertEqual(string sExpected, string sResult) { + std::cout << "Expected: " << sExpected << std::endl; + std::cout << "Result: " << sResult << std::endl; + assert(sExpected == sResult); +} diff --git a/mybulma/node_modules/node-sass/src/libsass/test/test_superselector.cpp b/mybulma/node_modules/node-sass/src/libsass/test/test_superselector.cpp new file mode 100644 index 0000000..bf21c7c --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/test/test_superselector.cpp @@ -0,0 +1,69 @@ +#include "../ast.hpp" +#include "../context.hpp" +#include "../parser.hpp" +#include + +using namespace Sass; + +Context ctx = Context(Context::Data()); + +Compound_Selector* compound_selector(std::string src) +{ return Parser::from_c_str(src.c_str(), ctx, "", Position()).parse_compound_selector(); } + +Complex_Selector* complex_selector(std::string src) +{ return Parser::from_c_str(src.c_str(), ctx, "", Position()).parse_complex_selector(false); } + +void check_compound(std::string s1, std::string s2) +{ + std::cout << "Is " + << s1 + << " a superselector of " + << s2 + << "?\t" + << compound_selector(s1 + ";")->is_superselector_of(compound_selector(s2 + ";")) + << std::endl; +} + +void check_complex(std::string s1, std::string s2) +{ + std::cout << "Is " + << s1 + << " a superselector of " + << s2 + << "?\t" + << complex_selector(s1 + ";")->is_superselector_of(complex_selector(s2 + ";")) + << std::endl; +} + +int main() +{ + check_compound(".foo", ".foo.bar"); + check_compound(".foo.bar", ".foo"); + check_compound(".foo.bar", "div.foo"); + check_compound(".foo", "div.foo"); + check_compound("div.foo", ".foo"); + check_compound("div.foo", "div.bar.foo"); + check_compound("p.foo", "div.bar.foo"); + check_compound(".hux", ".mumble"); + + std::cout << std::endl; + + check_complex(".foo ~ .bar", ".foo + .bar"); + check_complex(".foo .bar", ".foo + .bar"); + check_complex(".foo .bar", ".foo > .bar"); + check_complex(".foo .bar > .hux", ".foo.a .bar.b > .hux"); + check_complex(".foo ~ .bar .hux", ".foo.a + .bar.b > .hux"); + check_complex(".foo", ".bar .foo"); + check_complex(".foo", ".foo.a"); + check_complex(".foo.bar", ".foo"); + check_complex(".foo .bar .hux", ".bar .hux"); + check_complex(".foo ~ .bar .hux.x", ".foo.a + .bar.b > .hux.y"); + check_complex(".foo ~ .bar .hux", ".foo.a + .bar.b > .mumble"); + check_complex(".foo + .bar", ".foo ~ .bar"); + check_complex("a c e", "a b c d e"); + check_complex("c a e", "a b c d e"); + + return 0; +} + + diff --git a/mybulma/node_modules/node-sass/src/libsass/test/test_unification.cpp b/mybulma/node_modules/node-sass/src/libsass/test/test_unification.cpp new file mode 100644 index 0000000..5c663ee --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/test/test_unification.cpp @@ -0,0 +1,31 @@ +#include "../ast.hpp" +#include "../context.hpp" +#include "../parser.hpp" +#include + +using namespace Sass; + +Context ctx = Context(Context::Data()); + +Compound_Selector* selector(std::string src) +{ return Parser::from_c_str(src.c_str(), ctx, "", Position()).parse_compound_selector(); } + +void unify(std::string lhs, std::string rhs) +{ + Compound_Selector* unified = selector(lhs + ";")->unify_with(selector(rhs + ";"), ctx); + std::cout << lhs << " UNIFIED WITH " << rhs << " =\t" << (unified ? unified->to_string() : "NOTHING") << std::endl; +} + +int main() +{ + unify(".foo", ".foo.bar"); + unify("div:nth-of-type(odd)", "div:first-child"); + unify("div", "span:whatever"); + unify("div", "span"); + unify("foo:bar::after", "foo:bar::first-letter"); + unify(".foo#bar.hux", ".hux.foo#bar"); + unify(".foo#bar.hux", ".hux.foo#baz"); + unify("*:blah:fudge", "p:fudge:blah"); + + return 0; +} diff --git a/mybulma/node_modules/node-sass/src/libsass/version.sh b/mybulma/node_modules/node-sass/src/libsass/version.sh new file mode 100644 index 0000000..281de74 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/version.sh @@ -0,0 +1,10 @@ +if test "x$LIBSASS_VERSION" = "x"; then + LIBSASS_VERSION=`git describe --abbrev=4 --dirty --always --tags 2>/dev/null` +fi +if test "x$LIBSASS_VERSION" = "x"; then + LIBSASS_VERSION=`cat VERSION 2>/dev/null` +fi +if test "x$LIBSASS_VERSION" = "x"; then + LIBSASS_VERSION="[na]" +fi +echo $LIBSASS_VERSION diff --git a/mybulma/node_modules/node-sass/src/libsass/win/libsass.sln b/mybulma/node_modules/node-sass/src/libsass/win/libsass.sln new file mode 100644 index 0000000..2a55ad8 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/win/libsass.sln @@ -0,0 +1,39 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio 14 +VisualStudioVersion = 14.0.25420.1 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "libsass", "libsass.vcxproj", "{E4030474-AFC9-4CC6-BEB6-D846F631502B}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = ".SolutionItems", ".SolutionItems", "{33318C77-2391-4399-8118-C109155A4A75}" + ProjectSection(SolutionItems) = preProject + ..\.editorconfig = ..\.editorconfig + ..\.gitattributes = ..\.gitattributes + ..\.gitignore = ..\.gitignore + ..\.travis.yml = ..\.travis.yml + ..\appveyor.yml = ..\appveyor.yml + ..\Readme.md = ..\Readme.md + ..\res\resource.rc = ..\res\resource.rc + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Win32 = Debug|Win32 + Debug|Win64 = Debug|Win64 + Release|Win32 = Release|Win32 + Release|Win64 = Release|Win64 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {E4030474-AFC9-4CC6-BEB6-D846F631502B}.Debug|Win32.ActiveCfg = Debug|Win32 + {E4030474-AFC9-4CC6-BEB6-D846F631502B}.Debug|Win32.Build.0 = Debug|Win32 + {E4030474-AFC9-4CC6-BEB6-D846F631502B}.Debug|Win64.ActiveCfg = Debug|x64 + {E4030474-AFC9-4CC6-BEB6-D846F631502B}.Debug|Win64.Build.0 = Debug|x64 + {E4030474-AFC9-4CC6-BEB6-D846F631502B}.Release|Win32.ActiveCfg = Release|Win32 + {E4030474-AFC9-4CC6-BEB6-D846F631502B}.Release|Win32.Build.0 = Release|Win32 + {E4030474-AFC9-4CC6-BEB6-D846F631502B}.Release|Win64.ActiveCfg = Release|x64 + {E4030474-AFC9-4CC6-BEB6-D846F631502B}.Release|Win64.Build.0 = Release|x64 + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/mybulma/node_modules/node-sass/src/libsass/win/libsass.sln.DotSettings b/mybulma/node_modules/node-sass/src/libsass/win/libsass.sln.DotSettings new file mode 100644 index 0000000..405024e --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/win/libsass.sln.DotSettings @@ -0,0 +1,9 @@ + + ExplicitlyExcluded + ExplicitlyExcluded + ExplicitlyExcluded + ExplicitlyExcluded + ExplicitlyExcluded + ExplicitlyExcluded + ExplicitlyExcluded + ExplicitlyExcluded \ No newline at end of file diff --git a/mybulma/node_modules/node-sass/src/libsass/win/libsass.targets b/mybulma/node_modules/node-sass/src/libsass/win/libsass.targets new file mode 100644 index 0000000..c1c7d45 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/win/libsass.targets @@ -0,0 +1,118 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/mybulma/node_modules/node-sass/src/libsass/win/libsass.vcxproj b/mybulma/node_modules/node-sass/src/libsass/win/libsass.vcxproj new file mode 100644 index 0000000..8cfd61f --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/win/libsass.vcxproj @@ -0,0 +1,188 @@ + + + + [NA] + ..\src + ..\src + ..\include + + + + + + + + + + %(PreprocessorDefinitions);LIBSASS_VERSION="$(LIBSASS_VERSION)"; + + + + + + + + + + + + + Debug + Win32 + + + Debug + x64 + + + Release + Win32 + + + Release + x64 + + + + + {E4030474-AFC9-4CC6-BEB6-D846F631502B} + Win32Proj + libsass + + + libsass + Unicode + + + DynamicLibrary + ADD_EXPORTS;$(PreprocessorDefinitions); + + + StaticLibrary + + + v120 + + + v140 + + + true + + + true + + + false + true + + + false + true + + + + + + + + + + + + + + + + + + + true + $(SolutionDir)bin\Debug\ + $(SolutionDir)bin\Debug\obj\ + + + true + $(SolutionDir)bin\Debug\ + $(SolutionDir)bin\Debug\obj\ + + + false + $(SolutionDir)bin\ + $(SolutionDir)bin\obj\ + + + false + $(SolutionDir)bin\ + $(SolutionDir)bin\obj\ + + + + ..\include;%(AdditionalIncludeDirectories) + + + + + + + Level3 + Disabled + WIN32;_DEBUG;_CONSOLE;_LIB;$(PreprocessorDefinitions); + + + Console + true + + + + + + + Level3 + Disabled + WIN32;_DEBUG;_CONSOLE;_LIB;$(PreprocessorDefinitions); + + + Console + true + + + + + Level3 + + + MaxSpeed + true + true + WIN32;NDEBUG;_CONSOLE;_LIB;$(PreprocessorDefinitions); + + + Console + true + true + true + + + + + Level3 + + + MaxSpeed + true + true + WIN32;NDEBUG;_CONSOLE;_LIB;$(PreprocessorDefinitions); + + + Console + true + true + true + + + + + + + diff --git a/mybulma/node_modules/node-sass/src/libsass/win/libsass.vcxproj.filters b/mybulma/node_modules/node-sass/src/libsass/win/libsass.vcxproj.filters new file mode 100644 index 0000000..980f00f --- /dev/null +++ b/mybulma/node_modules/node-sass/src/libsass/win/libsass.vcxproj.filters @@ -0,0 +1,357 @@ + + + + + {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} + rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms + + + {93995380-89BD-4b04-88EB-625FBE52EBFB} + h;hh;hpp;hxx;hm;inl;inc;xsd + + + {4FC737F1-C7A5-4376-A066-2A32D752A2FF} + cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx + + + {bb9c270d-e9f5-49bf-afda-771a1a4bb5b7} + h;hh;hpp;hxx;hm;in;inl;inc;xsd + + + + + Include Headers + + + Include Headers + + + Include Headers + + + Include Headers + + + Include Headers + + + Include Headers + + + Include Headers + + + Include Headers + + + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + Headers + + + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Source Files + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + Sources + + + diff --git a/mybulma/node_modules/node-sass/src/sass_context_wrapper.cpp b/mybulma/node_modules/node-sass/src/sass_context_wrapper.cpp new file mode 100644 index 0000000..aa25c79 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/sass_context_wrapper.cpp @@ -0,0 +1,67 @@ +#include "sass_context_wrapper.h" + +extern "C" { + using namespace std; + + void compile_it(uv_work_t* req) { + sass_context_wrapper* ctx_w = (sass_context_wrapper*)req->data; + + if (ctx_w->dctx) { + compile_data(ctx_w->dctx); + } + else if (ctx_w->fctx) { + compile_file(ctx_w->fctx); + } + } + + void compile_data(struct Sass_Data_Context* dctx) { + sass_compile_data_context(dctx); + } + + void compile_file(struct Sass_File_Context* fctx) { + sass_compile_file_context(fctx); + } + + sass_context_wrapper* sass_make_context_wrapper() { + return (sass_context_wrapper*)calloc(1, sizeof(sass_context_wrapper)); + } + + void sass_free_context_wrapper(sass_context_wrapper* ctx_w) { + if (ctx_w->dctx) { + sass_delete_data_context(ctx_w->dctx); + } + else if (ctx_w->fctx) { + sass_delete_file_context(ctx_w->fctx); + } + if (ctx_w->async_resource) { + delete ctx_w->async_resource; + } + + delete ctx_w->error_callback; + delete ctx_w->success_callback; + + ctx_w->result.Reset(); + + free(ctx_w->include_path); + free(ctx_w->linefeed); + free(ctx_w->out_file); + free(ctx_w->source_map); + free(ctx_w->source_map_root); + free(ctx_w->indent); + + std::vector::iterator imp_it = ctx_w->importer_bridges.begin(); + while (imp_it != ctx_w->importer_bridges.end()) { + CustomImporterBridge* p = *imp_it; + imp_it = ctx_w->importer_bridges.erase(imp_it); + delete p; + } + std::vector::iterator func_it = ctx_w->function_bridges.begin(); + while (func_it != ctx_w->function_bridges.end()) { + CustomFunctionBridge* p = *func_it; + func_it = ctx_w->function_bridges.erase(func_it); + delete p; + } + + free(ctx_w); + } +} diff --git a/mybulma/node_modules/node-sass/src/sass_context_wrapper.h b/mybulma/node_modules/node-sass/src/sass_context_wrapper.h new file mode 100644 index 0000000..4aa3568 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/sass_context_wrapper.h @@ -0,0 +1,57 @@ +#ifndef SASS_CONTEXT_WRAPPER +#define SASS_CONTEXT_WRAPPER + +#include +#include +#include +#include +#include +#include "custom_function_bridge.h" +#include "custom_importer_bridge.h" + +#ifdef __cplusplus +extern "C" { +#endif + + void compile_data(struct Sass_Data_Context* dctx); + void compile_file(struct Sass_File_Context* fctx); + void compile_it(uv_work_t* req); + + struct sass_context_wrapper { + // binding related + bool is_sync; + void* cookie; + char* file; + char* include_path; + char* out_file; + char* source_map; + char* source_map_root; + char* linefeed; + char* indent; + + // libsass related + Sass_Data_Context* dctx; + Sass_File_Context* fctx; + + // libuv related + uv_async_t async; + uv_work_t request; + + // v8 and nan related + Nan::Persistent result; + Nan::AsyncResource* async_resource; + Nan::Callback* error_callback; + Nan::Callback* success_callback; + + std::vector function_bridges; + std::vector importer_bridges; + }; + + struct sass_context_wrapper* sass_make_context_wrapper(void); + void sass_free_context_wrapper(struct sass_context_wrapper*); + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/mybulma/node_modules/node-sass/src/sass_types/boolean.cpp b/mybulma/node_modules/node-sass/src/sass_types/boolean.cpp new file mode 100644 index 0000000..2d47932 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/sass_types/boolean.cpp @@ -0,0 +1,73 @@ +#include +#include "boolean.h" + +namespace SassTypes +{ + Nan::Persistent Boolean::constructor; + bool Boolean::constructor_locked = false; + + Boolean::Boolean(bool _value) { + value = sass_make_boolean(_value); + } + + Boolean& Boolean::get_singleton(bool v) { + static Boolean instance_false(false), instance_true(true); + return v ? instance_true : instance_false; + } + + v8::Local Boolean::get_constructor() { + Nan::EscapableHandleScope scope; + v8::Local conslocal; + if (constructor.IsEmpty()) { + v8::Local tpl = Nan::New(New); + + tpl->SetClassName(Nan::New("SassBoolean").ToLocalChecked()); + tpl->InstanceTemplate()->SetInternalFieldCount(1); + Nan::SetPrototypeTemplate(tpl, "getValue", Nan::New(GetValue)); + + conslocal = Nan::GetFunction(tpl).ToLocalChecked(); + constructor.Reset(conslocal); + + get_singleton(false).js_object.Reset(Nan::NewInstance(conslocal).ToLocalChecked()); + Nan::SetInternalFieldPointer(Nan::New(get_singleton(false).js_object), 0, &get_singleton(false)); + Nan::Set(conslocal, Nan::New("FALSE").ToLocalChecked(), Nan::New(get_singleton(false).js_object)); + + get_singleton(true).js_object.Reset(Nan::NewInstance(conslocal).ToLocalChecked()); + Nan::SetInternalFieldPointer(Nan::New(get_singleton(true).js_object), 0, &get_singleton(true)); + Nan::Set(conslocal, Nan::New("TRUE").ToLocalChecked(), Nan::New(get_singleton(true).js_object)); + + constructor_locked = true; + } else { + conslocal = Nan::New(constructor); + } + + return scope.Escape(conslocal); + } + + v8::Local Boolean::get_js_object() { + return Nan::New(this->js_object); + } + + v8::Local Boolean::get_js_boolean() { + return sass_boolean_get_value(this->value) ? Nan::True() : Nan::False(); + } + + NAN_METHOD(Boolean::New) { + if (info.IsConstructCall()) { + if (constructor_locked) { + return Nan::ThrowTypeError("Cannot instantiate SassBoolean"); + } + } + else { + if (info.Length() != 1 || !info[0]->IsBoolean()) { + return Nan::ThrowTypeError("Expected one boolean argument"); + } + + info.GetReturnValue().Set(get_singleton(Nan::To(info[0]).FromJust()).get_js_object()); + } + } + + NAN_METHOD(Boolean::GetValue) { + info.GetReturnValue().Set(Boolean::Unwrap(info.This())->get_js_boolean()); + } +} diff --git a/mybulma/node_modules/node-sass/src/sass_types/boolean.h b/mybulma/node_modules/node-sass/src/sass_types/boolean.h new file mode 100644 index 0000000..721a41c --- /dev/null +++ b/mybulma/node_modules/node-sass/src/sass_types/boolean.h @@ -0,0 +1,31 @@ +#ifndef SASS_TYPES_BOOLEAN_H +#define SASS_TYPES_BOOLEAN_H + +#include +#include "value.h" +#include "sass_value_wrapper.h" + +namespace SassTypes +{ + class Boolean : public SassTypes::Value { + public: + static Boolean& get_singleton(bool); + static v8::Local get_constructor(); + + v8::Local get_js_object(); + + static NAN_METHOD(New); + static NAN_METHOD(GetValue); + + private: + Boolean(bool); + + Nan::Persistent js_object; + + static Nan::Persistent constructor; + static bool constructor_locked; + v8::Local get_js_boolean(); + }; +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/sass_types/color.cpp b/mybulma/node_modules/node-sass/src/sass_types/color.cpp new file mode 100644 index 0000000..40358a2 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/sass_types/color.cpp @@ -0,0 +1,127 @@ +#include +#include "color.h" + +namespace SassTypes +{ + Color::Color(Sass_Value* v) : SassValueWrapper(v) {} + + Sass_Value* Color::construct(const std::vector> raw_val, Sass_Value **out) { + double a = 1.0, r = 0, g = 0, b = 0; + unsigned argb; + + switch (raw_val.size()) { + case 1: + if (!raw_val[0]->IsNumber()) { + return fail("Only argument should be an integer.", out); + } + + argb = Nan::To(raw_val[0]).FromJust(); + a = (double)((argb >> 030) & 0xff) / 0xff; + r = (double)((argb >> 020) & 0xff); + g = (double)((argb >> 010) & 0xff); + b = (double)(argb & 0xff); + break; + + case 4: + if (!raw_val[3]->IsNumber()) { + return fail("Constructor arguments should be numbers exclusively.", out); + } + + a = Nan::To(raw_val[3]).FromJust(); + NODE_SASS_FALLTHROUGH; + + case 3: + if (!raw_val[0]->IsNumber() || !raw_val[1]->IsNumber() || !raw_val[2]->IsNumber()) { + return fail("Constructor arguments should be numbers exclusively.", out); + } + + r = Nan::To(raw_val[0]).FromJust(); + g = Nan::To(raw_val[1]).FromJust(); + b = Nan::To(raw_val[2]).FromJust(); + break; + + case 0: + break; + + default: + return fail("Constructor should be invoked with either 0, 1, 3 or 4 arguments.", out); + } + + return *out = sass_make_color(r, g, b, a); + } + + void Color::initPrototype(v8::Local proto) { + Nan::SetPrototypeMethod(proto, "getR", GetR); + Nan::SetPrototypeMethod(proto, "getG", GetG); + Nan::SetPrototypeMethod(proto, "getB", GetB); + Nan::SetPrototypeMethod(proto, "getA", GetA); + Nan::SetPrototypeMethod(proto, "setR", SetR); + Nan::SetPrototypeMethod(proto, "setG", SetG); + Nan::SetPrototypeMethod(proto, "setB", SetB); + Nan::SetPrototypeMethod(proto, "setA", SetA); + } + + NAN_METHOD(Color::GetR) { + info.GetReturnValue().Set(sass_color_get_r(Color::Unwrap(info.This())->value)); + } + + NAN_METHOD(Color::GetG) { + info.GetReturnValue().Set(sass_color_get_g(Color::Unwrap(info.This())->value)); + } + + NAN_METHOD(Color::GetB) { + info.GetReturnValue().Set(sass_color_get_b(Color::Unwrap(info.This())->value)); + } + + NAN_METHOD(Color::GetA) { + info.GetReturnValue().Set(sass_color_get_a(Color::Unwrap(info.This())->value)); + } + + NAN_METHOD(Color::SetR) { + if (info.Length() != 1) { + return Nan::ThrowTypeError("Expected just one argument"); + } + + if (!info[0]->IsNumber()) { + return Nan::ThrowTypeError("Supplied value should be a number"); + } + + sass_color_set_r(Color::Unwrap(info.This())->value, Nan::To(info[0]).FromJust()); + } + + NAN_METHOD(Color::SetG) { + if (info.Length() != 1) { + return Nan::ThrowTypeError("Expected just one argument"); + } + + if (!info[0]->IsNumber()) { + return Nan::ThrowTypeError("Supplied value should be a number"); + } + + sass_color_set_g(Color::Unwrap(info.This())->value, Nan::To(info[0]).FromJust()); + } + + NAN_METHOD(Color::SetB) { + if (info.Length() != 1) { + return Nan::ThrowTypeError("Expected just one argument"); + } + + if (!info[0]->IsNumber()) { + return Nan::ThrowTypeError("Supplied value should be a number"); + } + + sass_color_set_b(Color::Unwrap(info.This())->value, Nan::To(info[0]).FromJust()); + } + + NAN_METHOD(Color::SetA) { + if (info.Length() != 1) { + return Nan::ThrowTypeError("Expected just one argument"); + } + + if (!info[0]->IsNumber()) { + return Nan::ThrowTypeError("Supplied value should be a number"); + } + + sass_color_set_a(Color::Unwrap(info.This())->value, Nan::To(info[0]).FromJust()); + } +} diff --git a/mybulma/node_modules/node-sass/src/sass_types/color.h b/mybulma/node_modules/node-sass/src/sass_types/color.h new file mode 100644 index 0000000..1bf9043 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/sass_types/color.h @@ -0,0 +1,34 @@ +#ifndef SASS_TYPES_COLOR_H +#define SASS_TYPES_COLOR_H + +#include +#include "sass_value_wrapper.h" + +#if defined(__GNUC__) && __GNUC__ >= 7 +#define NODE_SASS_FALLTHROUGH __attribute__ ((fallthrough)) +#else +#define NODE_SASS_FALLTHROUGH +#endif + +namespace SassTypes +{ + class Color : public SassValueWrapper { + public: + Color(Sass_Value*); + static char const* get_constructor_name() { return "SassColor"; } + static Sass_Value* construct(const std::vector>, Sass_Value **); + + static void initPrototype(v8::Local); + + static NAN_METHOD(GetR); + static NAN_METHOD(GetG); + static NAN_METHOD(GetB); + static NAN_METHOD(GetA); + static NAN_METHOD(SetR); + static NAN_METHOD(SetG); + static NAN_METHOD(SetB); + static NAN_METHOD(SetA); + }; +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/sass_types/error.cpp b/mybulma/node_modules/node-sass/src/sass_types/error.cpp new file mode 100644 index 0000000..03c6307 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/sass_types/error.cpp @@ -0,0 +1,24 @@ +#include +#include "error.h" +#include "../create_string.h" + +namespace SassTypes +{ + Error::Error(Sass_Value* v) : SassValueWrapper(v) {} + + Sass_Value* Error::construct(const std::vector> raw_val, Sass_Value **out) { + char const* value = ""; + + if (raw_val.size() >= 1) { + if (!raw_val[0]->IsString()) { + return fail("Argument should be a string.", out); + } + + value = create_string(raw_val[0]); + } + + return *out = sass_make_error(value); + } + + void Error::initPrototype(v8::Local) {} +} diff --git a/mybulma/node_modules/node-sass/src/sass_types/error.h b/mybulma/node_modules/node-sass/src/sass_types/error.h new file mode 100644 index 0000000..01786fd --- /dev/null +++ b/mybulma/node_modules/node-sass/src/sass_types/error.h @@ -0,0 +1,19 @@ +#ifndef SASS_TYPES_ERROR_H +#define SASS_TYPES_ERROR_H + +#include +#include "sass_value_wrapper.h" + +namespace SassTypes +{ + class Error : public SassValueWrapper { + public: + Error(Sass_Value*); + static char const* get_constructor_name() { return "SassError"; } + static Sass_Value* construct(const std::vector>, Sass_Value **); + + static void initPrototype(v8::Local); + }; +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/sass_types/factory.cpp b/mybulma/node_modules/node-sass/src/sass_types/factory.cpp new file mode 100644 index 0000000..c650710 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/sass_types/factory.cpp @@ -0,0 +1,72 @@ +#include +#include "factory.h" +#include "value.h" +#include "number.h" +#include "string.h" +#include "color.h" +#include "boolean.h" +#include "list.h" +#include "map.h" +#include "null.h" +#include "error.h" + +namespace SassTypes +{ + SassTypes::Value* Factory::create(Sass_Value* v) { + switch (sass_value_get_tag(v)) { + case SASS_NUMBER: + return new Number(v); + + case SASS_STRING: + return new String(v); + + case SASS_COLOR: + return new Color(v); + + case SASS_BOOLEAN: + return &Boolean::get_singleton(sass_boolean_get_value(v)); + + case SASS_LIST: + return new List(v); + + case SASS_MAP: + return new Map(v); + + case SASS_NULL: + return &Null::get_singleton(); + + case SASS_ERROR: + return new Error(v); + + default: + const char *msg = "Unknown type encountered."; + Nan::ThrowTypeError(msg); + return new Error(sass_make_error(msg)); + } + } + + NAN_MODULE_INIT(Factory::initExports) { + Nan::HandleScope scope; + v8::Local types = Nan::New(); + + Nan::Set(types, Nan::New("Number").ToLocalChecked(), Number::get_constructor()); + Nan::Set(types, Nan::New("String").ToLocalChecked(), String::get_constructor()); + Nan::Set(types, Nan::New("Color").ToLocalChecked(), Color::get_constructor()); + Nan::Set(types, Nan::New("Boolean").ToLocalChecked(), Boolean::get_constructor()); + Nan::Set(types, Nan::New("List").ToLocalChecked(), List::get_constructor()); + Nan::Set(types, Nan::New("Map").ToLocalChecked(), Map::get_constructor()); + Nan::Set(types, Nan::New("Null").ToLocalChecked(), Null::get_constructor()); + Nan::Set(types, Nan::New("Error").ToLocalChecked(), Error::get_constructor()); + Nan::Set(target, Nan::New("types").ToLocalChecked(), types); + } + + Value* Factory::unwrap(v8::Local obj) { + if (obj->IsObject()) { + v8::Local v8_obj = obj.As(); + if (v8_obj->InternalFieldCount() == 1) { + return SassTypes::Value::Unwrap(v8_obj); + } + } + return NULL; + } +} diff --git a/mybulma/node_modules/node-sass/src/sass_types/factory.h b/mybulma/node_modules/node-sass/src/sass_types/factory.h new file mode 100644 index 0000000..27b7e3f --- /dev/null +++ b/mybulma/node_modules/node-sass/src/sass_types/factory.h @@ -0,0 +1,20 @@ +#ifndef SASS_TYPES_FACTORY_H +#define SASS_TYPES_FACTORY_H + +#include +#include +#include "value.h" + +namespace SassTypes +{ + // This is the guru that knows everything about instantiating the right subclass of SassTypes::Value + // to wrap a given Sass_Value object. + class Factory { + public: + static NAN_MODULE_INIT(initExports); + static Value* create(Sass_Value*); + static Value* unwrap(v8::Local); + }; +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/sass_types/list.cpp b/mybulma/node_modules/node-sass/src/sass_types/list.cpp new file mode 100644 index 0000000..4c946ec --- /dev/null +++ b/mybulma/node_modules/node-sass/src/sass_types/list.cpp @@ -0,0 +1,101 @@ +#include +#include "list.h" + +namespace SassTypes +{ + List::List(Sass_Value* v) : SassValueWrapper(v) {} + + Sass_Value* List::construct(const std::vector> raw_val, Sass_Value **out) { + size_t length = 0; + bool comma = true; + bool is_bracketed = false; + + if (raw_val.size() >= 1) { + if (!raw_val[0]->IsNumber()) { + return fail("First argument should be an integer.", out); + } + + length = Nan::To(raw_val[0]).FromJust(); + + if (raw_val.size() >= 2) { + if (!raw_val[1]->IsBoolean()) { + return fail("Second argument should be a boolean.", out); + } + + comma = Nan::To(raw_val[1]).FromJust(); + } + } + + return *out = sass_make_list(length, comma ? SASS_COMMA : SASS_SPACE, is_bracketed); + } + + void List::initPrototype(v8::Local proto) { + Nan::SetPrototypeMethod(proto, "getLength", GetLength); + Nan::SetPrototypeMethod(proto, "getSeparator", GetSeparator); + Nan::SetPrototypeMethod(proto, "setSeparator", SetSeparator); + Nan::SetPrototypeMethod(proto, "getValue", GetValue); + Nan::SetPrototypeMethod(proto, "setValue", SetValue); + } + + NAN_METHOD(List::GetValue) { + + if (info.Length() != 1) { + return Nan::ThrowTypeError("Expected just one argument"); + } + + if (!info[0]->IsNumber()) { + return Nan::ThrowTypeError("Supplied index should be an integer"); + } + + Sass_Value* list = List::Unwrap(info.This())->value; + size_t index = Nan::To(info[0]).FromJust(); + + + if (index >= sass_list_get_length(list)) { + return Nan::ThrowRangeError(Nan::New("Out of bound index").ToLocalChecked()); + } + + info.GetReturnValue().Set(Factory::create(sass_list_get_value(list, Nan::To(info[0]).FromJust()))->get_js_object()); + } + + NAN_METHOD(List::SetValue) { + if (info.Length() != 2) { + return Nan::ThrowTypeError("Expected two arguments"); + } + + if (!info[0]->IsNumber()) { + return Nan::ThrowTypeError("Supplied index should be an integer"); + } + + if (!info[1]->IsObject()) { + return Nan::ThrowTypeError("Supplied value should be a SassValue object"); + } + + Value* sass_value = Factory::unwrap(info[1]); + if (sass_value) { + sass_list_set_value(List::Unwrap(info.This())->value, Nan::To(info[0]).FromJust(), sass_value->get_sass_value()); + } else { + Nan::ThrowTypeError("A SassValue is expected as the list item"); + } + } + + NAN_METHOD(List::GetSeparator) { + info.GetReturnValue().Set(sass_list_get_separator(List::Unwrap(info.This())->value) == SASS_COMMA); + } + + NAN_METHOD(List::SetSeparator) { + if (info.Length() != 1) { + return Nan::ThrowTypeError("Expected just one argument"); + } + + if (!info[0]->IsBoolean()) { + return Nan::ThrowTypeError("Supplied value should be a boolean"); + } + + sass_list_set_separator(List::Unwrap(info.This())->value, Nan::To(info[0]).FromJust() ? SASS_COMMA : SASS_SPACE); + } + + NAN_METHOD(List::GetLength) { + info.GetReturnValue().Set(Nan::New(sass_list_get_length(List::Unwrap(info.This())->value))); + } +} diff --git a/mybulma/node_modules/node-sass/src/sass_types/list.h b/mybulma/node_modules/node-sass/src/sass_types/list.h new file mode 100644 index 0000000..c43b754 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/sass_types/list.h @@ -0,0 +1,25 @@ +#ifndef SASS_TYPES_LIST_H +#define SASS_TYPES_LIST_H + +#include +#include "sass_value_wrapper.h" + +namespace SassTypes +{ + class List : public SassValueWrapper { + public: + List(Sass_Value*); + static char const* get_constructor_name() { return "SassList"; } + static Sass_Value* construct(const std::vector>, Sass_Value **); + + static void initPrototype(v8::Local); + + static NAN_METHOD(GetValue); + static NAN_METHOD(SetValue); + static NAN_METHOD(GetSeparator); + static NAN_METHOD(SetSeparator); + static NAN_METHOD(GetLength); + }; +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/sass_types/map.cpp b/mybulma/node_modules/node-sass/src/sass_types/map.cpp new file mode 100644 index 0000000..ae4a260 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/sass_types/map.cpp @@ -0,0 +1,118 @@ +#include +#include "map.h" + +namespace SassTypes +{ + Map::Map(Sass_Value* v) : SassValueWrapper(v) {} + + Sass_Value* Map::construct(const std::vector> raw_val, Sass_Value **out) { + size_t length = 0; + + if (raw_val.size() >= 1) { + if (!raw_val[0]->IsNumber()) { + return fail("First argument should be an integer.", out); + } + + length = Nan::To(raw_val[0]).FromJust(); + } + + return *out = sass_make_map(length); + } + + void Map::initPrototype(v8::Local proto) { + Nan::SetPrototypeMethod(proto, "getLength", GetLength); + Nan::SetPrototypeMethod(proto, "getKey", GetKey); + Nan::SetPrototypeMethod(proto, "setKey", SetKey); + Nan::SetPrototypeMethod(proto, "getValue", GetValue); + Nan::SetPrototypeMethod(proto, "setValue", SetValue); + } + + NAN_METHOD(Map::GetValue) { + + if (info.Length() != 1) { + return Nan::ThrowTypeError("Expected just one argument"); + } + + if (!info[0]->IsNumber()) { + return Nan::ThrowTypeError("Supplied index should be an integer"); + } + + Sass_Value* map = Map::Unwrap(info.This())->value; + size_t index = Nan::To(info[0]).FromJust(); + + + if (index >= sass_map_get_length(map)) { + return Nan::ThrowRangeError(Nan::New("Out of bound index").ToLocalChecked()); + } + + info.GetReturnValue().Set(Factory::create(sass_map_get_value(map, Nan::To(info[0]).FromJust()))->get_js_object()); + } + + NAN_METHOD(Map::SetValue) { + if (info.Length() != 2) { + return Nan::ThrowTypeError("Expected two arguments"); + } + + if (!info[0]->IsNumber()) { + return Nan::ThrowTypeError("Supplied index should be an integer"); + } + + if (!info[1]->IsObject()) { + return Nan::ThrowTypeError("Supplied value should be a SassValue object"); + } + + Value* sass_value = Factory::unwrap(info[1]); + if (sass_value) { + sass_map_set_value(Map::Unwrap(info.This())->value, Nan::To(info[0]).FromJust(), sass_value->get_sass_value()); + } else { + Nan::ThrowTypeError("A SassValue is expected as a map value"); + } + } + + NAN_METHOD(Map::GetKey) { + if (info.Length() != 1) { + return Nan::ThrowTypeError("Expected just one argument"); + } + + if (!info[0]->IsNumber()) { + return Nan::ThrowTypeError("Supplied index should be an integer"); + } + + Sass_Value* map = Map::Unwrap(info.This())->value; + size_t index = Nan::To(info[0]).FromJust(); + + + if (index >= sass_map_get_length(map)) { + return Nan::ThrowRangeError(Nan::New("Out of bound index").ToLocalChecked()); + } + + SassTypes::Value* obj = Factory::create(sass_map_get_key(map, Nan::To(info[0]).FromJust())); + v8::Local js_obj = obj->get_js_object(); + info.GetReturnValue().Set(js_obj); + } + + NAN_METHOD(Map::SetKey) { + if (info.Length() != 2) { + return Nan::ThrowTypeError("Expected two arguments"); + } + + if (!info[0]->IsNumber()) { + return Nan::ThrowTypeError("Supplied index should be an integer"); + } + + if (!info[1]->IsObject()) { + return Nan::ThrowTypeError("Supplied value should be a SassValue object"); + } + + Value* sass_value = Factory::unwrap(info[1]); + if (sass_value) { + sass_map_set_key(Map::Unwrap(info.This())->value, Nan::To(info[0]).FromJust(), sass_value->get_sass_value()); + } else { + Nan::ThrowTypeError("A SassValue is expected as a map key"); + } + } + + NAN_METHOD(Map::GetLength) { + info.GetReturnValue().Set(Nan::New(sass_map_get_length(Map::Unwrap(info.This())->value))); + } +} diff --git a/mybulma/node_modules/node-sass/src/sass_types/map.h b/mybulma/node_modules/node-sass/src/sass_types/map.h new file mode 100644 index 0000000..832585d --- /dev/null +++ b/mybulma/node_modules/node-sass/src/sass_types/map.h @@ -0,0 +1,25 @@ +#ifndef SASS_TYPES_MAP_H +#define SASS_TYPES_MAP_H + +#include +#include "sass_value_wrapper.h" + +namespace SassTypes +{ + class Map : public SassValueWrapper { + public: + Map(Sass_Value*); + static char const* get_constructor_name() { return "SassMap"; } + static Sass_Value* construct(const std::vector>, Sass_Value **); + + static void initPrototype(v8::Local); + + static NAN_METHOD(GetValue); + static NAN_METHOD(SetValue); + static NAN_METHOD(GetKey); + static NAN_METHOD(SetKey); + static NAN_METHOD(GetLength); + }; +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/sass_types/null.cpp b/mybulma/node_modules/node-sass/src/sass_types/null.cpp new file mode 100644 index 0000000..69f4c21 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/sass_types/null.cpp @@ -0,0 +1,57 @@ +#include +#include "null.h" + +namespace SassTypes +{ + Nan::Persistent Null::constructor; + bool Null::constructor_locked = false; + + Null::Null() { + value = sass_make_null(); + } + + Null& Null::get_singleton() { + static Null singleton_instance; + return singleton_instance; + } + + v8::Local Null::get_constructor() { + Nan::EscapableHandleScope scope; + v8::Local conslocal; + if (constructor.IsEmpty()) { + v8::Local tpl = Nan::New(New); + + tpl->SetClassName(Nan::New("SassNull").ToLocalChecked()); + tpl->InstanceTemplate()->SetInternalFieldCount(1); + + conslocal = Nan::GetFunction(tpl).ToLocalChecked(); + constructor.Reset(conslocal); + + get_singleton().js_object.Reset(Nan::NewInstance(conslocal).ToLocalChecked()); + Nan::SetInternalFieldPointer(Nan::New(get_singleton().js_object), 0, &get_singleton()); + Nan::Set(conslocal, Nan::New("NULL").ToLocalChecked(), Nan::New(get_singleton().js_object)); + + constructor_locked = true; + } else { + conslocal = Nan::New(constructor); + } + + return scope.Escape(conslocal); + } + + v8::Local Null::get_js_object() { + return Nan::New(this->js_object); + } + + NAN_METHOD(Null::New) { + + if (info.IsConstructCall()) { + if (constructor_locked) { + return Nan::ThrowTypeError("Cannot instantiate SassNull"); + } + } + else { + info.GetReturnValue().Set(get_singleton().get_js_object()); + } + } +} diff --git a/mybulma/node_modules/node-sass/src/sass_types/null.h b/mybulma/node_modules/node-sass/src/sass_types/null.h new file mode 100644 index 0000000..15b64ba --- /dev/null +++ b/mybulma/node_modules/node-sass/src/sass_types/null.h @@ -0,0 +1,29 @@ +#ifndef SASS_TYPES_NULL_H +#define SASS_TYPES_NULL_H + +#include +#include "value.h" + +namespace SassTypes +{ + class Null : public SassTypes::Value { + public: + static Null& get_singleton(); + static v8::Local get_constructor(); + + Sass_Value* get_sass_value(); + v8::Local get_js_object(); + + static NAN_METHOD(New); + + private: + Null(); + + Nan::Persistent js_object; + + static Nan::Persistent constructor; + static bool constructor_locked; + }; +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/sass_types/number.cpp b/mybulma/node_modules/node-sass/src/sass_types/number.cpp new file mode 100644 index 0000000..d8d303e --- /dev/null +++ b/mybulma/node_modules/node-sass/src/sass_types/number.cpp @@ -0,0 +1,75 @@ +#include +#include "number.h" +#include "../create_string.h" + +namespace SassTypes +{ + Number::Number(Sass_Value* v) : SassValueWrapper(v) {} + + Sass_Value* Number::construct(const std::vector> raw_val, Sass_Value **out) { + double value = 0; + char const* unit = ""; + + if (raw_val.size() >= 1) { + if (!raw_val[0]->IsNumber()) { + return fail("First argument should be a number.", out); + } + + value = Nan::To(raw_val[0]).FromJust(); + + if (raw_val.size() >= 2) { + if (!raw_val[1]->IsString()) { + return fail("Second argument should be a string.", out); + } + + unit = create_string(raw_val[1]); + *out = sass_make_number(value, unit); + delete unit; + return *out; + + } + } + + return *out = sass_make_number(value, unit); + } + + void Number::initPrototype(v8::Local proto) { + Nan::SetPrototypeMethod(proto, "getValue", GetValue); + Nan::SetPrototypeMethod(proto, "getUnit", GetUnit); + Nan::SetPrototypeMethod(proto, "setValue", SetValue); + Nan::SetPrototypeMethod(proto, "setUnit", SetUnit); + } + + NAN_METHOD(Number::GetValue) { + info.GetReturnValue().Set(Nan::New(sass_number_get_value(Number::Unwrap(info.This())->value))); + } + + NAN_METHOD(Number::GetUnit) { + info.GetReturnValue().Set(Nan::New(sass_number_get_unit(Number::Unwrap(info.This())->value)).ToLocalChecked()); + } + + NAN_METHOD(Number::SetValue) { + + if (info.Length() != 1) { + return Nan::ThrowTypeError("Expected just one argument"); + } + + if (!info[0]->IsNumber()) { + return Nan::ThrowTypeError("Supplied value should be a number"); + } + + sass_number_set_value(Number::Unwrap(info.This())->value, Nan::To(info[0]).FromJust()); + } + + NAN_METHOD(Number::SetUnit) { + if (info.Length() != 1) { + return Nan::ThrowTypeError("Expected just one argument"); + } + + if (!info[0]->IsString()) { + return Nan::ThrowTypeError("Supplied value should be a string"); + } + + sass_number_set_unit(Number::Unwrap(info.This())->value, create_string(info[0])); + } +} diff --git a/mybulma/node_modules/node-sass/src/sass_types/number.h b/mybulma/node_modules/node-sass/src/sass_types/number.h new file mode 100644 index 0000000..48a0236 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/sass_types/number.h @@ -0,0 +1,25 @@ +#ifndef SASS_TYPES_NUMBER_H +#define SASS_TYPES_NUMBER_H + +#include +#include "sass_value_wrapper.h" + +namespace SassTypes +{ + + class Number : public SassValueWrapper { + public: + Number(Sass_Value*); + static char const* get_constructor_name() { return "SassNumber"; } + static Sass_Value* construct(const std::vector>, Sass_Value **out); + + static void initPrototype(v8::Local); + + static NAN_METHOD(GetValue); + static NAN_METHOD(GetUnit); + static NAN_METHOD(SetValue); + static NAN_METHOD(SetUnit); + }; +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/sass_types/sass_value_wrapper.h b/mybulma/node_modules/node-sass/src/sass_types/sass_value_wrapper.h new file mode 100644 index 0000000..52a3511 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/sass_types/sass_value_wrapper.h @@ -0,0 +1,101 @@ +#ifndef SASS_TYPES_SASS_VALUE_WRAPPER_H +#define SASS_TYPES_SASS_VALUE_WRAPPER_H + +#include +#include +#include +#include "value.h" +#include "factory.h" + +namespace SassTypes +{ + // Include this in any SassTypes::Value subclasses to handle all the heavy lifting of constructing JS + // objects and wrapping sass values inside them + template + /* class SassValueWrapper : public SassTypes::Value { */ + class SassValueWrapper : public SassTypes::Value { + public: + static char const* get_constructor_name() { return "SassValue"; } + + SassValueWrapper(Sass_Value* v) : Value(v) { } + v8::Local get_js_object(); + + static v8::Local get_constructor(); + static v8::Local get_constructor_template(); + static NAN_METHOD(New); + static Sass_Value *fail(const char *, Sass_Value **); + + /* private: */ + static Nan::Persistent constructor; + }; + + template + Nan::Persistent SassValueWrapper::constructor; + + template + v8::Local SassValueWrapper::get_js_object() { + if (this->persistent().IsEmpty()) { + v8::Local wrapper = Nan::NewInstance(T::get_constructor()).ToLocalChecked(); + this->Wrap(wrapper); + } + + return this->handle(); + } + + template + v8::Local SassValueWrapper::get_constructor_template() { + Nan::EscapableHandleScope scope; + v8::Local tpl = Nan::New(New); + tpl->SetClassName(Nan::New(T::get_constructor_name()).ToLocalChecked()); + tpl->InstanceTemplate()->SetInternalFieldCount(1); + T::initPrototype(tpl); + + return scope.Escape(tpl); + } + + template + v8::Local SassValueWrapper::get_constructor() { + if (constructor.IsEmpty()) { + constructor.Reset(Nan::GetFunction(T::get_constructor_template()).ToLocalChecked()); + } + + return Nan::New(constructor); + } + + template + NAN_METHOD(SassValueWrapper::New) { + std::vector> localArgs(info.Length()); + + for (auto i = 0; i < info.Length(); ++i) { + localArgs[i] = info[i]; + } + if (info.IsConstructCall()) { + Sass_Value* value; + if (T::construct(localArgs, &value) != NULL) { + T* obj = new T(value); + sass_delete_value(value); + + obj->Wrap(info.This()); + info.GetReturnValue().Set(info.This()); + } else { + return Nan::ThrowError(Nan::New(sass_error_get_message(value)).ToLocalChecked()); + } + } else { + v8::Local cons = T::get_constructor(); + v8::Local inst; + if (Nan::NewInstance(cons, info.Length(), &localArgs[0]).ToLocal(&inst)) { + info.GetReturnValue().Set(inst); + } else { + info.GetReturnValue().Set(Nan::Undefined()); + } + } + } + + template + Sass_Value *SassValueWrapper::fail(const char *reason, Sass_Value **out) { + *out = sass_make_error(reason); + return NULL; + } +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/sass_types/string.cpp b/mybulma/node_modules/node-sass/src/sass_types/string.cpp new file mode 100644 index 0000000..c6f2c48 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/sass_types/string.cpp @@ -0,0 +1,48 @@ +#include +#include "string.h" +#include "../create_string.h" + +namespace SassTypes +{ + String::String(Sass_Value* v) : SassValueWrapper(v) {} + + Sass_Value* String::construct(const std::vector> raw_val, Sass_Value **out) { + char const* value = ""; + + if (raw_val.size() >= 1) { + if (!raw_val[0]->IsString()) { + return fail("Argument should be a string.", out); + } + + value = create_string(raw_val[0]); + *out = sass_make_string(value); + delete value; + return *out; + + } else { + return *out = sass_make_string(value); + } + + } + + void String::initPrototype(v8::Local proto) { + Nan::SetPrototypeMethod(proto, "getValue", GetValue); + Nan::SetPrototypeMethod(proto, "setValue", SetValue); + } + + NAN_METHOD(String::GetValue) { + info.GetReturnValue().Set(Nan::New(sass_string_get_value(String::Unwrap(info.This())->value)).ToLocalChecked()); + } + + NAN_METHOD(String::SetValue) { + if (info.Length() != 1) { + return Nan::ThrowTypeError("Expected just one argument"); + } + + if (!info[0]->IsString()) { + return Nan::ThrowTypeError("Supplied value should be a string"); + } + + sass_string_set_value(String::Unwrap(info.This())->value, create_string(info[0])); + } +} diff --git a/mybulma/node_modules/node-sass/src/sass_types/string.h b/mybulma/node_modules/node-sass/src/sass_types/string.h new file mode 100644 index 0000000..2e72c82 --- /dev/null +++ b/mybulma/node_modules/node-sass/src/sass_types/string.h @@ -0,0 +1,22 @@ +#ifndef SASS_TYPES_STRING_H +#define SASS_TYPES_STRING_H + +#include +#include "sass_value_wrapper.h" + +namespace SassTypes +{ + class String : public SassValueWrapper { + public: + String(Sass_Value*); + static char const* get_constructor_name() { return "SassString"; } + static Sass_Value* construct(const std::vector>, Sass_Value **); + + static void initPrototype(v8::Local); + + static NAN_METHOD(GetValue); + static NAN_METHOD(SetValue); + }; +} + +#endif diff --git a/mybulma/node_modules/node-sass/src/sass_types/value.h b/mybulma/node_modules/node-sass/src/sass_types/value.h new file mode 100644 index 0000000..fa4703c --- /dev/null +++ b/mybulma/node_modules/node-sass/src/sass_types/value.h @@ -0,0 +1,42 @@ +#ifndef SASS_TYPES_VALUE_H +#define SASS_TYPES_VALUE_H + +#include +#include + +namespace SassTypes +{ + // This is the interface that all sass values must comply with + class Value : public Nan::ObjectWrap { + + public: + virtual v8::Local get_js_object() =0; + + Value() { + + } + + Sass_Value* get_sass_value() { + return sass_clone_value(this->value); + } + + protected: + + Sass_Value* value; + + Value(Sass_Value* v) { + this->value = sass_clone_value(v); + } + + ~Value() { + sass_delete_value(this->value); + } + + static Sass_Value* fail(const char *reason, Sass_Value **out) { + *out = sass_make_error(reason); + return NULL; + } + }; +} + +#endif diff --git a/mybulma/node_modules/node-sass/test/api.js b/mybulma/node_modules/node-sass/test/api.js new file mode 100644 index 0000000..164a1c7 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/api.js @@ -0,0 +1,2035 @@ +/*eslint new-cap: ["error", {"capIsNewExceptions": ["Color"]}]*/ + +var assert = require('assert').strict, + fs = require('fs'), + path = require('path'), + read = fs.readFileSync, + sassPath = process.env.NODESASS_COV + ? require.resolve('../lib-cov') + : require.resolve('../lib'), + sass = require(sassPath), + fixture = path.join.bind(null, __dirname, 'fixtures'), + resolveFixture = path.resolve.bind(null, __dirname, 'fixtures'); + +describe('api', function() { + + describe('.render(options, callback)', function() { + + beforeEach(function() { + delete process.env.SASS_PATH; + }); + + it('should compile sass to css with file', function(done) { + var expected = read(fixture('simple/expected.css'), 'utf8').trim(); + + sass.render({ + file: fixture('simple/index.scss') + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), expected.replace(/\r\n/g, '\n')); + done(); + }); + }); + + it('should compile sass to css with outFile set to absolute url', function(done) { + sass.render({ + file: fixture('simple/index.scss'), + sourceMap: true, + outFile: fixture('simple/index-test.css') + }, function(error, result) { + assert.strictEqual(JSON.parse(result.map).file, 'index-test.css'); + done(); + }); + }); + + it('should compile sass to css with outFile set to relative url', function(done) { + sass.render({ + file: fixture('simple/index.scss'), + sourceMap: true, + outFile: './index-test.css' + }, function(error, result) { + assert.strictEqual(JSON.parse(result.map).file, 'index-test.css'); + done(); + }); + }); + + it('should compile sass to css with outFile and sourceMap set to relative url', function(done) { + sass.render({ + file: fixture('simple/index.scss'), + sourceMap: './deep/nested/index.map', + outFile: './index-test.css' + }, function(error, result) { + assert.strictEqual(JSON.parse(result.map).file, '../../index-test.css'); + done(); + }); + }); + + it('should not generate source map when not requested', function(done) { + sass.render({ + file: fixture('simple/index.scss'), + sourceMap: false + }, function(error, result) { + assert.strictEqual(Object.prototype.hasOwnProperty.call(result, 'map'), false, 'result has a map property'); + done(); + }); + }); + + it('should not generate source map without outFile and no explicit path given', function(done) { + sass.render({ + file: fixture('simple/index.scss'), + sourceMap: true + }, function(error, result) { + assert.strictEqual(Object.prototype.hasOwnProperty.call(result, 'map'), false, 'result has a map property'); + done(); + }); + }); + + it('should compile generate map with sourceMapRoot pass-through option', function(done) { + sass.render({ + file: fixture('simple/index.scss'), + sourceMap: './deep/nested/index.map', + sourceMapRoot: 'http://test.com/', + outFile: './index-test.css' + }, function(error, result) { + assert.strictEqual(JSON.parse(result.map).sourceRoot, 'http://test.com/'); + done(); + }); + }); + + it('should compile sass to css with data', function(done) { + var src = read(fixture('simple/index.scss'), 'utf8'); + var expected = read(fixture('simple/expected.css'), 'utf8').trim(); + + sass.render({ + data: src + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), expected.replace(/\r\n/g, '\n')); + done(); + }); + }); + + it('should compile sass to css using indented syntax', function(done) { + var src = read(fixture('indent/index.sass'), 'utf8'); + var expected = read(fixture('indent/expected.css'), 'utf8').trim(); + + sass.render({ + data: src, + indentedSyntax: true + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), expected.replace(/\r\n/g, '\n')); + done(); + }); + }); + + it('should NOT compile empty data string', function(done) { + sass.render({ + data: '' + }, function(error) { + assert.strictEqual(error.message, 'No input specified: provide a file name or a source string to process'); + done(); + }); + }); + + it('should NOT compile without any input', function(done) { + sass.render({ }, function(error) { + assert.strictEqual(error.message, 'No input specified: provide a file name or a source string to process'); + done(); + }); + }); + + it('should returnn error status 1 for bad input', function(done) { + sass.render({ + data: '#navbar width 80%;' + }, function(error) { + assert(error.message); + assert.strictEqual(error.status, 1); + done(); + }); + }); + + it('should compile with include paths', function(done) { + var src = read(fixture('include-path/index.scss'), 'utf8'); + var expected = read(fixture('include-path/expected.css'), 'utf8').trim(); + + sass.render({ + data: src, + includePaths: [ + fixture('include-path/functions'), + fixture('include-path/lib') + ] + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), expected.replace(/\r\n/g, '\n')); + done(); + }); + }); + + it('should add cwd to the front on include paths', function(done) { + var src = fixture('cwd-include-path/root/index.scss'); + var expected = read(fixture('cwd-include-path/expected.css'), 'utf8').trim(); + var cwd = process.cwd(); + + process.chdir(fixture('cwd-include-path')); + sass.render({ + file: src, + includePaths: [] + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), expected.replace(/\r\n/g, '\n')); + + process.chdir(cwd); + done(); + }); + }); + + it('should check SASS_PATH in the specified order', function(done) { + var src = read(fixture('sass-path/index.scss'), 'utf8'); + var expectedRed = read(fixture('sass-path/expected-red.css'), 'utf8').trim(); + var expectedOrange = read(fixture('sass-path/expected-orange.css'), 'utf8').trim(); + + var envIncludes = [ + fixture('sass-path/red'), + fixture('sass-path/orange') + ]; + + process.env.SASS_PATH = envIncludes.join(path.delimiter); + sass.render({ + data: src, + includePaths: [] + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), expectedRed.replace(/\r\n/g, '\n')); + }); + + process.env.SASS_PATH = envIncludes.reverse().join(path.delimiter); + sass.render({ + data: src, + includePaths: [] + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), expectedOrange.replace(/\r\n/g, '\n')); + done(); + }); + }); + + it('should prefer include path over SASS_PATH', function(done) { + var src = read(fixture('sass-path/index.scss'), 'utf8'); + var expectedRed = read(fixture('sass-path/expected-red.css'), 'utf8').trim(); + var expectedOrange = read(fixture('sass-path/expected-orange.css'), 'utf8').trim(); + + var envIncludes = [ + fixture('sass-path/red') + ]; + process.env.SASS_PATH = envIncludes.join(path.delimiter); + + sass.render({ + data: src, + includePaths: [] + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), expectedRed.replace(/\r\n/g, '\n')); + }); + sass.render({ + data: src, + includePaths: [fixture('sass-path/orange')] + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), expectedOrange.replace(/\r\n/g, '\n')); + done(); + }); + }); + + it('should render with precision option', function(done) { + var src = read(fixture('precision/index.scss'), 'utf8'); + var expected = read(fixture('precision/expected.css'), 'utf8').trim(); + + sass.render({ + data: src, + precision: 10 + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), expected.replace(/\r\n/g, '\n')); + done(); + }); + }); + + it('should contain all included files in stats when data is passed', function(done) { + var src = read(fixture('include-files/index.scss'), 'utf8'); + var expected = [ + fixture('include-files/bar.scss').replace(/\\/g, '/'), + fixture('include-files/foo.scss').replace(/\\/g, '/') + ]; + + sass.render({ + data: src, + includePaths: [fixture('include-files')] + }, function(error, result) { + assert.deepStrictEqual(result.stats.includedFiles, expected); + done(); + }); + }); + + it('should render with indentWidth and indentType options', function(done) { + sass.render({ + data: 'div { color: transparent; }', + indentWidth: 7, + indentType: 'tab' + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), 'div {\n\t\t\t\t\t\t\tcolor: transparent; }'); + done(); + }); + }); + + it('should render with linefeed option', function(done) { + sass.render({ + data: 'div { color: transparent; }', + linefeed: 'lfcr' + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), 'div {\n\r color: transparent; }'); + done(); + }); + }); + }); + + describe('.render(importer)', function() { + var src = read(fixture('include-files/index.scss'), 'utf8'); + + it('should respect the order of chained imports when using custom importers and one file is custom imported and the other is not.', function(done) { + sass.render({ + file: fixture('include-files/chained-imports-with-custom-importer.scss'), + importer: function(url, prev, done) { + // NOTE: to see that this test failure is only due to the stated + // issue do each of the following and see that the tests pass. + // + // a) add `return sass.NULL;` as the first line in this function to + // cause non-custom importers to always be used. + // b) comment out the conditional below to force our custom + // importer to always be used. + // + // You will notice that the tests pass when either all native, or + // all custom importers are used, but not when a native + custom + // import chain is used. + if (url !== 'file-processed-by-loader') { + return sass.NULL; + } + done({ + file: fixture('include-files/' + url + '.scss') + }); + } + }, function(err, data) { + assert.strictEqual(err, null); + + assert.strictEqual( + data.css.toString().trim(), + 'body {\n color: "red"; }' + ); + + done(); + }); + }); + + it('should still call the next importer with the resolved prev path when the previous importer returned both a file and contents property - issue #1219', function(done) { + sass.render({ + data: '@import "a";', + importer: function(url, prev, done) { + if (url === 'a') { + done({ + file: '/Users/me/sass/lib/a.scss', + contents: '@import "b"' + }); + } else { + assert.strictEqual(prev, '/Users/me/sass/lib/a.scss'); + done({ + file: '/Users/me/sass/lib/b.scss', + contents: 'div {color: yellow;}' + }); + } + } + }, function() { + done(); + }); + }); + + it('should override imports with "data" as input and fires callback with file and contents', function(done) { + sass.render({ + data: src, + importer: function(url, prev, done) { + done({ + file: '/some/other/path.scss', + contents: 'div {color: yellow;}' + }); + } + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), 'div {\n color: yellow; }\n\ndiv {\n color: yellow; }'); + done(); + }); + }); + + it('should should resolve imports depth first', function (done) { + var actualImportOrder = []; + var expectedImportOrder = [ + 'a', '_common', 'vars', 'struct', 'a1', 'common', 'vars', 'struct', 'b', 'b1' + ]; + var expected = read(fixture('depth-first/expected.css'), 'utf-8'); + + sass.render({ + file: fixture('depth-first/index.scss'), + importer: function (url, prev, done) { + actualImportOrder.push(url); + done(); + } + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), expected); + assert.deepStrictEqual(actualImportOrder, expectedImportOrder); + done(); + }); + }); + + it('should override imports with "file" as input and fires callback with file and contents', function(done) { + sass.render({ + file: fixture('include-files/index.scss'), + importer: function(url, prev, done) { + done({ + file: '/some/other/path.scss', + contents: 'div {color: yellow;}' + }); + } + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), 'div {\n color: yellow; }\n\ndiv {\n color: yellow; }'); + done(); + }); + }); + + it('should override imports with "data" as input and returns file and contents', function(done) { + sass.render({ + data: src, + importer: function(url, prev) { + return { + file: prev + url, + contents: 'div {color: yellow;}' + }; + } + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), 'div {\n color: yellow; }\n\ndiv {\n color: yellow; }'); + done(); + }); + }); + + it('should override imports with "file" as input and returns file and contents', function(done) { + sass.render({ + file: fixture('include-files/index.scss'), + importer: function(url, prev) { + return { + file: prev + url, + contents: 'div {color: yellow;}' + }; + } + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), 'div {\n color: yellow; }\n\ndiv {\n color: yellow; }'); + done(); + }); + }); + + it('should override imports with "data" as input and fires callback with file', function(done) { + sass.render({ + data: src, + importer: function(url, /* jshint unused:false */ prev, done) { + done({ + file: path.resolve(path.dirname(fixture('include-files/index.scss')), url + (path.extname(url) ? '' : '.scss')) + }); + } + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), '/* foo.scss */\n/* bar.scss */'); + done(); + }); + }); + + it('should override imports with "file" as input and fires callback with file', function(done) { + sass.render({ + file: fixture('include-files/index.scss'), + importer: function(url, prev, done) { + done({ + file: path.resolve(path.dirname(prev), url + (path.extname(url) ? '' : '.scss')) + }); + } + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), '/* foo.scss */\n/* bar.scss */'); + done(); + }); + }); + + it('should override imports with "data" as input and returns file', function(done) { + sass.render({ + data: src, + importer: function(url) { + return { + file: path.resolve(path.dirname(fixture('include-files/index.scss')), url + (path.extname(url) ? '' : '.scss')) + }; + } + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), '/* foo.scss */\n/* bar.scss */'); + done(); + }); + }); + + it('should override imports with "file" as input and returns file', function(done) { + sass.render({ + file: fixture('include-files/index.scss'), + importer: function(url, prev) { + return { + file: path.resolve(path.dirname(prev), url + (path.extname(url) ? '' : '.scss')) + }; + } + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), '/* foo.scss */\n/* bar.scss */'); + done(); + }); + }); + + it('should fallback to default import behaviour if importer returns sass.NULL', function(done) { + sass.render({ + file: fixture('include-files/index.scss'), + importer: function(url, prev, done) { + done(sass.NULL); + } + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), '/* foo.scss */\n/* bar.scss */'); + done(); + }); + }); + + it('should fallback to default import behaviour if importer returns null for backwards compatibility', function(done) { + sass.render({ + file: fixture('include-files/index.scss'), + importer: function(url, prev, done) { + done(null); + } + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), '/* foo.scss */\n/* bar.scss */'); + done(); + }); + }); + + it('should fallback to default import behaviour if importer returns undefined for backwards compatibility', function(done) { + sass.render({ + file: fixture('include-files/index.scss'), + importer: function(url, prev, done) { + done(undefined); + } + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), '/* foo.scss */\n/* bar.scss */'); + done(); + }); + }); + + it('should fallback to default import behaviour if importer returns false for backwards compatibility', function(done) { + sass.render({ + file: fixture('include-files/index.scss'), + importer: function(url, prev, done) { + done(false); + } + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), '/* foo.scss */\n/* bar.scss */'); + done(); + }); + }); + + it('should override imports with "data" as input and fires callback with contents', function(done) { + sass.render({ + data: src, + importer: function(url, prev, done) { + done({ + contents: 'div {color: yellow;}' + }); + } + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), 'div {\n color: yellow; }\n\ndiv {\n color: yellow; }'); + done(); + }); + }); + + it('should override imports with "file" as input and fires callback with contents', function(done) { + sass.render({ + file: fixture('include-files/index.scss'), + importer: function(url, prev, done) { + done({ + contents: 'div {color: yellow;}' + }); + } + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), 'div {\n color: yellow; }\n\ndiv {\n color: yellow; }'); + done(); + }); + }); + + it('should override imports with "data" as input and returns contents', function(done) { + sass.render({ + data: src, + importer: function() { + return { + contents: 'div {color: yellow;}' + }; + } + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), 'div {\n color: yellow; }\n\ndiv {\n color: yellow; }'); + done(); + }); + }); + + it('should override imports with "file" as input and returns contents', function(done) { + sass.render({ + file: fixture('include-files/index.scss'), + importer: function() { + return { + contents: 'div {color: yellow;}' + }; + } + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), 'div {\n color: yellow; }\n\ndiv {\n color: yellow; }'); + done(); + }); + }); + + it('should accept arrays of importers and return respect the order', function(done) { + sass.render({ + file: fixture('include-files/index.scss'), + importer: [ + function() { + return sass.NULL; + }, + function() { + return { + contents: 'div {color: yellow;}' + }; + } + ] + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), 'div {\n color: yellow; }\n\ndiv {\n color: yellow; }'); + done(); + }); + }); + + it('should be able to see its options in this.options', function(done) { + var fxt = fixture('include-files/index.scss'); + sass.render({ + file: fxt, + importer: function() { + assert.strictEqual(fxt, this.options.file); + return {}; + } + }, function() { + assert.strictEqual(fxt, this.options.file); + done(); + }); + }); + + it('should be able to access a persistent options object', function(done) { + sass.render({ + data: src, + importer: function() { + this.state = this.state || 0; + this.state++; + return { + contents: 'div {color: yellow;}' + }; + } + }, function() { + assert.strictEqual(this.state, 2); + done(); + }); + }); + + it('should wrap importer options', function(done) { + var options; + options = { + data: src, + importer: function() { + assert.notStrictEqual(this.options.importer, options.importer); + return { + contents: 'div {color: yellow;}' + }; + } + }; + sass.render(options, function() { + done(); + }); + }); + + it('should reflect user-defined error when returned as callback', function(done) { + sass.render({ + data: src, + importer: function(url, prev, done) { + done(new Error('doesn\'t exist!')); + } + }, function(error) { + assert(/doesn't exist!/.test(error.message)); + done(); + }); + }); + + it('should reflect user-defined error with return', function(done) { + sass.render({ + data: src, + importer: function() { + return new Error('doesn\'t exist!'); + } + }, function(error) { + assert(/doesn't exist!/.test(error.message)); + done(); + }); + }); + + it('should throw exception when importer returns an invalid value', function(done) { + sass.render({ + data: src, + importer: function() { + return { contents: new Buffer('i am not a string!') }; + } + }, function(error) { + assert(/returned value of `contents` must be a string/.test(error.message)); + done(); + }); + }); + }); + + describe('.render(functions)', function() { + it('should call custom defined nullary function', function(done) { + sass.render({ + data: 'div { color: foo(); }', + functions: { + 'foo()': function() { + return new sass.types.Number(42, 'px'); + } + } + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), 'div {\n color: 42px; }'); + done(); + }); + }); + + it('should call custom function with multiple args', function(done) { + sass.render({ + data: 'div { color: foo(3, 42px); }', + functions: { + 'foo($a, $b)': function(factor, size) { + return new sass.types.Number(factor.getValue() * size.getValue(), size.getUnit()); + } + } + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), 'div {\n color: 126px; }'); + done(); + }); + }); + + it('should work with custom functions that return data asynchronously', function(done) { + sass.render({ + data: 'div { color: foo(42px); }', + functions: { + 'foo($a)': function(size, done) { + setTimeout(function() { + done(new sass.types.Number(66, 'em')); + }, 50); + } + } + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), 'div {\n color: 66em; }'); + done(); + }); + }); + + it('should let custom functions call setter methods on wrapped sass values (number)', function(done) { + sass.render({ + data: 'div { width: foo(42px); height: bar(42px); }', + functions: { + 'foo($a)': function(size) { + size.setUnit('rem'); + return size; + }, + 'bar($a)': function(size) { + size.setValue(size.getValue() * 2); + return size; + } + } + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), 'div {\n width: 42rem;\n height: 84px; }'); + done(); + }); + }); + + it('should properly convert strings when calling custom functions', function(done) { + sass.render({ + data: 'div { color: foo("bar"); }', + functions: { + 'foo($a)': function(str) { + str = str.getValue().replace(/['"]/g, ''); + return new sass.types.String('"' + str + str + '"'); + } + } + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), 'div {\n color: "barbar"; }'); + done(); + }); + }); + + it('should let custom functions call setter methods on wrapped sass values (string)', function(done) { + sass.render({ + data: 'div { width: foo("bar"); }', + functions: { + 'foo($a)': function(str) { + var unquoted = str.getValue().replace(/['"]/g, ''); + str.setValue('"' + unquoted + unquoted + '"'); + return str; + } + } + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), 'div {\n width: "barbar"; }'); + done(); + }); + }); + + it('should properly convert colors when calling custom functions', function(done) { + sass.render({ + data: 'div { color: foo(#f00); background-color: bar(); border-color: baz(); }', + functions: { + 'foo($a)': function(color) { + assert.strictEqual(color.getR(), 255); + assert.strictEqual(color.getG(), 0); + assert.strictEqual(color.getB(), 0); + assert.strictEqual(color.getA(), 1.0); + + return new sass.types.Color(255, 255, 0, 0.5); + }, + 'bar()': function() { + return new sass.types.Color(0x33ff00ff); + }, + 'baz()': function() { + return new sass.types.Color(0xffff0000); + } + } + }, function(error, result) { + assert.strictEqual( + result.css.toString().trim(), + 'div {\n color: rgba(255, 255, 0, 0.5);' + + '\n background-color: rgba(255, 0, 255, 0.2);' + + '\n border-color: red; }' + ); + done(); + }); + }); + + it('should properly convert boolean when calling custom functions', function(done) { + sass.render({ + data: 'div { color: if(foo(true, false), #fff, #000);' + + '\n background-color: if(foo(true, true), #fff, #000); }', + functions: { + 'foo($a, $b)': function(a, b) { + return sass.types.Boolean(a.getValue() && b.getValue()); + } + } + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), 'div {\n color: #000;\n background-color: #fff; }'); + done(); + }); + }); + + it('should let custom functions call setter methods on wrapped sass values (boolean)', function(done) { + sass.render({ + data: 'div { color: if(foo(false), #fff, #000); background-color: if(foo(true), #fff, #000); }', + functions: { + 'foo($a)': function(a) { + return a.getValue() ? sass.types.Boolean.FALSE : sass.types.Boolean.TRUE; + } + } + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), 'div {\n color: #fff;\n background-color: #000; }'); + done(); + }); + }); + + it('should properly convert lists when calling custom functions', function(done) { + sass.render({ + data: '$test-list: (bar, #f00, 123em); @each $item in foo($test-list) { .#{$item} { color: #fff; } }', + functions: { + 'foo($l)': function(list) { + assert.strictEqual(list.getLength(), 3); + assert.ok(list.getValue(0) instanceof sass.types.String); + assert.strictEqual(list.getValue(0).getValue(), 'bar'); + assert.ok(list.getValue(1) instanceof sass.types.Color); + assert.strictEqual(list.getValue(1).getR(), 0xff); + assert.strictEqual(list.getValue(1).getG(), 0); + assert.strictEqual(list.getValue(1).getB(), 0); + assert.ok(list.getValue(2) instanceof sass.types.Number); + assert.strictEqual(list.getValue(2).getValue(), 123); + assert.strictEqual(list.getValue(2).getUnit(), 'em'); + + var out = new sass.types.List(3); + out.setValue(0, new sass.types.String('foo')); + out.setValue(1, new sass.types.String('bar')); + out.setValue(2, new sass.types.String('baz')); + return out; + } + } + }, function(error, result) { + assert.strictEqual( + result.css.toString().trim(), + '.foo {\n color: #fff; }\n\n.bar {\n color: #fff; }\n\n.baz {\n color: #fff; }' + ); + done(); + }); + }); + + it('should properly convert maps when calling custom functions', function(done) { + sass.render({ + data: '$test-map: foo((abc: 123, #def: true)); div { color: if(map-has-key($test-map, hello), #fff, #000); }' + + 'span { color: map-get($test-map, baz); }', + functions: { + 'foo($m)': function(map) { + assert.strictEqual(map.getLength(), 2); + assert.ok(map.getKey(0) instanceof sass.types.String); + assert.ok(map.getKey(1) instanceof sass.types.Color); + assert.ok(map.getValue(0) instanceof sass.types.Number); + assert.ok(map.getValue(1) instanceof sass.types.Boolean); + assert.strictEqual(map.getKey(0).getValue(), 'abc'); + assert.strictEqual(map.getValue(0).getValue(), 123); + assert.strictEqual(map.getKey(1).getR(), 0xdd); + assert.strictEqual(map.getValue(1).getValue(), true); + + var out = new sass.types.Map(3); + out.setKey(0, new sass.types.String('hello')); + out.setValue(0, new sass.types.String('world')); + out.setKey(1, new sass.types.String('foo')); + out.setValue(1, new sass.types.String('bar')); + out.setKey(2, new sass.types.String('baz')); + out.setValue(2, new sass.types.String('qux')); + return out; + } + } + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), 'div {\n color: #fff; }\n\nspan {\n color: qux; }'); + done(); + }); + }); + + it('should properly convert null when calling custom functions', function(done) { + sass.render({ + data: 'div { color: if(foo("bar"), #fff, #000); } ' + + 'span { color: if(foo(null), #fff, #000); }' + + 'table { color: if(bar() == null, #fff, #000); }', + functions: { + 'foo($a)': function(a) { + return sass.types.Boolean(a instanceof sass.types.Null); + }, + 'bar()': function() { + return sass.NULL; + } + } + }, function(error, result) { + assert.strictEqual( + result.css.toString().trim(), + 'div {\n color: #000; }\n\nspan {\n color: #fff; }\n\ntable {\n color: #fff; }' + ); + done(); + }); + }); + + it('should be possible to carry sass values across different renders', function(done) { + var persistentMap; + + sass.render({ + data: 'div { color: foo((abc: #112233, #ddeeff: true)); }', + functions: { + foo: function(m) { + persistentMap = m; + return sass.types.Color(0, 0, 0); + } + } + }, function() { + sass.render({ + data: 'div { color: map-get(bar(), abc); background-color: baz(); }', + functions: { + bar: function() { + return persistentMap; + }, + baz: function() { + return persistentMap.getKey(1); + } + } + }, function(errror, result) { + assert.strictEqual(result.css.toString().trim(), 'div {\n color: #112233;\n background-color: #ddeeff; }'); + done(); + }); + }); + }); + + it('should let us register custom functions without signatures', function(done) { + sass.render({ + data: 'div { color: foo(20, 22); }', + functions: { + foo: function(a, b) { + return new sass.types.Number(a.getValue() + b.getValue(), 'em'); + } + } + }, function(error, result) { + assert.strictEqual(result.css.toString().trim(), 'div {\n color: 42em; }'); + done(); + }); + }); + + it('should fail when returning anything other than a sass value from a custom function', function(done) { + sass.render({ + data: 'div { color: foo(); }', + functions: { + 'foo()': function() { + return {}; + } + } + }, function(error) { + assert.ok(/A SassValue object was expected/.test(error.message)); + done(); + }); + }); + + it('should properly bubble up standard JS errors thrown by custom functions', function(done) { + sass.render({ + data: 'div { color: foo(); }', + functions: { + 'foo()': function() { + throw new RangeError('This is a test error'); + } + } + }, function(error) { + assert.ok(/This is a test error/.test(error.message)); + done(); + }); + }); + + it('should properly bubble up unknown errors thrown by custom functions', function(done) { + sass.render({ + data: 'div { color: foo(); }', + functions: { + 'foo()': function() { + throw {}; + } + } + }, function(error) { + assert.ok(/unexpected error/.test(error.message)); + done(); + }); + }); + + it('should call custom functions with correct context', function(done) { + function assertExpected(result) { + assert.strictEqual(result.css.toString().trim(), 'div {\n foo1: 1;\n foo2: 2; }'); + } + var options = { + data: 'div { foo1: foo(); foo2: foo(); }', + functions: { + // foo() is stateful and will persist an incrementing counter + 'foo()': function() { + assert(this); + this.fooCounter = (this.fooCounter || 0) + 1; + return new sass.types.Number(this.fooCounter); + } + } + }; + + sass.render(options, function(error, result) { + assertExpected(result); + done(); + }); + }); + + describe('should properly bubble up errors from sass color constructor', function() { + it('four booleans', function(done) { + sass.render({ + data: 'div { color: foo(); }', + functions: { + 'foo()': function() { + return new sass.types.Color(false, false, false, false); + } + } + }, function(error) { + assert.ok(/Constructor arguments should be numbers exclusively/.test(error.message)); + done(); + }); + }); + + it('two arguments', function(done) { + sass.render({ + data: 'div { color: foo(); }', + functions: { + 'foo()': function() { + return sass.types.Color(2,3); + } + } + }, function(error) { + assert.ok(/Constructor should be invoked with either 0, 1, 3 or 4 arguments/.test(error.message)); + done(); + }); + }); + + it('single string argument', function(done) { + sass.render({ + data: 'div { color: foo(); }', + functions: { + 'foo()': function() { + return sass.types.Color('foo'); + } + } + }, function(error) { + assert.ok(/Only argument should be an integer/.test(error.message)); + done(); + }); + }); + }); + + it('should properly bubble up errors from sass value constructors', function(done) { + sass.render({ + data: 'div { color: foo(); }', + functions: { + 'foo()': function() { + return sass.types.Boolean('foo'); + } + } + }, function(error) { + assert.ok(/Expected one boolean argument/.test(error.message)); + done(); + }); + }); + + it('should properly bubble up errors from sass value setters', function(done) { + sass.render({ + data: 'div { color: foo(); }', + functions: { + 'foo()': function() { + var ret = new sass.types.Number(42); + ret.setUnit(123); + return ret; + } + } + }, function(error) { + assert.ok(/Supplied value should be a string/.test(error.message)); + done(); + }); + }); + + it('should fail when trying to set a bare number as the List item', function(done) { + sass.render({ + data: 'div { color: foo(); }', + functions: { + 'foo()': function() { + var out = new sass.types.List(1); + out.setValue(0, 2); + return out; + } + } + }, function(error) { + assert.ok(/Supplied value should be a SassValue object/.test(error.message)); + done(); + }); + }); + + it('should fail when trying to set a bare Object as the List item', function(done) { + sass.render({ + data: 'div { color: foo(); }', + functions: { + 'foo()': function() { + var out = new sass.types.List(1); + out.setValue(0, {}); + return out; + } + } + }, function(error) { + assert.ok(/A SassValue is expected as the list item/.test(error.message)); + done(); + }); + }); + + it('should fail when trying to set a bare Object as the Map key', function(done) { + sass.render({ + data: 'div { color: foo(); }', + functions: { + 'foo()': function() { + var out = new sass.types.Map(1); + out.setKey(0, {}); + out.setValue(0, new sass.types.String('aaa')); + return out; + } + } + }, function(error) { + assert.ok(/A SassValue is expected as a map key/.test(error.message)); + done(); + }); + }); + + it('should fail when trying to set a bare Object as the Map value', function(done) { + sass.render({ + data: 'div { color: foo(); }', + functions: { + 'foo()': function() { + var out = new sass.types.Map(1); + out.setKey(0, new sass.types.String('aaa')); + out.setValue(0, {}); + return out; + } + } + }, function(error) { + assert.ok(/A SassValue is expected as a map value/.test(error.message)); + done(); + }); + }); + + it('should always map null, true and false to the same (immutable) object', function(done) { + var counter = 0; + + sass.render({ + data: 'div { color: foo(bar(null)); background-color: baz("foo" == "bar"); }', + functions: { + foo: function(a) { + assert.strictEqual(a, sass.TRUE, + 'Supplied value should be the same instance as sass.TRUE' + ); + + assert.strictEqual( + sass.types.Boolean(true), sass.types.Boolean(true), + 'sass.types.Boolean(true) should return a singleton'); + + assert.strictEqual( + sass.types.Boolean(true), sass.TRUE, + 'sass.types.Boolean(true) should be the same instance as sass.TRUE'); + + counter++; + + return sass.types.String('foo'); + }, + bar: function(a) { + assert.strictEqual(a, sass.NULL, + 'Supplied value should be the same instance as sass.NULL'); + + assert.throws(function() { + return new sass.types.Null(); + }, /Cannot instantiate SassNull/); + + counter++; + + return sass.TRUE; + }, + baz: function(a) { + assert.strictEqual(a, sass.FALSE, + 'Supplied value should be the same instance as sass.FALSE'); + + assert.throws(function() { + return new sass.types.Boolean(false); + }, /Cannot instantiate SassBoolean/); + + assert.strictEqual( + sass.types.Boolean(false), sass.types.Boolean(false), + 'sass.types.Boolean(false) should return a singleton'); + + assert.strictEqual( + sass.types.Boolean(false), sass.FALSE, + 'sass.types.Boolean(false) should return singleton identical to sass.FALSE'); + + counter++; + + return sass.types.String('baz'); + } + } + }, function() { + assert.strictEqual(counter, 3); + done(); + }); + }); + }); + + describe('.render({stats: {}})', function() { + var start = Date.now(); + + it('should provide a start timestamp', function(done) { + sass.render({ + file: fixture('include-files/index.scss') + }, function(error, result) { + assert(!error); + assert.strictEqual(typeof result.stats.start, 'number'); + assert(result.stats.start >= start); + done(); + }); + }); + + it('should provide an end timestamp', function(done) { + sass.render({ + file: fixture('include-files/index.scss') + }, function(error, result) { + assert(!error); + assert.strictEqual(typeof result.stats.end, 'number'); + assert(result.stats.end >= result.stats.start); + done(); + }); + }); + + it('should provide a duration', function(done) { + sass.render({ + file: fixture('include-files/index.scss') + }, function(error, result) { + assert(!error); + assert.strictEqual(typeof result.stats.duration, 'number'); + assert.strictEqual(result.stats.end - result.stats.start, result.stats.duration); + done(); + }); + }); + + it('should contain the given entry file', function(done) { + sass.render({ + file: fixture('include-files/index.scss') + }, function(error, result) { + assert(!error); + assert.strictEqual(result.stats.entry, fixture('include-files/index.scss')); + done(); + }); + }); + + it('should contain an array of all included files', function(done) { + var expected = [ + fixture('include-files/bar.scss').replace(/\\/g, '/'), + fixture('include-files/foo.scss').replace(/\\/g, '/'), + fixture('include-files/index.scss').replace(/\\/g, '/') + ]; + + sass.render({ + file: fixture('include-files/index.scss') + }, function(error, result) { + assert(!error); + assert.deepStrictEqual(result.stats.includedFiles.sort(), expected.sort()); + done(); + }); + }); + + it('should contain array with the entry if there are no import statements', function(done) { + var expected = fixture('simple/index.scss').replace(/\\/g, '/'); + + sass.render({ + file: fixture('simple/index.scss') + }, function(error, result) { + assert.deepStrictEqual(result.stats.includedFiles, [expected]); + done(); + }); + }); + + it('should state `data` as entry file', function(done) { + sass.render({ + data: read(fixture('simple/index.scss'), 'utf8') + }, function(error, result) { + assert.strictEqual(result.stats.entry, 'data'); + done(); + }); + }); + + it('should contain an empty array as includedFiles', function(done) { + sass.render({ + data: read(fixture('simple/index.scss'), 'utf8') + }, function(error, result) { + assert.deepStrictEqual(result.stats.includedFiles, []); + done(); + }); + }); + }); + + describe('.renderSync(options)', function() { + it('should compile sass to css with file', function(done) { + var expected = read(fixture('simple/expected.css'), 'utf8').trim(); + var result = sass.renderSync({ file: fixture('simple/index.scss') }); + + assert.strictEqual(result.css.toString().trim(), expected.replace(/\r\n/g, '\n')); + done(); + }); + + it('should compile sass to css with outFile set to absolute url', function(done) { + var result = sass.renderSync({ + file: fixture('simple/index.scss'), + sourceMap: true, + outFile: fixture('simple/index-test.css') + }); + + assert.strictEqual(JSON.parse(result.map).file, 'index-test.css'); + done(); + }); + + it('should compile sass to css with outFile set to relative url', function(done) { + var result = sass.renderSync({ + file: fixture('simple/index.scss'), + sourceMap: true, + outFile: './index-test.css' + }); + + assert.strictEqual(JSON.parse(result.map).file, 'index-test.css'); + done(); + }); + + it('should compile sass to css with outFile and sourceMap set to relative url', function(done) { + var result = sass.renderSync({ + file: fixture('simple/index.scss'), + sourceMap: './deep/nested/index.map', + outFile: './index-test.css' + }); + + assert.strictEqual(JSON.parse(result.map).file, '../../index-test.css'); + done(); + }); + + it('should not generate source map when not requested', function(done) { + var result = sass.renderSync({ + file: fixture('simple/index.scss'), + sourceMap: false + }); + + assert.strictEqual(Object.prototype.hasOwnProperty.call(result, 'map'), false, 'result has a map property'); + done(); + }); + + it('should not generate source map without outFile and no explicit path given', function(done) { + var result = sass.renderSync({ + file: fixture('simple/index.scss'), + sourceMap: true + }); + + assert.strictEqual(Object.prototype.hasOwnProperty.call(result, 'map'), false, 'result has a map property'); + done(); + }); + + it('should compile generate map with sourceMapRoot pass-through option', function(done) { + var result = sass.renderSync({ + file: fixture('simple/index.scss'), + sourceMap: './deep/nested/index.map', + sourceMapRoot: 'http://test.com/', + outFile: './index-test.css' + }); + + assert.strictEqual(JSON.parse(result.map).sourceRoot, 'http://test.com/'); + done(); + }); + + it('should compile sass to css with data', function(done) { + var src = read(fixture('simple/index.scss'), 'utf8'); + var expected = read(fixture('simple/expected.css'), 'utf8').trim(); + var result = sass.renderSync({ data: src }); + + assert.strictEqual(result.css.toString().trim(), expected.replace(/\r\n/g, '\n')); + done(); + }); + + it('should compile sass to css using indented syntax', function(done) { + var src = read(fixture('indent/index.sass'), 'utf8'); + var expected = read(fixture('indent/expected.css'), 'utf8').trim(); + var result = sass.renderSync({ + data: src, + indentedSyntax: true + }); + + assert.strictEqual(result.css.toString().trim(), expected.replace(/\r\n/g, '\n')); + done(); + }); + + it('should NOT compile empty data string', function(done) { + assert.throws(function() { + sass.renderSync({ data: '' }); + }, /No input specified: provide a file name or a source string to process/ ); + done(); + }); + + it('should NOT compile without any input', function(done) { + assert.throws(function() { + sass.renderSync({}); + }, /No input specified: provide a file name or a source string to process/); + done(); + }); + + it('should throw error for bad input', function(done) { + assert.throws(function() { + sass.renderSync('somestring'); + }); + assert.throws(function() { + sass.renderSync({ data: '#navbar width 80%;' }); + }); + + done(); + }); + + it('should compile with include paths', function(done) { + var src = read(fixture('include-path/index.scss'), 'utf8'); + var expected = read(fixture('include-path/expected.css'), 'utf8').trim(); + var result = sass.renderSync({ + data: src, + includePaths: [ + fixture('include-path/functions'), + fixture('include-path/lib') + ] + }); + + assert.strictEqual(result.css.toString().trim(), expected.replace(/\r\n/g, '\n')); + done(); + }); + + it('should add cwd to the front on include paths', function(done) { + var src = fixture('cwd-include-path/root/index.scss'); + var expected = read(fixture('cwd-include-path/expected.css'), 'utf8').trim(); + var cwd = process.cwd(); + + process.chdir(fixture('cwd-include-path')); + var result = sass.renderSync({ + file: src, + includePaths: [ + fixture('include-path/functions'), + fixture('include-path/lib') + ] + }); + process.chdir(cwd); + + assert.strictEqual(result.css.toString().trim(), expected.replace(/\r\n/g, '\n')); + done(); + }); + + it('should check SASS_PATH in the specified order', function(done) { + var src = read(fixture('sass-path/index.scss'), 'utf8'); + var expectedRed = read(fixture('sass-path/expected-red.css'), 'utf8').trim(); + var expectedOrange = read(fixture('sass-path/expected-orange.css'), 'utf8').trim(); + + var envIncludes = [ + fixture('sass-path/red'), + fixture('sass-path/orange') + ]; + + process.env.SASS_PATH = envIncludes.join(path.delimiter); + var result = sass.renderSync({ + data: src, + includePaths: [] + }); + + assert.strictEqual(result.css.toString().trim(), expectedRed.replace(/\r\n/g, '\n')); + + process.env.SASS_PATH = envIncludes.reverse().join(path.delimiter); + result = sass.renderSync({ + data: src, + includePaths: [] + }); + + assert.strictEqual(result.css.toString().trim(), expectedOrange.replace(/\r\n/g, '\n')); + done(); + }); + + it('should prefer include path over SASS_PATH', function(done) { + var src = read(fixture('sass-path/index.scss'), 'utf8'); + var expectedRed = read(fixture('sass-path/expected-red.css'), 'utf8').trim(); + var expectedOrange = read(fixture('sass-path/expected-orange.css'), 'utf8').trim(); + + var envIncludes = [ + fixture('sass-path/red') + ]; + process.env.SASS_PATH = envIncludes.join(path.delimiter); + + var result = sass.renderSync({ + data: src, + includePaths: [] + }); + + assert.strictEqual(result.css.toString().trim(), expectedRed.replace(/\r\n/g, '\n')); + + result = sass.renderSync({ + data: src, + includePaths: [fixture('sass-path/orange')] + }); + + assert.strictEqual(result.css.toString().trim(), expectedOrange.replace(/\r\n/g, '\n')); + done(); + }); + + it('should render with precision option', function(done) { + var src = read(fixture('precision/index.scss'), 'utf8'); + var expected = read(fixture('precision/expected.css'), 'utf8').trim(); + var result = sass.renderSync({ + data: src, + precision: 10 + }); + + assert.strictEqual(result.css.toString().trim(), expected.replace(/\r\n/g, '\n')); + done(); + }); + + it('should contain all included files in stats when data is passed', function(done) { + var src = read(fixture('include-files/index.scss'), 'utf8'); + var expected = [ + fixture('include-files/bar.scss').replace(/\\/g, '/'), + fixture('include-files/foo.scss').replace(/\\/g, '/') + ]; + + var result = sass.renderSync({ + data: src, + includePaths: [fixture('include-files')] + }); + + assert.deepStrictEqual(result.stats.includedFiles, expected); + done(); + }); + + it('should render with indentWidth and indentType options', function(done) { + var result = sass.renderSync({ + data: 'div { color: transparent; }', + indentWidth: 7, + indentType: 'tab' + }); + + assert.strictEqual(result.css.toString().trim(), 'div {\n\t\t\t\t\t\t\tcolor: transparent; }'); + done(); + }); + + it('should render with linefeed option', function(done) { + var result = sass.renderSync({ + data: 'div { color: transparent; }', + linefeed: 'lfcr' + }); + + assert.strictEqual(result.css.toString().trim(), 'div {\n\r color: transparent; }'); + done(); + }); + }); + + describe('.renderSync(importer)', function() { + var src = read(fixture('include-files/index.scss'), 'utf8'); + + it('should override imports with "data" as input and returns file and contents', function(done) { + var result = sass.renderSync({ + data: src, + importer: function(url, prev) { + return { + file: prev + url, + contents: 'div {color: yellow;}' + }; + } + }); + + assert.strictEqual(result.css.toString().trim(), 'div {\n color: yellow; }\n\ndiv {\n color: yellow; }'); + done(); + }); + + it('should override imports with "file" as input and returns file and contents', function(done) { + var result = sass.renderSync({ + file: fixture('include-files/index.scss'), + importer: function(url, prev) { + return { + file: prev + url, + contents: 'div {color: yellow;}' + }; + } + }); + + assert.strictEqual(result.css.toString().trim(), 'div {\n color: yellow; }\n\ndiv {\n color: yellow; }'); + done(); + }); + + it('should override imports with "data" as input and returns file', function(done) { + var result = sass.renderSync({ + data: src, + importer: function(url) { + return { + file: path.resolve(path.dirname(fixture('include-files/index.scss')), url + (path.extname(url) ? '' : '.scss')) + }; + } + }); + + assert.strictEqual(result.css.toString().trim(), '/* foo.scss */\n/* bar.scss */'); + done(); + }); + + it('should override imports with "file" as input and returns file', function(done) { + var result = sass.renderSync({ + file: fixture('include-files/index.scss'), + importer: function(url, prev) { + return { + file: path.resolve(path.dirname(prev), url + (path.extname(url) ? '' : '.scss')) + }; + } + }); + + assert.strictEqual(result.css.toString().trim(), '/* foo.scss */\n/* bar.scss */'); + done(); + }); + + it('should override imports with "data" as input and returns contents', function(done) { + var result = sass.renderSync({ + data: src, + importer: function() { + return { + contents: 'div {color: yellow;}' + }; + } + }); + + assert.strictEqual(result.css.toString().trim(), 'div {\n color: yellow; }\n\ndiv {\n color: yellow; }'); + done(); + }); + + it('should override imports with "file" as input and returns contents', function(done) { + var result = sass.renderSync({ + file: fixture('include-files/index.scss'), + importer: function() { + return { + contents: 'div {color: yellow;}' + }; + } + }); + + assert.strictEqual(result.css.toString().trim(), 'div {\n color: yellow; }\n\ndiv {\n color: yellow; }'); + done(); + }); + + + + it('should fallback to default import behaviour if importer returns sass.NULL', function(done) { + var result = sass.renderSync({ + file: fixture('include-files/index.scss'), + importer: function() { + return sass.NULL; + } + }); + + assert.strictEqual(result.css.toString().trim(), '/* foo.scss */\n/* bar.scss */'); + done(); + }); + + it('should fallback to default import behaviour if importer returns null for backwards compatibility', function(done) { + var result = sass.renderSync({ + file: fixture('include-files/index.scss'), + importer: function() { + return null; + } + }); + + assert.strictEqual(result.css.toString().trim(), '/* foo.scss */\n/* bar.scss */'); + done(); + }); + + it('should fallback to default import behaviour if importer returns undefined for backwards compatibility', function(done) { + var result = sass.renderSync({ + file: fixture('include-files/index.scss'), + importer: function() { + return undefined; + } + }); + + assert.strictEqual(result.css.toString().trim(), '/* foo.scss */\n/* bar.scss */'); + done(); + }); + + it('should fallback to default import behaviour if importer returns false for backwards compatibility', function(done) { + var result = sass.renderSync({ + file: fixture('include-files/index.scss'), + importer: function() { + return false; + } + }); + + assert.strictEqual(result.css.toString().trim(), '/* foo.scss */\n/* bar.scss */'); + done(); + }); + + it('should accept arrays of importers and return respect the order', function(done) { + var result = sass.renderSync({ + file: fixture('include-files/index.scss'), + importer: [ + function() { + return sass.NULL; + }, + function() { + return { + contents: 'div {color: yellow;}' + }; + } + ] + }); + + assert.strictEqual(result.css.toString().trim(), 'div {\n color: yellow; }\n\ndiv {\n color: yellow; }'); + done(); + }); + + it('should be able to see its options in this.options', function(done) { + var fxt = fixture('include-files/index.scss'); + var sync = false; + sass.renderSync({ + file: fixture('include-files/index.scss'), + importer: function() { + assert.strictEqual(fxt, this.options.file); + sync = true; + return {}; + } + }); + assert.strictEqual(sync, true); + done(); + }); + + it('should throw user-defined error', function(done) { + assert.throws(function() { + sass.renderSync({ + data: src, + importer: function() { + return new Error('doesn\'t exist!'); + } + }); + }, /doesn't exist!/); + + done(); + }); + + it('should throw exception when importer returns an invalid value', function(done) { + assert.throws(function() { + sass.renderSync({ + data: src, + importer: function() { + return { contents: new Buffer('i am not a string!') }; + } + }); + }, /returned value of `contents` must be a string/); + + done(); + }); + }); + + describe('.renderSync(functions)', function() { + it('should call custom function in sync mode', function(done) { + var result = sass.renderSync({ + data: 'div { width: cos(0) * 50px; }', + functions: { + 'cos($a)': function(angle) { + if (!(angle instanceof sass.types.Number)) { + throw new TypeError('Unexpected type for "angle"'); + } + return new sass.types.Number(Math.cos(angle.getValue())); + } + } + }); + + assert.strictEqual(result.css.toString().trim(), 'div {\n width: 50px; }'); + done(); + }); + + it('should return a list of selectors after calling the headings custom function', function(done) { + var result = sass.renderSync({ + data: '#{headings(2,5)} { color: #08c; }', + functions: { + 'headings($from: 0, $to: 6)': function(from, to) { + var i, f = from.getValue(), t = to.getValue(), + list = new sass.types.List(t - f + 1); + + for (i = f; i <= t; i++) { + list.setValue(i - f, new sass.types.String('h' + i)); + } + + return list; + } + } + }); + + assert.strictEqual(result.css.toString().trim(), 'h2, h3, h4, h5 {\n color: #08c; }'); + done(); + }); + + it('should let custom function invoke sass types constructors without the `new` keyword', function(done) { + var result = sass.renderSync({ + data: 'div { color: foo(); }', + functions: { + 'foo()': function() { + return sass.types.Number(42, 'em'); + } + } + }); + + assert.strictEqual(result.css.toString().trim(), 'div {\n color: 42em; }'); + done(); + }); + + it('should let us register custom functions without signatures', function(done) { + var result = sass.renderSync({ + data: 'div { color: foo(20, 22); }', + functions: { + foo: function(a, b) { + return new sass.types.Number(a.getValue() + b.getValue(), 'em'); + } + } + }); + + assert.strictEqual(result.css.toString().trim(), 'div {\n color: 42em; }'); + done(); + }); + + it('should fail when returning anything other than a sass value from a custom function', function(done) { + assert.throws(function() { + sass.renderSync({ + data: 'div { color: foo(); }', + functions: { + 'foo()': function() { + return {}; + } + } + }); + }, /A SassValue object was expected/); + + done(); + }); + + it('should properly bubble up standard JS errors thrown by custom functions', function(done) { + assert.throws(function() { + sass.renderSync({ + data: 'div { color: foo(); }', + functions: { + 'foo()': function() { + throw new RangeError('This is a test error'); + } + } + }); + }, /This is a test error/); + + done(); + }); + + it('should properly bubble up unknown errors thrown by custom functions', function(done) { + assert.throws(function() { + sass.renderSync({ + data: 'div { color: foo(); }', + functions: { + 'foo()': function() { + throw {}; + } + } + }); + }, /unexpected error/); + + done(); + }); + + it('should properly bubble up errors from sass value getters/setters/constructors', function(done) { + assert.throws(function() { + sass.renderSync({ + data: 'div { color: foo(); }', + functions: { + 'foo()': function() { + return sass.types.Boolean('foo'); + } + } + }); + }, /Expected one boolean argument/); + + assert.throws(function() { + sass.renderSync({ + data: 'div { color: foo(); }', + functions: { + 'foo()': function() { + var ret = new sass.types.Number(42); + ret.setUnit(123); + return ret; + } + } + }); + }, /Supplied value should be a string/); + + done(); + }); + + it('should call custom functions with correct context', function(done) { + function assertExpected(result) { + assert.strictEqual(result.css.toString().trim(), 'div {\n foo1: 1;\n foo2: 2; }'); + } + var options = { + data: 'div { foo1: foo(); foo2: foo(); }', + functions: { + // foo() is stateful and will persist an incrementing counter + 'foo()': function() { + assert(this); + this.fooCounter = (this.fooCounter || 0) + 1; + return new sass.types.Number(this.fooCounter); + } + } + }; + assertExpected(sass.renderSync(options)); + done(); + }); + }); + + describe('.renderSync({stats: {}})', function() { + var start = Date.now(); + var result = sass.renderSync({ + file: fixture('include-files/index.scss') + }); + + it('should provide a start timestamp', function(done) { + assert.strictEqual(typeof result.stats.start, 'number'); + assert(result.stats.start >= start); + done(); + }); + + it('should provide an end timestamp', function(done) { + assert.strictEqual(typeof result.stats.end, 'number'); + assert(result.stats.end >= result.stats.start); + done(); + }); + + it('should provide a duration', function(done) { + assert.strictEqual(typeof result.stats.duration, 'number'); + assert.strictEqual(result.stats.end - result.stats.start, result.stats.duration); + done(); + }); + + it('should contain the given entry file', function(done) { + assert.strictEqual(result.stats.entry, resolveFixture('include-files/index.scss')); + done(); + }); + + it('should contain an array of all included files', function(done) { + var expected = [ + fixture('include-files/bar.scss').replace(/\\/g, '/'), + fixture('include-files/foo.scss').replace(/\\/g, '/'), + fixture('include-files/index.scss').replace(/\\/g, '/') + ].sort(); + var actual = result.stats.includedFiles.sort(); + + assert.strictEqual(actual[0], expected[0]); + assert.strictEqual(actual[1], expected[1]); + assert.strictEqual(actual[2], expected[2]); + done(); + }); + + it('should contain array with the entry if there are no import statements', function(done) { + var expected = fixture('simple/index.scss').replace(/\\/g, '/'); + + var result = sass.renderSync({ + file: fixture('simple/index.scss') + }); + + assert.deepStrictEqual(result.stats.includedFiles, [expected]); + done(); + }); + + it('should state `data` as entry file', function(done) { + var result = sass.renderSync({ + data: read(fixture('simple/index.scss'), 'utf8') + }); + + assert.strictEqual(result.stats.entry, 'data'); + done(); + }); + + it('should contain an empty array as includedFiles', function(done) { + var result = sass.renderSync({ + data: read(fixture('simple/index.scss'), 'utf8') + }); + + assert.deepStrictEqual(result.stats.includedFiles, []); + done(); + }); + }); + + describe('.info', function() { + var package = require('../package.json'), + info = sass.info; + + it('should return a correct version info', function(done) { + assert(info.indexOf(package.version) > 0); + assert(info.indexOf('(Wrapper)') > 0); + assert(info.indexOf('[JavaScript]') > 0); + assert(info.indexOf('[NA]') < 0); + assert(info.indexOf('(Sass Compiler)') > 0); + assert(info.indexOf('[C/C++]') > 0); + + done(); + }); + }); +}); diff --git a/mybulma/node_modules/node-sass/test/binding.js b/mybulma/node_modules/node-sass/test/binding.js new file mode 100644 index 0000000..5963463 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/binding.js @@ -0,0 +1,129 @@ +/*eslint new-cap: ["error", {"capIsNewExceptions": ["Color"]}]*/ + +var assert = require('assert').strict, + path = require('path'), + etx = require('../lib/extensions'), + binding = process.env.NODESASS_COV + ? require('../lib-cov/binding') + : require('../lib/binding'); + +describe('binding', function() { + describe('missing error', function() { + it('should be useful', function() { + process.env.SASS_BINARY_NAME = 'unknown-x64-48'; + + assert.throws( + function() { binding(etx); }, + function(err) { + var re = new RegExp('Missing binding.*?\\' + path.sep + 'vendor\\' + path.sep); + if ((err instanceof Error)) { + return re.test(err); + } + } + ); + }); + + it('should list currently installed bindings', function() { + assert.throws( + function() { binding(etx); }, + function(err) { + var etx = require('../lib/extensions'); + + delete process.env.SASS_BINARY_NAME; + + if ((err instanceof Error)) { + return err.message.indexOf( + etx.getHumanEnvironment(etx.getBinaryName()) + ) !== -1; + } + } + ); + }); + }); + + describe('on unsupported environment', function() { + describe('with an unsupported architecture', function() { + beforeEach(function() { + Object.defineProperty(process, 'arch', { + value: 'foo', + }); + }); + + afterEach(function() { + Object.defineProperty(process, 'arch', { + value: 'x64', + }); + }); + + it('should error', function() { + assert.throws( + function() { binding(etx); }, + 'Node Sass does not yet support your current environment' + ); + }); + + it('should inform the user the architecture is unsupported', function() { + assert.throws( + function() { binding(etx); }, + 'Unsupported architecture (foo)' + ); + }); + }); + + describe('with an unsupported platform', function() { + beforeEach(function() { + Object.defineProperty(process, 'platform', { + value: 'bar', + }); + }); + + afterEach(function() { + Object.defineProperty(process, 'platform', { + value: 'darwin', + }); + }); + + it('should error', function() { + assert.throws( + function() { binding(etx); }, + 'Node Sass does not yet support your current environment' + ); + }); + + it('should inform the user the platform is unsupported', function() { + assert.throws( + function() { binding(etx); }, + 'Unsupported platform (bar)' + ); + }); + }); + + describe('with an unsupported runtime', function() { + beforeEach(function() { + Object.defineProperty(process.versions, 'modules', { + value: 'baz', + }); + }); + + afterEach(function() { + Object.defineProperty(process.versions, 'modules', { + value: 51, + }); + }); + + it('should error', function() { + assert.throws( + function() { binding(etx); }, + 'Node Sass does not yet support your current environment' + ); + }); + + it('should inform the user the runtime is unsupported', function() { + assert.throws( + function() { binding(etx); }, + 'Unsupported runtime (baz)' + ); + }); + }); + }); +}); diff --git a/mybulma/node_modules/node-sass/test/cli.js b/mybulma/node_modules/node-sass/test/cli.js new file mode 100644 index 0000000..c4d5e95 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/cli.js @@ -0,0 +1,793 @@ +var assert = require('assert').strict, + fs = require('fs'), + path = require('path'), + read = require('fs').readFileSync, + glob = require('glob'), + rimraf = require('rimraf'), + stream = require('stream'), + spawn = require('cross-spawn'), + cli = path.join(__dirname, '..', 'bin', 'node-sass'), + fixture = path.join.bind(null, __dirname, 'fixtures'); + +describe('cli', function() { + // For some reason we experience random timeout failures in CI + // due to spawn hanging/failing silently. See #1692. + this.retries(4); + + describe('node-sass < in.scss', function() { + it('should read data from stdin', function(done) { + var src = fs.createReadStream(fixture('simple/index.scss')); + var expected = read(fixture('simple/expected.css'), 'utf8').trim(); + var bin = spawn(cli); + + bin.stdout.setEncoding('utf8'); + bin.stdout.once('data', function(data) { + assert.strictEqual(data.trim(), expected.replace(/\r\n/g, '\n')); + done(); + }); + + src.pipe(bin.stdin); + }); + + it('should compile sass using the --indented-syntax option', function(done) { + var src = fs.createReadStream(fixture('indent/index.sass')); + var expected = read(fixture('indent/expected.css'), 'utf8').trim(); + var bin = spawn(cli, ['--indented-syntax']); + + bin.stdout.setEncoding('utf8'); + bin.stdout.once('data', function(data) { + assert.strictEqual(data.trim(), expected.replace(/\r\n/g, '\n')); + done(); + }); + + src.pipe(bin.stdin); + }); + + it('should compile with the --quiet option', function(done) { + var src = fs.createReadStream(fixture('simple/index.scss')); + var expected = read(fixture('simple/expected.css'), 'utf8').trim(); + var bin = spawn(cli, ['--quiet']); + + bin.stdout.setEncoding('utf8'); + bin.stdout.once('data', function(data) { + assert.strictEqual(data.trim(), expected.replace(/\r\n/g, '\n')); + done(); + }); + + src.pipe(bin.stdin); + }); + + it('should compile with the --output-style option', function(done) { + var src = fs.createReadStream(fixture('compressed/index.scss')); + var expected = read(fixture('compressed/expected.css'), 'utf8').trim(); + var bin = spawn(cli, ['--output-style', 'compressed']); + + bin.stdout.setEncoding('utf8'); + bin.stdout.once('data', function(data) { + assert.strictEqual(data.trim(), expected.replace(/\r\n/g, '\n')); + done(); + }); + + src.pipe(bin.stdin); + }); + + it('should compile with the --source-comments option', function(done) { + var src = fs.createReadStream(fixture('source-comments/index.scss')); + var expected = read(fixture('source-comments/expected.css'), 'utf8').trim(); + var bin = spawn(cli, ['--source-comments']); + + bin.stdout.setEncoding('utf8'); + bin.stdout.once('data', function(data) { + assert.strictEqual(data.trim(), expected.replace(/\r\n/g, '\n')); + done(); + }); + + src.pipe(bin.stdin); + }); + + it('should render with indentWidth and indentType options', function(done) { + var src = new stream.Readable(); + var bin = spawn(cli, ['--indent-width', 7, '--indent-type', 'tab']); + + src._read = function() { }; + src.push('div { color: transparent; }'); + src.push(null); + + bin.stdout.setEncoding('utf8'); + bin.stdout.once('data', function(data) { + assert.strictEqual(data.trim(), 'div {\n\t\t\t\t\t\t\tcolor: transparent; }'); + done(); + }); + + src.pipe(bin.stdin); + }); + + it('should render with linefeed option', function(done) { + var src = new stream.Readable(); + var bin = spawn(cli, ['--linefeed', 'lfcr']); + + src._read = function() { }; + src.push('div { color: transparent; }'); + src.push(null); + + bin.stdout.setEncoding('utf8'); + bin.stdout.once('data', function(data) { + assert.strictEqual(data.trim(), 'div {\n\r color: transparent; }'); + done(); + }); + + src.pipe(bin.stdin); + }); + }); + + describe('node-sass in.scss', function() { + it('should compile a scss file', function(done) { + var src = fixture('simple/index.scss'); + var dest = fixture('simple/index.css'); + var bin = spawn(cli, [src, dest]); + + bin.once('close', function() { + assert(fs.existsSync(dest)); + fs.unlinkSync(dest); + done(); + }); + }); + + it('should compile a scss file to custom destination', function(done) { + var src = fixture('simple/index.scss'); + var dest = fixture('simple/index-custom.css'); + var bin = spawn(cli, [src, dest]); + + bin.once('close', function() { + assert(fs.existsSync(dest)); + fs.unlinkSync(dest); + done(); + }); + }); + + it('should compile with the --include-path option', function(done) { + var includePaths = [ + '--include-path', fixture('include-path/functions'), + '--include-path', fixture('include-path/lib') + ]; + + var src = fixture('include-path/index.scss'); + var expected = read(fixture('include-path/expected.css'), 'utf8').trim(); + var bin = spawn(cli, [src].concat(includePaths)); + + bin.stdout.setEncoding('utf8'); + bin.stdout.once('data', function(data) { + assert.strictEqual(data.trim(), expected.replace(/\r\n/g, '\n')); + done(); + }); + }); + + it('should compile silently using the --quiet option', function(done) { + var src = fixture('simple/index.scss'); + var dest = fixture('simple/index.css'); + var bin = spawn(cli, [src, dest, '--quiet']); + var didEmit = false; + + bin.stderr.once('data', function() { + didEmit = true; + }); + + bin.once('close', function() { + assert.strictEqual(didEmit, false); + fs.unlinkSync(dest); + done(); + }); + }); + + it('should still report errors with the --quiet option', function(done) { + var src = fixture('invalid/index.scss'); + var dest = fixture('invalid/index.css'); + var bin = spawn(cli, [src, dest, '--quiet']); + var didEmit = false; + + bin.stderr.once('data', function() { + didEmit = true; + }); + + bin.once('close', function() { + assert.strictEqual(didEmit, true); + done(); + }); + }); + + it('should not exit with the --watch option', function(done) { + var src = fixture('simple/index.scss'); + var bin = spawn(cli, [src, '--watch']); + var exited; + + bin.once('close', function() { + exited = true; + }); + + setTimeout(function() { + if (exited) { + throw new Error('Watch ended too early!'); + } else { + bin.kill(); + done(); + } + }, 100); + }); + + it.skip('should emit `warn` on file change when using --watch option', function(done) { + var src = fixture('simple/tmp.scss'); + + fs.writeFileSync(src, ''); + + var bin = spawn(cli, ['--watch', src]); + + bin.stderr.setEncoding('utf8'); + bin.stderr.once('data', function(data) { + assert.strictEqual(data.trim(), '=> changed: ' + src); + fs.unlinkSync(src); + bin.kill(); + done(); + }); + + setTimeout(function() { + fs.appendFileSync(src, 'body {}'); + }, 500); + }); + + it.skip('should emit nothing on file change when using --watch and --quiet options', function(done) { + var src = fixture('simple/tmp.scss'); + var didEmit = false; + fs.writeFileSync(src, ''); + + var bin = spawn(cli, ['--watch', '--quiet', src]); + + bin.stderr.setEncoding('utf8'); + bin.stderr.once('data', function() { + didEmit = true; + }); + + setTimeout(function() { + fs.appendFileSync(src, 'body {}'); + setTimeout(function() { + assert.strictEqual(didEmit, false); + bin.kill(); + done(); + fs.unlinkSync(src); + }, 200); + }, 500); + }); + + it.skip('should render all watched files', function(done) { + var src = fixture('simple/bar.scss'); + + fs.writeFileSync(src, ''); + + var bin = spawn(cli, [ + '--output-style', 'compressed', + '--watch', src + ]); + + bin.stdout.setEncoding('utf8'); + bin.stdout.once('data', function(data) { + assert.strictEqual(data.trim(), 'body{background:white}'); + fs.unlinkSync(src); + bin.kill(); + done(); + }); + + setTimeout(function() { + fs.appendFileSync(src, 'body{background:white}'); + }, 500); + }); + + it.skip('should watch the full scss dep tree for a single file (scss)', function(done) { + var src = fixture('watching/index.scss'); + var foo = fixture('watching/white.scss'); + + fs.writeFileSync(foo, ''); + + var bin = spawn(cli, [ + '--output-style', 'compressed', + '--watch', src + ]); + + bin.stdout.setEncoding('utf8'); + bin.stdout.once('data', function(data) { + assert.strictEqual(data.trim(), 'body{background:blue}'); + bin.kill(); + done(); + }); + + setTimeout(function() { + fs.appendFileSync(foo, 'body{background:blue}\n'); + }, 500); + }); + + it.skip('should watch the full sass dep tree for a single file (sass)', function(done) { + var src = fixture('watching/index.sass'); + var foo = fixture('watching/bar.sass'); + + fs.writeFileSync(foo, ''); + + var bin = spawn(cli, [ + '--output-style', 'compressed', + '--watch', src + ]); + + bin.stdout.setEncoding('utf8'); + bin.stdout.once('data', function(data) { + assert.strictEqual(data.trim(), 'body{background:red}'); + bin.kill(); + done(); + }); + + setTimeout(function() { + fs.appendFileSync(foo, 'body\n\tbackground: red\n'); + }, 500); + }); + }); + + describe('node-sass --output directory', function() { + it.skip('should watch whole directory', function(done) { + var destDir = fixture('watching-css-out-01/'); + var srcDir = fixture('watching-dir-01/'); + var srcFile = path.join(srcDir, 'index.scss'); + + fs.writeFileSync(srcFile, ''); + + var bin = spawn(cli, [ + '--output-style', 'compressed', + '--output', destDir, + '--watch', srcDir + ]); + + setTimeout(function() { + fs.appendFileSync(srcFile, 'a {color:green;}\n'); + setTimeout(function() { + bin.kill(); + var files = fs.readdirSync(destDir); + assert.deepStrictEqual(files, ['index.css']); + rimraf(destDir, done); + }, 200); + }, 500); + }); + + it.skip('should compile all changed files in watched directory', function(done) { + var destDir = fixture('watching-css-out-02/'); + var srcDir = fixture('watching-dir-02/'); + var srcFile = path.join(srcDir, 'foo.scss'); + + fs.writeFileSync(srcFile, ''); + + var bin = spawn(cli, [ + '--output-style', 'compressed', + '--output', destDir, + '--watch', srcDir + ]); + + setTimeout(function () { + fs.appendFileSync(srcFile, 'body{background:white}\n'); + setTimeout(function () { + bin.kill(); + var files = fs.readdirSync(destDir); + assert.deepStrictEqual(files, ['foo.css', 'index.css']); + rimraf(destDir, done); + }, 200); + }, 500); + }); + }); + + describe('node-sass in.scss --output out.css', function() { + it('should compile a scss file to build.css', function(done) { + var src = fixture('simple/index.scss'); + var dest = fixture('simple/index.css'); + var bin = spawn(cli, [src, '--output', path.dirname(dest)]); + + bin.once('close', function() { + assert(fs.existsSync(dest)); + fs.unlinkSync(dest); + done(); + }); + }); + + it('should compile with the --source-map option', function(done) { + var src = fixture('source-map/index.scss'); + var destCss = fixture('source-map/index.css'); + var destMap = fixture('source-map/index.map'); + var expectedCss = read(fixture('source-map/expected.css'), 'utf8').trim().replace(/\r\n/g, '\n'); + var expectedMap = read(fixture('source-map/expected.map'), 'utf8').trim().replace(/\r\n/g, '\n'); + var bin = spawn(cli, [src, '--output', path.dirname(destCss), '--source-map', destMap]); + + bin.once('close', function() { + assert.strictEqual(read(destCss, 'utf8').trim(), expectedCss); + assert.strictEqual(read(destMap, 'utf8').trim(), expectedMap); + fs.unlinkSync(destCss); + fs.unlinkSync(destMap); + done(); + }); + }); + + it('should omit sourceMappingURL if --omit-source-map-url flag is used', function(done) { + var src = fixture('source-map/index.scss'); + var dest = fixture('source-map/index.css'); + var map = fixture('source-map/index.map'); + var bin = spawn(cli, [ + src, '--output', path.dirname(dest), + '--source-map', map, '--omit-source-map-url' + ]); + + bin.once('close', function() { + assert.strictEqual(read(dest, 'utf8').indexOf('sourceMappingURL'), -1); + assert(fs.existsSync(map)); + fs.unlinkSync(map); + fs.unlinkSync(dest); + done(); + }); + }); + + it('should compile with the --source-root option', function(done) { + var src = fixture('source-map/index.scss'); + var destCss = fixture('source-map/index.css'); + var destMap = fixture('source-map/index.map'); + var expectedCss = read(fixture('source-map/expected.css'), 'utf8').trim().replace(/\r\n/g, '\n'); + var expectedUrl = 'http://test/'; + var bin = spawn(cli, [ + src, '--output', path.dirname(destCss), + '--source-map-root', expectedUrl, + '--source-map', destMap + ]); + + bin.once('close', function() { + assert.strictEqual(read(destCss, 'utf8').trim(), expectedCss); + assert.strictEqual(JSON.parse(read(destMap, 'utf8')).sourceRoot, expectedUrl); + fs.unlinkSync(destCss); + fs.unlinkSync(destMap); + done(); + }); + }); + + it('should compile with the --source-map-embed option and no outfile', function(done) { + var src = fixture('source-map-embed/index.scss'); + var expectedCss = read(fixture('source-map-embed/expected.css'), 'utf8').trim().replace(/\r\n/g, '\n'); + var result = ''; + var bin = spawn(cli, [ + src, + '--source-map-embed', + '--source-map', 'true' + ]); + + bin.stdout.on('data', function(data) { + result += data; + }); + + bin.once('close', function() { + assert.strictEqual(result.trim().replace(/\r\n/g, '\n'), expectedCss); + done(); + }); + }); + }); + + describe('node-sass sass/ --output css/', function() { + it('should create the output directory', function(done) { + var src = fixture('input-directory/sass'); + var dest = fixture('input-directory/css'); + var bin = spawn(cli, [src, '--output', dest]); + + bin.once('close', function() { + assert(fs.existsSync(dest)); + rimraf.sync(dest); + done(); + }); + }); + + it('should compile all files in the folder', function(done) { + var src = fixture('input-directory/sass'); + var dest = fixture('input-directory/css'); + var bin = spawn(cli, [src, '--output', dest]); + + bin.once('close', function() { + var files = fs.readdirSync(dest).sort(); + assert.deepStrictEqual(files, ['one.css', 'two.css', 'nested'].sort()); + var nestedFiles = fs.readdirSync(path.join(dest, 'nested')); + assert.deepStrictEqual(nestedFiles, ['three.css']); + rimraf.sync(dest); + done(); + }); + }); + + it('should compile with --source-map set to directory', function(done) { + var src = fixture('input-directory/sass'); + var dest = fixture('input-directory/css'); + var destMap = fixture('input-directory/map'); + var bin = spawn(cli, [src, '--output', dest, '--source-map', destMap]); + + bin.once('close', function() { + var map = JSON.parse(read(fixture('input-directory/map/nested/three.css.map'), 'utf8')); + + assert.strictEqual(map.file, '../../css/nested/three.css'); + rimraf.sync(dest); + rimraf.sync(destMap); + done(); + }); + }); + + it('should skip files with an underscore', function(done) { + var src = fixture('input-directory/sass'); + var dest = fixture('input-directory/css'); + var bin = spawn(cli, [src, '--output', dest]); + + bin.once('close', function() { + var files = fs.readdirSync(dest); + assert.strictEqual(files.indexOf('_skipped.css'), -1); + rimraf.sync(dest); + done(); + }); + }); + + it('should ignore nested files if --recursive false', function(done) { + var src = fixture('input-directory/sass'); + var dest = fixture('input-directory/css'); + var bin = spawn(cli, [ + src, '--output', dest, + '--recursive', false + ]); + + bin.once('close', function() { + var files = fs.readdirSync(dest); + assert.deepStrictEqual(files, ['one.css', 'two.css']); + rimraf.sync(dest); + done(); + }); + }); + + it('should error if no output directory is provided', function(done) { + var src = fixture('input-directory/sass'); + var bin = spawn(cli, [src]); + + bin.once('close', function(code) { + assert.notStrictEqual(code, 0); + assert.strictEqual(glob.sync(fixture('input-directory/**/*.css')).length, 0); + done(); + }); + }); + + it('should error if output directory is not a directory', function(done) { + var src = fixture('input-directory/sass'); + var dest = fixture('input-directory/sass/one.scss'); + var bin = spawn(cli, [src, '--output', dest]); + + bin.once('close', function(code) { + assert.notStrictEqual(code, 0); + assert.strictEqual(glob.sync(fixture('input-directory/**/*.css')).length, 0); + done(); + }); + }); + + it('should not error if output directory is a symlink', function(done) { + var outputDir = fixture('input-directory/css'); + var src = fixture('input-directory/sass'); + var symlink = fixture('symlinked-css'); + fs.mkdirSync(outputDir); + fs.symlinkSync(outputDir, symlink); + var bin = spawn(cli, [src, '--output', symlink]); + + bin.once('close', function() { + var files = fs.readdirSync(outputDir).sort(); + assert.deepStrictEqual(files, ['one.css', 'two.css', 'nested'].sort()); + var nestedFiles = fs.readdirSync(path.join(outputDir, 'nested')); + assert.deepStrictEqual(nestedFiles, ['three.css']); + rimraf.sync(outputDir); + fs.unlinkSync(symlink); + done(); + }); + }); + }); + + describe('node-sass in.scss --output path/to/file/out.css', function() { + it('should create the output directory', function(done) { + var src = fixture('output-directory/index.scss'); + var dest = fixture('output-directory/path/to/file/index.css'); + var bin = spawn(cli, [src, '--output', path.dirname(dest)]); + + bin.once('close', function() { + assert(fs.existsSync(path.dirname(dest))); + fs.unlinkSync(dest); + fs.rmdirSync(path.dirname(dest)); + dest = path.dirname(dest); + fs.rmdirSync(path.dirname(dest)); + dest = path.dirname(dest); + fs.rmdirSync(path.dirname(dest)); + done(); + }); + }); + + }); + + describe('node-sass --follow --output output-dir input-dir', function() { + it('should compile with the --follow option', function(done) { + var src = fixture('follow/input-dir'); + var dest = fixture('follow/output-dir'); + + fs.mkdirSync(src); + fs.symlinkSync(path.join(path.dirname(src), 'foo'), path.join(src, 'foo'), 'dir'); + + var bin = spawn(cli, [src, '--follow', '--output', dest]); + + bin.once('close', function() { + var expected = path.join(dest, 'foo/bar/index.css'); + fs.unlinkSync(path.join(src, 'foo')); + fs.rmdirSync(src); + assert(fs.existsSync(expected)); + fs.unlinkSync(expected); + expected = path.dirname(expected); + fs.rmdirSync(expected); + expected = path.dirname(expected); + fs.rmdirSync(expected); + fs.rmdirSync(dest); + done(); + }); + }); + }); + + describe('importer', function() { + var dest = fixture('include-files/index.css'); + var src = fixture('include-files/index.scss'); + var expectedData = read(fixture('include-files/expected-data-importer.css'), 'utf8').trim().replace(/\r\n/g, '\n'); + var expectedFile = read(fixture('include-files/expected-file-importer.css'), 'utf8').trim().replace(/\r\n/g, '\n'); + + it('should override imports and fire callback with file and contents', function(done) { + var bin = spawn(cli, [ + src, '--output', path.dirname(dest), + '--importer', fixture('extras/my_custom_importer_file_and_data_cb.js') + ]); + + bin.once('close', function() { + assert.strictEqual(read(dest, 'utf8').trim(), expectedData); + fs.unlinkSync(dest); + done(); + }); + }); + + it('should override imports and fire callback with file', function(done) { + var bin = spawn(cli, [ + src, '--output', path.dirname(dest), + '--importer', fixture('extras/my_custom_importer_file_cb.js') + ]); + + bin.once('close', function() { + if (fs.existsSync(dest)) { + assert.strictEqual(read(dest, 'utf8').trim(), expectedFile); + fs.unlinkSync(dest); + } + + done(); + }); + }); + + it('should override imports and fire callback with data', function(done) { + var bin = spawn(cli, [ + src, '--output', path.dirname(dest), + '--importer', fixture('extras/my_custom_importer_data_cb.js') + ]); + + bin.once('close', function() { + assert.strictEqual(read(dest, 'utf8').trim(), expectedData); + fs.unlinkSync(dest); + done(); + }); + }); + + it('should override imports and return file and contents', function(done) { + var bin = spawn(cli, [ + src, '--output', path.dirname(dest), + '--importer', fixture('extras/my_custom_importer_file_and_data.js') + ]); + + bin.once('close', function() { + assert.strictEqual(read(dest, 'utf8').trim(), expectedData); + fs.unlinkSync(dest); + done(); + }); + }); + + it('should override imports and return file', function(done) { + var bin = spawn(cli, [ + src, '--output', path.dirname(dest), + '--importer', fixture('extras/my_custom_importer_file.js') + ]); + + bin.once('close', function() { + if (fs.existsSync(dest)) { + assert.strictEqual(read(dest, 'utf8').trim(), expectedFile); + fs.unlinkSync(dest); + } + + done(); + }); + }); + + it('should override imports and return data', function(done) { + var bin = spawn(cli, [ + src, '--output', path.dirname(dest), + '--importer', fixture('extras/my_custom_importer_data.js') + ]); + + bin.once('close', function() { + assert.strictEqual(read(dest, 'utf8').trim(), expectedData); + fs.unlinkSync(dest); + done(); + }); + }); + + it('should accept arrays of importers and return respect the order', function(done) { + var bin = spawn(cli, [ + src, '--output', path.dirname(dest), + '--importer', fixture('extras/my_custom_arrays_of_importers.js') + ]); + + bin.once('close', function() { + assert.strictEqual(read(dest, 'utf8').trim(), expectedData); + fs.unlinkSync(dest); + done(); + }); + }); + + it('should return error for invalid importer file path', function(done) { + var bin = spawn(cli, [ + src, '--output', path.dirname(dest), + '--importer', fixture('non/existing/path') + ]); + + bin.once('close', function(code) { + assert.notStrictEqual(code, 0); + done(); + }); + }); + + it('should reflect user-defined Error', function(done) { + var bin = spawn(cli, [ + src, '--output', path.dirname(dest), + '--importer', fixture('extras/my_custom_importer_error.js') + ]); + + bin.stderr.once('data', function(code) { + assert.strictEqual(JSON.parse(code).message, 'doesn\'t exist!'); + done(); + }); + }); + }); + + describe('functions', function() { + it('should let custom functions call setter methods on wrapped sass values (number)', function(done) { + var dest = fixture('custom-functions/setter.css'); + var src = fixture('custom-functions/setter.scss'); + var expected = read(fixture('custom-functions/setter-expected.css'), 'utf8').trim().replace(/\r\n/g, '\n'); + var bin = spawn(cli, [ + src, '--output', path.dirname(dest), + '--functions', fixture('extras/my_custom_functions_setter.js') + ]); + + bin.once('close', function() { + assert.strictEqual(read(dest, 'utf8').trim(), expected); + fs.unlinkSync(dest); + done(); + }); + }); + + it('should properly convert strings when calling custom functions', function(done) { + var dest = fixture('custom-functions/string-conversion.css'); + var src = fixture('custom-functions/string-conversion.scss'); + var expected = read(fixture('custom-functions/string-conversion-expected.css'), 'utf8').trim().replace(/\r\n/g, '\n'); + var bin = spawn(cli, [ + src, '--output', path.dirname(dest), + '--functions', fixture('extras/my_custom_functions_string_conversion.js') + ]); + + bin.once('close', function() { + assert.strictEqual(read(dest, 'utf8').trim(), expected); + fs.unlinkSync(dest); + done(); + }); + }); + }); +}); diff --git a/mybulma/node_modules/node-sass/test/downloadoptions.js b/mybulma/node_modules/node-sass/test/downloadoptions.js new file mode 100644 index 0000000..968213e --- /dev/null +++ b/mybulma/node_modules/node-sass/test/downloadoptions.js @@ -0,0 +1,129 @@ +var assert = require('assert').strict, + ua = require('../scripts/util/useragent'), + opts = require('../scripts/util/downloadoptions'); + + +describe('util', function() { + describe('downloadoptions', function() { + describe('without a proxy', function() { + it('should look as we expect', function() { + var expected = { + strictSSL: true, + timeout: 60000, + headers: { + 'User-Agent': ua(), + }, + }; + + assert.deepStrictEqual(opts(), expected); + }); + }); + + describe('with an npm config proxy', function() { + var proxy = 'http://test.proxy:1234'; + + before(function() { + process.env.npm_config_proxy = proxy; + }); + + after(function() { + delete process.env.npm_config_proxy; + }); + + it('should look as we expect', function() { + var expected = { + strictSSL: true, + proxy: proxy, + timeout: 60000, + headers: { + 'User-Agent': ua(), + }, + }; + + assert.deepStrictEqual(opts(), expected); + }); + }); + + describe('with an env proxy proxy', function() { + var proxy = 'http://test.proxy:1234'; + + before(function() { + process.env.HTTP_PROXY = proxy; + }); + + after(function() { + delete process.env.HTTP_PROXY; + }); + + it('should look as we expect', function() { + var expected = { + strictSSL: true, + timeout: 60000, + headers: { + 'User-Agent': ua(), + }, + }; + + assert.deepStrictEqual(opts(), expected); + }); + }); + + describe('with SASS_REJECT_UNAUTHORIZED set to false', function() { + beforeEach(function() { + process.env.SASS_REJECT_UNAUTHORIZED = '0'; + }); + + it('should look as we expect', function() { + var expected = { + strictSSL: false, + timeout: 60000, + headers: { + 'User-Agent': ua(), + }, + }; + + assert.deepStrictEqual(opts(), expected); + }); + }); + + describe('with SASS_REJECT_UNAUTHORIZED set to true', function() { + beforeEach(function() { + process.env.SASS_REJECT_UNAUTHORIZED = '1'; + }); + + it('should look as we expect', function() { + var expected = { + strictSSL: true, + timeout: 60000, + headers: { + 'User-Agent': ua(), + }, + }; + + assert.deepStrictEqual(opts(), expected); + }); + }); + + describe('with npm_config_sass_reject_unauthorized set to true', function() { + beforeEach(function() { + process.env.npm_config_sass_reject_unauthorized = true; + }); + + it('should look as we expect', function() { + var expected = { + strictSSL: true, + timeout: 60000, + headers: { + 'User-Agent': ua(), + }, + }; + + assert.deepStrictEqual(opts(), expected); + }); + + afterEach(function() { + process.env.npm_config_sass_reject_unauthorized = undefined; + }); + }); + }); +}); diff --git a/mybulma/node_modules/node-sass/test/errors.js b/mybulma/node_modules/node-sass/test/errors.js new file mode 100644 index 0000000..537ebb6 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/errors.js @@ -0,0 +1,53 @@ +var assert = require('assert').strict, + path = require('path'), + errors = require('../lib/errors'); + +describe('binary errors', function() { + + function getCurrentPlatform() { + if (process.platform === 'win32') { + return 'Windows'; + } else if (process.platform === 'darwin') { + return 'OS X'; + } + return ''; + } + + function getCurrentArchitecture() { + if (process.arch === 'x86' || process.arch === 'ia32') { + return '32-bit'; + } else if (process.arch === 'x64') { + return '64-bit'; + } + return ''; + } + + function getCurrentEnvironment() { + return getCurrentPlatform() + ' ' + getCurrentArchitecture(); + } + + describe('for an unsupported environment', function() { + it('identifies the current environment', function() { + var message = errors.unsupportedEnvironment(); + assert.ok(message.indexOf(getCurrentEnvironment()) !== -1); + }); + + it('links to supported environment documentation', function() { + var message = errors.unsupportedEnvironment(); + assert.ok(message.indexOf('https://github.com/sass/node-sass/releases/tag/v') !== -1); + }); + }); + + describe('for an missing binary', function() { + it('identifies the current environment', function() { + var message = errors.missingBinary(); + assert.ok(message.indexOf(getCurrentEnvironment()) !== -1); + }); + + it('documents the expected binary location', function() { + var message = errors.missingBinary(); + assert.ok(message.indexOf(path.sep + 'vendor' + path.sep) !== -1); + }); + }); + +}); diff --git a/mybulma/node_modules/node-sass/test/fixtures/compressed/expected.css b/mybulma/node_modules/node-sass/test/fixtures/compressed/expected.css new file mode 100644 index 0000000..197319f --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/compressed/expected.css @@ -0,0 +1 @@ +#navbar{width:80%;height:23px}#navbar ul{list-style-type:none}#navbar li{float:left}#navbar li a{font-weight:bold} diff --git a/mybulma/node_modules/node-sass/test/fixtures/compressed/index.scss b/mybulma/node_modules/node-sass/test/fixtures/compressed/index.scss new file mode 100644 index 0000000..38a8fe6 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/compressed/index.scss @@ -0,0 +1,16 @@ +#navbar { + width: 80%; + height: 23px; +} + +#navbar ul { + list-style-type: none; +} + +#navbar li { + float: left; + + a { + font-weight: bold; + } +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/custom-functions/setter-expected.css b/mybulma/node_modules/node-sass/test/fixtures/custom-functions/setter-expected.css new file mode 100644 index 0000000..9b67ea9 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/custom-functions/setter-expected.css @@ -0,0 +1,3 @@ +div { + width: 42rem; + height: 84px; } diff --git a/mybulma/node_modules/node-sass/test/fixtures/custom-functions/setter.scss b/mybulma/node_modules/node-sass/test/fixtures/custom-functions/setter.scss new file mode 100644 index 0000000..fbc6116 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/custom-functions/setter.scss @@ -0,0 +1 @@ +div { width: foo(42px); height: bar(42px); } diff --git a/mybulma/node_modules/node-sass/test/fixtures/custom-functions/string-conversion-expected.css b/mybulma/node_modules/node-sass/test/fixtures/custom-functions/string-conversion-expected.css new file mode 100644 index 0000000..741d2cb --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/custom-functions/string-conversion-expected.css @@ -0,0 +1,2 @@ +div { + color: "barbar"; } diff --git a/mybulma/node_modules/node-sass/test/fixtures/custom-functions/string-conversion.scss b/mybulma/node_modules/node-sass/test/fixtures/custom-functions/string-conversion.scss new file mode 100644 index 0000000..4e6403f --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/custom-functions/string-conversion.scss @@ -0,0 +1 @@ +div { color: foo("bar"); } diff --git a/mybulma/node_modules/node-sass/test/fixtures/cwd-include-path/expected.css b/mybulma/node_modules/node-sass/test/fixtures/cwd-include-path/expected.css new file mode 100644 index 0000000..1cfd35a --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/cwd-include-path/expected.css @@ -0,0 +1,2 @@ +.outside { + color: red; } diff --git a/mybulma/node_modules/node-sass/test/fixtures/cwd-include-path/outside.scss b/mybulma/node_modules/node-sass/test/fixtures/cwd-include-path/outside.scss new file mode 100644 index 0000000..9568623 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/cwd-include-path/outside.scss @@ -0,0 +1,3 @@ +.outside { + color: red; +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/cwd-include-path/root/index.scss b/mybulma/node_modules/node-sass/test/fixtures/cwd-include-path/root/index.scss new file mode 100644 index 0000000..0279f78 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/cwd-include-path/root/index.scss @@ -0,0 +1 @@ +@import 'outside'; diff --git a/mybulma/node_modules/node-sass/test/fixtures/depth-first/_common.scss b/mybulma/node_modules/node-sass/test/fixtures/depth-first/_common.scss new file mode 100644 index 0000000..7b30f5e --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/depth-first/_common.scss @@ -0,0 +1,6 @@ +@import "vars"; +@import "struct"; + +.myvars { + content: quote($import_counter); +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/depth-first/_struct.scss b/mybulma/node_modules/node-sass/test/fixtures/depth-first/_struct.scss new file mode 100644 index 0000000..f3152b7 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/depth-first/_struct.scss @@ -0,0 +1,3 @@ +.common-struct { + content: "common-struct"; +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/depth-first/_vars.scss b/mybulma/node_modules/node-sass/test/fixtures/depth-first/_vars.scss new file mode 100644 index 0000000..da5a7f2 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/depth-first/_vars.scss @@ -0,0 +1,5 @@ +$import_counter: $import_counter + 1; + +.common-vars { + content: "common-vars"; +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/depth-first/a.scss b/mybulma/node_modules/node-sass/test/fixtures/depth-first/a.scss new file mode 100644 index 0000000..6c815de --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/depth-first/a.scss @@ -0,0 +1,7 @@ +@import "_common"; +@import "a1"; + +.a2 { + content: "a2"; +} + diff --git a/mybulma/node_modules/node-sass/test/fixtures/depth-first/a1.scss b/mybulma/node_modules/node-sass/test/fixtures/depth-first/a1.scss new file mode 100644 index 0000000..272671b --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/depth-first/a1.scss @@ -0,0 +1,3 @@ +.a1 { + content: "a1"; +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/depth-first/b.scss b/mybulma/node_modules/node-sass/test/fixtures/depth-first/b.scss new file mode 100644 index 0000000..2f21d7d --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/depth-first/b.scss @@ -0,0 +1,5 @@ +@import "b1"; + +.b2 { + content: "b2"; +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/depth-first/b1.scss b/mybulma/node_modules/node-sass/test/fixtures/depth-first/b1.scss new file mode 100644 index 0000000..ec7b88d --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/depth-first/b1.scss @@ -0,0 +1,3 @@ +.b1 { + content: "b1"; +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/depth-first/expected.css b/mybulma/node_modules/node-sass/test/fixtures/depth-first/expected.css new file mode 100644 index 0000000..f83c268 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/depth-first/expected.css @@ -0,0 +1,32 @@ +.common-vars { + content: "common-vars"; } + +.common-struct { + content: "common-struct"; } + +.myvars { + content: "1"; } + +.a1 { + content: "a1"; } + +.a2 { + content: "a2"; } + +.common-vars { + content: "common-vars"; } + +.common-struct { + content: "common-struct"; } + +.myvars { + content: "2"; } + +.b1 { + content: "b1"; } + +.b2 { + content: "b2"; } + +#the-last { + content: "LAST"; } \ No newline at end of file diff --git a/mybulma/node_modules/node-sass/test/fixtures/depth-first/index.scss b/mybulma/node_modules/node-sass/test/fixtures/depth-first/index.scss new file mode 100644 index 0000000..09b0e76 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/depth-first/index.scss @@ -0,0 +1,8 @@ +$import_counter: 0; +@import "a"; +@import "common"; +@import "b"; + +#the-last { + content: "LAST"; +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_arrays_of_importers.js b/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_arrays_of_importers.js new file mode 100644 index 0000000..38c1c08 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_arrays_of_importers.js @@ -0,0 +1,12 @@ +var sass = require('../../..'); + +module.exports = [ + function() { + return sass.NULL; + }, + function() { + return { + contents: 'div {color: yellow;}' + }; + } +]; diff --git a/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_functions_setter.js b/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_functions_setter.js new file mode 100644 index 0000000..9ec8c24 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_functions_setter.js @@ -0,0 +1,10 @@ +module.exports = { + 'foo($a)': function(size) { + size.setUnit('rem'); + return size; + }, + 'bar($a)': function(size) { + size.setValue(size.getValue() * 2); + return size; + } +}; diff --git a/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_functions_string_conversion.js b/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_functions_string_conversion.js new file mode 100644 index 0000000..3aa4eb8 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_functions_string_conversion.js @@ -0,0 +1,8 @@ +var sass = require('../../..'); + +module.exports = { + 'foo($a)': function(str) { + str = str.getValue().replace(/['"]/g, ''); + return new sass.types.String('"' + str + str + '"'); + } +}; diff --git a/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_importer_data.js b/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_importer_data.js new file mode 100644 index 0000000..23f272a --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_importer_data.js @@ -0,0 +1,5 @@ +module.exports = function() { + return { + contents: 'div {color: yellow;}' + }; +}; diff --git a/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_importer_data_cb.js b/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_importer_data_cb.js new file mode 100644 index 0000000..d587304 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_importer_data_cb.js @@ -0,0 +1,5 @@ +module.exports = function(file, prev, done) { + done({ + contents: 'div {color: yellow;}' + }); +}; diff --git a/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_importer_error.js b/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_importer_error.js new file mode 100644 index 0000000..eb1c959 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_importer_error.js @@ -0,0 +1,3 @@ +module.exports = function() { + return new Error('doesn\'t exist!'); +}; diff --git a/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_importer_file.js b/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_importer_file.js new file mode 100644 index 0000000..ad7b17d --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_importer_file.js @@ -0,0 +1,7 @@ +var path = require('path'); + +module.exports = function(file) { + return { + file: path.resolve(path.join(process.cwd(), 'test/fixtures/include-files/', file + (path.extname(file) ? '' : '.scss'))) + }; +}; diff --git a/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_importer_file_and_data.js b/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_importer_file_and_data.js new file mode 100644 index 0000000..e29f042 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_importer_file_and_data.js @@ -0,0 +1,6 @@ +module.exports = function() { + return { + file: '/some/random/path/file.scss', + contents: 'div {color: yellow;}' + }; +}; diff --git a/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_importer_file_and_data_cb.js b/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_importer_file_and_data_cb.js new file mode 100644 index 0000000..e24f05e --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_importer_file_and_data_cb.js @@ -0,0 +1,6 @@ +module.exports = function(file, prev, done) { + done({ + file: '/some/random/path/file.scss', + contents: 'div {color: yellow;}' + }); +}; diff --git a/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_importer_file_cb.js b/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_importer_file_cb.js new file mode 100644 index 0000000..dc5b8bd --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/extras/my_custom_importer_file_cb.js @@ -0,0 +1,7 @@ +var path = require('path'); + +module.exports = function(file, /* jshint unused:false */ prev, done) { + done({ + file: path.resolve(path.join(process.cwd(), 'test/fixtures/include-files/', file + (path.extname(file) ? '' : '.scss'))) + }); +}; diff --git a/mybulma/node_modules/node-sass/test/fixtures/follow/foo/bar/index.scss b/mybulma/node_modules/node-sass/test/fixtures/follow/foo/bar/index.scss new file mode 100644 index 0000000..38a8fe6 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/follow/foo/bar/index.scss @@ -0,0 +1,16 @@ +#navbar { + width: 80%; + height: 23px; +} + +#navbar ul { + list-style-type: none; +} + +#navbar li { + float: left; + + a { + font-weight: bold; + } +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/include-files/bar.scss b/mybulma/node_modules/node-sass/test/fixtures/include-files/bar.scss new file mode 100644 index 0000000..f377712 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/include-files/bar.scss @@ -0,0 +1 @@ +/* bar.scss */ diff --git a/mybulma/node_modules/node-sass/test/fixtures/include-files/chained-imports-with-custom-importer.scss b/mybulma/node_modules/node-sass/test/fixtures/include-files/chained-imports-with-custom-importer.scss new file mode 100644 index 0000000..8dbe665 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/include-files/chained-imports-with-custom-importer.scss @@ -0,0 +1 @@ +@import "file-not-processed-by-loader", "file-processed-by-loader"; diff --git a/mybulma/node_modules/node-sass/test/fixtures/include-files/expected-data-importer.css b/mybulma/node_modules/node-sass/test/fixtures/include-files/expected-data-importer.css new file mode 100644 index 0000000..1925a60 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/include-files/expected-data-importer.css @@ -0,0 +1,5 @@ +div { + color: yellow; } + +div { + color: yellow; } diff --git a/mybulma/node_modules/node-sass/test/fixtures/include-files/expected-file-importer.css b/mybulma/node_modules/node-sass/test/fixtures/include-files/expected-file-importer.css new file mode 100644 index 0000000..326f694 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/include-files/expected-file-importer.css @@ -0,0 +1,2 @@ +/* foo.scss */ +/* bar.scss */ diff --git a/mybulma/node_modules/node-sass/test/fixtures/include-files/expected-importer.css b/mybulma/node_modules/node-sass/test/fixtures/include-files/expected-importer.css new file mode 100644 index 0000000..1925a60 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/include-files/expected-importer.css @@ -0,0 +1,5 @@ +div { + color: yellow; } + +div { + color: yellow; } diff --git a/mybulma/node_modules/node-sass/test/fixtures/include-files/file-not-processed-by-loader.scss b/mybulma/node_modules/node-sass/test/fixtures/include-files/file-not-processed-by-loader.scss new file mode 100644 index 0000000..47f8c1d --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/include-files/file-not-processed-by-loader.scss @@ -0,0 +1 @@ +$variable-defined-by-file-not-processed-by-loader: 'red'; diff --git a/mybulma/node_modules/node-sass/test/fixtures/include-files/file-processed-by-loader.scss b/mybulma/node_modules/node-sass/test/fixtures/include-files/file-processed-by-loader.scss new file mode 100644 index 0000000..4c79efe --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/include-files/file-processed-by-loader.scss @@ -0,0 +1,3 @@ +body { + color: $variable-defined-by-file-not-processed-by-loader; +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/include-files/foo.scss b/mybulma/node_modules/node-sass/test/fixtures/include-files/foo.scss new file mode 100644 index 0000000..9834f54 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/include-files/foo.scss @@ -0,0 +1 @@ +/* foo.scss */ diff --git a/mybulma/node_modules/node-sass/test/fixtures/include-files/index.scss b/mybulma/node_modules/node-sass/test/fixtures/include-files/index.scss new file mode 100644 index 0000000..01bdbe2 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/include-files/index.scss @@ -0,0 +1,2 @@ +@import 'foo'; +@import 'bar'; diff --git a/mybulma/node_modules/node-sass/test/fixtures/include-path/expected.css b/mybulma/node_modules/node-sass/test/fixtures/include-path/expected.css new file mode 100644 index 0000000..1f0ae60 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/include-path/expected.css @@ -0,0 +1,3 @@ +body { + background: red; + color: #0000fe; } diff --git a/mybulma/node_modules/node-sass/test/fixtures/include-path/functions/colorBlue.scss b/mybulma/node_modules/node-sass/test/fixtures/include-path/functions/colorBlue.scss new file mode 100644 index 0000000..4fa618c --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/include-path/functions/colorBlue.scss @@ -0,0 +1,3 @@ +@function colorBlue() { + @return #0000fe; +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/include-path/index.scss b/mybulma/node_modules/node-sass/test/fixtures/include-path/index.scss new file mode 100644 index 0000000..8b0f5e8 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/include-path/index.scss @@ -0,0 +1,7 @@ +@import 'vars'; +@import 'colorBlue'; + +body { + background: $color; + color: colorBlue(); +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/include-path/lib/vars.scss b/mybulma/node_modules/node-sass/test/fixtures/include-path/lib/vars.scss new file mode 100644 index 0000000..4b04915 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/include-path/lib/vars.scss @@ -0,0 +1 @@ +$color: red; diff --git a/mybulma/node_modules/node-sass/test/fixtures/indent/expected.css b/mybulma/node_modules/node-sass/test/fixtures/indent/expected.css new file mode 100644 index 0000000..a7df077 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/indent/expected.css @@ -0,0 +1,2 @@ +foo + bar { + color: red; } diff --git a/mybulma/node_modules/node-sass/test/fixtures/indent/index.sass b/mybulma/node_modules/node-sass/test/fixtures/indent/index.sass new file mode 100644 index 0000000..3f7dfae --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/indent/index.sass @@ -0,0 +1,3 @@ +foo + + bar + color: red diff --git a/mybulma/node_modules/node-sass/test/fixtures/input-directory/sass/_skipped.scss b/mybulma/node_modules/node-sass/test/fixtures/input-directory/sass/_skipped.scss new file mode 100644 index 0000000..38a8fe6 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/input-directory/sass/_skipped.scss @@ -0,0 +1,16 @@ +#navbar { + width: 80%; + height: 23px; +} + +#navbar ul { + list-style-type: none; +} + +#navbar li { + float: left; + + a { + font-weight: bold; + } +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/input-directory/sass/nested/three.scss b/mybulma/node_modules/node-sass/test/fixtures/input-directory/sass/nested/three.scss new file mode 100644 index 0000000..38a8fe6 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/input-directory/sass/nested/three.scss @@ -0,0 +1,16 @@ +#navbar { + width: 80%; + height: 23px; +} + +#navbar ul { + list-style-type: none; +} + +#navbar li { + float: left; + + a { + font-weight: bold; + } +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/input-directory/sass/one.scss b/mybulma/node_modules/node-sass/test/fixtures/input-directory/sass/one.scss new file mode 100644 index 0000000..38a8fe6 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/input-directory/sass/one.scss @@ -0,0 +1,16 @@ +#navbar { + width: 80%; + height: 23px; +} + +#navbar ul { + list-style-type: none; +} + +#navbar li { + float: left; + + a { + font-weight: bold; + } +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/input-directory/sass/two.scss b/mybulma/node_modules/node-sass/test/fixtures/input-directory/sass/two.scss new file mode 100644 index 0000000..38a8fe6 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/input-directory/sass/two.scss @@ -0,0 +1,16 @@ +#navbar { + width: 80%; + height: 23px; +} + +#navbar ul { + list-style-type: none; +} + +#navbar li { + float: left; + + a { + font-weight: bold; + } +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/invalid/index.scss b/mybulma/node_modules/node-sass/test/fixtures/invalid/index.scss new file mode 100644 index 0000000..e04c557 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/invalid/index.scss @@ -0,0 +1,3 @@ +body { + background-color: $green; +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/output-directory/index.scss b/mybulma/node_modules/node-sass/test/fixtures/output-directory/index.scss new file mode 100644 index 0000000..38a8fe6 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/output-directory/index.scss @@ -0,0 +1,16 @@ +#navbar { + width: 80%; + height: 23px; +} + +#navbar ul { + list-style-type: none; +} + +#navbar li { + float: left; + + a { + font-weight: bold; + } +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/precision/expected.css b/mybulma/node_modules/node-sass/test/fixtures/precision/expected.css new file mode 100644 index 0000000..c9a0a06 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/precision/expected.css @@ -0,0 +1,2 @@ +.foo { + margin: 1.23456789 px; } diff --git a/mybulma/node_modules/node-sass/test/fixtures/precision/index.scss b/mybulma/node_modules/node-sass/test/fixtures/precision/index.scss new file mode 100644 index 0000000..328b214 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/precision/index.scss @@ -0,0 +1,3 @@ +.foo { + margin: 1.23456789 px; +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/sass-path/expected-orange.css b/mybulma/node_modules/node-sass/test/fixtures/sass-path/expected-orange.css new file mode 100644 index 0000000..32b0627 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/sass-path/expected-orange.css @@ -0,0 +1,3 @@ +body { + background: orange; } + diff --git a/mybulma/node_modules/node-sass/test/fixtures/sass-path/expected-red.css b/mybulma/node_modules/node-sass/test/fixtures/sass-path/expected-red.css new file mode 100644 index 0000000..3884d51 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/sass-path/expected-red.css @@ -0,0 +1,3 @@ +body { + background: red; } + diff --git a/mybulma/node_modules/node-sass/test/fixtures/sass-path/index.scss b/mybulma/node_modules/node-sass/test/fixtures/sass-path/index.scss new file mode 100644 index 0000000..867386c --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/sass-path/index.scss @@ -0,0 +1,6 @@ +@import 'colors'; + +body { + background: $color; +} + diff --git a/mybulma/node_modules/node-sass/test/fixtures/sass-path/orange/colors.scss b/mybulma/node_modules/node-sass/test/fixtures/sass-path/orange/colors.scss new file mode 100644 index 0000000..ca95971 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/sass-path/orange/colors.scss @@ -0,0 +1 @@ +$color: orange; diff --git a/mybulma/node_modules/node-sass/test/fixtures/sass-path/red/colors.scss b/mybulma/node_modules/node-sass/test/fixtures/sass-path/red/colors.scss new file mode 100644 index 0000000..4b04915 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/sass-path/red/colors.scss @@ -0,0 +1 @@ +$color: red; diff --git a/mybulma/node_modules/node-sass/test/fixtures/simple/expected.css b/mybulma/node_modules/node-sass/test/fixtures/simple/expected.css new file mode 100644 index 0000000..402ae50 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/simple/expected.css @@ -0,0 +1,11 @@ +#navbar { + width: 80%; + height: 23px; } + +#navbar ul { + list-style-type: none; } + +#navbar li { + float: left; } + #navbar li a { + font-weight: bold; } diff --git a/mybulma/node_modules/node-sass/test/fixtures/simple/index.scss b/mybulma/node_modules/node-sass/test/fixtures/simple/index.scss new file mode 100644 index 0000000..38a8fe6 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/simple/index.scss @@ -0,0 +1,16 @@ +#navbar { + width: 80%; + height: 23px; +} + +#navbar ul { + list-style-type: none; +} + +#navbar li { + float: left; + + a { + font-weight: bold; + } +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/source-comments/expected.css b/mybulma/node_modules/node-sass/test/fixtures/source-comments/expected.css new file mode 100644 index 0000000..53b1a38 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/source-comments/expected.css @@ -0,0 +1,15 @@ +/* line 1, stdin */ +#navbar { + width: 80%; + height: 23px; } + +/* line 6, stdin */ +#navbar ul { + list-style-type: none; } + +/* line 10, stdin */ +#navbar li { + float: left; } + /* line 13, stdin */ + #navbar li a { + font-weight: bold; } diff --git a/mybulma/node_modules/node-sass/test/fixtures/source-comments/index.scss b/mybulma/node_modules/node-sass/test/fixtures/source-comments/index.scss new file mode 100644 index 0000000..38a8fe6 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/source-comments/index.scss @@ -0,0 +1,16 @@ +#navbar { + width: 80%; + height: 23px; +} + +#navbar ul { + list-style-type: none; +} + +#navbar li { + float: left; + + a { + font-weight: bold; + } +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/source-map-embed/expected.css b/mybulma/node_modules/node-sass/test/fixtures/source-map-embed/expected.css new file mode 100644 index 0000000..a1e895f --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/source-map-embed/expected.css @@ -0,0 +1,13 @@ +#navbar { + width: 80%; + height: 23px; } + +#navbar ul { + list-style-type: none; } + +#navbar li { + float: left; } + #navbar li a { + font-weight: bold; } + +/*# sourceMappingURL=data:application/json;base64,ewoJInZlcnNpb24iOiAzLAoJImZpbGUiOiAidGVzdC9maXh0dXJlcy9zb3VyY2UtbWFwLWVtYmVkL2luZGV4LmNzcyIsCgkic291cmNlcyI6IFsKCQkidGVzdC9maXh0dXJlcy9zb3VyY2UtbWFwLWVtYmVkL2luZGV4LnNjc3MiCgldLAoJIm5hbWVzIjogW10sCgkibWFwcGluZ3MiOiAiQUFBQSxBQUFBLE9BQU8sQ0FBQztFQUNOLEtBQUssRUFBRSxHQUFHO0VBQ1YsTUFBTSxFQUFFLElBQUksR0FDYjs7QUFFRCxBQUFBLE9BQU8sQ0FBQyxFQUFFLENBQUM7RUFDVCxlQUFlLEVBQUUsSUFBSSxHQUN0Qjs7QUFFRCxBQUFBLE9BQU8sQ0FBQyxFQUFFLENBQUM7RUFDVCxLQUFLLEVBQUUsSUFBSSxHQUtaO0VBTkQsQUFHRSxPQUhLLENBQUMsRUFBRSxDQUdSLENBQUMsQ0FBQztJQUNBLFdBQVcsRUFBRSxJQUFJLEdBQ2xCIgp9 */ diff --git a/mybulma/node_modules/node-sass/test/fixtures/source-map-embed/index.scss b/mybulma/node_modules/node-sass/test/fixtures/source-map-embed/index.scss new file mode 100644 index 0000000..38a8fe6 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/source-map-embed/index.scss @@ -0,0 +1,16 @@ +#navbar { + width: 80%; + height: 23px; +} + +#navbar ul { + list-style-type: none; +} + +#navbar li { + float: left; + + a { + font-weight: bold; + } +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/source-map/expected.css b/mybulma/node_modules/node-sass/test/fixtures/source-map/expected.css new file mode 100644 index 0000000..708bc47 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/source-map/expected.css @@ -0,0 +1,13 @@ +#navbar { + width: 80%; + height: 23px; } + +#navbar ul { + list-style-type: none; } + +#navbar li { + float: left; } + #navbar li a { + font-weight: bold; } + +/*# sourceMappingURL=index.map */ diff --git a/mybulma/node_modules/node-sass/test/fixtures/source-map/expected.map b/mybulma/node_modules/node-sass/test/fixtures/source-map/expected.map new file mode 100644 index 0000000..bd43765 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/source-map/expected.map @@ -0,0 +1,9 @@ +{ + "version": 3, + "file": "index.css", + "sources": [ + "index.scss" + ], + "names": [], + "mappings": "AAAA,AAAA,OAAO,CAAC;EACN,KAAK,EAAE,GAAG;EACV,MAAM,EAAE,IAAI,GACb;;AAED,AAAA,OAAO,CAAC,EAAE,CAAC;EACT,eAAe,EAAE,IAAI,GACtB;;AAED,AAAA,OAAO,CAAC,EAAE,CAAC;EACT,KAAK,EAAE,IAAI,GAKZ;EAND,AAGE,OAHK,CAAC,EAAE,CAGR,CAAC,CAAC;IACA,WAAW,EAAE,IAAI,GAClB" +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/source-map/index.scss b/mybulma/node_modules/node-sass/test/fixtures/source-map/index.scss new file mode 100644 index 0000000..38a8fe6 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/source-map/index.scss @@ -0,0 +1,16 @@ +#navbar { + width: 80%; + height: 23px; +} + +#navbar ul { + list-style-type: none; +} + +#navbar li { + float: left; + + a { + font-weight: bold; + } +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/watcher/main/one.scss b/mybulma/node_modules/node-sass/test/fixtures/watcher/main/one.scss new file mode 100644 index 0000000..414af5e --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/watcher/main/one.scss @@ -0,0 +1,5 @@ +@import "partials/one"; + +.one { + color: red; +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/watcher/main/partials/_one.scss b/mybulma/node_modules/node-sass/test/fixtures/watcher/main/partials/_one.scss new file mode 100644 index 0000000..379ec65 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/watcher/main/partials/_one.scss @@ -0,0 +1,5 @@ +@import "partials/three"; + +.one { + color: darkred; +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/watcher/main/partials/_three.scss b/mybulma/node_modules/node-sass/test/fixtures/watcher/main/partials/_three.scss new file mode 100644 index 0000000..1846e9a --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/watcher/main/partials/_three.scss @@ -0,0 +1,3 @@ +.three { + color: darkgreen; +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/watcher/main/partials/_two.scss b/mybulma/node_modules/node-sass/test/fixtures/watcher/main/partials/_two.scss new file mode 100644 index 0000000..7a1ace9 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/watcher/main/partials/_two.scss @@ -0,0 +1,5 @@ +@import "partials/three"; + +.two { + color: darkblue; +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/watcher/main/three.scss b/mybulma/node_modules/node-sass/test/fixtures/watcher/main/three.scss new file mode 100644 index 0000000..24cab72 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/watcher/main/three.scss @@ -0,0 +1,3 @@ +.three { + color: green; +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/watcher/main/two.scss b/mybulma/node_modules/node-sass/test/fixtures/watcher/main/two.scss new file mode 100644 index 0000000..68036db --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/watcher/main/two.scss @@ -0,0 +1,3 @@ +.two { + color: blue; +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/watcher/sibling/partials/_three.scss b/mybulma/node_modules/node-sass/test/fixtures/watcher/sibling/partials/_three.scss new file mode 100644 index 0000000..1846e9a --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/watcher/sibling/partials/_three.scss @@ -0,0 +1,3 @@ +.three { + color: darkgreen; +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/watcher/sibling/three.scss b/mybulma/node_modules/node-sass/test/fixtures/watcher/sibling/three.scss new file mode 100644 index 0000000..4e9d1a7 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/watcher/sibling/three.scss @@ -0,0 +1,5 @@ +@import "partials/three"; + +.three { + color: green; +} diff --git a/mybulma/node_modules/node-sass/test/fixtures/watching-dir-01/index.scss b/mybulma/node_modules/node-sass/test/fixtures/watching-dir-01/index.scss new file mode 100644 index 0000000..b189432 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/watching-dir-01/index.scss @@ -0,0 +1 @@ +a {color:green;} diff --git a/mybulma/node_modules/node-sass/test/fixtures/watching-dir-02/foo.scss b/mybulma/node_modules/node-sass/test/fixtures/watching-dir-02/foo.scss new file mode 100644 index 0000000..620aa31 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/watching-dir-02/foo.scss @@ -0,0 +1 @@ +body{background:white} diff --git a/mybulma/node_modules/node-sass/test/fixtures/watching-dir-02/index.scss b/mybulma/node_modules/node-sass/test/fixtures/watching-dir-02/index.scss new file mode 100644 index 0000000..dece2c6 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/watching-dir-02/index.scss @@ -0,0 +1 @@ +@import './foo'; diff --git a/mybulma/node_modules/node-sass/test/fixtures/watching/bar.sass b/mybulma/node_modules/node-sass/test/fixtures/watching/bar.sass new file mode 100644 index 0000000..de9fd93 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/watching/bar.sass @@ -0,0 +1,2 @@ +body + background: white diff --git a/mybulma/node_modules/node-sass/test/fixtures/watching/index.sass b/mybulma/node_modules/node-sass/test/fixtures/watching/index.sass new file mode 100644 index 0000000..9f146c8 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/watching/index.sass @@ -0,0 +1 @@ +@import "bar.sass"; diff --git a/mybulma/node_modules/node-sass/test/fixtures/watching/index.scss b/mybulma/node_modules/node-sass/test/fixtures/watching/index.scss new file mode 100644 index 0000000..7905647 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/watching/index.scss @@ -0,0 +1 @@ +@import './white'; diff --git a/mybulma/node_modules/node-sass/test/fixtures/watching/white.scss b/mybulma/node_modules/node-sass/test/fixtures/watching/white.scss new file mode 100644 index 0000000..620aa31 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/fixtures/watching/white.scss @@ -0,0 +1 @@ +body{background:white} diff --git a/mybulma/node_modules/node-sass/test/lowlevel.js b/mybulma/node_modules/node-sass/test/lowlevel.js new file mode 100644 index 0000000..a7aedf1 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/lowlevel.js @@ -0,0 +1,243 @@ +process.env.NODESASS_COV ? require('../lib-cov') : require('../lib'); + +var assert = require('assert').strict, + sass = require('../lib/extensions'), + binding = require(sass.getBinaryPath()); + +describe('lowlevel', function() { + it('fail with options not an object', function(done) { + var options = 2; + assert.throws(function() { + binding.renderSync(options); + }, /"result" element is not an object/); + done(); + }); + + it('data context with options.data not provided', function(done) { + var options = { + /* data: */ + sourceComments: false, + file: null, + outFile: null, + includePaths: '', + precision: 5, + sourceMap: null, + style: 0, + indentWidth: 2, + indentType: 0, + linefeed: '\n', + result: { stats: {} } }; + + binding.renderSync(options); + assert(/Data context created without a source string/.test(options.result.error), + 'Should fail with error message "Data context created without a source string"'); + done(); + }); + + it('data context with both options.data and options.file not provided', function(done) { + var options = { + /* data: */ + sourceComments: false, + /* file: null, */ + outFile: null, + includePaths: '', + precision: 5, + sourceMap: null, + style: 0, + indentWidth: 2, + indentType: 0, + linefeed: '\n', + result: { stats: {} } }; + + binding.renderSync(options); + assert(/Data context created without a source string/.test(options.result.error), + 'Should fail with error message "Data context created without a source string"'); + done(); + }); + + it('file context with both options.data and options.file not provided', function(done) { + var options = { + /* data: */ + sourceComments: false, + /* file: null, */ + outFile: null, + includePaths: '', + precision: 5, + sourceMap: null, + style: 0, + indentWidth: 2, + indentType: 0, + linefeed: '\n', + result: { stats: {} } }; + + binding.renderFileSync(options); + assert(/File context created without an input path/.test(options.result.error), + 'Should fail with error message "File context created without an input path"'); + done(); + }); + + it('file context with options.file not provided, options.data given', function(done) { + var options = { + data: 'div { width: 10px; } ', + sourceComments: false, + /* file: null, */ + outFile: null, + includePaths: '', + precision: 5, + sourceMap: null, + style: 0, + indentWidth: 2, + indentType: 0, + linefeed: '\n', + result: { stats: {} } }; + + binding.renderFileSync(options); + assert(/File context created without an input path/.test(options.result.error), + 'Should fail with error message "File context created without an input path"'); + done(); + }); + + it('fail with options.result not provided', function(done) { + var options = { data: 'div { width: 10px; } ', + sourceComments: false, + file: null, + outFile: null, + includePaths: '', + precision: 5, + sourceMap: null, + style: 0, + indentWidth: 2, + indentType: 0, + linefeed: '\n' }; + + assert.throws(function() { + binding.renderSync(options); + }, /"result" element is not an object/); + done(); + }); + + + it('fail with options.result not an object', function(done) { + var options = { data: 'div { width: 10px; } ', + sourceComments: false, + file: null, + outFile: null, + includePaths: '', + precision: 5, + sourceMap: null, + style: 0, + indentWidth: 2, + indentType: 0, + linefeed: '\n', + result: 2 }; + + assert.throws(function() { + binding.renderSync(options); + }, /"result" element is not an object/); + done(); + }); + + + it('fail with options.result.stats not provided', function(done) { + + var options = { data: 'div { width: 10px; } ', + sourceComments: false, + file: null, + outFile: null, + includePaths: '', + precision: 5, + sourceMap: null, + style: 0, + indentWidth: 2, + indentType: 0, + linefeed: '\n', + result: {} }; + + assert.throws(function() { + binding.renderSync(options); + }, /"result.stats" element is not an object/); + done(); + }); + + it('fail with options.result.stats not an object', function(done) { + + var options = { data: 'div { width: 10px; } ', + sourceComments: false, + file: null, + outFile: null, + includePaths: '', + precision: 5, + sourceMap: null, + style: 0, + indentWidth: 2, + indentType: 0, + linefeed: '\n', + result: { stats: 2 } }; + + assert.throws(function() { + binding.renderSync(options); + }, /"result.stats" element is not an object/); + done(); + }); + + it('options.indentWidth not provided', function(done) { + var options = { data: 'div { width: 10px; }', + sourceComments: false, + file: null, + outFile: null, + includePaths: '', + precision: 5, + sourceMap: null, + style: 0, + /* indentWidth */ + indentType: 0, + linefeed: '\n', + result: { stats: {} } }; + + binding.renderSync(options); + assert(options.result.css); + done(); + }); + + it('empty data string', function(done) { + var options = { data: '', + sourceComments: false, + file: null, + outFile: null, + includePaths: '', + precision: 5, + sourceMap: null, + style: 0, + /* indentWidth */ + indentType: 0, + linefeed: '\n', + result: { stats: {} } }; + + binding.renderSync(options); + assert(/empty source string/.test(options.result.error), + 'Should fail with error message "Data context created with empty source string"'); + done(); + }); + + + it('empty file string', function(done) { + var options = { + sourceComments: false, + file: '', + outFile: null, + includePaths: '', + precision: 5, + sourceMap: null, + style: 0, + /* indentWidth */ + indentType: 0, + linefeed: '\n', + result: { stats: {} } }; + + binding.renderFileSync(options); + assert(/empty input path/.test(options.result.error), + 'Should fail with error message "File context created with empty input path"'); + done(); + }); + +}); // lowlevel diff --git a/mybulma/node_modules/node-sass/test/runtime.js b/mybulma/node_modules/node-sass/test/runtime.js new file mode 100644 index 0000000..de0ee5a --- /dev/null +++ b/mybulma/node_modules/node-sass/test/runtime.js @@ -0,0 +1,185 @@ +var assert = require('assert').strict, + sass = process.env.NODESASS_COV + ? require('../lib-cov/extensions') + : require('../lib/extensions'); + +describe('runtime parameters', function() { + var pkg = require('../package'), + // Let's use JSON to fake a deep copy + savedArgv = JSON.stringify(process.argv), + savedEnv = JSON.stringify(process.env); + + afterEach(function() { + process.argv = JSON.parse(savedArgv); + process.env = JSON.parse(savedEnv); + delete pkg.nodeSassConfig; + }); + + describe('configuration precedence should be respected', function() { + + describe('SASS_BINARY_NAME', function() { + beforeEach(function() { + process.argv.push('--sass-binary-name', 'aaa'); + process.env.SASS_BINARY_NAME = 'bbb'; + process.env.npm_config_sass_binary_name = 'ccc'; + pkg.nodeSassConfig = { binaryName: 'ddd' }; + }); + + it('command line argument', function() { + assert.strictEqual(sass.getBinaryName(), 'aaa_binding.node'); + }); + + it('environment variable', function() { + process.argv = []; + assert.strictEqual(sass.getBinaryName(), 'bbb_binding.node'); + }); + + it('npm config variable', function() { + process.argv = []; + process.env.SASS_BINARY_NAME = null; + assert.strictEqual(sass.getBinaryName(), 'ccc_binding.node'); + }); + + it('package.json', function() { + process.argv = []; + process.env.SASS_BINARY_NAME = null; + process.env.npm_config_sass_binary_name = null; + assert.strictEqual(sass.getBinaryName(), 'ddd_binding.node'); + }); + }); + + describe('SASS_BINARY_SITE', function() { + beforeEach(function() { + process.argv.push('--sass-binary-site', 'http://aaa.example.com:9999'); + process.env.SASS_BINARY_SITE = 'http://bbb.example.com:8888'; + process.env.npm_config_sass_binary_site = 'http://ccc.example.com:7777'; + pkg.nodeSassConfig = { binarySite: 'http://ddd.example.com:6666' }; + }); + + it('command line argument', function() { + var URL = 'http://aaa.example.com:9999'; + assert.strictEqual(sass.getBinaryUrl().substr(0, URL.length), URL); + }); + + it('environment variable', function() { + process.argv = []; + var URL = 'http://bbb.example.com:8888'; + assert.strictEqual(sass.getBinaryUrl().substr(0, URL.length), URL); + }); + + it('npm config variable', function() { + process.argv = []; + process.env.SASS_BINARY_SITE = null; + var URL = 'http://ccc.example.com:7777'; + assert.strictEqual(sass.getBinaryUrl().substr(0, URL.length), URL); + }); + + it('package.json', function() { + process.argv = []; + process.env.SASS_BINARY_SITE = null; + process.env.npm_config_sass_binary_site = null; + var URL = 'http://ddd.example.com:6666'; + assert.strictEqual(sass.getBinaryUrl().substr(0, URL.length), URL); + }); + }); + + describe('SASS_BINARY_DIR', function() { + beforeEach(function() { + process.argv.push('--sass-binary-dir', 'aaa'); + process.env.SASS_BINARY_DIR = 'bbb'; + process.env.npm_config_sass_binary_dir = 'ccc'; + pkg.nodeSassConfig = { binaryDir: 'ddd' }; + }); + + it('command line argument', function() { + assert.strictEqual(sass.getBinaryDir(), 'aaa'); + }); + + it('environment variable', function() { + process.argv = []; + assert.strictEqual(sass.getBinaryDir(), 'bbb'); + }); + + it('npm config variable', function() { + process.argv = []; + process.env.SASS_BINARY_DIR = null; + assert.strictEqual(sass.getBinaryDir(), 'ccc'); + }); + + it('package.json', function() { + process.argv = []; + process.env.SASS_BINARY_DIR = null; + process.env.npm_config_sass_binary_dir = null; + assert.strictEqual(sass.getBinaryDir(), 'ddd'); + }); + }); + + describe('SASS_BINARY_PATH', function() { + beforeEach(function() { + process.argv.push('--sass-binary-path', 'aaa_binding.node'); + process.env.SASS_BINARY_PATH = 'bbb_binding.node'; + process.env.npm_config_sass_binary_path = 'ccc_binding.node'; + pkg.nodeSassConfig = { binaryPath: 'ddd_binding.node' }; + }); + + it('command line argument', function() { + assert.strictEqual(sass.getBinaryPath(), 'aaa_binding.node'); + }); + + it('environment variable', function() { + process.argv = []; + assert.strictEqual(sass.getBinaryPath(), 'bbb_binding.node'); + }); + + it('npm config variable', function() { + process.argv = []; + process.env.SASS_BINARY_PATH = null; + assert.strictEqual(sass.getBinaryPath(), 'ccc_binding.node'); + }); + + it('package.json', function() { + process.argv = []; + process.env.SASS_BINARY_PATH = null; + process.env.npm_config_sass_binary_path = null; + assert.strictEqual(sass.getBinaryPath(), 'ddd_binding.node'); + }); + }); + + }); + + describe.skip('Sass Binary Cache', function() { + var npmCacheDir; + before(function() { + npmCacheDir = process.env.npm_config_cache; + }); + + beforeEach(function() { + delete process.env.npm_config_sass_binary_cache; + }); + + it('npm config variable', function() { + var overridenCachePath = '/foo/bar/'; + process.env.npm_config_sass_binary_cache = overridenCachePath; + assert.strictEqual(sass.getCachePath(), overridenCachePath); + }); + + it('With no value, falls back to NPM cache', function() { + assert.strictEqual(sass.getCachePath(), npmCacheDir); + }); + }); +}); + +// describe('library detection', function() { +// it('should throw error when libsass binary is missing.', function() { +// var sass = require(extensionsPath), +// originalBin = sass.getBinaryPath(), +// renamedBin = [originalBin, '_moved'].join(''); + +// assert.throws(function() { +// fs.renameSync(originalBin, renamedBin); +// sass.getBinaryPath(true); +// }, /The `libsass` binding was not found/); + +// fs.renameSync(renamedBin, originalBin); +// }); +// }); diff --git a/mybulma/node_modules/node-sass/test/scripts/util/proxy.js b/mybulma/node_modules/node-sass/test/scripts/util/proxy.js new file mode 100644 index 0000000..c01ddc1 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/scripts/util/proxy.js @@ -0,0 +1,76 @@ +var assert = require('assert').strict, + proxy = require('../../../scripts/util/proxy'); + +describe('proxy', function() { + var oldEnvironment; + + beforeEach(function() { + oldEnvironment = process.env; + }); + + afterEach(function() { + process.env = oldEnvironment; + }); + + describe('without an npm proxy config', function() { + delete process.env.npm_config_https_proxy; + delete process.env.npm_config_proxy; + delete process.env.npm_config_http_proxy; + + it('should return an empty string', function() { + assert.strictEqual('', proxy()); + }); + + it('should ignore system proxy environment variables', function() { + process.env.HTTPS_PROXY = 'http://https_proxy.com'; + process.env.PROXY = 'http://proxy.com'; + process.env.HTTP_PROXY = 'http://http_proxy.com'; + + assert.strictEqual('', proxy()); + }); + }); + + describe('with an npm proxy config', function() { + beforeEach(function() { + process.env.npm_config_https_proxy = 'http://https_proxy.com'; + process.env.npm_config_proxy = 'http://proxy.com'; + process.env.npm_config_http_proxy = 'http://http_proxy.com'; + }); + + describe('https_proxy', function() { + it('should have the highest precedence', function() { + assert.strictEqual(process.env.npm_config_https_proxy, proxy()); + }); + }); + + describe('proxy', function() { + it('should have the higher precedence than https_proxy', function() { + assert.strictEqual(process.env.npm_config_https_proxy, proxy()); + delete process.env.npm_config_https_proxy; + + assert.strictEqual(process.env.npm_config_proxy, proxy()); + }); + + it('should have the lower precedence than http_proxy', function() { + delete process.env.npm_config_https_proxy; + + assert.strictEqual(process.env.npm_config_proxy, proxy()); + delete process.env.npm_config_proxy; + + assert.strictEqual(process.env.npm_config_http_proxy, proxy()); + }); + }); + + describe('http_proxy', function() { + it('should have the lowest precedence', function() { + assert.strictEqual(process.env.npm_config_https_proxy, proxy()); + delete process.env.npm_config_https_proxy; + + assert.strictEqual(process.env.npm_config_proxy, proxy()); + delete process.env.npm_config_proxy; + + assert.strictEqual(process.env.npm_config_http_proxy, proxy()); + }); + }); + }); +}); diff --git a/mybulma/node_modules/node-sass/test/types.js b/mybulma/node_modules/node-sass/test/types.js new file mode 100644 index 0000000..4593b53 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/types.js @@ -0,0 +1,708 @@ +/*eslint new-cap: ["error", { "capIsNew": false }]*/ +'use strict'; + +var assert = require('assert').strict; +var sass = require('../'); +var semver = require('semver'); + +describe('sass.types', function() { + describe('Boolean', function() { + it('exists', function() { + assert(sass.types.Boolean); + }); + + it('names the constructor correctly', function() { + assert.strictEqual(sass.types.Boolean.name, 'SassBoolean'); + }); + + it('supports call constructor', function() { + if(semver.gt(process.version, 'v14.5.0')) { + // v8 issue tracked in https://github.com/sass/node-sass/issues/2972 + this.skip(); + } + var t = sass.types.Boolean(true); + assert.strictEqual(t.toString(), '[object SassBoolean]'); + + var f = sass.types.Boolean(false); + assert.strictEqual(f.toString(), '[object SassBoolean]'); + }); + + it('has true and false singletons', function() { + assert.strictEqual(sass.types.Boolean(true), sass.types.Boolean(true)); + assert.strictEqual(sass.types.Boolean(false), sass.types.Boolean(false)); + assert.notStrictEqual(sass.types.Boolean(false), sass.types.Boolean(true)); + assert.strictEqual(sass.types.Boolean(true), sass.types.Boolean.TRUE); + assert.strictEqual(sass.types.Boolean(false), sass.types.Boolean.FALSE); + }); + + it('supports DOES NOT support new constructor', function() { + assert.throws(function() { + new sass.types.Boolean(true); + }, function(error) { + assert.ok(error instanceof TypeError); + assert.strictEqual(error.message, 'Cannot instantiate SassBoolean'); + return true; + }); + }); + + it('throws with incorrect constructor args', function() { + assert.throws(function() { + sass.types.Boolean(); + }, function(error) { + assert.ok(error instanceof TypeError); + assert.strictEqual(error.message, 'Expected one boolean argument'); + return true; + }); + + [1, 2, '', 'hi', {}, []].forEach(function(arg) { + assert.throws(function() { + sass.types.Boolean(arg); + }, function(error) { + assert.ok(error instanceof TypeError); + assert.strictEqual(error.message, 'Expected one boolean argument'); + return true; + }); + }); + + assert.throws(function() { + sass.types.Boolean(true, false); + }, function(error) { + assert.ok(error instanceof TypeError); + assert.strictEqual(error.message, 'Expected one boolean argument'); + return true; + }); + }); + + it('implements getValue', function() { + var t = sass.types.Boolean(true); + assert.strictEqual(typeof t.getValue, 'function'); + assert.strictEqual(t.getValue(), true); + + var f = sass.types.Boolean(false); + assert.strictEqual(typeof f.getValue, 'function'); + assert.strictEqual(f.getValue(), false); + }); + }); + + describe('Color', function() { + it('exists', function() { + assert(sass.types.Color); + }); + + it('names the constructor correctly', function() { + assert.strictEqual(sass.types.Color.name, 'SassColor'); + }); + + it('supports call constructor', function() { + if(semver.gt(process.version, 'v14.5.0')) { + // v8 issue tracked in https://github.com/sass/node-sass/issues/2972 + this.skip(); + } + + var t = sass.types.Color(); + assert.strictEqual(t.toString(), '[object SassColor]'); + }); + + it('supports new constructor', function() { + if(semver.gt(process.version, 'v14.5.0')) { + // v8 issue tracked in https://github.com/sass/node-sass/issues/2972 + this.skip(); + } + + var t = new sass.types.Color(1); + assert.strictEqual(t.toString(), '[object SassColor]'); + }); + + it('supports variadic constructor args', function() { + var a = new sass.types.Color(); + + assert.strictEqual(a.getR(), 0); + assert.strictEqual(a.getG(), 0); + assert.strictEqual(a.getB(), 0); + assert.strictEqual(a.getA(), 1); + + var b = new sass.types.Color(1); + + assert.strictEqual(b.getR(), 0); + assert.strictEqual(b.getG(), 0); + assert.strictEqual(b.getB(), 1); + assert.strictEqual(b.getA(), 0); // why ? + + assert.throws(function() { + new sass.types.Color(1, 2); + }, function(error) { + // assert.ok(error instanceof TypeError); + assert.strictEqual(error.message, 'Constructor should be invoked with either 0, 1, 3 or 4 arguments.'); + return true; + }); + + var c = new sass.types.Color(1, 2, 3); + + assert.strictEqual(c.getR(), 1); + assert.strictEqual(c.getG(), 2); + assert.strictEqual(c.getB(), 3); + assert.strictEqual(c.getA(), 1); + + var d = new sass.types.Color(1, 2, 3, 4); + + assert.strictEqual(d.getR(), 1); + assert.strictEqual(d.getG(), 2); + assert.strictEqual(d.getB(), 3); + assert.strictEqual(d.getA(), 4); + + assert.throws(function() { + new sass.types.Color(1, 2, 3, 4, 5); + }, function(error) { + // assert.ok(error instanceof TypeError); + assert.strictEqual(error.message, 'Constructor should be invoked with either 0, 1, 3 or 4 arguments.'); + return true; + }); + }); + + it('supports get{R,G,B,A} and set{R,G,B,A}', function() { + var c = new sass.types.Color(); + + assert.strictEqual(c.getR(), 0); + assert.strictEqual(c.getG(), 0); + assert.strictEqual(c.getB(), 0); + assert.strictEqual(c.getA(), 1); + + assert.strictEqual(c.setR(1), undefined); + + assert.strictEqual(c.getR(), 1); + assert.strictEqual(c.getG(), 0); + assert.strictEqual(c.getB(), 0); + assert.strictEqual(c.getA(), 1); + + assert.strictEqual(c.setG(1), undefined); + + assert.strictEqual(c.getR(), 1); + assert.strictEqual(c.getG(), 1); + assert.strictEqual(c.getB(), 0); + assert.strictEqual(c.getA(), 1); + + assert.strictEqual(c.setB(1), undefined); + + assert.strictEqual(c.getR(), 1); + assert.strictEqual(c.getG(), 1); + assert.strictEqual(c.getB(), 1); + assert.strictEqual(c.getA(), 1); + + assert.strictEqual(c.setA(0), undefined); + + assert.strictEqual(c.getR(), 1); + assert.strictEqual(c.getG(), 1); + assert.strictEqual(c.getB(), 1); + assert.strictEqual(c.getA(), 0); + }); + + it('throws with incorrect set{R,G,B,A} arguments', function() { + var c = new sass.types.Color(); + + function assertJustOneArgument(cb) { + assert.throws(function() { + cb(); + }, function(error) { + assert.ok(error instanceof TypeError); + assert.strictEqual(error.message, 'Expected just one argument'); + + return true; + }); + } + + function assertNumberArgument(arg, cb) { + assert.throws(function() { + cb(); + }, function(error) { + assert.ok(error instanceof TypeError); + assert.strictEqual(error.message, 'Supplied value should be a number'); + + return true; + }, 'argument was: ' + arg); + } + + assertJustOneArgument(function() { c.setR(); }); + assertJustOneArgument(function() { c.setG(); }); + assertJustOneArgument(function() { c.setB(); }); + assertJustOneArgument(function() { c.setA(); }); + + assertJustOneArgument(function() { c.setR(1, 2); }); + assertJustOneArgument(function() { c.setG(1, 2); }); + assertJustOneArgument(function() { c.setB(1, 2); }); + assertJustOneArgument(function() { c.setA(1, 2); }); + + [true, false, '0', '1', '', 'omg', {}, []].forEach(function(arg) { + assertNumberArgument(arg, function() { c.setR(arg); }); + assertNumberArgument(arg, function() { c.setG(arg); }); + assertNumberArgument(arg, function() { c.setB(arg); }); + assertNumberArgument(arg, function() { c.setA(arg); }); + }); + }); + }); + + describe('Error', function() { + it('exists', function() { + assert(sass.types.Error); + }); + + it('has a correctly named constructor', function() { + assert.strictEqual(sass.types.Error.name, 'SassError'); + }); + + it('supports call constructor', function() { + if(semver.gt(process.version, 'v14.5.0')) { + // v8 issue tracked in https://github.com/sass/node-sass/issues/2972 + this.skip(); + } + + var e = sass.types.Error('Such Error'); + assert.ok(e instanceof sass.types.Error); + assert.strictEqual(e.toString(), '[object SassError]'); + + // TODO: I'm not sure this object works well, it likely needs to be fleshed out more... + }); + + it('supports new constructor', function() { + if(semver.gt(process.version, 'v14.5.0')) { + // v8 issue tracked in https://github.com/sass/node-sass/issues/2972 + this.skip(); + } + + var e = new sass.types.Error('Such Error'); + assert.ok(e instanceof sass.types.Error); + assert.strictEqual(e.toString(), '[object SassError]'); + // TODO: I'm not sure this object works well, it likely needs to be fleshed out more... + }); + }); + + describe('List', function() { + it('exists', function() { + assert(sass.types.List); + }); + + it('has a correctly named constructor', function() { + assert.strictEqual(sass.types.List.name, 'SassList'); + }); + + it('support call constructor', function() { + if(semver.gt(process.version, 'v14.5.0')) { + // v8 issue tracked in https://github.com/sass/node-sass/issues/2972 + this.skip(); + } + + var list = sass.types.List(); + assert.ok(list instanceof sass.types.List); + assert.strictEqual(list.toString(), '[object SassList]'); + }); + + it('support new constructor', function() { + if(semver.gt(process.version, 'v14.5.0')) { + // v8 issue tracked in https://github.com/sass/node-sass/issues/2972 + this.skip(); + } + + var list = new sass.types.List(); + assert.ok(list instanceof sass.types.List); + assert.strictEqual(list.toString(), '[object SassList]'); + }); + + it('support variadic constructor', function() { + var a = new sass.types.List(); + assert.strictEqual(a.getLength(), 0); + assert.strictEqual(a.getSeparator(), true); + var b = new sass.types.List(1); + assert.strictEqual(b.getSeparator(), true); + assert.strictEqual(b.getLength(), 1); + var c = new sass.types.List(1, true); + assert.strictEqual(b.getLength(), 1); + assert.strictEqual(c.getSeparator(), true); + var d = new sass.types.List(1, false); + assert.strictEqual(b.getLength(), 1); + assert.strictEqual(d.getSeparator(), false); + var e = new sass.types.List(1, true, 2); + assert.strictEqual(b.getLength(), 1); + assert.strictEqual(e.getSeparator(), true); + + assert.throws(function() { + new sass.types.List('not-a-number'); + }, function(error) { + // TODO: TypeError + assert.strictEqual(error.message, 'First argument should be an integer.'); + return true; + }); + + assert.throws(function() { + new sass.types.List(1, 'not-a-boolean'); + }, function(error) { + // TODO: TypeError + assert.strictEqual(error.message, 'Second argument should be a boolean.'); + return true; + }); + }); + + it('supports {get,set}Separator', function() { + var a = new sass.types.List(); + assert.strictEqual(a.getSeparator(), true); + assert.strictEqual(a.setSeparator(true), undefined); + assert.strictEqual(a.getSeparator(), true); + assert.strictEqual(a.setSeparator(false), undefined); + assert.strictEqual(a.getSeparator(), false); + + assert.throws(function() { + a.setSeparator(); + }, function(error) { + assert.ok(error instanceof TypeError); + assert.strictEqual(error.message, 'Expected just one argument'); + return true; + }); + + [1, '', [], {}].forEach(function(arg) { + assert.throws(function() { + a.setSeparator(arg); + }, function(error) { + assert.ok(error instanceof TypeError); + assert.strictEqual(error.message, 'Supplied value should be a boolean'); + return true; + }, 'setSeparator(' + arg + ')'); + }); + }); + + it('supports setValue and getValue', function() { + var a = new sass.types.List(); + + assert.throws(function() { + a.getValue(); + }, function(error) { + assert.ok(error instanceof TypeError); + assert.strictEqual(error.message, 'Expected just one argument'); + + return true; + }); + + ['hi', [], {}].forEach(function(arg) { + assert.throws(function() { + a.getValue(arg); + }, function(error) { + assert.ok(error instanceof TypeError); + assert.strictEqual(error.message, 'Supplied index should be an integer'); + + return true; + }, 'getValue(' + arg + ')'); + }); + + assert.throws(function() { + a.getValue(0); + }, function(error) { + assert.ok(error instanceof RangeError); + assert.strictEqual(error.message, 'Out of bound index'); + + return true; + }); + + assert.throws(function() { + a.getValue(-1); + }, function(error) { + assert.ok(error instanceof RangeError); + assert.strictEqual(error.message, 'Out of bound index'); + + return true; + }); + + assert.throws(function() { + a.setValue(); + }, function(error) { + assert.ok(error instanceof TypeError); + assert.strictEqual(error.message, 'Expected two arguments'); + return true; + }); + + assert.throws(function() { + a.setValue(1); + }, function(error) { + assert.ok(error instanceof TypeError); + assert.strictEqual(error.message, 'Expected two arguments'); + return true; + }); + + assert.throws(function() { + a.setValue(0, 'no-a-sass-value'); + }, function(error) { + assert.ok(error instanceof TypeError); + assert.strictEqual(error.message, 'Supplied value should be a SassValue object'); + return true; + }); + }); + + // TODO: more complex set/get value scenarios + }); + + describe('Map', function() { + it('exists', function() { + assert(sass.types.Map); + }); + + it('has a correctly named constructor', function() { + assert.strictEqual(sass.types.Map.name, 'SassMap'); + }); + + it('supports call constructor', function() { + if(semver.gt(process.version, 'v14.5.0')) { + // v8 issue tracked in https://github.com/sass/node-sass/issues/2972 + this.skip(); + } + + var x = sass.types.Map(); + assert.strictEqual(x.toString(), '[object SassMap]'); + }); + + it('supports new constructor', function() { + if(semver.gt(process.version, 'v14.5.0')) { + // v8 issue tracked in https://github.com/sass/node-sass/issues/2972 + this.skip(); + } + + var x = new sass.types.Map(); + assert.strictEqual(x.toString(), '[object SassMap]'); + }); + + it('supports an optional constructor argument', function() { + var x = new sass.types.Map(); + var y = new sass.types.Map(1); + var z = new sass.types.Map(2, 3); + + assert.throws(function() { + new sass.types.Map('OMG'); + }, function(error) { + assert.strictEqual(error.message, 'First argument should be an integer.'); + // TODO: TypeError + + return true; + }); + + assert.strictEqual(x.getLength(), 0); + assert.strictEqual(y.getLength(), 1); + assert.strictEqual(z.getLength(), 2); + }); + + it('supports length', function() { + var y = new sass.types.Map(1); + var z = new sass.types.Map(2); + + assert.strictEqual(y.getLength(), 1); + assert.strictEqual(z.getLength(), 2); + }); + + it('supports {get,set}Value {get,set}Key', function() { + var y = new sass.types.Map(1); + var omg = new sass.types.String('OMG'); + y.setValue(0, omg); + console.log(y.getValue(0)); + }); + }); + + describe('Null', function() { + it('exists', function() { + assert(sass.types.Null); + }); + + it('has a correctly named constructor', function() { + assert.strictEqual(sass.types.Null.name, 'SassNull'); + }); + + it('does not support new constructor', function() { + assert.throws(function() { + new sass.types.Null(); + }, function(error) { + assert.ok(error instanceof TypeError); + assert.strictEqual(error.message, 'Cannot instantiate SassNull'); + return true; + }); + }); + + it('supports call constructor (and is a singleton)', function() { + assert.strictEqual(sass.types.Null(), sass.types.Null()); + assert.strictEqual(sass.types.Null(), sass.types.Null.NULL); + }); + }); + + describe('Number', function() { + it('exists', function() { + assert(sass.types.Number); + }); + + it('has a correctly named constructor', function() { + assert.strictEqual(sass.types.Number.name, 'SassNumber'); + }); + + it('supports new constructor', function() { + if(semver.gt(process.version, 'v14.5.0')) { + // v8 issue tracked in https://github.com/sass/node-sass/issues/2972 + this.skip(); + } + + var number = new sass.types.Number(); + assert.strictEqual(number.toString(), '[object SassNumber]'); + }); + + it('supports call constructor', function() { + if(semver.gt(process.version, 'v14.5.0')) { + // v8 issue tracked in https://github.com/sass/node-sass/issues/2972 + this.skip(); + } + + var number = sass.types.Number(); + assert.strictEqual(number.toString(), '[object SassNumber]'); + }); + + it('supports multiple constructor arguments', function() { + var a = new sass.types.Number(); + var b = new sass.types.Number(1); + var c = new sass.types.Number(2, 'px'); + + assert.throws(function() { + new sass.types.Number('OMG'); + }, function(error) { + // TODO: TypeError + assert.strictEqual(error.message, 'First argument should be a number.'); + return true; + }); + + assert.throws(function() { + new sass.types.Number(1, 2); + }, function(error) { + // TODO: TypeError + assert.strictEqual(error.message, 'Second argument should be a string.'); + return true; + }); + + assert.strictEqual(a.getValue(), 0); + assert.strictEqual(a.getUnit(), ''); + assert.strictEqual(b.getValue(), 1); + assert.strictEqual(b.getUnit(), ''); + assert.strictEqual(c.getValue(), 2); + assert.strictEqual(c.getUnit(), 'px'); + }); + + it('supports get{Unit,Value}, set{Unit,Value}', function() { + var number = new sass.types.Number(1, 'px'); + assert.strictEqual(number.getValue(), 1); + assert.strictEqual(number.getUnit(), 'px'); + + number.setValue(2); + assert.strictEqual(number.getValue(), 2); + assert.strictEqual(number.getUnit(), 'px'); + + number.setUnit('em'); + assert.strictEqual(number.getValue(), 2); + assert.strictEqual(number.getUnit(), 'em'); + + assert.throws(function() { + number.setValue('OMG'); + }, function(error) { + assert.ok(error instanceof TypeError); + assert.strictEqual(error.message, 'Supplied value should be a number'); + return true; + }); + + assert.throws(function() { + number.setValue(); + }, function(error) { + assert.ok(error instanceof TypeError); + assert.strictEqual(error.message, 'Expected just one argument'); + return true; + }); + + assert.throws(function() { + number.setUnit(); + }, function(error) { + assert.ok(error instanceof TypeError); + assert.strictEqual(error.message, 'Expected just one argument'); + return true; + }); + + assert.throws(function() { + number.setUnit(1); + }, function(error) { + assert.ok(error instanceof TypeError); + assert.strictEqual(error.message, 'Supplied value should be a string'); + return true; + }); + }); + }); + + describe('String', function() { + it('exists', function() { + assert(sass.types.String); + }); + + it('has a properly named constructor', function() { + assert.strictEqual(sass.types.String.name, 'SassString'); + }); + + it('supports call constructor', function() { + if(semver.gt(process.version, 'v14.5.0')) { + // v8 issue tracked in https://github.com/sass/node-sass/issues/2972 + this.skip(); + } + + var x = sass.types.String('OMG'); + + assert.strictEqual(x.toString(), '[object SassString]'); + assert.strictEqual(x.getValue(), 'OMG'); + }); + + it('supports new constructor', function() { + if(semver.gt(process.version, 'v14.5.0')) { + // v8 issue tracked in https://github.com/sass/node-sass/issues/2972 + this.skip(); + } + + var x = new sass.types.String('OMG'); + + assert.strictEqual(x.toString(), '[object SassString]'); + assert.strictEqual(x.getValue(), 'OMG'); + }); + + it('supports multiple constructor arg combinations', function() { + new sass.types.String(); + new sass.types.String('OMG'); + new sass.types.String('OMG', 'NOPE'); + + [null, undefined, [], {}, function() { }].forEach(function(arg) { + assert.throws(function() { + new sass.types.String(arg); + }, function(error) { + // TODO: TypeError + assert.strictEqual(error.message, 'Argument should be a string.'); + return true; + }); + }); + }); + + it('supports {get,set}Value', function() { + var x = new sass.types.String(); + + assert.strictEqual(x.getValue(), ''); + assert.strictEqual(x.setValue('hi'), undefined); + assert.strictEqual(x.getValue(), 'hi'); + assert.strictEqual(x.setValue('bye'), undefined); + assert.strictEqual(x.getValue(), 'bye'); + + assert.throws(function() { + x.setValue(); + }, function(error) { + assert.ok(error instanceof TypeError); + assert.strictEqual(error.message, 'Expected just one argument'); + return true; + }); + + assert.throws(function() { + x.setValue('hi', 'hi'); + }, function(error) { + assert.ok(error instanceof TypeError); + assert.strictEqual(error.message, 'Expected just one argument'); + return true; + }); + }); + }); +}); diff --git a/mybulma/node_modules/node-sass/test/useragent.js b/mybulma/node_modules/node-sass/test/useragent.js new file mode 100644 index 0000000..5578fd6 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/useragent.js @@ -0,0 +1,15 @@ +var assert = require('assert').strict, + pkg = require('../package.json'), + ua = require('../scripts/util/useragent'); + +describe('util', function() { + describe('useragent', function() { + it('should look as we expect', function() { + var reNode = 'node/' + process.version; + var reSass = 'node-sass-installer/' + pkg.version; + var reUA = new RegExp('^' + reNode + ' ' + reSass + '$'); + + assert.ok(reUA.test(ua())); + }); + }); +}); diff --git a/mybulma/node_modules/node-sass/test/watcher.js b/mybulma/node_modules/node-sass/test/watcher.js new file mode 100644 index 0000000..8c80425 --- /dev/null +++ b/mybulma/node_modules/node-sass/test/watcher.js @@ -0,0 +1,503 @@ +var assert = require('assert').strict, + fs = require('fs-extra'), + path = require('path'), + temp = require('unique-temp-dir'), + watcher = require('../lib/watcher'); + +describe('watcher', function() { + var main, sibling; + var origin = path.join(__dirname, 'fixtures', 'watcher'); + + beforeEach(function() { + var fixture = temp(); + fs.ensureDirSync(fixture); + fs.copySync(origin, fixture); + main = fs.realpathSync(path.join(fixture, 'main')); + sibling = fs.realpathSync(path.join(fixture, 'sibling')); + }); + + describe('with directory', function() { + beforeEach(function() { + watcher.reset({ + directory: main, + includePath: [main] + }); + }); + + describe('when a file is changed', function() { + describe('and it is in the graph', function() { + describe('if it is a partial', function() { + it('should record its ancestors as changed', function() { + var file = path.join(main, 'partials', '_one.scss'); + var files = watcher.changed(file); + assert.deepStrictEqual(files.changed, [ + path.join(main, 'one.scss'), + ]); + }); + + it('should record its descendants as added', function() { + var file = path.join(main, 'partials', '_one.scss'); + var files = watcher.changed(file); + assert.deepStrictEqual(files.added, [ + path.join(main, 'partials', '_three.scss'), + ]); + }); + + it('should record nothing as removed', function() { + var file = path.join(main, 'partials', '_one.scss'); + var files = watcher.changed(file); + assert.deepStrictEqual(files.removed, []); + }); + }); + + describe('if it is not a partial', function() { + it('should record itself as changed', function() { + var file = path.join(main, 'one.scss'); + var files = watcher.changed(file); + assert.deepStrictEqual(files.changed, [ + file, + ]); + }); + + it('should record its descendants as added', function() { + var file = path.join(main, 'one.scss'); + var files = watcher.changed(file); + assert.deepStrictEqual(files.added, [ + path.join(main, 'partials', '_one.scss'), + path.join(main, 'partials', '_three.scss'), + ]); + }); + + it('should record nothing as removed', function() { + var file = path.join(main, 'one.scss'); + var files = watcher.changed(file); + assert.deepStrictEqual(files.removed, []); + }); + }); + }); + + describe('and is not in the graph', function() { + describe('if it is a partial', function() { + it('should not record anything', function() { + var file = path.join(sibling, 'partials', '_three.scss'); + var files = watcher.changed(file); + assert.deepStrictEqual(files, { + added: [], + changed: [], + removed: [], + }); + }); + }); + + describe('if it is not a partial', function() { + it('should record itself as changed', function() { + var file = path.join(sibling, 'three.scss'); + var files = watcher.changed(file); + assert.deepStrictEqual(files, { + added: [], + changed: [file], + removed: [], + }); + }); + }); + }); + }); + + describe('when a file is added', function() { + describe('and it is in the graph', function() { + describe('if it is a partial', function() { + it('should record nothing as added', function() { + var file = path.join(main, 'partials', '_three.scss'); + var files = watcher.added(file); + assert.deepStrictEqual(files.added, []); + }); + + it('should record its descendants as added', function() { + var file = path.join(main, 'partials', '_one.scss'); + var files = watcher.added(file); + assert.deepStrictEqual(files.added, [ + path.join(main, 'partials', '_three.scss') + ]); + }); + + it('should record nothing as changed', function() { + var file = path.join(main, 'partials', '_three.scss'); + var files = watcher.added(file); + assert.deepStrictEqual(files.changed, []); + }); + + it('should record nothing as removed', function() { + var file = path.join(main, 'partials', '_three.scss'); + var files = watcher.added(file); + assert.deepStrictEqual(files.removed, []); + }); + }); + + describe('if it is not a partial', function() { + it('should record nothing as added', function() { + var file = path.join(main, 'three.scss'); + var files = watcher.added(file); + assert.deepStrictEqual(files.added, []); + }); + + it('should record its descendants as added', function() { + var file = path.join(main, 'one.scss'); + var files = watcher.added(file); + assert.deepStrictEqual(files.added, [ + path.join(main, 'partials', '_one.scss'), + path.join(main, 'partials', '_three.scss'), + ]); + }); + + it('should record nothing as changed', function() { + var file = path.join(main, 'one.scss'); + var files = watcher.added(file); + assert.deepStrictEqual(files.changed, []); + }); + + it('should record nothing as removed', function() { + var file = path.join(main, 'one.scss'); + var files = watcher.added(file); + assert.deepStrictEqual(files.removed, []); + }); + }); + }); + }); + + describe('when a file is removed', function() { + describe('and it is in the graph', function() { + describe('if it is a partial', function() { + it('should record nothing as added', function() { + var file = path.join(main, 'partials', '_one.scss'); + var files = watcher.removed(file); + assert.deepStrictEqual(files.added, []); + }); + + it('should record its ancestors as changed', function() { + var file = path.join(main, 'partials', '_one.scss'); + var files = watcher.removed(file); + assert.deepStrictEqual(files.changed, [ + path.join(main, 'one.scss'), + ]); + }); + + it('should record itself as removed', function() { + var file = path.join(main, 'partials', '_one.scss'); + var files = watcher.removed(file); + assert.deepStrictEqual(files.removed, [file]); + }); + }); + + describe('if it is not a partial', function() { + it('should record nothing as added', function() { + var file = path.join(main, 'one.scss'); + var files = watcher.removed(file); + assert.deepStrictEqual(files.added, []); + }); + + it('should record nothing as changed', function() { + var file = path.join(main, 'one.scss'); + var files = watcher.removed(file); + assert.deepStrictEqual(files.changed, []); + }); + + it('should record itself as removed', function() { + var file = path.join(main, 'one.scss'); + var files = watcher.removed(file); + assert.deepStrictEqual(files.removed, [file]); + }); + }); + }); + + describe('and is not in the graph', function() { + describe('if it is a partial', function() { + it('should record nothing', function() { + var file = path.join(sibling, 'partials', '_three.scss'); + var files = watcher.removed(file); + assert.deepStrictEqual(files, { + added: [], + changed: [], + removed: [], + }); + }); + }); + + describe('if it is not a partial', function() { + it('should record nothing', function() { + var file = path.join(sibling, 'three.scss'); + var files = watcher.removed(file); + assert.deepStrictEqual(files, { + added: [], + changed: [], + removed: [], + }); + }); + }); + }); + }); + }); + + describe('with file', function() { + beforeEach(function() { + watcher.reset({ + src: path.join(main, 'one.scss'), + includePath: [main] + }); + }); + + describe('when a file is changed', function() { + describe('and it is in the graph', function() { + describe('if it is a partial', function() { + it('should record its descendants as added', function() { + var file = path.join(main, 'partials', '_one.scss'); + var files = watcher.changed(file); + assert.deepStrictEqual(files.added, [ + path.join(main, 'partials', '_three.scss'), + ]); + }); + + it('should record its ancenstors as changed', function() { + var file = path.join(main, 'partials', '_one.scss'); + var files = watcher.changed(file); + assert.deepStrictEqual(files.changed, [ + path.join(main, 'one.scss'), + ]); + }); + + it('should record nothing as removed', function() { + var file = path.join(main, 'partials', '_one.scss'); + var files = watcher.changed(file); + assert.deepStrictEqual(files.removed, []); + }); + }); + + describe('if it is not a partial', function() { + it('should record its descendants as added', function() { + var file = path.join(main, 'one.scss'); + var files = watcher.changed(file); + assert.deepStrictEqual(files.added, [ + path.join(main, 'partials', '_one.scss'), + path.join(main, 'partials', '_three.scss'), + ]); + }); + + it('should record itself as changed', function() { + var file = path.join(main, 'one.scss'); + var files = watcher.changed(file); + assert.deepStrictEqual(files.changed, [file]); + }); + + it('should record nothing as removed', function() { + var file = path.join(main, 'one.scss'); + var files = watcher.changed(file); + assert.deepStrictEqual(files.removed, []); + }); + }); + }); + + describe('and it is not in the graph', function() { + describe('if it is a partial', function() { + it('should record nothing', function() { + var file = path.join(sibling, 'partials', '_three.scss'); + var files = watcher.changed(file); + assert.deepStrictEqual(files, { + added: [], + changed: [], + removed: [], + }); + }); + }); + + describe('if it is not a partial', function() { + it('should record nothing as added', function() { + var file = path.join(sibling, 'three.scss'); + var files = watcher.changed(file); + assert.deepStrictEqual(files.added, []); + }); + + it('should record itself as changed', function() { + var file = path.join(sibling, 'three.scss'); + var files = watcher.changed(file); + assert.deepStrictEqual(files.changed, [file]); + }); + + it('should record nothing as removed', function() { + var file = path.join(sibling, 'three.scss'); + var files = watcher.changed(file); + assert.deepStrictEqual(files.removed, []); + }); + }); + }); + }); + + describe('when a file is added', function() { + describe('and it is in the graph', function() { + it('should record nothing as added', function() { + var file = path.join(main, 'partials', '_three.scss'); + var files = watcher.added(file); + assert.deepStrictEqual(files.added, []); + }); + + it('should record its descendants as added', function() { + var file = path.join(main, 'partials', '_one.scss'); + var files = watcher.added(file); + assert.deepStrictEqual(files.added, [ + path.join(main, 'partials', '_three.scss'), + ]); + }); + + it('should record nothing as changed', function() { + var file = path.join(main, 'partials', '_three.scss'); + var files = watcher.added(file); + assert.deepStrictEqual(files.changed, []); + }); + + it('should record nothing as removed', function() { + var file = path.join(main, 'partials', '_three.scss'); + var files = watcher.added(file); + assert.deepStrictEqual(files.removed, []); + }); + }); + + describe('and it is not in the graph', function() { + beforeEach(function() { + watcher.reset({ + src: path.join(main, 'two.scss'), + includePath: [main] + }); + }); + + describe('if it is a partial', function() { + it('should record nothing as added', function() { + var file = path.join(main, 'partials', '_three.scss'); + var files = watcher.added(file); + assert.deepStrictEqual(files.added, [ + file, + ]); + }); + + it('should not record its descendants as added', function() { + var file = path.join(main, 'partials', '_one.scss'); + var files = watcher.added(file); + assert.deepStrictEqual(files.added, [ + file, + ]); + }); + + it('should record nothing as changed', function() { + var file = path.join(main, 'partials', '_three.scss'); + var files = watcher.added(file); + assert.deepStrictEqual(files.changed, []); + }); + + it('should record nothing as removed', function() { + var file = path.join(main, 'partials', '_three.scss'); + var files = watcher.added(file); + assert.deepStrictEqual(files.removed, []); + }); + }); + + describe('if it is not a partial', function() { + it('should record itself as added', function() { + var file = path.join(main, 'three.scss'); + var files = watcher.added(file); + assert.deepStrictEqual(files.added, [ + file, + ]); + }); + + it('should record nothing as changed', function() { + var file = path.join(main, 'one.scss'); + var files = watcher.added(file); + assert.deepStrictEqual(files.changed, []); + }); + + it('should record nothing as removed', function() { + var file = path.join(main, 'one.scss'); + var files = watcher.added(file); + assert.deepStrictEqual(files.removed, []); + }); + }); + }); + }); + + describe('when a file is removed', function() { + describe('and it is in the graph', function() { + describe('if it is a partial', function() { + it('should record nothing as added', function() { + var file = path.join(main, 'partials', '_one.scss'); + var files = watcher.removed(file); + assert.deepStrictEqual(files.added, []); + }); + + it('should record its ancestors as changed', function() { + var file = path.join(main, 'partials', '_one.scss'); + var files = watcher.removed(file); + assert.deepStrictEqual(files.changed, [ + path.join(main, 'one.scss'), + ]); + }); + + it('should record itself as removed', function() { + var file = path.join(main, 'partials', '_one.scss'); + var files = watcher.removed(file); + assert.deepStrictEqual(files.removed, [file]); + }); + }); + + describe('if it is not a partial', function() { + it('should record nothing as added', function() { + var file = path.join(main, 'one.scss'); + var files = watcher.removed(file); + assert.deepStrictEqual(files.added, []); + }); + + it('should record nothing as changed', function() { + var file = path.join(main, 'one.scss'); + var files = watcher.removed(file); + assert.deepStrictEqual(files.changed, []); + }); + + it('should record itself as removed', function() { + var file = path.join(main, 'one.scss'); + var files = watcher.removed(file); + assert.deepStrictEqual(files.removed, [file]); + }); + }); + }); + + describe('and is not in the graph', function() { + beforeEach(function() { + watcher.reset({ + src: path.join(main, 'two.scss'), + includePath: [main] + }); + }); + + describe('if it is a partial', function() { + it('should record nothing as added', function() { + var file = path.join(main, 'partials', '_one.scss'); + var files = watcher.removed(file); + assert.deepStrictEqual(files, { + added: [], + changed: [], + removed: [], + }); + }); + }); + + describe('if it is not a partial', function() { + it('should record nothing', function() { + var file = path.join(main, 'one.scss'); + var files = watcher.removed(file); + assert.deepStrictEqual(files, { + added: [], + changed: [], + removed: [], + }); + }); + }); + }); + }); + }); +}); diff --git a/mybulma/node_modules/node-sass/vendor/darwin-x64-108/darwin-x64-108_binding.node b/mybulma/node_modules/node-sass/vendor/darwin-x64-108/darwin-x64-108_binding.node new file mode 100644 index 0000000..8f91aa6 Binary files /dev/null and b/mybulma/node_modules/node-sass/vendor/darwin-x64-108/darwin-x64-108_binding.node differ diff --git a/mybulma/node_modules/node-sass/vendor/darwin-x64-111/darwin-x64-111_binding.node b/mybulma/node_modules/node-sass/vendor/darwin-x64-111/darwin-x64-111_binding.node new file mode 100644 index 0000000..4844bd7 Binary files /dev/null and b/mybulma/node_modules/node-sass/vendor/darwin-x64-111/darwin-x64-111_binding.node differ diff --git a/mybulma/node_modules/node-sass/vendor/darwin-x64-93/binding.node b/mybulma/node_modules/node-sass/vendor/darwin-x64-93/binding.node new file mode 100644 index 0000000..fd3136e Binary files /dev/null and b/mybulma/node_modules/node-sass/vendor/darwin-x64-93/binding.node differ diff --git a/mybulma/node_modules/node-sass/vendor/linux-x64-111/binding.node b/mybulma/node_modules/node-sass/vendor/linux-x64-111/binding.node new file mode 100644 index 0000000..b1ed682 Binary files /dev/null and b/mybulma/node_modules/node-sass/vendor/linux-x64-111/binding.node differ diff --git a/mybulma/node_modules/node-sass/vendor/linux-x64-83/binding.node b/mybulma/node_modules/node-sass/vendor/linux-x64-83/binding.node new file mode 100644 index 0000000..8f61a55 Binary files /dev/null and b/mybulma/node_modules/node-sass/vendor/linux-x64-83/binding.node differ diff --git a/mybulma/node_modules/nopt/CHANGELOG.md b/mybulma/node_modules/nopt/CHANGELOG.md new file mode 100644 index 0000000..82a09fb --- /dev/null +++ b/mybulma/node_modules/nopt/CHANGELOG.md @@ -0,0 +1,58 @@ +### v4.0.1 (2016-12-14) + +#### WHOOPS + +* [`fb9b1ce`](https://github.com/npm/nopt/commit/fb9b1ce57b3c69b4f7819015be87719204f77ef6) + Merged so many patches at once that the code fencing + ([@adius](https://github.com/adius)) added got broken. Sorry, + ([@adius](https://github.com/adius))! + ([@othiym23](https://github.com/othiym23)) + +### v4.0.0 (2016-12-13) + +#### BREAKING CHANGES + +* [`651d447`](https://github.com/npm/nopt/commit/651d4473946096d341a480bbe56793de3fc706aa) + When parsing String-typed arguments, if the next value is `""`, don't simply + swallow it. ([@samjonester](https://github.com/samjonester)) + +#### PERFORMANCE TWEAKS + +* [`3370ce8`](https://github.com/npm/nopt/commit/3370ce87a7618ba228883861db84ddbcdff252a9) + Simplify initialization. ([@elidoran](https://github.com/elidoran)) +* [`356e58e`](https://github.com/npm/nopt/commit/356e58e3b3b431a4b1af7fd7bdee44c2c0526a09) + Store `Array.isArray(types[arg])` for reuse. + ([@elidoran](https://github.com/elidoran)) +* [`0d95e90`](https://github.com/npm/nopt/commit/0d95e90515844f266015b56d2c80b94e5d14a07e) + Interpret single-item type arrays as a single type. + ([@samjonester](https://github.com/samjonester)) +* [`07c69d3`](https://github.com/npm/nopt/commit/07c69d38b5186450941fbb505550becb78a0e925) + Simplify key-value extraction. ([@elidoran](https://github.com/elidoran)) +* [`39b6e5c`](https://github.com/npm/nopt/commit/39b6e5c65ac47f60cd43a1fbeece5cd4c834c254) + Only call `Date.parse(val)` once. ([@elidoran](https://github.com/elidoran)) +* [`934943d`](https://github.com/npm/nopt/commit/934943dffecb55123a2b15959fe2a359319a5dbd) + Use `osenv.home()` to find a user's home directory instead of assuming it's + always `$HOME`. ([@othiym23](https://github.com/othiym23)) + +#### TEST & CI IMPROVEMENTS + +* [`326ffff`](https://github.com/npm/nopt/commit/326ffff7f78a00bcd316adecf69075f8a8093619) + Fix `/tmp` test to work on Windows. + ([@elidoran](https://github.com/elidoran)) +* [`c89d31a`](https://github.com/npm/nopt/commit/c89d31a49d14f2238bc6672db08da697bbc57f1b) + Only run Windows tests on Windows, only run Unix tests on a Unix. + ([@elidoran](https://github.com/elidoran)) +* [`affd3d1`](https://github.com/npm/nopt/commit/affd3d1d0addffa93006397b2013b18447339366) + Refresh Travis to run the tests against the currently-supported batch of npm + versions. ([@helio](https://github.com/helio)-frota) +* [`55f9449`](https://github.com/npm/nopt/commit/55f94497d163ed4d16dd55fd6c4fb95cc440e66d) + `tap@8.0.1` ([@othiym23](https://github.com/othiym23)) + +#### DOC TWEAKS + +* [`5271229`](https://github.com/npm/nopt/commit/5271229ee7c810217dd51616c086f5d9ab224581) + Use JavaScript code block for syntax highlighting. + ([@adius](https://github.com/adius)) +* [`c0d156f`](https://github.com/npm/nopt/commit/c0d156f229f9994c5dfcec4a8886eceff7a07682) + The code sample in the README had `many2: [ oneThing ]`, and now it has + `many2: [ two, things ]`. ([@silkentrance](https://github.com/silkentrance)) diff --git a/mybulma/node_modules/nopt/LICENSE b/mybulma/node_modules/nopt/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/mybulma/node_modules/nopt/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/mybulma/node_modules/nopt/README.md b/mybulma/node_modules/nopt/README.md new file mode 100644 index 0000000..a99531c --- /dev/null +++ b/mybulma/node_modules/nopt/README.md @@ -0,0 +1,213 @@ +If you want to write an option parser, and have it be good, there are +two ways to do it. The Right Way, and the Wrong Way. + +The Wrong Way is to sit down and write an option parser. We've all done +that. + +The Right Way is to write some complex configurable program with so many +options that you hit the limit of your frustration just trying to +manage them all, and defer it with duct-tape solutions until you see +exactly to the core of the problem, and finally snap and write an +awesome option parser. + +If you want to write an option parser, don't write an option parser. +Write a package manager, or a source control system, or a service +restarter, or an operating system. You probably won't end up with a +good one of those, but if you don't give up, and you are relentless and +diligent enough in your procrastination, you may just end up with a very +nice option parser. + +## USAGE + +```javascript +// my-program.js +var nopt = require("nopt") + , Stream = require("stream").Stream + , path = require("path") + , knownOpts = { "foo" : [String, null] + , "bar" : [Stream, Number] + , "baz" : path + , "bloo" : [ "big", "medium", "small" ] + , "flag" : Boolean + , "pick" : Boolean + , "many1" : [String, Array] + , "many2" : [path, Array] + } + , shortHands = { "foofoo" : ["--foo", "Mr. Foo"] + , "b7" : ["--bar", "7"] + , "m" : ["--bloo", "medium"] + , "p" : ["--pick"] + , "f" : ["--flag"] + } + // everything is optional. + // knownOpts and shorthands default to {} + // arg list defaults to process.argv + // slice defaults to 2 + , parsed = nopt(knownOpts, shortHands, process.argv, 2) +console.log(parsed) +``` + +This would give you support for any of the following: + +```console +$ node my-program.js --foo "blerp" --no-flag +{ "foo" : "blerp", "flag" : false } + +$ node my-program.js ---bar 7 --foo "Mr. Hand" --flag +{ bar: 7, foo: "Mr. Hand", flag: true } + +$ node my-program.js --foo "blerp" -f -----p +{ foo: "blerp", flag: true, pick: true } + +$ node my-program.js -fp --foofoo +{ foo: "Mr. Foo", flag: true, pick: true } + +$ node my-program.js --foofoo -- -fp # -- stops the flag parsing. +{ foo: "Mr. Foo", argv: { remain: ["-fp"] } } + +$ node my-program.js --blatzk -fp # unknown opts are ok. +{ blatzk: true, flag: true, pick: true } + +$ node my-program.js --blatzk=1000 -fp # but you need to use = if they have a value +{ blatzk: 1000, flag: true, pick: true } + +$ node my-program.js --no-blatzk -fp # unless they start with "no-" +{ blatzk: false, flag: true, pick: true } + +$ node my-program.js --baz b/a/z # known paths are resolved. +{ baz: "/Users/isaacs/b/a/z" } + +# if Array is one of the types, then it can take many +# values, and will always be an array. The other types provided +# specify what types are allowed in the list. + +$ node my-program.js --many1 5 --many1 null --many1 foo +{ many1: ["5", "null", "foo"] } + +$ node my-program.js --many2 foo --many2 bar +{ many2: ["/path/to/foo", "path/to/bar"] } +``` + +Read the tests at the bottom of `lib/nopt.js` for more examples of +what this puppy can do. + +## Types + +The following types are supported, and defined on `nopt.typeDefs` + +* String: A normal string. No parsing is done. +* path: A file system path. Gets resolved against cwd if not absolute. +* url: A url. If it doesn't parse, it isn't accepted. +* Number: Must be numeric. +* Date: Must parse as a date. If it does, and `Date` is one of the options, + then it will return a Date object, not a string. +* Boolean: Must be either `true` or `false`. If an option is a boolean, + then it does not need a value, and its presence will imply `true` as + the value. To negate boolean flags, do `--no-whatever` or `--whatever + false` +* NaN: Means that the option is strictly not allowed. Any value will + fail. +* Stream: An object matching the "Stream" class in node. Valuable + for use when validating programmatically. (npm uses this to let you + supply any WriteStream on the `outfd` and `logfd` config options.) +* Array: If `Array` is specified as one of the types, then the value + will be parsed as a list of options. This means that multiple values + can be specified, and that the value will always be an array. + +If a type is an array of values not on this list, then those are +considered valid values. For instance, in the example above, the +`--bloo` option can only be one of `"big"`, `"medium"`, or `"small"`, +and any other value will be rejected. + +When parsing unknown fields, `"true"`, `"false"`, and `"null"` will be +interpreted as their JavaScript equivalents. + +You can also mix types and values, or multiple types, in a list. For +instance `{ blah: [Number, null] }` would allow a value to be set to +either a Number or null. When types are ordered, this implies a +preference, and the first type that can be used to properly interpret +the value will be used. + +To define a new type, add it to `nopt.typeDefs`. Each item in that +hash is an object with a `type` member and a `validate` method. The +`type` member is an object that matches what goes in the type list. The +`validate` method is a function that gets called with `validate(data, +key, val)`. Validate methods should assign `data[key]` to the valid +value of `val` if it can be handled properly, or return boolean +`false` if it cannot. + +You can also call `nopt.clean(data, types, typeDefs)` to clean up a +config object and remove its invalid properties. + +## Error Handling + +By default, nopt outputs a warning to standard error when invalid values for +known options are found. You can change this behavior by assigning a method +to `nopt.invalidHandler`. This method will be called with +the offending `nopt.invalidHandler(key, val, types)`. + +If no `nopt.invalidHandler` is assigned, then it will console.error +its whining. If it is assigned to boolean `false` then the warning is +suppressed. + +## Abbreviations + +Yes, they are supported. If you define options like this: + +```javascript +{ "foolhardyelephants" : Boolean +, "pileofmonkeys" : Boolean } +``` + +Then this will work: + +```bash +node program.js --foolhar --pil +node program.js --no-f --pileofmon +# etc. +``` + +## Shorthands + +Shorthands are a hash of shorter option names to a snippet of args that +they expand to. + +If multiple one-character shorthands are all combined, and the +combination does not unambiguously match any other option or shorthand, +then they will be broken up into their constituent parts. For example: + +```json +{ "s" : ["--loglevel", "silent"] +, "g" : "--global" +, "f" : "--force" +, "p" : "--parseable" +, "l" : "--long" +} +``` + +```bash +npm ls -sgflp +# just like doing this: +npm ls --loglevel silent --global --force --long --parseable +``` + +## The Rest of the args + +The config object returned by nopt is given a special member called +`argv`, which is an object with the following fields: + +* `remain`: The remaining args after all the parsing has occurred. +* `original`: The args as they originally appeared. +* `cooked`: The args after flags and shorthands are expanded. + +## Slicing + +Node programs are called with more or less the exact argv as it appears +in C land, after the v8 and node-specific options have been plucked off. +As such, `argv[0]` is always `node` and `argv[1]` is always the +JavaScript program being run. + +That's usually not very useful to you. So they're sliced off by +default. If you want them, then you can pass in `0` as the last +argument, or any other number that you'd like to slice off the start of +the list. diff --git a/mybulma/node_modules/nopt/bin/nopt.js b/mybulma/node_modules/nopt/bin/nopt.js new file mode 100644 index 0000000..3232d4c --- /dev/null +++ b/mybulma/node_modules/nopt/bin/nopt.js @@ -0,0 +1,54 @@ +#!/usr/bin/env node +var nopt = require("../lib/nopt") + , path = require("path") + , types = { num: Number + , bool: Boolean + , help: Boolean + , list: Array + , "num-list": [Number, Array] + , "str-list": [String, Array] + , "bool-list": [Boolean, Array] + , str: String + , clear: Boolean + , config: Boolean + , length: Number + , file: path + } + , shorthands = { s: [ "--str", "astring" ] + , b: [ "--bool" ] + , nb: [ "--no-bool" ] + , tft: [ "--bool-list", "--no-bool-list", "--bool-list", "true" ] + , "?": ["--help"] + , h: ["--help"] + , H: ["--help"] + , n: [ "--num", "125" ] + , c: ["--config"] + , l: ["--length"] + , f: ["--file"] + } + , parsed = nopt( types + , shorthands + , process.argv + , 2 ) + +console.log("parsed", parsed) + +if (parsed.help) { + console.log("") + console.log("nopt cli tester") + console.log("") + console.log("types") + console.log(Object.keys(types).map(function M (t) { + var type = types[t] + if (Array.isArray(type)) { + return [t, type.map(function (type) { return type.name })] + } + return [t, type && type.name] + }).reduce(function (s, i) { + s[i[0]] = i[1] + return s + }, {})) + console.log("") + console.log("shorthands") + console.log(shorthands) +} diff --git a/mybulma/node_modules/nopt/lib/nopt.js b/mybulma/node_modules/nopt/lib/nopt.js new file mode 100644 index 0000000..ecfa5da --- /dev/null +++ b/mybulma/node_modules/nopt/lib/nopt.js @@ -0,0 +1,441 @@ +// info about each config option. + +var debug = process.env.DEBUG_NOPT || process.env.NOPT_DEBUG + ? function () { console.error.apply(console, arguments) } + : function () {} + +var url = require("url") + , path = require("path") + , Stream = require("stream").Stream + , abbrev = require("abbrev") + , os = require("os") + +module.exports = exports = nopt +exports.clean = clean + +exports.typeDefs = + { String : { type: String, validate: validateString } + , Boolean : { type: Boolean, validate: validateBoolean } + , url : { type: url, validate: validateUrl } + , Number : { type: Number, validate: validateNumber } + , path : { type: path, validate: validatePath } + , Stream : { type: Stream, validate: validateStream } + , Date : { type: Date, validate: validateDate } + } + +function nopt (types, shorthands, args, slice) { + args = args || process.argv + types = types || {} + shorthands = shorthands || {} + if (typeof slice !== "number") slice = 2 + + debug(types, shorthands, args, slice) + + args = args.slice(slice) + var data = {} + , key + , argv = { + remain: [], + cooked: args, + original: args.slice(0) + } + + parse(args, data, argv.remain, types, shorthands) + // now data is full + clean(data, types, exports.typeDefs) + data.argv = argv + Object.defineProperty(data.argv, 'toString', { value: function () { + return this.original.map(JSON.stringify).join(" ") + }, enumerable: false }) + return data +} + +function clean (data, types, typeDefs) { + typeDefs = typeDefs || exports.typeDefs + var remove = {} + , typeDefault = [false, true, null, String, Array] + + Object.keys(data).forEach(function (k) { + if (k === "argv") return + var val = data[k] + , isArray = Array.isArray(val) + , type = types[k] + if (!isArray) val = [val] + if (!type) type = typeDefault + if (type === Array) type = typeDefault.concat(Array) + if (!Array.isArray(type)) type = [type] + + debug("val=%j", val) + debug("types=", type) + val = val.map(function (val) { + // if it's an unknown value, then parse false/true/null/numbers/dates + if (typeof val === "string") { + debug("string %j", val) + val = val.trim() + if ((val === "null" && ~type.indexOf(null)) + || (val === "true" && + (~type.indexOf(true) || ~type.indexOf(Boolean))) + || (val === "false" && + (~type.indexOf(false) || ~type.indexOf(Boolean)))) { + val = JSON.parse(val) + debug("jsonable %j", val) + } else if (~type.indexOf(Number) && !isNaN(val)) { + debug("convert to number", val) + val = +val + } else if (~type.indexOf(Date) && !isNaN(Date.parse(val))) { + debug("convert to date", val) + val = new Date(val) + } + } + + if (!types.hasOwnProperty(k)) { + return val + } + + // allow `--no-blah` to set 'blah' to null if null is allowed + if (val === false && ~type.indexOf(null) && + !(~type.indexOf(false) || ~type.indexOf(Boolean))) { + val = null + } + + var d = {} + d[k] = val + debug("prevalidated val", d, val, types[k]) + if (!validate(d, k, val, types[k], typeDefs)) { + if (exports.invalidHandler) { + exports.invalidHandler(k, val, types[k], data) + } else if (exports.invalidHandler !== false) { + debug("invalid: "+k+"="+val, types[k]) + } + return remove + } + debug("validated val", d, val, types[k]) + return d[k] + }).filter(function (val) { return val !== remove }) + + // if we allow Array specifically, then an empty array is how we + // express 'no value here', not null. Allow it. + if (!val.length && type.indexOf(Array) === -1) { + debug('VAL HAS NO LENGTH, DELETE IT', val, k, type.indexOf(Array)) + delete data[k] + } + else if (isArray) { + debug(isArray, data[k], val) + data[k] = val + } else data[k] = val[0] + + debug("k=%s val=%j", k, val, data[k]) + }) +} + +function validateString (data, k, val) { + data[k] = String(val) +} + +function validatePath (data, k, val) { + if (val === true) return false + if (val === null) return true + + val = String(val) + + var isWin = process.platform === 'win32' + , homePattern = isWin ? /^~(\/|\\)/ : /^~\// + , home = os.homedir() + + if (home && val.match(homePattern)) { + data[k] = path.resolve(home, val.substr(2)) + } else { + data[k] = path.resolve(val) + } + return true +} + +function validateNumber (data, k, val) { + debug("validate Number %j %j %j", k, val, isNaN(val)) + if (isNaN(val)) return false + data[k] = +val +} + +function validateDate (data, k, val) { + var s = Date.parse(val) + debug("validate Date %j %j %j", k, val, s) + if (isNaN(s)) return false + data[k] = new Date(val) +} + +function validateBoolean (data, k, val) { + if (val instanceof Boolean) val = val.valueOf() + else if (typeof val === "string") { + if (!isNaN(val)) val = !!(+val) + else if (val === "null" || val === "false") val = false + else val = true + } else val = !!val + data[k] = val +} + +function validateUrl (data, k, val) { + val = url.parse(String(val)) + if (!val.host) return false + data[k] = val.href +} + +function validateStream (data, k, val) { + if (!(val instanceof Stream)) return false + data[k] = val +} + +function validate (data, k, val, type, typeDefs) { + // arrays are lists of types. + if (Array.isArray(type)) { + for (var i = 0, l = type.length; i < l; i ++) { + if (type[i] === Array) continue + if (validate(data, k, val, type[i], typeDefs)) return true + } + delete data[k] + return false + } + + // an array of anything? + if (type === Array) return true + + // NaN is poisonous. Means that something is not allowed. + if (type !== type) { + debug("Poison NaN", k, val, type) + delete data[k] + return false + } + + // explicit list of values + if (val === type) { + debug("Explicitly allowed %j", val) + // if (isArray) (data[k] = data[k] || []).push(val) + // else data[k] = val + data[k] = val + return true + } + + // now go through the list of typeDefs, validate against each one. + var ok = false + , types = Object.keys(typeDefs) + for (var i = 0, l = types.length; i < l; i ++) { + debug("test type %j %j %j", k, val, types[i]) + var t = typeDefs[types[i]] + if (t && + ((type && type.name && t.type && t.type.name) ? (type.name === t.type.name) : (type === t.type))) { + var d = {} + ok = false !== t.validate(d, k, val) + val = d[k] + if (ok) { + // if (isArray) (data[k] = data[k] || []).push(val) + // else data[k] = val + data[k] = val + break + } + } + } + debug("OK? %j (%j %j %j)", ok, k, val, types[i]) + + if (!ok) delete data[k] + return ok +} + +function parse (args, data, remain, types, shorthands) { + debug("parse", args, data, remain) + + var key = null + , abbrevs = abbrev(Object.keys(types)) + , shortAbbr = abbrev(Object.keys(shorthands)) + + for (var i = 0; i < args.length; i ++) { + var arg = args[i] + debug("arg", arg) + + if (arg.match(/^-{2,}$/)) { + // done with keys. + // the rest are args. + remain.push.apply(remain, args.slice(i + 1)) + args[i] = "--" + break + } + var hadEq = false + if (arg.charAt(0) === "-" && arg.length > 1) { + var at = arg.indexOf('=') + if (at > -1) { + hadEq = true + var v = arg.substr(at + 1) + arg = arg.substr(0, at) + args.splice(i, 1, arg, v) + } + + // see if it's a shorthand + // if so, splice and back up to re-parse it. + var shRes = resolveShort(arg, shorthands, shortAbbr, abbrevs) + debug("arg=%j shRes=%j", arg, shRes) + if (shRes) { + debug(arg, shRes) + args.splice.apply(args, [i, 1].concat(shRes)) + if (arg !== shRes[0]) { + i -- + continue + } + } + arg = arg.replace(/^-+/, "") + var no = null + while (arg.toLowerCase().indexOf("no-") === 0) { + no = !no + arg = arg.substr(3) + } + + if (abbrevs[arg]) arg = abbrevs[arg] + + var argType = types[arg] + var isTypeArray = Array.isArray(argType) + if (isTypeArray && argType.length === 1) { + isTypeArray = false + argType = argType[0] + } + + var isArray = argType === Array || + isTypeArray && argType.indexOf(Array) !== -1 + + // allow unknown things to be arrays if specified multiple times. + if (!types.hasOwnProperty(arg) && data.hasOwnProperty(arg)) { + if (!Array.isArray(data[arg])) + data[arg] = [data[arg]] + isArray = true + } + + var val + , la = args[i + 1] + + var isBool = typeof no === 'boolean' || + argType === Boolean || + isTypeArray && argType.indexOf(Boolean) !== -1 || + (typeof argType === 'undefined' && !hadEq) || + (la === "false" && + (argType === null || + isTypeArray && ~argType.indexOf(null))) + + if (isBool) { + // just set and move along + val = !no + // however, also support --bool true or --bool false + if (la === "true" || la === "false") { + val = JSON.parse(la) + la = null + if (no) val = !val + i ++ + } + + // also support "foo":[Boolean, "bar"] and "--foo bar" + if (isTypeArray && la) { + if (~argType.indexOf(la)) { + // an explicit type + val = la + i ++ + } else if ( la === "null" && ~argType.indexOf(null) ) { + // null allowed + val = null + i ++ + } else if ( !la.match(/^-{2,}[^-]/) && + !isNaN(la) && + ~argType.indexOf(Number) ) { + // number + val = +la + i ++ + } else if ( !la.match(/^-[^-]/) && ~argType.indexOf(String) ) { + // string + val = la + i ++ + } + } + + if (isArray) (data[arg] = data[arg] || []).push(val) + else data[arg] = val + + continue + } + + if (argType === String) { + if (la === undefined) { + la = "" + } else if (la.match(/^-{1,2}[^-]+/)) { + la = "" + i -- + } + } + + if (la && la.match(/^-{2,}$/)) { + la = undefined + i -- + } + + val = la === undefined ? true : la + if (isArray) (data[arg] = data[arg] || []).push(val) + else data[arg] = val + + i ++ + continue + } + remain.push(arg) + } +} + +function resolveShort (arg, shorthands, shortAbbr, abbrevs) { + // handle single-char shorthands glommed together, like + // npm ls -glp, but only if there is one dash, and only if + // all of the chars are single-char shorthands, and it's + // not a match to some other abbrev. + arg = arg.replace(/^-+/, '') + + // if it's an exact known option, then don't go any further + if (abbrevs[arg] === arg) + return null + + // if it's an exact known shortopt, same deal + if (shorthands[arg]) { + // make it an array, if it's a list of words + if (shorthands[arg] && !Array.isArray(shorthands[arg])) + shorthands[arg] = shorthands[arg].split(/\s+/) + + return shorthands[arg] + } + + // first check to see if this arg is a set of single-char shorthands + var singles = shorthands.___singles + if (!singles) { + singles = Object.keys(shorthands).filter(function (s) { + return s.length === 1 + }).reduce(function (l,r) { + l[r] = true + return l + }, {}) + shorthands.___singles = singles + debug('shorthand singles', singles) + } + + var chrs = arg.split("").filter(function (c) { + return singles[c] + }) + + if (chrs.join("") === arg) return chrs.map(function (c) { + return shorthands[c] + }).reduce(function (l, r) { + return l.concat(r) + }, []) + + + // if it's an arg abbrev, and not a literal shorthand, then prefer the arg + if (abbrevs[arg] && !shorthands[arg]) + return null + + // if it's an abbr for a shorthand, then use that + if (shortAbbr[arg]) + arg = shortAbbr[arg] + + // make it an array, if it's a list of words + if (shorthands[arg] && !Array.isArray(shorthands[arg])) + shorthands[arg] = shorthands[arg].split(/\s+/) + + return shorthands[arg] +} diff --git a/mybulma/node_modules/nopt/package.json b/mybulma/node_modules/nopt/package.json new file mode 100644 index 0000000..12ed02d --- /dev/null +++ b/mybulma/node_modules/nopt/package.json @@ -0,0 +1,34 @@ +{ + "name": "nopt", + "version": "5.0.0", + "description": "Option parsing for Node, supporting types, shorthands, etc. Used by npm.", + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "main": "lib/nopt.js", + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "test": "tap test/*.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/nopt.git" + }, + "bin": { + "nopt": "bin/nopt.js" + }, + "license": "ISC", + "dependencies": { + "abbrev": "1" + }, + "devDependencies": { + "tap": "^14.10.6" + }, + "files": [ + "bin", + "lib" + ], + "engines": { + "node": ">=6" + } +} diff --git a/mybulma/node_modules/normalize-package-data/lib/extract_description.js b/mybulma/node_modules/normalize-package-data/lib/extract_description.js new file mode 100644 index 0000000..bf98968 --- /dev/null +++ b/mybulma/node_modules/normalize-package-data/lib/extract_description.js @@ -0,0 +1,22 @@ +module.exports = extractDescription + +// Extracts description from contents of a readme file in markdown format +function extractDescription (d) { + if (!d) { + return + } + if (d === 'ERROR: No README data found!') { + return + } + // the first block of text before the first heading + // that isn't the first line heading + d = d.trim().split('\n') + for (var s = 0; d[s] && d[s].trim().match(/^(#|$)/); s++) { + ; + } + var l = d.length + for (var e = s + 1; e < l && d[e].trim(); e++) { + ; + } + return d.slice(s, e).join(' ').trim() +} diff --git a/mybulma/node_modules/normalize-package-data/lib/fixer.js b/mybulma/node_modules/normalize-package-data/lib/fixer.js new file mode 100644 index 0000000..97c26b2 --- /dev/null +++ b/mybulma/node_modules/normalize-package-data/lib/fixer.js @@ -0,0 +1,474 @@ +var isValidSemver = require('semver/functions/valid') +var cleanSemver = require('semver/functions/clean') +var validateLicense = require('validate-npm-package-license') +var hostedGitInfo = require('hosted-git-info') +var isBuiltinModule = require('is-core-module') +var depTypes = ['dependencies', 'devDependencies', 'optionalDependencies'] +var extractDescription = require('./extract_description') +var url = require('url') +var typos = require('./typos.json') + +module.exports = { + // default warning function + warn: function () {}, + + fixRepositoryField: function (data) { + if (data.repositories) { + this.warn('repositories') + data.repository = data.repositories[0] + } + if (!data.repository) { + return this.warn('missingRepository') + } + if (typeof data.repository === 'string') { + data.repository = { + type: 'git', + url: data.repository, + } + } + var r = data.repository.url || '' + if (r) { + var hosted = hostedGitInfo.fromUrl(r) + if (hosted) { + r = data.repository.url + = hosted.getDefaultRepresentation() === 'shortcut' ? hosted.https() : hosted.toString() + } + } + + if (r.match(/github.com\/[^/]+\/[^/]+\.git\.git$/)) { + this.warn('brokenGitUrl', r) + } + }, + + fixTypos: function (data) { + Object.keys(typos.topLevel).forEach(function (d) { + if (Object.prototype.hasOwnProperty.call(data, d)) { + this.warn('typo', d, typos.topLevel[d]) + } + }, this) + }, + + fixScriptsField: function (data) { + if (!data.scripts) { + return + } + if (typeof data.scripts !== 'object') { + this.warn('nonObjectScripts') + delete data.scripts + return + } + Object.keys(data.scripts).forEach(function (k) { + if (typeof data.scripts[k] !== 'string') { + this.warn('nonStringScript') + delete data.scripts[k] + } else if (typos.script[k] && !data.scripts[typos.script[k]]) { + this.warn('typo', k, typos.script[k], 'scripts') + } + }, this) + }, + + fixFilesField: function (data) { + var files = data.files + if (files && !Array.isArray(files)) { + this.warn('nonArrayFiles') + delete data.files + } else if (data.files) { + data.files = data.files.filter(function (file) { + if (!file || typeof file !== 'string') { + this.warn('invalidFilename', file) + return false + } else { + return true + } + }, this) + } + }, + + fixBinField: function (data) { + if (!data.bin) { + return + } + if (typeof data.bin === 'string') { + var b = {} + var match + if (match = data.name.match(/^@[^/]+[/](.*)$/)) { + b[match[1]] = data.bin + } else { + b[data.name] = data.bin + } + data.bin = b + } + }, + + fixManField: function (data) { + if (!data.man) { + return + } + if (typeof data.man === 'string') { + data.man = [data.man] + } + }, + fixBundleDependenciesField: function (data) { + var bdd = 'bundledDependencies' + var bd = 'bundleDependencies' + if (data[bdd] && !data[bd]) { + data[bd] = data[bdd] + delete data[bdd] + } + if (data[bd] && !Array.isArray(data[bd])) { + this.warn('nonArrayBundleDependencies') + delete data[bd] + } else if (data[bd]) { + data[bd] = data[bd].filter(function (bd) { + if (!bd || typeof bd !== 'string') { + this.warn('nonStringBundleDependency', bd) + return false + } else { + if (!data.dependencies) { + data.dependencies = {} + } + if (Object.prototype.hasOwnProperty.call(data.dependencies, bd)) { + this.warn('nonDependencyBundleDependency', bd) + data.dependencies[bd] = '*' + } + return true + } + }, this) + } + }, + + fixDependencies: function (data, strict) { + objectifyDeps(data, this.warn) + addOptionalDepsToDeps(data, this.warn) + this.fixBundleDependenciesField(data) + + ;['dependencies', 'devDependencies'].forEach(function (deps) { + if (!(deps in data)) { + return + } + if (!data[deps] || typeof data[deps] !== 'object') { + this.warn('nonObjectDependencies', deps) + delete data[deps] + return + } + Object.keys(data[deps]).forEach(function (d) { + var r = data[deps][d] + if (typeof r !== 'string') { + this.warn('nonStringDependency', d, JSON.stringify(r)) + delete data[deps][d] + } + var hosted = hostedGitInfo.fromUrl(data[deps][d]) + if (hosted) { + data[deps][d] = hosted.toString() + } + }, this) + }, this) + }, + + fixModulesField: function (data) { + if (data.modules) { + this.warn('deprecatedModules') + delete data.modules + } + }, + + fixKeywordsField: function (data) { + if (typeof data.keywords === 'string') { + data.keywords = data.keywords.split(/,\s+/) + } + if (data.keywords && !Array.isArray(data.keywords)) { + delete data.keywords + this.warn('nonArrayKeywords') + } else if (data.keywords) { + data.keywords = data.keywords.filter(function (kw) { + if (typeof kw !== 'string' || !kw) { + this.warn('nonStringKeyword') + return false + } else { + return true + } + }, this) + } + }, + + fixVersionField: function (data, strict) { + // allow "loose" semver 1.0 versions in non-strict mode + // enforce strict semver 2.0 compliance in strict mode + var loose = !strict + if (!data.version) { + data.version = '' + return true + } + if (!isValidSemver(data.version, loose)) { + throw new Error('Invalid version: "' + data.version + '"') + } + data.version = cleanSemver(data.version, loose) + return true + }, + + fixPeople: function (data) { + modifyPeople(data, unParsePerson) + modifyPeople(data, parsePerson) + }, + + fixNameField: function (data, options) { + if (typeof options === 'boolean') { + options = {strict: options} + } else if (typeof options === 'undefined') { + options = {} + } + var strict = options.strict + if (!data.name && !strict) { + data.name = '' + return + } + if (typeof data.name !== 'string') { + throw new Error('name field must be a string.') + } + if (!strict) { + data.name = data.name.trim() + } + ensureValidName(data.name, strict, options.allowLegacyCase) + if (isBuiltinModule(data.name)) { + this.warn('conflictingName', data.name) + } + }, + + fixDescriptionField: function (data) { + if (data.description && typeof data.description !== 'string') { + this.warn('nonStringDescription') + delete data.description + } + if (data.readme && !data.description) { + data.description = extractDescription(data.readme) + } + if (data.description === undefined) { + delete data.description + } + if (!data.description) { + this.warn('missingDescription') + } + }, + + fixReadmeField: function (data) { + if (!data.readme) { + this.warn('missingReadme') + data.readme = 'ERROR: No README data found!' + } + }, + + fixBugsField: function (data) { + if (!data.bugs && data.repository && data.repository.url) { + var hosted = hostedGitInfo.fromUrl(data.repository.url) + if (hosted && hosted.bugs()) { + data.bugs = {url: hosted.bugs()} + } + } else if (data.bugs) { + var emailRe = /^.+@.*\..+$/ + if (typeof data.bugs === 'string') { + if (emailRe.test(data.bugs)) { + data.bugs = {email: data.bugs} + /* eslint-disable-next-line node/no-deprecated-api */ + } else if (url.parse(data.bugs).protocol) { + data.bugs = {url: data.bugs} + } else { + this.warn('nonEmailUrlBugsString') + } + } else { + bugsTypos(data.bugs, this.warn) + var oldBugs = data.bugs + data.bugs = {} + if (oldBugs.url) { + /* eslint-disable-next-line node/no-deprecated-api */ + if (typeof (oldBugs.url) === 'string' && url.parse(oldBugs.url).protocol) { + data.bugs.url = oldBugs.url + } else { + this.warn('nonUrlBugsUrlField') + } + } + if (oldBugs.email) { + if (typeof (oldBugs.email) === 'string' && emailRe.test(oldBugs.email)) { + data.bugs.email = oldBugs.email + } else { + this.warn('nonEmailBugsEmailField') + } + } + } + if (!data.bugs.email && !data.bugs.url) { + delete data.bugs + this.warn('emptyNormalizedBugs') + } + } + }, + + fixHomepageField: function (data) { + if (!data.homepage && data.repository && data.repository.url) { + var hosted = hostedGitInfo.fromUrl(data.repository.url) + if (hosted && hosted.docs()) { + data.homepage = hosted.docs() + } + } + if (!data.homepage) { + return + } + + if (typeof data.homepage !== 'string') { + this.warn('nonUrlHomepage') + return delete data.homepage + } + /* eslint-disable-next-line node/no-deprecated-api */ + if (!url.parse(data.homepage).protocol) { + data.homepage = 'http://' + data.homepage + } + }, + + fixLicenseField: function (data) { + const license = data.license || data.licence + if (!license) { + return this.warn('missingLicense') + } + if ( + typeof (license) !== 'string' || + license.length < 1 || + license.trim() === '' + ) { + return this.warn('invalidLicense') + } + if (!validateLicense(license).validForNewPackages) { + return this.warn('invalidLicense') + } + }, +} + +function isValidScopedPackageName (spec) { + if (spec.charAt(0) !== '@') { + return false + } + + var rest = spec.slice(1).split('/') + if (rest.length !== 2) { + return false + } + + return rest[0] && rest[1] && + rest[0] === encodeURIComponent(rest[0]) && + rest[1] === encodeURIComponent(rest[1]) +} + +function isCorrectlyEncodedName (spec) { + return !spec.match(/[/@\s+%:]/) && + spec === encodeURIComponent(spec) +} + +function ensureValidName (name, strict, allowLegacyCase) { + if (name.charAt(0) === '.' || + !(isValidScopedPackageName(name) || isCorrectlyEncodedName(name)) || + (strict && (!allowLegacyCase) && name !== name.toLowerCase()) || + name.toLowerCase() === 'node_modules' || + name.toLowerCase() === 'favicon.ico') { + throw new Error('Invalid name: ' + JSON.stringify(name)) + } +} + +function modifyPeople (data, fn) { + if (data.author) { + data.author = fn(data.author) + }['maintainers', 'contributors'].forEach(function (set) { + if (!Array.isArray(data[set])) { + return + } + data[set] = data[set].map(fn) + }) + return data +} + +function unParsePerson (person) { + if (typeof person === 'string') { + return person + } + var name = person.name || '' + var u = person.url || person.web + var url = u ? (' (' + u + ')') : '' + var e = person.email || person.mail + var email = e ? (' <' + e + '>') : '' + return name + email + url +} + +function parsePerson (person) { + if (typeof person !== 'string') { + return person + } + var name = person.match(/^([^(<]+)/) + var url = person.match(/\(([^)]+)\)/) + var email = person.match(/<([^>]+)>/) + var obj = {} + if (name && name[0].trim()) { + obj.name = name[0].trim() + } + if (email) { + obj.email = email[1] + } + if (url) { + obj.url = url[1] + } + return obj +} + +function addOptionalDepsToDeps (data, warn) { + var o = data.optionalDependencies + if (!o) { + return + } + var d = data.dependencies || {} + Object.keys(o).forEach(function (k) { + d[k] = o[k] + }) + data.dependencies = d +} + +function depObjectify (deps, type, warn) { + if (!deps) { + return {} + } + if (typeof deps === 'string') { + deps = deps.trim().split(/[\n\r\s\t ,]+/) + } + if (!Array.isArray(deps)) { + return deps + } + warn('deprecatedArrayDependencies', type) + var o = {} + deps.filter(function (d) { + return typeof d === 'string' + }).forEach(function (d) { + d = d.trim().split(/(:?[@\s><=])/) + var dn = d.shift() + var dv = d.join('') + dv = dv.trim() + dv = dv.replace(/^@/, '') + o[dn] = dv + }) + return o +} + +function objectifyDeps (data, warn) { + depTypes.forEach(function (type) { + if (!data[type]) { + return + } + data[type] = depObjectify(data[type], type, warn) + }) +} + +function bugsTypos (bugs, warn) { + if (!bugs) { + return + } + Object.keys(bugs).forEach(function (k) { + if (typos.bugs[k]) { + warn('typo', k, typos.bugs[k], 'bugs') + bugs[typos.bugs[k]] = bugs[k] + delete bugs[k] + } + }) +} diff --git a/mybulma/node_modules/normalize-package-data/lib/make_warning.js b/mybulma/node_modules/normalize-package-data/lib/make_warning.js new file mode 100644 index 0000000..3be9c86 --- /dev/null +++ b/mybulma/node_modules/normalize-package-data/lib/make_warning.js @@ -0,0 +1,22 @@ +var util = require('util') +var messages = require('./warning_messages.json') + +module.exports = function () { + var args = Array.prototype.slice.call(arguments, 0) + var warningName = args.shift() + if (warningName === 'typo') { + return makeTypoWarning.apply(null, args) + } else { + var msgTemplate = messages[warningName] ? messages[warningName] : warningName + ": '%s'" + args.unshift(msgTemplate) + return util.format.apply(null, args) + } +} + +function makeTypoWarning (providedName, probableName, field) { + if (field) { + providedName = field + "['" + providedName + "']" + probableName = field + "['" + probableName + "']" + } + return util.format(messages.typo, providedName, probableName) +} diff --git a/mybulma/node_modules/normalize-package-data/lib/normalize.js b/mybulma/node_modules/normalize-package-data/lib/normalize.js new file mode 100644 index 0000000..bf71d2c --- /dev/null +++ b/mybulma/node_modules/normalize-package-data/lib/normalize.js @@ -0,0 +1,48 @@ +module.exports = normalize + +var fixer = require('./fixer') +normalize.fixer = fixer + +var makeWarning = require('./make_warning') + +var fieldsToFix = ['name', 'version', 'description', 'repository', 'modules', 'scripts', + 'files', 'bin', 'man', 'bugs', 'keywords', 'readme', 'homepage', 'license'] +var otherThingsToFix = ['dependencies', 'people', 'typos'] + +var thingsToFix = fieldsToFix.map(function (fieldName) { + return ucFirst(fieldName) + 'Field' +}) +// two ways to do this in CoffeeScript on only one line, sub-70 chars: +// thingsToFix = fieldsToFix.map (name) -> ucFirst(name) + "Field" +// thingsToFix = (ucFirst(name) + "Field" for name in fieldsToFix) +thingsToFix = thingsToFix.concat(otherThingsToFix) + +function normalize (data, warn, strict) { + if (warn === true) { + warn = null + strict = true + } + if (!strict) { + strict = false + } + if (!warn || data.private) { + warn = function (msg) { /* noop */ } + } + + if (data.scripts && + data.scripts.install === 'node-gyp rebuild' && + !data.scripts.preinstall) { + data.gypfile = true + } + fixer.warn = function () { + warn(makeWarning.apply(null, arguments)) + } + thingsToFix.forEach(function (thingName) { + fixer['fix' + ucFirst(thingName)](data, strict) + }) + data._id = data.name + '@' + data.version +} + +function ucFirst (string) { + return string.charAt(0).toUpperCase() + string.slice(1) +} diff --git a/mybulma/node_modules/normalize-package-data/lib/safe_format.js b/mybulma/node_modules/normalize-package-data/lib/safe_format.js new file mode 100644 index 0000000..5fc888e --- /dev/null +++ b/mybulma/node_modules/normalize-package-data/lib/safe_format.js @@ -0,0 +1,11 @@ +var util = require('util') + +module.exports = function () { + var args = Array.prototype.slice.call(arguments, 0) + args.forEach(function (arg) { + if (!arg) { + throw new TypeError('Bad arguments.') + } + }) + return util.format.apply(null, arguments) +} diff --git a/mybulma/node_modules/normalize-package-data/lib/typos.json b/mybulma/node_modules/normalize-package-data/lib/typos.json new file mode 100644 index 0000000..7f9dd28 --- /dev/null +++ b/mybulma/node_modules/normalize-package-data/lib/typos.json @@ -0,0 +1,25 @@ +{ + "topLevel": { + "dependancies": "dependencies" + ,"dependecies": "dependencies" + ,"depdenencies": "dependencies" + ,"devEependencies": "devDependencies" + ,"depends": "dependencies" + ,"dev-dependencies": "devDependencies" + ,"devDependences": "devDependencies" + ,"devDepenencies": "devDependencies" + ,"devdependencies": "devDependencies" + ,"repostitory": "repository" + ,"repo": "repository" + ,"prefereGlobal": "preferGlobal" + ,"hompage": "homepage" + ,"hampage": "homepage" + ,"autohr": "author" + ,"autor": "author" + ,"contributers": "contributors" + ,"publicationConfig": "publishConfig" + ,"script": "scripts" + }, + "bugs": { "web": "url", "name": "url" }, + "script": { "server": "start", "tests": "test" } +} diff --git a/mybulma/node_modules/normalize-package-data/lib/warning_messages.json b/mybulma/node_modules/normalize-package-data/lib/warning_messages.json new file mode 100644 index 0000000..4890f50 --- /dev/null +++ b/mybulma/node_modules/normalize-package-data/lib/warning_messages.json @@ -0,0 +1,30 @@ +{ + "repositories": "'repositories' (plural) Not supported. Please pick one as the 'repository' field" + ,"missingRepository": "No repository field." + ,"brokenGitUrl": "Probably broken git url: %s" + ,"nonObjectScripts": "scripts must be an object" + ,"nonStringScript": "script values must be string commands" + ,"nonArrayFiles": "Invalid 'files' member" + ,"invalidFilename": "Invalid filename in 'files' list: %s" + ,"nonArrayBundleDependencies": "Invalid 'bundleDependencies' list. Must be array of package names" + ,"nonStringBundleDependency": "Invalid bundleDependencies member: %s" + ,"nonDependencyBundleDependency": "Non-dependency in bundleDependencies: %s" + ,"nonObjectDependencies": "%s field must be an object" + ,"nonStringDependency": "Invalid dependency: %s %s" + ,"deprecatedArrayDependencies": "specifying %s as array is deprecated" + ,"deprecatedModules": "modules field is deprecated" + ,"nonArrayKeywords": "keywords should be an array of strings" + ,"nonStringKeyword": "keywords should be an array of strings" + ,"conflictingName": "%s is also the name of a node core module." + ,"nonStringDescription": "'description' field should be a string" + ,"missingDescription": "No description" + ,"missingReadme": "No README data" + ,"missingLicense": "No license field." + ,"nonEmailUrlBugsString": "Bug string field must be url, email, or {email,url}" + ,"nonUrlBugsUrlField": "bugs.url field must be a string url. Deleted." + ,"nonEmailBugsEmailField": "bugs.email field must be a string email. Deleted." + ,"emptyNormalizedBugs": "Normalized value of bugs field is an empty object. Deleted." + ,"nonUrlHomepage": "homepage field must be a string url. Deleted." + ,"invalidLicense": "license should be a valid SPDX license expression" + ,"typo": "%s should probably be %s." +} diff --git a/mybulma/node_modules/npmlog/lib/log.js b/mybulma/node_modules/npmlog/lib/log.js new file mode 100644 index 0000000..be650c6 --- /dev/null +++ b/mybulma/node_modules/npmlog/lib/log.js @@ -0,0 +1,404 @@ +'use strict' +var Progress = require('are-we-there-yet') +var Gauge = require('gauge') +var EE = require('events').EventEmitter +var log = exports = module.exports = new EE() +var util = require('util') + +var setBlocking = require('set-blocking') +var consoleControl = require('console-control-strings') + +setBlocking(true) +var stream = process.stderr +Object.defineProperty(log, 'stream', { + set: function (newStream) { + stream = newStream + if (this.gauge) { + this.gauge.setWriteTo(stream, stream) + } + }, + get: function () { + return stream + }, +}) + +// by default, decide based on tty-ness. +var colorEnabled +log.useColor = function () { + return colorEnabled != null ? colorEnabled : stream.isTTY +} + +log.enableColor = function () { + colorEnabled = true + this.gauge.setTheme({ hasColor: colorEnabled, hasUnicode: unicodeEnabled }) +} +log.disableColor = function () { + colorEnabled = false + this.gauge.setTheme({ hasColor: colorEnabled, hasUnicode: unicodeEnabled }) +} + +// default level +log.level = 'info' + +log.gauge = new Gauge(stream, { + enabled: false, // no progress bars unless asked + theme: { hasColor: log.useColor() }, + template: [ + { type: 'progressbar', length: 20 }, + { type: 'activityIndicator', kerning: 1, length: 1 }, + { type: 'section', default: '' }, + ':', + { type: 'logline', kerning: 1, default: '' }, + ], +}) + +log.tracker = new Progress.TrackerGroup() + +// we track this separately as we may need to temporarily disable the +// display of the status bar for our own loggy purposes. +log.progressEnabled = log.gauge.isEnabled() + +var unicodeEnabled + +log.enableUnicode = function () { + unicodeEnabled = true + this.gauge.setTheme({ hasColor: this.useColor(), hasUnicode: unicodeEnabled }) +} + +log.disableUnicode = function () { + unicodeEnabled = false + this.gauge.setTheme({ hasColor: this.useColor(), hasUnicode: unicodeEnabled }) +} + +log.setGaugeThemeset = function (themes) { + this.gauge.setThemeset(themes) +} + +log.setGaugeTemplate = function (template) { + this.gauge.setTemplate(template) +} + +log.enableProgress = function () { + if (this.progressEnabled) { + return + } + + this.progressEnabled = true + this.tracker.on('change', this.showProgress) + if (this._paused) { + return + } + + this.gauge.enable() +} + +log.disableProgress = function () { + if (!this.progressEnabled) { + return + } + this.progressEnabled = false + this.tracker.removeListener('change', this.showProgress) + this.gauge.disable() +} + +var trackerConstructors = ['newGroup', 'newItem', 'newStream'] + +var mixinLog = function (tracker) { + // mixin the public methods from log into the tracker + // (except: conflicts and one's we handle specially) + Object.keys(log).forEach(function (P) { + if (P[0] === '_') { + return + } + + if (trackerConstructors.filter(function (C) { + return C === P + }).length) { + return + } + + if (tracker[P]) { + return + } + + if (typeof log[P] !== 'function') { + return + } + + var func = log[P] + tracker[P] = function () { + return func.apply(log, arguments) + } + }) + // if the new tracker is a group, make sure any subtrackers get + // mixed in too + if (tracker instanceof Progress.TrackerGroup) { + trackerConstructors.forEach(function (C) { + var func = tracker[C] + tracker[C] = function () { + return mixinLog(func.apply(tracker, arguments)) + } + }) + } + return tracker +} + +// Add tracker constructors to the top level log object +trackerConstructors.forEach(function (C) { + log[C] = function () { + return mixinLog(this.tracker[C].apply(this.tracker, arguments)) + } +}) + +log.clearProgress = function (cb) { + if (!this.progressEnabled) { + return cb && process.nextTick(cb) + } + + this.gauge.hide(cb) +} + +log.showProgress = function (name, completed) { + if (!this.progressEnabled) { + return + } + + var values = {} + if (name) { + values.section = name + } + + var last = log.record[log.record.length - 1] + if (last) { + values.subsection = last.prefix + var disp = log.disp[last.level] || last.level + var logline = this._format(disp, log.style[last.level]) + if (last.prefix) { + logline += ' ' + this._format(last.prefix, this.prefixStyle) + } + + logline += ' ' + last.message.split(/\r?\n/)[0] + values.logline = logline + } + values.completed = completed || this.tracker.completed() + this.gauge.show(values) +}.bind(log) // bind for use in tracker's on-change listener + +// temporarily stop emitting, but don't drop +log.pause = function () { + this._paused = true + if (this.progressEnabled) { + this.gauge.disable() + } +} + +log.resume = function () { + if (!this._paused) { + return + } + + this._paused = false + + var b = this._buffer + this._buffer = [] + b.forEach(function (m) { + this.emitLog(m) + }, this) + if (this.progressEnabled) { + this.gauge.enable() + } +} + +log._buffer = [] + +var id = 0 +log.record = [] +log.maxRecordSize = 10000 +log.log = function (lvl, prefix, message) { + var l = this.levels[lvl] + if (l === undefined) { + return this.emit('error', new Error(util.format( + 'Undefined log level: %j', lvl))) + } + + var a = new Array(arguments.length - 2) + var stack = null + for (var i = 2; i < arguments.length; i++) { + var arg = a[i - 2] = arguments[i] + + // resolve stack traces to a plain string. + if (typeof arg === 'object' && arg instanceof Error && arg.stack) { + Object.defineProperty(arg, 'stack', { + value: stack = arg.stack + '', + enumerable: true, + writable: true, + }) + } + } + if (stack) { + a.unshift(stack + '\n') + } + message = util.format.apply(util, a) + + var m = { + id: id++, + level: lvl, + prefix: String(prefix || ''), + message: message, + messageRaw: a, + } + + this.emit('log', m) + this.emit('log.' + lvl, m) + if (m.prefix) { + this.emit(m.prefix, m) + } + + this.record.push(m) + var mrs = this.maxRecordSize + var n = this.record.length - mrs + if (n > mrs / 10) { + var newSize = Math.floor(mrs * 0.9) + this.record = this.record.slice(-1 * newSize) + } + + this.emitLog(m) +}.bind(log) + +log.emitLog = function (m) { + if (this._paused) { + this._buffer.push(m) + return + } + if (this.progressEnabled) { + this.gauge.pulse(m.prefix) + } + + var l = this.levels[m.level] + if (l === undefined) { + return + } + + if (l < this.levels[this.level]) { + return + } + + if (l > 0 && !isFinite(l)) { + return + } + + // If 'disp' is null or undefined, use the lvl as a default + // Allows: '', 0 as valid disp + var disp = log.disp[m.level] != null ? log.disp[m.level] : m.level + this.clearProgress() + m.message.split(/\r?\n/).forEach(function (line) { + var heading = this.heading + if (heading) { + this.write(heading, this.headingStyle) + this.write(' ') + } + this.write(disp, log.style[m.level]) + var p = m.prefix || '' + if (p) { + this.write(' ') + } + + this.write(p, this.prefixStyle) + this.write(' ' + line + '\n') + }, this) + this.showProgress() +} + +log._format = function (msg, style) { + if (!stream) { + return + } + + var output = '' + if (this.useColor()) { + style = style || {} + var settings = [] + if (style.fg) { + settings.push(style.fg) + } + + if (style.bg) { + settings.push('bg' + style.bg[0].toUpperCase() + style.bg.slice(1)) + } + + if (style.bold) { + settings.push('bold') + } + + if (style.underline) { + settings.push('underline') + } + + if (style.inverse) { + settings.push('inverse') + } + + if (settings.length) { + output += consoleControl.color(settings) + } + + if (style.beep) { + output += consoleControl.beep() + } + } + output += msg + if (this.useColor()) { + output += consoleControl.color('reset') + } + + return output +} + +log.write = function (msg, style) { + if (!stream) { + return + } + + stream.write(this._format(msg, style)) +} + +log.addLevel = function (lvl, n, style, disp) { + // If 'disp' is null or undefined, use the lvl as a default + if (disp == null) { + disp = lvl + } + + this.levels[lvl] = n + this.style[lvl] = style + if (!this[lvl]) { + this[lvl] = function () { + var a = new Array(arguments.length + 1) + a[0] = lvl + for (var i = 0; i < arguments.length; i++) { + a[i + 1] = arguments[i] + } + + return this.log.apply(this, a) + }.bind(this) + } + this.disp[lvl] = disp +} + +log.prefixStyle = { fg: 'magenta' } +log.headingStyle = { fg: 'white', bg: 'black' } + +log.style = {} +log.levels = {} +log.disp = {} +log.addLevel('silly', -Infinity, { inverse: true }, 'sill') +log.addLevel('verbose', 1000, { fg: 'cyan', bg: 'black' }, 'verb') +log.addLevel('info', 2000, { fg: 'green' }) +log.addLevel('timing', 2500, { fg: 'green', bg: 'black' }) +log.addLevel('http', 3000, { fg: 'green', bg: 'black' }) +log.addLevel('notice', 3500, { fg: 'cyan', bg: 'black' }) +log.addLevel('warn', 4000, { fg: 'black', bg: 'yellow' }, 'WARN') +log.addLevel('error', 5000, { fg: 'red', bg: 'black' }, 'ERR!') +log.addLevel('silent', Infinity) + +// allow 'error' prefix +log.on('error', function () {}) diff --git a/mybulma/node_modules/p-locate/index.d.ts b/mybulma/node_modules/p-locate/index.d.ts new file mode 100644 index 0000000..14115e1 --- /dev/null +++ b/mybulma/node_modules/p-locate/index.d.ts @@ -0,0 +1,64 @@ +declare namespace pLocate { + interface Options { + /** + Number of concurrently pending promises returned by `tester`. Minimum: `1`. + + @default Infinity + */ + readonly concurrency?: number; + + /** + Preserve `input` order when searching. + + Disable this to improve performance if you don't care about the order. + + @default true + */ + readonly preserveOrder?: boolean; + } +} + +declare const pLocate: { + /** + Get the first fulfilled promise that satisfies the provided testing function. + + @param input - An iterable of promises/values to test. + @param tester - This function will receive resolved values from `input` and is expected to return a `Promise` or `boolean`. + @returns A `Promise` that is fulfilled when `tester` resolves to `true` or the iterable is done, or rejects if any of the promises reject. The fulfilled value is the current iterable value or `undefined` if `tester` never resolved to `true`. + + @example + ``` + import pathExists = require('path-exists'); + import pLocate = require('p-locate'); + + const files = [ + 'unicorn.png', + 'rainbow.png', // Only this one actually exists on disk + 'pony.png' + ]; + + (async () => { + const foundPath = await pLocate(files, file => pathExists(file)); + + console.log(foundPath); + //=> 'rainbow' + })(); + ``` + */ + ( + input: Iterable | ValueType>, + tester: (element: ValueType) => PromiseLike | boolean, + options?: pLocate.Options + ): Promise; + + // TODO: Remove this for the next major release, refactor the whole definition to: + // declare function pLocate( + // input: Iterable | ValueType>, + // tester: (element: ValueType) => PromiseLike | boolean, + // options?: pLocate.Options + // ): Promise; + // export = pLocate; + default: typeof pLocate; +}; + +export = pLocate; diff --git a/mybulma/node_modules/p-locate/index.js b/mybulma/node_modules/p-locate/index.js new file mode 100644 index 0000000..e13ce15 --- /dev/null +++ b/mybulma/node_modules/p-locate/index.js @@ -0,0 +1,52 @@ +'use strict'; +const pLimit = require('p-limit'); + +class EndError extends Error { + constructor(value) { + super(); + this.value = value; + } +} + +// The input can also be a promise, so we await it +const testElement = async (element, tester) => tester(await element); + +// The input can also be a promise, so we `Promise.all()` them both +const finder = async element => { + const values = await Promise.all(element); + if (values[1] === true) { + throw new EndError(values[0]); + } + + return false; +}; + +const pLocate = async (iterable, tester, options) => { + options = { + concurrency: Infinity, + preserveOrder: true, + ...options + }; + + const limit = pLimit(options.concurrency); + + // Start all the promises concurrently with optional limit + const items = [...iterable].map(element => [element, limit(testElement, element, tester)]); + + // Check the promises either serially or concurrently + const checkLimit = pLimit(options.preserveOrder ? 1 : Infinity); + + try { + await Promise.all(items.map(element => checkLimit(finder, element))); + } catch (error) { + if (error instanceof EndError) { + return error.value; + } + + throw error; + } +}; + +module.exports = pLocate; +// TODO: Remove this for the next major release +module.exports.default = pLocate; diff --git a/mybulma/node_modules/p-locate/license b/mybulma/node_modules/p-locate/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/mybulma/node_modules/p-locate/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/mybulma/node_modules/p-locate/package.json b/mybulma/node_modules/p-locate/package.json new file mode 100644 index 0000000..e3de275 --- /dev/null +++ b/mybulma/node_modules/p-locate/package.json @@ -0,0 +1,53 @@ +{ + "name": "p-locate", + "version": "4.1.0", + "description": "Get the first fulfilled promise that satisfies the provided testing function", + "license": "MIT", + "repository": "sindresorhus/p-locate", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "promise", + "locate", + "find", + "finder", + "search", + "searcher", + "test", + "array", + "collection", + "iterable", + "iterator", + "race", + "fulfilled", + "fastest", + "async", + "await", + "promises", + "bluebird" + ], + "dependencies": { + "p-limit": "^2.2.0" + }, + "devDependencies": { + "ava": "^1.4.1", + "delay": "^4.1.0", + "in-range": "^1.0.0", + "time-span": "^3.0.0", + "tsd": "^0.7.2", + "xo": "^0.24.0" + } +} diff --git a/mybulma/node_modules/p-locate/readme.md b/mybulma/node_modules/p-locate/readme.md new file mode 100644 index 0000000..f8e2c2e --- /dev/null +++ b/mybulma/node_modules/p-locate/readme.md @@ -0,0 +1,90 @@ +# p-locate [![Build Status](https://travis-ci.org/sindresorhus/p-locate.svg?branch=master)](https://travis-ci.org/sindresorhus/p-locate) + +> Get the first fulfilled promise that satisfies the provided testing function + +Think of it like an async version of [`Array#find`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/Array/find). + + +## Install + +``` +$ npm install p-locate +``` + + +## Usage + +Here we find the first file that exists on disk, in array order. + +```js +const pathExists = require('path-exists'); +const pLocate = require('p-locate'); + +const files = [ + 'unicorn.png', + 'rainbow.png', // Only this one actually exists on disk + 'pony.png' +]; + +(async () => { + const foundPath = await pLocate(files, file => pathExists(file)); + + console.log(foundPath); + //=> 'rainbow' +})(); +``` + +*The above is just an example. Use [`locate-path`](https://github.com/sindresorhus/locate-path) if you need this.* + + +## API + +### pLocate(input, tester, [options]) + +Returns a `Promise` that is fulfilled when `tester` resolves to `true` or the iterable is done, or rejects if any of the promises reject. The fulfilled value is the current iterable value or `undefined` if `tester` never resolved to `true`. + +#### input + +Type: `Iterable` + +An iterable of promises/values to test. + +#### tester(element) + +Type: `Function` + +This function will receive resolved values from `input` and is expected to return a `Promise` or `boolean`. + +#### options + +Type: `Object` + +##### concurrency + +Type: `number`
+Default: `Infinity`
+Minimum: `1` + +Number of concurrently pending promises returned by `tester`. + +##### preserveOrder + +Type: `boolean`
+Default: `true` + +Preserve `input` order when searching. + +Disable this to improve performance if you don't care about the order. + + +## Related + +- [p-map](https://github.com/sindresorhus/p-map) - Map over promises concurrently +- [p-filter](https://github.com/sindresorhus/p-filter) - Filter promises concurrently +- [p-any](https://github.com/sindresorhus/p-any) - Wait for any promise to be fulfilled +- [More…](https://github.com/sindresorhus/promise-fun) + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/mybulma/node_modules/p-try/index.d.ts b/mybulma/node_modules/p-try/index.d.ts new file mode 100644 index 0000000..2a7319e --- /dev/null +++ b/mybulma/node_modules/p-try/index.d.ts @@ -0,0 +1,39 @@ +declare const pTry: { + /** + Start a promise chain. + + @param fn - The function to run to start the promise chain. + @param arguments - Arguments to pass to `fn`. + @returns The value of calling `fn(...arguments)`. If the function throws an error, the returned `Promise` will be rejected with that error. + + @example + ``` + import pTry = require('p-try'); + + (async () => { + try { + const value = await pTry(() => { + return synchronousFunctionThatMightThrow(); + }); + console.log(value); + } catch (error) { + console.error(error); + } + })(); + ``` + */ + ( + fn: (...arguments: ArgumentsType) => PromiseLike | ValueType, + ...arguments: ArgumentsType + ): Promise; + + // TODO: remove this in the next major version, refactor the whole definition to: + // declare function pTry( + // fn: (...arguments: ArgumentsType) => PromiseLike | ValueType, + // ...arguments: ArgumentsType + // ): Promise; + // export = pTry; + default: typeof pTry; +}; + +export = pTry; diff --git a/mybulma/node_modules/p-try/index.js b/mybulma/node_modules/p-try/index.js new file mode 100644 index 0000000..db858da --- /dev/null +++ b/mybulma/node_modules/p-try/index.js @@ -0,0 +1,9 @@ +'use strict'; + +const pTry = (fn, ...arguments_) => new Promise(resolve => { + resolve(fn(...arguments_)); +}); + +module.exports = pTry; +// TODO: remove this in the next major version +module.exports.default = pTry; diff --git a/mybulma/node_modules/p-try/license b/mybulma/node_modules/p-try/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/mybulma/node_modules/p-try/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/mybulma/node_modules/p-try/package.json b/mybulma/node_modules/p-try/package.json new file mode 100644 index 0000000..81c4d32 --- /dev/null +++ b/mybulma/node_modules/p-try/package.json @@ -0,0 +1,42 @@ +{ + "name": "p-try", + "version": "2.2.0", + "description": "`Start a promise chain", + "license": "MIT", + "repository": "sindresorhus/p-try", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=6" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "promise", + "try", + "resolve", + "function", + "catch", + "async", + "await", + "promises", + "settled", + "ponyfill", + "polyfill", + "shim", + "bluebird" + ], + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.1", + "xo": "^0.24.0" + } +} diff --git a/mybulma/node_modules/p-try/readme.md b/mybulma/node_modules/p-try/readme.md new file mode 100644 index 0000000..4d7bd64 --- /dev/null +++ b/mybulma/node_modules/p-try/readme.md @@ -0,0 +1,58 @@ +# p-try [![Build Status](https://travis-ci.org/sindresorhus/p-try.svg?branch=master)](https://travis-ci.org/sindresorhus/p-try) + +> Start a promise chain + +[How is it useful?](http://cryto.net/~joepie91/blog/2016/05/11/what-is-promise-try-and-why-does-it-matter/) + + +## Install + +``` +$ npm install p-try +``` + + +## Usage + +```js +const pTry = require('p-try'); + +(async () => { + try { + const value = await pTry(() => { + return synchronousFunctionThatMightThrow(); + }); + console.log(value); + } catch (error) { + console.error(error); + } +})(); +``` + + +## API + +### pTry(fn, ...arguments) + +Returns a `Promise` resolved with the value of calling `fn(...arguments)`. If the function throws an error, the returned `Promise` will be rejected with that error. + +Support for passing arguments on to the `fn` is provided in order to be able to avoid creating unnecessary closures. You probably don't need this optimization unless you're pushing a *lot* of functions. + +#### fn + +The function to run to start the promise chain. + +#### arguments + +Arguments to pass to `fn`. + + +## Related + +- [p-finally](https://github.com/sindresorhus/p-finally) - `Promise#finally()` ponyfill - Invoked when the promise is settled regardless of outcome +- [More…](https://github.com/sindresorhus/promise-fun) + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/mybulma/node_modules/path-parse/LICENSE b/mybulma/node_modules/path-parse/LICENSE new file mode 100644 index 0000000..810f3db --- /dev/null +++ b/mybulma/node_modules/path-parse/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Javier Blanco + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/mybulma/node_modules/path-parse/README.md b/mybulma/node_modules/path-parse/README.md new file mode 100644 index 0000000..05097f8 --- /dev/null +++ b/mybulma/node_modules/path-parse/README.md @@ -0,0 +1,42 @@ +# path-parse [![Build Status](https://travis-ci.org/jbgutierrez/path-parse.svg?branch=master)](https://travis-ci.org/jbgutierrez/path-parse) + +> Node.js [`path.parse(pathString)`](https://nodejs.org/api/path.html#path_path_parse_pathstring) [ponyfill](https://ponyfill.com). + +## Install + +``` +$ npm install --save path-parse +``` + +## Usage + +```js +var pathParse = require('path-parse'); + +pathParse('/home/user/dir/file.txt'); +//=> { +// root : "/", +// dir : "/home/user/dir", +// base : "file.txt", +// ext : ".txt", +// name : "file" +// } +``` + +## API + +See [`path.parse(pathString)`](https://nodejs.org/api/path.html#path_path_parse_pathstring) docs. + +### pathParse(path) + +### pathParse.posix(path) + +The Posix specific version. + +### pathParse.win32(path) + +The Windows specific version. + +## License + +MIT © [Javier Blanco](http://jbgutierrez.info) diff --git a/mybulma/node_modules/path-parse/index.js b/mybulma/node_modules/path-parse/index.js new file mode 100644 index 0000000..f062d0a --- /dev/null +++ b/mybulma/node_modules/path-parse/index.js @@ -0,0 +1,75 @@ +'use strict'; + +var isWindows = process.platform === 'win32'; + +// Regex to split a windows path into into [dir, root, basename, name, ext] +var splitWindowsRe = + /^(((?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?[\\\/]?)(?:[^\\\/]*[\\\/])*)((\.{1,2}|[^\\\/]+?|)(\.[^.\/\\]*|))[\\\/]*$/; + +var win32 = {}; + +function win32SplitPath(filename) { + return splitWindowsRe.exec(filename).slice(1); +} + +win32.parse = function(pathString) { + if (typeof pathString !== 'string') { + throw new TypeError( + "Parameter 'pathString' must be a string, not " + typeof pathString + ); + } + var allParts = win32SplitPath(pathString); + if (!allParts || allParts.length !== 5) { + throw new TypeError("Invalid path '" + pathString + "'"); + } + return { + root: allParts[1], + dir: allParts[0] === allParts[1] ? allParts[0] : allParts[0].slice(0, -1), + base: allParts[2], + ext: allParts[4], + name: allParts[3] + }; +}; + + + +// Split a filename into [dir, root, basename, name, ext], unix version +// 'root' is just a slash, or nothing. +var splitPathRe = + /^((\/?)(?:[^\/]*\/)*)((\.{1,2}|[^\/]+?|)(\.[^.\/]*|))[\/]*$/; +var posix = {}; + + +function posixSplitPath(filename) { + return splitPathRe.exec(filename).slice(1); +} + + +posix.parse = function(pathString) { + if (typeof pathString !== 'string') { + throw new TypeError( + "Parameter 'pathString' must be a string, not " + typeof pathString + ); + } + var allParts = posixSplitPath(pathString); + if (!allParts || allParts.length !== 5) { + throw new TypeError("Invalid path '" + pathString + "'"); + } + + return { + root: allParts[1], + dir: allParts[0].slice(0, -1), + base: allParts[2], + ext: allParts[4], + name: allParts[3], + }; +}; + + +if (isWindows) + module.exports = win32.parse; +else /* posix */ + module.exports = posix.parse; + +module.exports.posix = posix.parse; +module.exports.win32 = win32.parse; diff --git a/mybulma/node_modules/path-parse/package.json b/mybulma/node_modules/path-parse/package.json new file mode 100644 index 0000000..36c23f8 --- /dev/null +++ b/mybulma/node_modules/path-parse/package.json @@ -0,0 +1,33 @@ +{ + "name": "path-parse", + "version": "1.0.7", + "description": "Node.js path.parse() ponyfill", + "main": "index.js", + "scripts": { + "test": "node test.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/jbgutierrez/path-parse.git" + }, + "keywords": [ + "path", + "paths", + "file", + "dir", + "parse", + "built-in", + "util", + "utils", + "core", + "ponyfill", + "polyfill", + "shim" + ], + "author": "Javier Blanco ", + "license": "MIT", + "bugs": { + "url": "https://github.com/jbgutierrez/path-parse/issues" + }, + "homepage": "https://github.com/jbgutierrez/path-parse#readme" +} diff --git a/mybulma/node_modules/process-nextick-args/index.js b/mybulma/node_modules/process-nextick-args/index.js new file mode 100644 index 0000000..3eecf11 --- /dev/null +++ b/mybulma/node_modules/process-nextick-args/index.js @@ -0,0 +1,45 @@ +'use strict'; + +if (typeof process === 'undefined' || + !process.version || + process.version.indexOf('v0.') === 0 || + process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) { + module.exports = { nextTick: nextTick }; +} else { + module.exports = process +} + +function nextTick(fn, arg1, arg2, arg3) { + if (typeof fn !== 'function') { + throw new TypeError('"callback" argument must be a function'); + } + var len = arguments.length; + var args, i; + switch (len) { + case 0: + case 1: + return process.nextTick(fn); + case 2: + return process.nextTick(function afterTickOne() { + fn.call(null, arg1); + }); + case 3: + return process.nextTick(function afterTickTwo() { + fn.call(null, arg1, arg2); + }); + case 4: + return process.nextTick(function afterTickThree() { + fn.call(null, arg1, arg2, arg3); + }); + default: + args = new Array(len - 1); + i = 0; + while (i < args.length) { + args[i++] = arguments[i]; + } + return process.nextTick(function afterTick() { + fn.apply(null, args); + }); + } +} + diff --git a/mybulma/node_modules/process-nextick-args/license.md b/mybulma/node_modules/process-nextick-args/license.md new file mode 100644 index 0000000..c67e353 --- /dev/null +++ b/mybulma/node_modules/process-nextick-args/license.md @@ -0,0 +1,19 @@ +# Copyright (c) 2015 Calvin Metcalf + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE.** diff --git a/mybulma/node_modules/process-nextick-args/package.json b/mybulma/node_modules/process-nextick-args/package.json new file mode 100644 index 0000000..6070b72 --- /dev/null +++ b/mybulma/node_modules/process-nextick-args/package.json @@ -0,0 +1,25 @@ +{ + "name": "process-nextick-args", + "version": "2.0.1", + "description": "process.nextTick but always with args", + "main": "index.js", + "files": [ + "index.js" + ], + "scripts": { + "test": "node test.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/calvinmetcalf/process-nextick-args.git" + }, + "author": "", + "license": "MIT", + "bugs": { + "url": "https://github.com/calvinmetcalf/process-nextick-args/issues" + }, + "homepage": "https://github.com/calvinmetcalf/process-nextick-args", + "devDependencies": { + "tap": "~0.2.6" + } +} diff --git a/mybulma/node_modules/process-nextick-args/readme.md b/mybulma/node_modules/process-nextick-args/readme.md new file mode 100644 index 0000000..ecb432c --- /dev/null +++ b/mybulma/node_modules/process-nextick-args/readme.md @@ -0,0 +1,18 @@ +process-nextick-args +===== + +[![Build Status](https://travis-ci.org/calvinmetcalf/process-nextick-args.svg?branch=master)](https://travis-ci.org/calvinmetcalf/process-nextick-args) + +```bash +npm install --save process-nextick-args +``` + +Always be able to pass arguments to process.nextTick, no matter the platform + +```js +var pna = require('process-nextick-args'); + +pna.nextTick(function (a, b, c) { + console.log(a, b, c); +}, 'step', 3, 'profit'); +``` diff --git a/mybulma/node_modules/promise-retry/test/test.js b/mybulma/node_modules/promise-retry/test/test.js new file mode 100644 index 0000000..466b099 --- /dev/null +++ b/mybulma/node_modules/promise-retry/test/test.js @@ -0,0 +1,263 @@ +'use strict'; + +var expect = require('expect.js'); +var promiseRetry = require('../'); +var promiseDelay = require('sleep-promise'); + +describe('promise-retry', function () { + it('should call fn again if retry was called', function () { + var count = 0; + + return promiseRetry(function (retry) { + count += 1; + + return promiseDelay(10) + .then(function () { + if (count <= 2) { + retry(new Error('foo')); + } + + return 'final'; + }); + }, { factor: 1 }) + .then(function (value) { + expect(value).to.be('final'); + expect(count).to.be(3); + }, function () { + throw new Error('should not fail'); + }); + }); + + it('should call fn with the attempt number', function () { + var count = 0; + + return promiseRetry(function (retry, number) { + count += 1; + expect(count).to.equal(number); + + return promiseDelay(10) + .then(function () { + if (count <= 2) { + retry(new Error('foo')); + } + + return 'final'; + }); + }, { factor: 1 }) + .then(function (value) { + expect(value).to.be('final'); + expect(count).to.be(3); + }, function () { + throw new Error('should not fail'); + }); + }); + + it('should not retry on fulfillment if retry was not called', function () { + var count = 0; + + return promiseRetry(function () { + count += 1; + + return promiseDelay(10) + .then(function () { + return 'final'; + }); + }) + .then(function (value) { + expect(value).to.be('final'); + expect(count).to.be(1); + }, function () { + throw new Error('should not fail'); + }); + }); + + it('should not retry on rejection if retry was not called', function () { + var count = 0; + + return promiseRetry(function () { + count += 1; + + return promiseDelay(10) + .then(function () { + throw new Error('foo'); + }); + }) + .then(function () { + throw new Error('should not succeed'); + }, function (err) { + expect(err.message).to.be('foo'); + expect(count).to.be(1); + }); + }); + + it('should not retry on rejection if nr of retries is 0', function () { + var count = 0; + + return promiseRetry(function (retry) { + count += 1; + + return promiseDelay(10) + .then(function () { + throw new Error('foo'); + }) + .catch(retry); + }, { retries : 0 }) + .then(function () { + throw new Error('should not succeed'); + }, function (err) { + expect(err.message).to.be('foo'); + expect(count).to.be(1); + }); + }); + + it('should reject the promise if the retries were exceeded', function () { + var count = 0; + + return promiseRetry(function (retry) { + count += 1; + + return promiseDelay(10) + .then(function () { + throw new Error('foo'); + }) + .catch(retry); + }, { retries: 2, factor: 1 }) + .then(function () { + throw new Error('should not succeed'); + }, function (err) { + expect(err.message).to.be('foo'); + expect(count).to.be(3); + }); + }); + + it('should pass options to the underlying retry module', function () { + var count = 0; + + return promiseRetry(function (retry) { + return promiseDelay(10) + .then(function () { + if (count < 2) { + count += 1; + retry(new Error('foo')); + } + + return 'final'; + }); + }, { retries: 1, factor: 1 }) + .then(function () { + throw new Error('should not succeed'); + }, function (err) { + expect(err.message).to.be('foo'); + }); + }); + + it('should convert direct fulfillments into promises', function () { + return promiseRetry(function () { + return 'final'; + }, { factor: 1 }) + .then(function (value) { + expect(value).to.be('final'); + }, function () { + throw new Error('should not fail'); + }); + }); + + it('should convert direct rejections into promises', function () { + promiseRetry(function () { + throw new Error('foo'); + }, { retries: 1, factor: 1 }) + .then(function () { + throw new Error('should not succeed'); + }, function (err) { + expect(err.message).to.be('foo'); + }); + }); + + it('should not crash on undefined rejections', function () { + return promiseRetry(function () { + throw undefined; + }, { retries: 1, factor: 1 }) + .then(function () { + throw new Error('should not succeed'); + }, function (err) { + expect(err).to.be(undefined); + }) + .then(function () { + return promiseRetry(function (retry) { + retry(); + }, { retries: 1, factor: 1 }); + }) + .then(function () { + throw new Error('should not succeed'); + }, function (err) { + expect(err).to.be(undefined); + }); + }); + + it('should retry if retry() was called with undefined', function () { + var count = 0; + + return promiseRetry(function (retry) { + count += 1; + + return promiseDelay(10) + .then(function () { + if (count <= 2) { + retry(); + } + + return 'final'; + }); + }, { factor: 1 }) + .then(function (value) { + expect(value).to.be('final'); + expect(count).to.be(3); + }, function () { + throw new Error('should not fail'); + }); + }); + + it('should work with several retries in the same chain', function () { + var count = 0; + + return promiseRetry(function (retry) { + count += 1; + + return promiseDelay(10) + .then(function () { + retry(new Error('foo')); + }) + .catch(function (err) { + retry(err); + }); + }, { retries: 1, factor: 1 }) + .then(function () { + throw new Error('should not succeed'); + }, function (err) { + expect(err.message).to.be('foo'); + expect(count).to.be(2); + }); + }); + + it('should allow options to be passed first', function () { + var count = 0; + + return promiseRetry({ factor: 1 }, function (retry) { + count += 1; + + return promiseDelay(10) + .then(function () { + if (count <= 2) { + retry(new Error('foo')); + } + + return 'final'; + }); + }).then(function (value) { + expect(value).to.be('final'); + expect(count).to.be(3); + }, function () { + throw new Error('should not fail'); + }); + }); +}); diff --git a/mybulma/node_modules/quick-lru/index.d.ts b/mybulma/node_modules/quick-lru/index.d.ts new file mode 100644 index 0000000..c382e51 --- /dev/null +++ b/mybulma/node_modules/quick-lru/index.d.ts @@ -0,0 +1,90 @@ +declare namespace QuickLRU { + interface Options { + /** + The maximum number of items before evicting the least recently used items. + */ + readonly maxSize: number; + } +} + +declare class QuickLRU + implements Iterable<[KeyType, ValueType]> { + /** + The stored item count. + */ + readonly size: number; + + /** + Simple ["Least Recently Used" (LRU) cache](https://en.m.wikipedia.org/wiki/Cache_replacement_policies#Least_Recently_Used_.28LRU.29). + + The instance is [`iterable`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Iteration_protocols) so you can use it directly in a [`for…of`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Statements/for...of) loop. + + @example + ``` + import QuickLRU = require('quick-lru'); + + const lru = new QuickLRU({maxSize: 1000}); + + lru.set('🦄', '🌈'); + + lru.has('🦄'); + //=> true + + lru.get('🦄'); + //=> '🌈' + ``` + */ + constructor(options: QuickLRU.Options); + + [Symbol.iterator](): IterableIterator<[KeyType, ValueType]>; + + /** + Set an item. + + @returns The list instance. + */ + set(key: KeyType, value: ValueType): this; + + /** + Get an item. + + @returns The stored item or `undefined`. + */ + get(key: KeyType): ValueType | undefined; + + /** + Check if an item exists. + */ + has(key: KeyType): boolean; + + /** + Get an item without marking it as recently used. + + @returns The stored item or `undefined`. + */ + peek(key: KeyType): ValueType | undefined; + + /** + Delete an item. + + @returns `true` if the item is removed or `false` if the item doesn't exist. + */ + delete(key: KeyType): boolean; + + /** + Delete all items. + */ + clear(): void; + + /** + Iterable for all the keys. + */ + keys(): IterableIterator; + + /** + Iterable for all the values. + */ + values(): IterableIterator; +} + +export = QuickLRU; diff --git a/mybulma/node_modules/quick-lru/index.js b/mybulma/node_modules/quick-lru/index.js new file mode 100644 index 0000000..b7b1760 --- /dev/null +++ b/mybulma/node_modules/quick-lru/index.js @@ -0,0 +1,115 @@ +'use strict'; + +class QuickLRU { + constructor(options = {}) { + if (!(options.maxSize && options.maxSize > 0)) { + throw new TypeError('`maxSize` must be a number greater than 0'); + } + + this.maxSize = options.maxSize; + this.cache = new Map(); + this.oldCache = new Map(); + this._size = 0; + } + + _set(key, value) { + this.cache.set(key, value); + this._size++; + + if (this._size >= this.maxSize) { + this._size = 0; + this.oldCache = this.cache; + this.cache = new Map(); + } + } + + get(key) { + if (this.cache.has(key)) { + return this.cache.get(key); + } + + if (this.oldCache.has(key)) { + const value = this.oldCache.get(key); + this.oldCache.delete(key); + this._set(key, value); + return value; + } + } + + set(key, value) { + if (this.cache.has(key)) { + this.cache.set(key, value); + } else { + this._set(key, value); + } + + return this; + } + + has(key) { + return this.cache.has(key) || this.oldCache.has(key); + } + + peek(key) { + if (this.cache.has(key)) { + return this.cache.get(key); + } + + if (this.oldCache.has(key)) { + return this.oldCache.get(key); + } + } + + delete(key) { + const deleted = this.cache.delete(key); + if (deleted) { + this._size--; + } + + return this.oldCache.delete(key) || deleted; + } + + clear() { + this.cache.clear(); + this.oldCache.clear(); + this._size = 0; + } + + * keys() { + for (const [key] of this) { + yield key; + } + } + + * values() { + for (const [, value] of this) { + yield value; + } + } + + * [Symbol.iterator]() { + for (const item of this.cache) { + yield item; + } + + for (const item of this.oldCache) { + const [key] = item; + if (!this.cache.has(key)) { + yield item; + } + } + } + + get size() { + let oldCacheSize = 0; + for (const key of this.oldCache.keys()) { + if (!this.cache.has(key)) { + oldCacheSize++; + } + } + + return this._size + oldCacheSize; + } +} + +module.exports = QuickLRU; diff --git a/mybulma/node_modules/quick-lru/license b/mybulma/node_modules/quick-lru/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/mybulma/node_modules/quick-lru/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/mybulma/node_modules/quick-lru/package.json b/mybulma/node_modules/quick-lru/package.json new file mode 100644 index 0000000..46c175b --- /dev/null +++ b/mybulma/node_modules/quick-lru/package.json @@ -0,0 +1,42 @@ +{ + "name": "quick-lru", + "version": "4.0.1", + "description": "Simple \"Least Recently Used\" (LRU) cache", + "license": "MIT", + "repository": "sindresorhus/quick-lru", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && nyc ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "lru", + "quick", + "cache", + "caching", + "least", + "recently", + "used", + "fast", + "map", + "hash", + "buffer" + ], + "devDependencies": { + "ava": "^1.4.1", + "coveralls": "^3.0.3", + "nyc": "^14.1.1", + "tsd": "^0.7.2", + "xo": "^0.24.0" + } +} diff --git a/mybulma/node_modules/quick-lru/readme.md b/mybulma/node_modules/quick-lru/readme.md new file mode 100644 index 0000000..2b87001 --- /dev/null +++ b/mybulma/node_modules/quick-lru/readme.md @@ -0,0 +1,93 @@ +# quick-lru [![Build Status](https://travis-ci.org/sindresorhus/quick-lru.svg?branch=master)](https://travis-ci.org/sindresorhus/quick-lru) [![Coverage Status](https://coveralls.io/repos/github/sindresorhus/quick-lru/badge.svg?branch=master)](https://coveralls.io/github/sindresorhus/quick-lru?branch=master) + +> Simple ["Least Recently Used" (LRU) cache](https://en.m.wikipedia.org/wiki/Cache_replacement_policies#Least_Recently_Used_.28LRU.29) + +Useful when you need to cache something and limit memory usage. + +Inspired by the [`hashlru` algorithm](https://github.com/dominictarr/hashlru#algorithm), but instead uses [`Map`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/Map) to support keys of any type, not just strings, and values can be `undefined`. + + +## Install + +``` +$ npm install quick-lru +``` + + +## Usage + +```js +const QuickLRU = require('quick-lru'); + +const lru = new QuickLRU({maxSize: 1000}); + +lru.set('🦄', '🌈'); + +lru.has('🦄'); +//=> true + +lru.get('🦄'); +//=> '🌈' +``` + + +## API + +### new QuickLRU(options?) + +Returns a new instance. + +### options + +Type: `object` + +#### maxSize + +*Required*
+Type: `number` + +The maximum number of items before evicting the least recently used items. + +### Instance + +The instance is [`iterable`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Iteration_protocols) so you can use it directly in a [`for…of`](https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Statements/for...of) loop. + +Both `key` and `value` can be of any type. + +#### .set(key, value) + +Set an item. Returns the instance. + +#### .get(key) + +Get an item. + +#### .has(key) + +Check if an item exists. + +#### .peek(key) + +Get an item without marking it as recently used. + +#### .delete(key) + +Delete an item. + +Returns `true` if the item is removed or `false` if the item doesn't exist. + +#### .clear() + +Delete all items. + +#### .keys() + +Iterable for all the keys. + +#### .values() + +Iterable for all the values. + +#### .size + +The stored item count. diff --git a/mybulma/node_modules/read-pkg-up/node_modules/type-fest/index.d.ts b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/index.d.ts new file mode 100644 index 0000000..520df22 --- /dev/null +++ b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/index.d.ts @@ -0,0 +1,20 @@ +// Basic +export * from './source/basic'; + +// Utilities +export {Except} from './source/except'; +export {Mutable} from './source/mutable'; +export {Merge} from './source/merge'; +export {MergeExclusive} from './source/merge-exclusive'; +export {RequireAtLeastOne} from './source/require-at-least-one'; +export {RequireExactlyOne} from './source/require-exactly-one'; +export {PartialDeep} from './source/partial-deep'; +export {ReadonlyDeep} from './source/readonly-deep'; +export {LiteralUnion} from './source/literal-union'; +export {Promisable} from './source/promisable'; +export {Opaque} from './source/opaque'; +export {SetOptional} from './source/set-optional'; +export {SetRequired} from './source/set-required'; + +// Miscellaneous +export {PackageJson} from './source/package-json'; diff --git a/mybulma/node_modules/read-pkg-up/node_modules/type-fest/license b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/mybulma/node_modules/read-pkg-up/node_modules/type-fest/package.json b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/package.json new file mode 100644 index 0000000..ea66211 --- /dev/null +++ b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/package.json @@ -0,0 +1,51 @@ +{ + "name": "type-fest", + "version": "0.8.1", + "description": "A collection of essential TypeScript types", + "license": "(MIT OR CC0-1.0)", + "repository": "sindresorhus/type-fest", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && tsd" + }, + "files": [ + "index.d.ts", + "source" + ], + "keywords": [ + "typescript", + "ts", + "types", + "utility", + "util", + "utilities", + "omit", + "merge", + "json" + ], + "devDependencies": { + "@sindresorhus/tsconfig": "^0.4.0", + "@typescript-eslint/eslint-plugin": "^2.2.0", + "@typescript-eslint/parser": "^2.2.0", + "eslint-config-xo-typescript": "^0.18.0", + "tsd": "^0.7.3", + "xo": "^0.24.0" + }, + "xo": { + "extends": "xo-typescript", + "extensions": [ + "ts" + ], + "rules": { + "import/no-unresolved": "off", + "@typescript-eslint/indent": "off" + } + } +} diff --git a/mybulma/node_modules/read-pkg-up/node_modules/type-fest/readme.md b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/readme.md new file mode 100644 index 0000000..1824bda --- /dev/null +++ b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/readme.md @@ -0,0 +1,635 @@ +
+
+
+ type-fest +
+
+ A collection of essential TypeScript types +
+
+
+
+
+ +[![Build Status](https://travis-ci.com/sindresorhus/type-fest.svg?branch=master)](https://travis-ci.com/sindresorhus/type-fest) +[![](https://img.shields.io/badge/unicorn-approved-ff69b4.svg)](https://www.youtube.com/watch?v=9auOCbH5Ns4) + + +Many of the types here should have been built-in. You can help by suggesting some of them to the [TypeScript project](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md). + +Either add this package as a dependency or copy-paste the needed types. No credit required. 👌 + +PR welcome for additional commonly needed types and docs improvements. Read the [contributing guidelines](.github/contributing.md) first. + + +## Install + +``` +$ npm install type-fest +``` + +*Requires TypeScript >=3.2* + + +## Usage + +```ts +import {Except} from 'type-fest'; + +type Foo = { + unicorn: string; + rainbow: boolean; +}; + +type FooWithoutRainbow = Except; +//=> {unicorn: string} +``` + + +## API + +Click the type names for complete docs. + +### Basic + +- [`Primitive`](source/basic.d.ts) - Matches any [primitive value](https://developer.mozilla.org/en-US/docs/Glossary/Primitive). +- [`Class`](source/basic.d.ts) - Matches a [`class` constructor](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes). +- [`TypedArray`](source/basic.d.ts) - Matches any [typed array](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray), like `Uint8Array` or `Float64Array`. +- [`JsonObject`](source/basic.d.ts) - Matches a JSON object. +- [`JsonArray`](source/basic.d.ts) - Matches a JSON array. +- [`JsonValue`](source/basic.d.ts) - Matches any valid JSON value. +- [`ObservableLike`](source/basic.d.ts) - Matches a value that is like an [Observable](https://github.com/tc39/proposal-observable). + +### Utilities + +- [`Except`](source/except.d.ts) - Create a type from an object type without certain keys. This is a stricter version of [`Omit`](https://www.typescriptlang.org/docs/handbook/release-notes/typescript-3-5.html#the-omit-helper-type). +- [`Mutable`](source/mutable.d.ts) - Convert an object with `readonly` keys into a mutable object. The inverse of `Readonly`. +- [`Merge`](source/merge.d.ts) - Merge two types into a new type. Keys of the second type overrides keys of the first type. +- [`MergeExclusive`](source/merge-exclusive.d.ts) - Create a type that has mutually exclusive keys. +- [`RequireAtLeastOne`](source/require-at-least-one.d.ts) - Create a type that requires at least one of the given keys. +- [`RequireExactlyOne`](source/require-one.d.ts) - Create a type that requires exactly a single key of the given keys and disallows more. +- [`PartialDeep`](source/partial-deep.d.ts) - Create a deeply optional version of another type. Use [`Partial`](https://github.com/Microsoft/TypeScript/blob/2961bc3fc0ea1117d4e53bc8e97fa76119bc33e3/src/lib/es5.d.ts#L1401-L1406) if you only need one level deep. +- [`ReadonlyDeep`](source/readonly-deep.d.ts) - Create a deeply immutable version of an `object`/`Map`/`Set`/`Array` type. Use [`Readonly`](https://github.com/Microsoft/TypeScript/blob/2961bc3fc0ea1117d4e53bc8e97fa76119bc33e3/src/lib/es5.d.ts#L1415-L1420) if you only need one level deep. +- [`LiteralUnion`](source/literal-union.d.ts) - Create a union type by combining primitive types and literal types without sacrificing auto-completion in IDEs for the literal type part of the union. Workaround for [Microsoft/TypeScript#29729](https://github.com/Microsoft/TypeScript/issues/29729). +- [`Promisable`](source/promisable.d.ts) - Create a type that represents either the value or the value wrapped in `PromiseLike`. +- [`Opaque`](source/opaque.d.ts) - Create an [opaque type](https://codemix.com/opaque-types-in-javascript/). +- [`SetOptional`](source/set-optional.d.ts) - Create a type that makes the given keys optional. +- [`SetRequired`](source/set-required.d.ts) - Create a type that makes the given keys required. + +### Miscellaneous + +- [`PackageJson`](source/package-json.d.ts) - Type for [npm's `package.json` file](https://docs.npmjs.com/creating-a-package-json-file). + + +## Declined types + +*If we decline a type addition, we will make sure to document the better solution here.* + +- [`Diff` and `Spread`](https://github.com/sindresorhus/type-fest/pull/7) - The PR author didn't provide any real-world use-cases and the PR went stale. If you think this type is useful, provide some real-world use-cases and we might reconsider. +- [`Dictionary`](https://github.com/sindresorhus/type-fest/issues/33) - You only save a few characters (`Dictionary` vs `Record`) from [`Record`](https://github.com/Microsoft/TypeScript/blob/2961bc3fc0ea1117d4e53bc8e97fa76119bc33e3/src/lib/es5.d.ts#L1429-L1434), which is more flexible and well-known. Also, you shouldn't use an object as a dictionary. We have `Map` in JavaScript now. + + +## Tips + +### Built-in types + +There are many advanced types most users don't know about. + +- [`Partial`](https://github.com/Microsoft/TypeScript/blob/2961bc3fc0ea1117d4e53bc8e97fa76119bc33e3/src/lib/es5.d.ts#L1401-L1406) - Make all properties in `T` optional. +
+ + Example + + + [Playground](https://typescript-play.js.org/?target=6#code/KYOwrgtgBAMg9gcxsAbsANlA3gKClAeQDMiAaPKAEWACMwFz8BRAJxbhcagDEBDAF17ocAXxw4AliH7AWRXgGNgUAHJwAJsADCcEEQkJsFXgAcTK3hGAAuKAGd+LKQgDcFEx363wEGrLf46IjIaOi28EioGG5iOArovHZ2qhrAAIJmAEJgEuiaLEb4Jk4oAsoKuvoIYCwCErq2apo6egZQALyF+FCm5pY2UABETelmg1xFnrYAzAAM8xNQQZGh4cFR6AB0xEQUIm4UFa0IABRHVbYACrws-BJCADwjLVUAfACUXfhEHFBnug4oABrYAATygcCIhBoACtgAp+JsQaC7P9ju9Prhut0joCwCZ1GUAGpCMDKTrnAwAbWRPWSyMhKWalQMAF0Dtj8BIoSd8YSZCT0GSOu1OmAQJp9CBgOpPkc7uBgBzOfwABYSOybSnVWp3XQ0sF04FgxnPFkIVkdKB84mkpUUfCxbEsYD8GogKBqjUBKBiWIAen9UGut3u6CeqReBlePXQQQA7skwMl+HAoMU4CgJJoISB0ODeOmbvwIVC1cAcIGmdpzVApDI5IpgJscNL49WMiZsrl8id3lrzScsD0zBYrLZBgAVOCUOCdwa+95uIA) + + ```ts + interface NodeConfig { + appName: string; + port: number; + } + + class NodeAppBuilder { + private configuration: NodeConfig = { + appName: 'NodeApp', + port: 3000 + }; + + config(config: Partial) { + type NodeConfigKey = keyof NodeConfig; + + for (const key of Object.keys(config) as NodeConfigKey[]) { + const updateValue = config[key]; + + if (updateValue === undefined) { + continue; + } + + this.configuration[key] = updateValue; + } + + return this; + } + } + + // `Partial`` allows us to provide only a part of the + // NodeConfig interface. + new NodeAppBuilder().config({appName: 'ToDoApp'}); + ``` +
+ +- [`Required`](https://github.com/Microsoft/TypeScript/blob/2961bc3fc0ea1117d4e53bc8e97fa76119bc33e3/src/lib/es5.d.ts#L1408-L1413) - Make all properties in `T` required. +
+ + Example + + + [Playground](https://typescript-play.js.org/?target=6#code/AQ4SwOwFwUwJwGYEMDGNgGED21VQGJZwC2wA3gFCjXAzFJgA2A-AFzADOUckA5gNxUaIYjA4ckvGG07c+g6gF8KQkAgCuEFFDA5O6gEbEwUbLm2ESwABQIixACJIoSdgCUYAR3Vg4MACYAPGYuFvYAfACU5Ko0APRxwADKMBD+wFAAFuh2Vv7OSBlYGdmc8ABu8LHKsRyGxqY4oQT21pTCIHQMjOwA5DAAHgACxAAOjDAAdChYxL0ANLHUouKSMH0AEmAAhJhY6ozpAJ77GTCMjMCiV0ToSAb7UJPPC9WRgrEJwAAqR6MwSRQPFGUFocDgRHYxnEfGAowh-zgUCOwF6KwkUl6tXqJhCeEsxDaS1AXSYfUGI3GUxmc0WSneQA) + + ```ts + interface ContactForm { + email?: string; + message?: string; + } + + function submitContactForm(formData: Required) { + // Send the form data to the server. + } + + submitContactForm({ + email: 'ex@mple.com', + message: 'Hi! Could you tell me more about…', + }); + + // TypeScript error: missing property 'message' + submitContactForm({ + email: 'ex@mple.com', + }); + ``` +
+ +- [`Readonly`](https://github.com/Microsoft/TypeScript/blob/2961bc3fc0ea1117d4e53bc8e97fa76119bc33e3/src/lib/es5.d.ts#L1415-L1420) - Make all properties in `T` readonly. +
+ + Example + + + [Playground](https://typescript-play.js.org/?target=6#code/AQ4UwOwVwW2AZA9gc3mAbmANsA3gKFCOAHkAzMgGkOJABEwAjKZa2kAUQCcvEu32AMQCGAF2FYBIAL4BufDRABLCKLBcywgMZgEKZOoDCiCGSXI8i4hGEwwALmABnUVxXJ57YFgzZHSVF8sT1BpBSItLGEnJz1kAy5LLy0TM2RHACUwYQATEywATwAeAITjU3MAPnkrCJMXLigtUT4AClxgGztKbyDgaX99I1TzAEokr1BRAAslJwA6FIqLAF48TtswHp9MHDla9hJGACswZvmyLjAwAC8wVpm5xZHkUZDaMKIwqyWXYCW0oN4sNlsA1h0ug5gAByACyBQAggAHJHQ7ZBIFoXbzBjMCz7OoQP5YIaJNYQMAAdziCVaALGNSIAHomcAACoFJFgADKWjcSNEwG4vC4ji0wggEEQguiTnMEGALWAV1yAFp8gVgEjeFyuKICvMrCTgVxnst5jtsGC4ljsPNhXxGaAWcAAOq6YRXYDCRg+RWIcA5JSC+kWdCepQ+v3RYCU3RInzRMCGwlpC19NYBW1Ye08R1AA) + + ```ts + enum LogLevel { + Off, + Debug, + Error, + Fatal + }; + + interface LoggerConfig { + name: string; + level: LogLevel; + } + + class Logger { + config: Readonly; + + constructor({name, level}: LoggerConfig) { + this.config = {name, level}; + Object.freeze(this.config); + } + } + + const config: LoggerConfig = { + name: 'MyApp', + level: LogLevel.Debug + }; + + const logger = new Logger(config); + + // TypeScript Error: cannot assign to read-only property. + logger.config.level = LogLevel.Error; + + // We are able to edit config variable as we please. + config.level = LogLevel.Error; + ``` +
+ +- [`Pick`](https://github.com/Microsoft/TypeScript/blob/2961bc3fc0ea1117d4e53bc8e97fa76119bc33e3/src/lib/es5.d.ts#L1422-L1427) - From `T`, pick a set of properties whose keys are in the union `K`. +
+ + Example + + + [Playground](https://typescript-play.js.org/?target=6#code/AQ4SwOwFwUwJwGYEMDGNgEE5TCgNugN4BQoZwOUBAXMAM5RyQDmA3KeSFABYCuAtgCMISMHloMmENh04oA9tBjQJjFuzIBfYrOAB6PcADCcGElh1gEGAHcKATwAO6ebyjB5CTNlwFwSxFR0BX5HeToYABNgBDh5fm8cfBg6AHIKG3ldA2BHOOcfFNpUygJ0pAhokr4hETFUgDpswywkggAFUwA3MFtgAF5gQgowKhhVKTYKGuFRcXo1aVZgbTIoJ3RW3xhOmB6+wfbcAGsAHi3kgBpgEtGy4AAfG54BWfqAPnZm4AAlZUj4MAkMA8GAGB4vEgfMlLLw6CwPBA8PYRmMgZVgAC6CgmI4cIommQELwICh8RBgKZKvALh1ur0bHQABR5PYMui0Wk7em2ADaAF0AJS0AASABUALIAGQAogR+Mp3CROCAFBBwVC2ikBpj5CgBIqGjizLA5TAFdAmalImAuqlBRoVQh5HBgEy1eDWfs7J5cjzGYKhroVfpDEhHM4MV6GRR5NN0JrtnRg6BVirTFBeHAKYmYY6QNpdB73LmCJZBlSAXAubtvczeSmQMNSuMbmKNgBlHFgPEUNwusBIPAAQlS1xetTmxT0SDoESgdD0C4aACtHMwxytLrohawgA) + + ```ts + interface Article { + title: string; + thumbnail: string; + content: string; + } + + // Creates new type out of the `Article` interface composed + // from the Articles' two properties: `title` and `thumbnail`. + // `ArticlePreview = {title: string; thumbnail: string}` + type ArticlePreview = Pick; + + // Render a list of articles using only title and description. + function renderArticlePreviews(previews: ArticlePreview[]): HTMLElement { + const articles = document.createElement('div'); + + for (const preview of previews) { + // Append preview to the articles. + } + + return articles; + } + + const articles = renderArticlePreviews([ + { + title: 'TypeScript tutorial!', + thumbnail: '/assets/ts.jpg' + } + ]); + ``` +
+ +- [`Record`](https://github.com/Microsoft/TypeScript/blob/2961bc3fc0ea1117d4e53bc8e97fa76119bc33e3/src/lib/es5.d.ts#L1429-L1434) - Construct a type with a set of properties `K` of type `T`. +
+ + Example + + + [Playground](https://typescript-play.js.org/?target=6#code/AQ4ejYAUHsGcCWAXBMB2dgwGbAKYC2ADgDYwCeeemCaWArgE7ADGMxAhmuQHQBQoYEnJE8wALKEARnkaxEKdMAC8wAOS0kstGuAAfdQBM8ANzxlRjXQbVaWACwC0JPB0NqA3HwGgIwAJJoWozYHCxixnAsjAhStADmwESMMJYo1Fi4HMCIaPEu+MRklHj8gpqyoeHAAKJFFFTAAN4+giDYCIxwSAByHAR4AFw5SDF5Xm2gJBzdfQPD3WPxE5PAlBxdAPLYNQAelgh4aOHDaPQEMowrIAC+3oJ+AMKMrlrAXFhSAFZ4LEhC9g4-0BmA4JBISXgiCkBQABpILrJ5MhUGhYcATGD6Bk4Hh-jNgABrPDkOBlXyQAAq9ngYmJpOAAHcEOCRjAXqwYODfoo6DhakUSph+Uh7GI4P0xER4Cj0OSQGwMP8tP1hgAlX7swwAHgRl2RvIANALSA08ABtAC6AD4VM1Wm0Kow0MMrYaHYJjGYLLJXZb3at1HYnC43Go-QHQDcvA6-JsmEJXARgCDgMYWAhjIYhDAU+YiMAAFIwex0ZmilMITCGF79TLAGRsAgJYAAZRwSEZGzEABFTOZUrJ5Yn+jwnWgeER6HB7AAKJrADpdXqS4ZqYultTG6azVfqHswPBbtauLY7fayQ7HIbAAAMwBuAEoYw9IBq2Ixs9h2eFMOQYPQObALQKJgggABeYhghCIpikkKRpOQRIknAsZUiIeCttECBEP8NSMCkjDDAARMGziuIYxHwYOjDCMBmDNnAuTxA6irdCOBB1Lh5Dqpqn66tISIykawBnOCtqqC0gbjqc9DgpGkxegOliyfJDrRkAA) + + ```ts + // Positions of employees in our company. + type MemberPosition = 'intern' | 'developer' | 'tech-lead'; + + // Interface describing properties of a single employee. + interface Employee { + firstName: string; + lastName: string; + yearsOfExperience: number; + } + + // Create an object that has all possible `MemberPosition` values set as keys. + // Those keys will store a collection of Employees of the same position. + const team: Record = { + intern: [], + developer: [], + 'tech-lead': [], + }; + + // Our team has decided to help John with his dream of becoming Software Developer. + team.intern.push({ + firstName: 'John', + lastName: 'Doe', + yearsOfExperience: 0 + }); + + // `Record` forces you to initialize all of the property keys. + // TypeScript Error: "tech-lead" property is missing + const teamEmpty: Record = { + intern: null, + developer: null, + }; + ``` +
+ +- [`Exclude`](https://github.com/Microsoft/TypeScript/blob/2961bc3fc0ea1117d4e53bc8e97fa76119bc33e3/src/lib/es5.d.ts#L1436-L1439) - Exclude from `T` those types that are assignable to `U`. +
+ + Example + + + [Playground](https://typescript-play.js.org/?target=6#code/JYOwLgpgTgZghgYwgAgMrQG7QMIHsQzADmyA3gFDLIAOuUYAXMiAK4A2byAPsgM5hRQJHqwC2AI2gBucgF9y5MAE9qKAEoQAjiwj8AEnBAATNtGQBeZAAooWphu26wAGmS3e93bRC8IASgsAPmRDJRlyAHoI5ABRAA8ENhYjFFYOZGVVZBgoXFFkAAM0zh5+QRBhZhYJaAKAOkjogEkQZAQ4X2QAdwALCFbaemRgXmQtFjhOMFwq9K6ULuB0lk6U+HYwZAxJnQaYFhAEMGB8ZCIIMAAFOjAANR2IK0HGWISklIAedCgsKDwCYgAbQA5M9gQBdVzFQJ+JhiSRQMiUYYwayZCC4VHPCzmSzAspCYEBWxgFhQAZwKC+FpgJ43VwARgADH4ZFQSWSBjcZPJyPtDsdTvxKWBvr8rD1DCZoJ5HPopaYoK4EPhCEQmGKcKriLCtrhgEYkVQVT5Nr4fmZLLZtMBbFZgT0wGBqES6ghbHBIJqoBKFdBWQpjfh+DQbhY2tqiHVsbjLMVkAB+ZAAZiZaeQTHOVxu9ySjxNaujNwDVHNvzqbBGkBAdPoAfkQA) + + ```ts + interface ServerConfig { + port: null | string | number; + } + + type RequestHandler = (request: Request, response: Response) => void; + + // Exclude `null` type from `null | string | number`. + // In case the port is equal to `null`, we will use default value. + function getPortValue(port: Exclude): number { + if (typeof port === 'string') { + return parseInt(port, 10); + } + + return port; + } + + function startServer(handler: RequestHandler, config: ServerConfig): void { + const server = require('http').createServer(handler); + + const port = config.port === null ? 3000 : getPortValue(config.port); + server.listen(port); + } + ``` +
+ +- [`Extract`](https://github.com/Microsoft/TypeScript/blob/2961bc3fc0ea1117d4e53bc8e97fa76119bc33e3/src/lib/es5.d.ts#L1441-L1444) - Extract from `T` those types that are assignable to `U`. +
+ + Example + + + [Playground](https://typescript-play.js.org/?target=6#code/CYUwxgNghgTiAEAzArgOzAFwJYHtXzSwEdkQBJYACgEoAueVZAWwCMQYBuAKDDwGcM8MgBF4AXngBlAJ6scESgHIRi6ty5ZUGdoihgEABXZ888AN5d48ANoiAuvUat23K6ihMQ9ATE0BzV3goPy8GZjZOLgBfLi4Aejj4AEEICBwAdz54MAALKFQQ+BxEeAAHY1NgKAwoIKy0grr4DByEUpgccpgMaXgAaxBerCzi+B9-ZulygDouFHRsU1z8kKMYE1RhaqgAHkt4AHkWACt4EAAPbVRgLLWNgBp9gGlBs8uQa6yAUUuYPQwdgNpKM7nh7mMML4CgA+R5WABqUAgpDeVxuhxO1he0jsXGh8EoOBO9COx3BQPo2PBADckaR6IjkSA6PBqTgsMBzPsicdrEC7OJWXSQNwYvFEgAVTS9JLXODpeDpKBZFg4GCoWa8VACIJykAKiQWKy2YQOAioYikCg0OEMDyhRSy4DyxS24KhAAMjyi6gS8AAwjh5OD0iBFHAkJoEOksC1mnkMJq8gUQKDNttKPlnfrwYp3J5XfBHXqoKpfYkAOI4ansTxaeDADmoRSCCBYAbxhC6TDx6rwYHIRX5bScjA4bLJwoDmDwDkfbA9JMrVMVdM1TN69LgkTgwgkchUahqIA) + + ```ts + declare function uniqueId(): number; + + const ID = Symbol('ID'); + + interface Person { + [ID]: number; + name: string; + age: number; + } + + // Allows changing the person data as long as the property key is of string type. + function changePersonData< + Obj extends Person, + Key extends Extract, + Value extends Obj[Key] + > (obj: Obj, key: Key, value: Value): void { + obj[key] = value; + } + + // Tiny Andrew was born. + const andrew = { + [ID]: uniqueId(), + name: 'Andrew', + age: 0, + }; + + // Cool, we're fine with that. + changePersonData(andrew, 'name', 'Pony'); + + // Goverment didn't like the fact that you wanted to change your identity. + changePersonData(andrew, ID, uniqueId()); + ``` +
+ +- [`NonNullable`](https://github.com/Microsoft/TypeScript/blob/2961bc3fc0ea1117d4e53bc8e97fa76119bc33e3/src/lib/es5.d.ts#L1446-L1449) - Exclude `null` and `undefined` from `T`. +
+ + Example + + Works with strictNullChecks set to true. (Read more here) + + [Playground](https://typescript-play.js.org/?target=6#code/C4TwDgpgBACg9gJ2AOQK4FsBGEFQLxQDOwCAlgHYDmUAPlORtrnQwDasDcAUFwPQBU-WAEMkUOADMowqAGNWwwoSgATCBIqlgpOOSjAAFsOBRSy1IQgr9cKJlSlW1mZYQA3HFH68u8xcoBlHA8EACEHJ08Aby4oKDBUTFZSWXjEFEYcAEIALihkXTR2YSSIAB54JDQsHAA+blj4xOTUsHSACkMzPKD3HHDHNQQAGjSkPMqMmoQASh7g-oihqBi4uNIpdraxPAI2VhmVxrX9AzMAOm2ppnwoAA4ABifuE4BfKAhWSyOTuK7CS7pao3AhXF5rV48E4ICDAVAIPT-cGQyG+XTEIgLMJLTx7CAAdygvRCA0iCHaMwarhJOIQjUBSHaACJHk8mYdeLwxtdcVAAOSsh58+lXdr7Dlcq7A3n3J4PEUdADMcspUE53OluAIUGVTx46oAKuAIAFZGQwCYAKIIBCILjUxaDHAMnla+iodjcIA) + + ```ts + type PortNumber = string | number | null; + + /** Part of a class definition that is used to build a server */ + class ServerBuilder { + portNumber!: NonNullable; + + port(this: ServerBuilder, port: PortNumber): ServerBuilder { + if (port == null) { + this.portNumber = 8000; + } else { + this.portNumber = port; + } + + return this; + } + } + + const serverBuilder = new ServerBuilder(); + + serverBuilder + .port('8000') // portNumber = '8000' + .port(null) // portNumber = 8000 + .port(3000); // portNumber = 3000 + + // TypeScript error + serverBuilder.portNumber = null; + ``` +
+ +- [`Parameters`](https://github.com/Microsoft/TypeScript/blob/2961bc3fc0ea1117d4e53bc8e97fa76119bc33e3/src/lib/es5.d.ts#L1451-L1454) - Obtain the parameters of a function type in a tuple. +
+ + Example + + + [Playground](https://typescript-play.js.org/?target=6#code/GYVwdgxgLglg9mABAZwBYmMANgUwBQxgAOIUAXIgIZgCeA2gLoCUFAbnDACaIDeAUIkQB6IYgCypSlBxUATrMo1ECsJzgBbLEoipqAc0J7EMKMgDkiHLnU4wp46pwAPHMgB0fAL58+oSLARECEosLAA5ABUYG2QAHgAxJGdpVWREPDdMylk9ZApqemZEAF4APipacrw-CApEgBogkKwAYThwckQwEHUAIxxZJl4BYVEImiIZKF0oZRwiWVdbeygJmThgOYgcGFYcbhqApCJsyhtpWXcR1cnEePBoeDAABVPzgbTixFeFd8uEsClADcIxGiygIFkSEOT3SmTc2VydQeRx+ZxwF2QQ34gkEwDgsnSuFmMBKiAADEDjIhYk1Qm0OlSYABqZnYka4xA1DJZHJYkGc7yCbyeRA+CAIZCzNAYbA4CIAdxg2zJwVCkWirjwMswuEaACYmCCgA) + + ```ts + function shuffle(input: any[]): void { + // Mutate array randomly changing its' elements indexes. + } + + function callNTimes any> (func: Fn, callCount: number) { + // Type that represents the type of the received function parameters. + type FunctionParameters = Parameters; + + return function (...args: FunctionParameters) { + for (let i = 0; i < callCount; i++) { + func(...args); + } + } + } + + const shuffleTwice = callNTimes(shuffle, 2); + ``` +
+ +- [`ConstructorParameters`](https://github.com/Microsoft/TypeScript/blob/2961bc3fc0ea1117d4e53bc8e97fa76119bc33e3/src/lib/es5.d.ts#L1456-L1459) - Obtain the parameters of a constructor function type in a tuple. +
+ + Example + + + [Playground](https://typescript-play.js.org/?target=6#code/MYGwhgzhAECCBOAXAlqApgWQPYBM0mgG8AoaaFRENALmgkXmQDsBzAblOmCycTV4D8teo1YdO3JiICuwRFngAKClWENmLAJRFOZRAAtkEAHQq00ALzlklNBzIBfYk+KhIMAJJTEYJsDQAwmDA+mgAPAAq0GgAHnxMODCKTGgA7tCKxllg8CwQtL4AngDaALraFgB80EWa1SRkAA6MAG5gfNAB4FABPDJyCrQR9tDNyG0dwMGhtBhgjWEiGgA00F70vv4RhY3hEZXVVinpc42KmuJkkv3y8Bly8EPaDWTkhiZd7r3e8LK3llwGCMXGQWGhEOsfH5zJlsrl8p0+gw-goAAo5MAAW3BaHgEEilU0tEhmzQ212BJ0ry4SOg+kg+gBBiMximIGA0nAfAQLGk2N4EAAEgzYcYcnkLsRdDTvNEYkYUKwSdCme9WdM0MYwYhFPSIPpJdTkAAzDKxBUaZX+aAAQgsVmkCTQxuYaBw2ng4Ok8CYcotSu8pMur09iG9vuObxZnx6SN+AyUWTF8MN0CcZE4Ywm5jZHK5aB5fP4iCFIqT4oRRTKRLo6lYVNeAHpG50wOzOe1zHr9NLQ+HoABybsD4HOKXXRA1JCoKhBELmI5pNaB6Fz0KKBAodDYPAgSUTmqYsAALx4m5nC6nW9nGq14KtaEUA9gR9PvuNCjQ9BgACNvcwNBtAcLiAA) + + ```ts + class ArticleModel { + title: string; + content?: string; + + constructor(title: string) { + this.title = title; + } + } + + class InstanceCache any)> { + private ClassConstructor: T; + private cache: Map> = new Map(); + + constructor (ctr: T) { + this.ClassConstructor = ctr; + } + + getInstance (...args: ConstructorParameters): InstanceType { + const hash = this.calculateArgumentsHash(...args); + + const existingInstance = this.cache.get(hash); + if (existingInstance !== undefined) { + return existingInstance; + } + + return new this.ClassConstructor(...args); + } + + private calculateArgumentsHash(...args: any[]): string { + // Calculate hash. + return 'hash'; + } + } + + const articleCache = new InstanceCache(ArticleModel); + const amazonArticle = articleCache.getInstance('Amazon forests burining!'); + ``` +
+ +- [`ReturnType`](https://github.com/Microsoft/TypeScript/blob/2961bc3fc0ea1117d4e53bc8e97fa76119bc33e3/src/lib/es5.d.ts#L1461-L1464) – Obtain the return type of a function type. +
+ + Example + + + [Playground](https://typescript-play.js.org/?target=6#code/MYGwhgzhAECSAmICmBlJAnAbgS2E6A3gFDTTwD2AcuQC4AW2AdgOYAUAlAFzSbnbyEAvkWFFQkGJSQB3GMVI1sNZNwg10TZgG4S0YOUY0kh1es07d+xmvQBXYDXLpWi5UlMaWAGj0GjJ6BtNdkJdBQYIADpXZGgAXmgYpB1ScOwoq38aeN9DYxoU6GFRKzVoJjUwRjwAYXJbPPRuAFkwAAcAHgAxBodsAx9GWwBbACMMAD4cxhloVraOCyYjdAAzMDxoOut1e0d0UNIZ6WhWSPOwdGYIbiqATwBtAF0uaHudUQB6ACpv6ABpJBINqJdAbADW0Do5BOw3u5R2VTwMHIq2gAANtjZ0bkbHsnFCwJh8ONjHp0EgwEZ4JFoN9PkRVr1FAZoMwkDRYIjqkgOrosepoEgAB7+eAwAV2BxOLy6ACCVxgIrFEoMeOl6AACpcwMMORgIB1JRMiBNWKVdhruJKfOdIpdrtwFddXlzKjyACp3Nq842HaDIbL6BrZBIVGhIpB1EMYSLsmjmtWW-YhAA+qegAAYLKQLQj3ZsEsdccmnGcLor2Dn8xGedHGpEIBzEzspfsfMHDNAANTQACMVaIljV5GQkRA5DYmIpVKQAgAJARO9le33BDXIyi0YuLW2nJFGLqkOvxFB0YPdBSaLZ0IwNzyPkO8-xkGgsLh8Al427a3hWAhXwwHA8EHT5PmgAB1bAQBAANJ24adKWpft72RaBUTgRBUCAj89HAM8xCTaBjggABRQx0DuHJv25P9dCkWRZVIAAiBjoFImpmjlFBgA0NpsjadByDacgIDAEAIAAQmYpjoGYgAZSBsmGPw6DtZiiFA8CoJguDmAQmoZ2QvtUKQLdoAYmBTwgdEiCAA) + + ```ts + /** Provides every element of the iterable `iter` into the `callback` function and stores the results in an array. */ + function mapIter< + Elem, + Func extends (elem: Elem) => any, + Ret extends ReturnType + >(iter: Iterable, callback: Func): Ret[] { + const mapped: Ret[] = []; + + for (const elem of iter) { + mapped.push(callback(elem)); + } + + return mapped; + } + + const setObject: Set = new Set(); + const mapObject: Map = new Map(); + + mapIter(setObject, (value: string) => value.indexOf('Foo')); // number[] + + mapIter(mapObject, ([key, value]: [number, string]) => { + return key % 2 === 0 ? value : 'Odd'; + }); // string[] + ``` +
+ +- [`InstanceType`](https://github.com/Microsoft/TypeScript/blob/2961bc3fc0ea1117d4e53bc8e97fa76119bc33e3/src/lib/es5.d.ts#L1466-L1469) – Obtain the instance type of a constructor function type. +
+ + Example + + + [Playground](https://typescript-play.js.org/?target=6#code/MYGwhgzhAECSAmICmBlJAnAbgS2E6A3gFDTTwD2AcuQC4AW2AdgOYAUAlAFzSbnbyEAvkWFFQkGJSQB3GMVI1sNZNwg10TZgG4S0YOUY0kh1es07d+xmvQBXYDXLpWi5UlMaWAGj0GjJ6BtNdkJdBQYIADpXZGgAXmgYpB1ScOwoq38aeN9DYxoU6GFRKzVoJjUwRjwAYXJbPPRuAFkwAAcAHgAxBodsAx9GWwBbACMMAD4cxhloVraOCyYjdAAzMDxoOut1e0d0UNIZ6WhWSPOwdGYIbiqATwBtAF0uaHudUQB6ACpv6ABpJBINqJdAbADW0Do5BOw3u5R2VTwMHIq2gAANtjZ0bkbHsnFCwJh8ONjHp0EgwEZ4JFoN9PkRVr1FAZoMwkDRYIjqkgOrosepoEgAB7+eAwAV2BxOLy6ACCVxgIrFEoMeOl6AACpcwMMORgIB1JRMiBNWKVdhruJKfOdIpdrtwFddXlzKjyACp3Nq842HaDIbL6BrZBIVGhIpB1EMYSLsmjmtWW-YhAA+qegAAYLKQLQj3ZsEsdccmnGcLor2Dn8xGedHGpEIBzEzspfsfMHDNAANTQACMVaIljV5GQkRA5DYmIpVKQAgAJARO9le33BDXIyi0YuLW2nJFGLqkOvxFB0YPdBSaLZ0IwNzyPkO8-xkGgsLh8Al427a3hWAhXwwHA8EHT5PmgAB1bAQBAANJ24adKWpft72RaBUTgRBUCAj89HAM8xCTaBjggABRQx0DuHJv25P9dCkWRZVIAAiBjoFImpmjlFBgA0NpsjadByDacgIDAEAIAAQmYpjoGYgAZSBsmGPw6DtZiiFA8CoJguDmAQmoZ2QvtUKQLdoAYmBTwgdEiCAA) + + ```ts + class IdleService { + doNothing (): void {} + } + + class News { + title: string; + content: string; + + constructor(title: string, content: string) { + this.title = title; + this.content = content; + } + } + + const instanceCounter: Map = new Map(); + + interface Constructor { + new(...args: any[]): any; + } + + // Keep track how many instances of `Constr` constructor have been created. + function getInstance< + Constr extends Constructor, + Args extends ConstructorParameters + >(constructor: Constr, ...args: Args): InstanceType { + let count = instanceCounter.get(constructor) || 0; + + const instance = new constructor(...args); + + instanceCounter.set(constructor, count + 1); + + console.log(`Created ${count + 1} instances of ${Constr.name} class`); + + return instance; + } + + + const idleService = getInstance(IdleService); + // Will log: `Created 1 instances of IdleService class` + const newsEntry = getInstance(News, 'New ECMAScript proposals!', 'Last month...'); + // Will log: `Created 1 instances of News class` + ``` +
+ +- [`Omit`](https://github.com/microsoft/TypeScript/blob/71af02f7459dc812e85ac31365bfe23daf14b4e4/src/lib/es5.d.ts#L1446) – Constructs a type by picking all properties from T and then removing K. +
+ + Example + + + [Playground](https://typescript-play.js.org/?target=6#code/JYOwLgpgTgZghgYwgAgIImAWzgG2QbwChlks4BzCAVShwC5kBnMKUcgbmKYAcIFgIjBs1YgOXMpSFMWbANoBdTiW5woFddwAW0kfKWEAvoUIB6U8gDCUCHEiNkICAHdkYAJ69kz4GC3JcPG4oAHteKDABBxCYNAxsPFBIWEQUCAAPJG4wZABySUFcgJAAEzMLXNV1ck0dIuCw6EjBADpy5AB1FAQ4EGQAV0YUP2AHDy8wEOQbUugmBLwtEIA3OcmQnEjuZBgQqE7gAGtgZAhwKHdkHFGwNvGUdDIcAGUliIBJEF3kAF5kAHlML4ADyPBIAGjyBUYRQAPnkqho4NoYQA+TiEGD9EAISIhPozErQMG4AASK2gn2+AApek9pCSXm8wFSQooAJQMUkAFQAsgAZACiOAgmDOOSIJAQ+OYyGl4DgoDmf2QJRCCH6YvALQQNjsEGFovF1NyJWAy1y7OUyHMyE+yRAuFImG4Iq1YDswHxbRINjA-SgfXlHqVUE4xiAA) + + ```ts + interface Animal { + imageUrl: string; + species: string; + images: string[]; + paragraphs: string[]; + } + + // Creates new type with all properties of the `Animal` interface + // except 'images' and 'paragraphs' properties. We can use this + // type to render small hover tooltip for a wiki entry list. + type AnimalShortInfo = Omit; + + function renderAnimalHoverInfo (animals: AnimalShortInfo[]): HTMLElement { + const container = document.createElement('div'); + // Internal implementation. + return container; + } + ``` +
+ +You can find some examples in the [TypeScript docs](https://www.typescriptlang.org/docs/handbook/advanced-types.html#predefined-conditional-types). + + +## Maintainers + +- [Sindre Sorhus](https://github.com/sindresorhus) +- [Jarek Radosz](https://github.com/CvX) +- [Dimitri Benin](https://github.com/BendingBender) + + +## License + +(MIT OR CC0-1.0) + + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/basic.d.ts b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/basic.d.ts new file mode 100644 index 0000000..5969ce5 --- /dev/null +++ b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/basic.d.ts @@ -0,0 +1,67 @@ +/// + +// TODO: This can just be `export type Primitive = not object` when the `not` keyword is out. +/** +Matches any [primitive value](https://developer.mozilla.org/en-US/docs/Glossary/Primitive). +*/ +export type Primitive = + | null + | undefined + | string + | number + | boolean + | symbol + | bigint; + +// TODO: Remove the `= unknown` sometime in the future when most users are on TS 3.5 as it's now the default +/** +Matches a [`class` constructor](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes). +*/ +export type Class = new(...arguments_: Arguments) => T; + +/** +Matches any [typed array](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray), like `Uint8Array` or `Float64Array`. +*/ +export type TypedArray = + | Int8Array + | Uint8Array + | Uint8ClampedArray + | Int16Array + | Uint16Array + | Int32Array + | Uint32Array + | Float32Array + | Float64Array + | BigInt64Array + | BigUint64Array; + +/** +Matches a JSON object. + +This type can be useful to enforce some input to be JSON-compatible or as a super-type to be extended from. Don't use this as a direct return type as the user would have to double-cast it: `jsonObject as unknown as CustomResponse`. Instead, you could extend your CustomResponse type from it to ensure your type only uses JSON-compatible types: `interface CustomResponse extends JsonObject { … }`. +*/ +export type JsonObject = {[key: string]: JsonValue}; + +/** +Matches a JSON array. +*/ +export interface JsonArray extends Array {} + +/** +Matches any valid JSON value. +*/ +export type JsonValue = string | number | boolean | null | JsonObject | JsonArray; + +declare global { + interface SymbolConstructor { + readonly observable: symbol; + } +} + +/** +Matches a value that is like an [Observable](https://github.com/tc39/proposal-observable). +*/ +export interface ObservableLike { + subscribe(observer: (value: unknown) => void): void; + [Symbol.observable](): ObservableLike; +} diff --git a/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/except.d.ts b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/except.d.ts new file mode 100644 index 0000000..7dedbaa --- /dev/null +++ b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/except.d.ts @@ -0,0 +1,22 @@ +/** +Create a type from an object type without certain keys. + +This type is a stricter version of [`Omit`](https://www.typescriptlang.org/docs/handbook/release-notes/typescript-3-5.html#the-omit-helper-type). The `Omit` type does not restrict the omitted keys to be keys present on the given type, while `Except` does. The benefits of a stricter type are avoiding typos and allowing the compiler to pick up on rename refactors automatically. + +Please upvote [this issue](https://github.com/microsoft/TypeScript/issues/30825) if you want to have the stricter version as a built-in in TypeScript. + +@example +``` +import {Except} from 'type-fest'; + +type Foo = { + a: number; + b: string; + c: boolean; +}; + +type FooWithoutA = Except; +//=> {b: string}; +``` +*/ +export type Except = Pick>; diff --git a/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/literal-union.d.ts b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/literal-union.d.ts new file mode 100644 index 0000000..52e8de6 --- /dev/null +++ b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/literal-union.d.ts @@ -0,0 +1,33 @@ +import {Primitive} from './basic'; + +/** +Allows creating a union type by combining primitive types and literal types without sacrificing auto-completion in IDEs for the literal type part of the union. + +Currently, when a union type of a primitive type is combined with literal types, TypeScript loses all information about the combined literals. Thus, when such type is used in an IDE with autocompletion, no suggestions are made for the declared literals. + +This type is a workaround for [Microsoft/TypeScript#29729](https://github.com/Microsoft/TypeScript/issues/29729). It will be removed as soon as it's not needed anymore. + +@example +``` +import {LiteralUnion} from 'type-fest'; + +// Before + +type Pet = 'dog' | 'cat' | string; + +const pet: Pet = ''; +// Start typing in your TypeScript-enabled IDE. +// You **will not** get auto-completion for `dog` and `cat` literals. + +// After + +type Pet2 = LiteralUnion<'dog' | 'cat', string>; + +const pet: Pet2 = ''; +// You **will** get auto-completion for `dog` and `cat` literals. +``` + */ +export type LiteralUnion< + LiteralType extends BaseType, + BaseType extends Primitive +> = LiteralType | (BaseType & {_?: never}); diff --git a/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/merge-exclusive.d.ts b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/merge-exclusive.d.ts new file mode 100644 index 0000000..059bd2c --- /dev/null +++ b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/merge-exclusive.d.ts @@ -0,0 +1,39 @@ +// Helper type. Not useful on its own. +type Without = {[KeyType in Exclude]?: never}; + +/** +Create a type that has mutually exclusive keys. + +This type was inspired by [this comment](https://github.com/Microsoft/TypeScript/issues/14094#issuecomment-373782604). + +This type works with a helper type, called `Without`. `Without` produces a type that has only keys from `FirstType` which are not present on `SecondType` and sets the value type for these keys to `never`. This helper type is then used in `MergeExclusive` to remove keys from either `FirstType` or `SecondType`. + +@example +``` +import {MergeExclusive} from 'type-fest'; + +interface ExclusiveVariation1 { + exclusive1: boolean; +} + +interface ExclusiveVariation2 { + exclusive2: string; +} + +type ExclusiveOptions = MergeExclusive; + +let exclusiveOptions: ExclusiveOptions; + +exclusiveOptions = {exclusive1: true}; +//=> Works +exclusiveOptions = {exclusive2: 'hi'}; +//=> Works +exclusiveOptions = {exclusive1: true, exclusive2: 'hi'}; +//=> Error +``` +*/ +export type MergeExclusive = + (FirstType | SecondType) extends object ? + (Without & SecondType) | (Without & FirstType) : + FirstType | SecondType; + diff --git a/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/merge.d.ts b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/merge.d.ts new file mode 100644 index 0000000..4b3920b --- /dev/null +++ b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/merge.d.ts @@ -0,0 +1,22 @@ +import {Except} from './except'; + +/** +Merge two types into a new type. Keys of the second type overrides keys of the first type. + +@example +``` +import {Merge} from 'type-fest'; + +type Foo = { + a: number; + b: string; +}; + +type Bar = { + b: number; +}; + +const ab: Merge = {a: 1, b: 2}; +``` +*/ +export type Merge = Except> & SecondType; diff --git a/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/mutable.d.ts b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/mutable.d.ts new file mode 100644 index 0000000..03d0dda --- /dev/null +++ b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/mutable.d.ts @@ -0,0 +1,22 @@ +/** +Convert an object with `readonly` keys into a mutable object. Inverse of `Readonly`. + +This can be used to [store and mutate options within a class](https://github.com/sindresorhus/pageres/blob/4a5d05fca19a5fbd2f53842cbf3eb7b1b63bddd2/source/index.ts#L72), [edit `readonly` objects within tests](https://stackoverflow.com/questions/50703834), and [construct a `readonly` object within a function](https://github.com/Microsoft/TypeScript/issues/24509). + +@example +``` +import {Mutable} from 'type-fest'; + +type Foo = { + readonly a: number; + readonly b: string; +}; + +const mutableFoo: Mutable = {a: 1, b: '2'}; +mutableFoo.a = 3; +``` +*/ +export type Mutable = { + // For each `Key` in the keys of `ObjectType`, make a mapped type by removing the `readonly` modifier from the key. + -readonly [KeyType in keyof ObjectType]: ObjectType[KeyType]; +}; diff --git a/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/opaque.d.ts b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/opaque.d.ts new file mode 100644 index 0000000..5311c1b --- /dev/null +++ b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/opaque.d.ts @@ -0,0 +1,40 @@ +/** +Create an opaque type, which hides its internal details from the public, and can only be created by being used explicitly. + +The generic type parameter can be anything. It doesn't have to be an object. + +[Read more about opaque types.](https://codemix.com/opaque-types-in-javascript/) + +There have been several discussions about adding this feature to TypeScript via the `opaque type` operator, similar to how Flow does it. Unfortunately, nothing has (yet) moved forward: + - [Microsoft/TypeScript#15408](https://github.com/Microsoft/TypeScript/issues/15408) + - [Microsoft/TypeScript#15807](https://github.com/Microsoft/TypeScript/issues/15807) + +@example +``` +import {Opaque} from 'type-fest'; + +type AccountNumber = Opaque; +type AccountBalance = Opaque; + +function createAccountNumber(): AccountNumber { + return 2 as AccountNumber; +} + +function getMoneyForAccount(accountNumber: AccountNumber): AccountBalance { + return 4 as AccountBalance; +} + +// This will compile successfully. +getMoneyForAccount(createAccountNumber()); + +// But this won't, because it has to be explicitly passed as an `AccountNumber` type. +getMoneyForAccount(2); + +// You can use opaque values like they aren't opaque too. +const accountNumber = createAccountNumber(); + +// This will compile successfully. +accountNumber + 2; +``` +*/ +export type Opaque = Type & {readonly __opaque__: unique symbol}; diff --git a/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/package-json.d.ts b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/package-json.d.ts new file mode 100644 index 0000000..3179e58 --- /dev/null +++ b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/package-json.d.ts @@ -0,0 +1,501 @@ +import {LiteralUnion} from '..'; + +declare namespace PackageJson { + /** + A person who has been involved in creating or maintaining the package. + */ + export type Person = + | string + | { + name: string; + url?: string; + email?: string; + }; + + export type BugsLocation = + | string + | { + /** + The URL to the package's issue tracker. + */ + url?: string; + + /** + The email address to which issues should be reported. + */ + email?: string; + }; + + export interface DirectoryLocations { + /** + Location for executable scripts. Sugar to generate entries in the `bin` property by walking the folder. + */ + bin?: string; + + /** + Location for Markdown files. + */ + doc?: string; + + /** + Location for example scripts. + */ + example?: string; + + /** + Location for the bulk of the library. + */ + lib?: string; + + /** + Location for man pages. Sugar to generate a `man` array by walking the folder. + */ + man?: string; + + /** + Location for test files. + */ + test?: string; + + [directoryType: string]: unknown; + } + + export type Scripts = { + /** + Run **before** the package is published (Also run on local `npm install` without any arguments). + */ + prepublish?: string; + + /** + Run both **before** the package is packed and published, and on local `npm install` without any arguments. This is run **after** `prepublish`, but **before** `prepublishOnly`. + */ + prepare?: string; + + /** + Run **before** the package is prepared and packed, **only** on `npm publish`. + */ + prepublishOnly?: string; + + /** + Run **before** a tarball is packed (on `npm pack`, `npm publish`, and when installing git dependencies). + */ + prepack?: string; + + /** + Run **after** the tarball has been generated and moved to its final destination. + */ + postpack?: string; + + /** + Run **after** the package is published. + */ + publish?: string; + + /** + Run **after** the package is published. + */ + postpublish?: string; + + /** + Run **before** the package is installed. + */ + preinstall?: string; + + /** + Run **after** the package is installed. + */ + install?: string; + + /** + Run **after** the package is installed and after `install`. + */ + postinstall?: string; + + /** + Run **before** the package is uninstalled and before `uninstall`. + */ + preuninstall?: string; + + /** + Run **before** the package is uninstalled. + */ + uninstall?: string; + + /** + Run **after** the package is uninstalled. + */ + postuninstall?: string; + + /** + Run **before** bump the package version and before `version`. + */ + preversion?: string; + + /** + Run **before** bump the package version. + */ + version?: string; + + /** + Run **after** bump the package version. + */ + postversion?: string; + + /** + Run with the `npm test` command, before `test`. + */ + pretest?: string; + + /** + Run with the `npm test` command. + */ + test?: string; + + /** + Run with the `npm test` command, after `test`. + */ + posttest?: string; + + /** + Run with the `npm stop` command, before `stop`. + */ + prestop?: string; + + /** + Run with the `npm stop` command. + */ + stop?: string; + + /** + Run with the `npm stop` command, after `stop`. + */ + poststop?: string; + + /** + Run with the `npm start` command, before `start`. + */ + prestart?: string; + + /** + Run with the `npm start` command. + */ + start?: string; + + /** + Run with the `npm start` command, after `start`. + */ + poststart?: string; + + /** + Run with the `npm restart` command, before `restart`. Note: `npm restart` will run the `stop` and `start` scripts if no `restart` script is provided. + */ + prerestart?: string; + + /** + Run with the `npm restart` command. Note: `npm restart` will run the `stop` and `start` scripts if no `restart` script is provided. + */ + restart?: string; + + /** + Run with the `npm restart` command, after `restart`. Note: `npm restart` will run the `stop` and `start` scripts if no `restart` script is provided. + */ + postrestart?: string; + } & { + [scriptName: string]: string; + }; + + /** + Dependencies of the package. The version range is a string which has one or more space-separated descriptors. Dependencies can also be identified with a tarball or Git URL. + */ + export interface Dependency { + [packageName: string]: string; + } + + export interface NonStandardEntryPoints { + /** + An ECMAScript module ID that is the primary entry point to the program. + */ + module?: string; + + /** + A module ID with untranspiled code that is the primary entry point to the program. + */ + esnext?: + | string + | { + main?: string; + browser?: string; + [moduleName: string]: string | undefined; + }; + + /** + A hint to JavaScript bundlers or component tools when packaging modules for client side use. + */ + browser?: + | string + | { + [moduleName: string]: string | false; + }; + } + + export interface TypeScriptConfiguration { + /** + Location of the bundled TypeScript declaration file. + */ + types?: string; + + /** + Location of the bundled TypeScript declaration file. Alias of `types`. + */ + typings?: string; + } + + export interface YarnConfiguration { + /** + If your package only allows one version of a given dependency, and you’d like to enforce the same behavior as `yarn install --flat` on the command line, set this to `true`. + + Note that if your `package.json` contains `"flat": true` and other packages depend on yours (e.g. you are building a library rather than an application), those other packages will also need `"flat": true` in their `package.json` or be installed with `yarn install --flat` on the command-line. + */ + flat?: boolean; + + /** + Selective version resolutions. Allows the definition of custom package versions inside dependencies without manual edits in the `yarn.lock` file. + */ + resolutions?: Dependency; + } + + export interface JSPMConfiguration { + /** + JSPM configuration. + */ + jspm?: PackageJson; + } +} + +/** +Type for [npm's `package.json` file](https://docs.npmjs.com/creating-a-package-json-file). Also includes types for fields used by other popular projects, like TypeScript and Yarn. +*/ +export type PackageJson = { + /** + The name of the package. + */ + name?: string; + + /** + Package version, parseable by [`node-semver`](https://github.com/npm/node-semver). + */ + version?: string; + + /** + Package description, listed in `npm search`. + */ + description?: string; + + /** + Keywords associated with package, listed in `npm search`. + */ + keywords?: string[]; + + /** + The URL to the package's homepage. + */ + homepage?: LiteralUnion<'.', string>; + + /** + The URL to the package's issue tracker and/or the email address to which issues should be reported. + */ + bugs?: PackageJson.BugsLocation; + + /** + The license for the package. + */ + license?: string; + + /** + The licenses for the package. + */ + licenses?: Array<{ + type?: string; + url?: string; + }>; + + author?: PackageJson.Person; + + /** + A list of people who contributed to the package. + */ + contributors?: PackageJson.Person[]; + + /** + A list of people who maintain the package. + */ + maintainers?: PackageJson.Person[]; + + /** + The files included in the package. + */ + files?: string[]; + + /** + The module ID that is the primary entry point to the program. + */ + main?: string; + + /** + The executable files that should be installed into the `PATH`. + */ + bin?: + | string + | { + [binary: string]: string; + }; + + /** + Filenames to put in place for the `man` program to find. + */ + man?: string | string[]; + + /** + Indicates the structure of the package. + */ + directories?: PackageJson.DirectoryLocations; + + /** + Location for the code repository. + */ + repository?: + | string + | { + type: string; + url: string; + }; + + /** + Script commands that are run at various times in the lifecycle of the package. The key is the lifecycle event, and the value is the command to run at that point. + */ + scripts?: PackageJson.Scripts; + + /** + Is used to set configuration parameters used in package scripts that persist across upgrades. + */ + config?: { + [configKey: string]: unknown; + }; + + /** + The dependencies of the package. + */ + dependencies?: PackageJson.Dependency; + + /** + Additional tooling dependencies that are not required for the package to work. Usually test, build, or documentation tooling. + */ + devDependencies?: PackageJson.Dependency; + + /** + Dependencies that are skipped if they fail to install. + */ + optionalDependencies?: PackageJson.Dependency; + + /** + Dependencies that will usually be required by the package user directly or via another dependency. + */ + peerDependencies?: PackageJson.Dependency; + + /** + Package names that are bundled when the package is published. + */ + bundledDependencies?: string[]; + + /** + Alias of `bundledDependencies`. + */ + bundleDependencies?: string[]; + + /** + Engines that this package runs on. + */ + engines?: { + [EngineName in 'npm' | 'node' | string]: string; + }; + + /** + @deprecated + */ + engineStrict?: boolean; + + /** + Operating systems the module runs on. + */ + os?: Array>; + + /** + CPU architectures the module runs on. + */ + cpu?: Array>; + + /** + If set to `true`, a warning will be shown if package is installed locally. Useful if the package is primarily a command-line application that should be installed globally. + + @deprecated + */ + preferGlobal?: boolean; + + /** + If set to `true`, then npm will refuse to publish it. + */ + private?: boolean; + + /** + * A set of config values that will be used at publish-time. It's especially handy to set the tag, registry or access, to ensure that a given package is not tagged with 'latest', published to the global public registry or that a scoped module is private by default. + */ + publishConfig?: { + [config: string]: unknown; + }; +} & +PackageJson.NonStandardEntryPoints & +PackageJson.TypeScriptConfiguration & +PackageJson.YarnConfiguration & +PackageJson.JSPMConfiguration & { + [key: string]: unknown; +}; diff --git a/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/partial-deep.d.ts b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/partial-deep.d.ts new file mode 100644 index 0000000..b962b84 --- /dev/null +++ b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/partial-deep.d.ts @@ -0,0 +1,72 @@ +import {Primitive} from './basic'; + +/** +Create a type from another type with all keys and nested keys set to optional. + +Use-cases: +- Merging a default settings/config object with another object, the second object would be a deep partial of the default object. +- Mocking and testing complex entities, where populating an entire object with its keys would be redundant in terms of the mock or test. + +@example +``` +import {PartialDeep} from 'type-fest'; + +const settings: Settings = { + textEditor: { + fontSize: 14; + fontColor: '#000000'; + fontWeight: 400; + } + autocomplete: false; + autosave: true; +}; + +const applySavedSettings = (savedSettings: PartialDeep) => { + return {...settings, ...savedSettings}; +} + +settings = applySavedSettings({textEditor: {fontWeight: 500}}); +``` +*/ +export type PartialDeep = T extends Primitive + ? Partial + : T extends Map + ? PartialMapDeep + : T extends Set + ? PartialSetDeep + : T extends ReadonlyMap + ? PartialReadonlyMapDeep + : T extends ReadonlySet + ? PartialReadonlySetDeep + : T extends ((...arguments: any[]) => unknown) + ? T | undefined + : T extends object + ? PartialObjectDeep + : unknown; + +/** +Same as `PartialDeep`, but accepts only `Map`s and as inputs. Internal helper for `PartialDeep`. +*/ +interface PartialMapDeep extends Map, PartialDeep> {} + +/** +Same as `PartialDeep`, but accepts only `Set`s as inputs. Internal helper for `PartialDeep`. +*/ +interface PartialSetDeep extends Set> {} + +/** +Same as `PartialDeep`, but accepts only `ReadonlyMap`s as inputs. Internal helper for `PartialDeep`. +*/ +interface PartialReadonlyMapDeep extends ReadonlyMap, PartialDeep> {} + +/** +Same as `PartialDeep`, but accepts only `ReadonlySet`s as inputs. Internal helper for `PartialDeep`. +*/ +interface PartialReadonlySetDeep extends ReadonlySet> {} + +/** +Same as `PartialDeep`, but accepts only `object`s as inputs. Internal helper for `PartialDeep`. +*/ +type PartialObjectDeep = { + [KeyType in keyof ObjectType]?: PartialDeep +}; diff --git a/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/promisable.d.ts b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/promisable.d.ts new file mode 100644 index 0000000..71242a5 --- /dev/null +++ b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/promisable.d.ts @@ -0,0 +1,23 @@ +/** +Create a type that represents either the value or the value wrapped in `PromiseLike`. + +Use-cases: +- A function accepts a callback that may either return a value synchronously or may return a promised value. +- This type could be the return type of `Promise#then()`, `Promise#catch()`, and `Promise#finally()` callbacks. + +Please upvote [this issue](https://github.com/microsoft/TypeScript/issues/31394) if you want to have this type as a built-in in TypeScript. + +@example +``` +import {Promisable} from 'type-fest'; + +async function logger(getLogEntry: () => Promisable): Promise { + const entry = await getLogEntry(); + console.log(entry); +} + +logger(() => 'foo'); +logger(() => Promise.resolve('bar')); +``` +*/ +export type Promisable = T | PromiseLike; diff --git a/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/readonly-deep.d.ts b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/readonly-deep.d.ts new file mode 100644 index 0000000..b8c04de --- /dev/null +++ b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/readonly-deep.d.ts @@ -0,0 +1,59 @@ +import {Primitive} from './basic'; + +/** +Convert `object`s, `Map`s, `Set`s, and `Array`s and all of their keys/elements into immutable structures recursively. + +This is useful when a deeply nested structure needs to be exposed as completely immutable, for example, an imported JSON module or when receiving an API response that is passed around. + +Please upvote [this issue](https://github.com/microsoft/TypeScript/issues/13923) if you want to have this type as a built-in in TypeScript. + +@example +``` +// data.json +{ + "foo": ["bar"] +} + +// main.ts +import {ReadonlyDeep} from 'type-fest'; +import dataJson = require('./data.json'); + +const data: ReadonlyDeep = dataJson; + +export default data; + +// test.ts +import data from './main'; + +data.foo.push('bar'); +//=> error TS2339: Property 'push' does not exist on type 'readonly string[]' +``` +*/ +export type ReadonlyDeep = T extends Primitive | ((...arguments: any[]) => unknown) + ? T + : T extends ReadonlyMap + ? ReadonlyMapDeep + : T extends ReadonlySet + ? ReadonlySetDeep + : T extends object + ? ReadonlyObjectDeep + : unknown; + +/** +Same as `ReadonlyDeep`, but accepts only `ReadonlyMap`s as inputs. Internal helper for `ReadonlyDeep`. +*/ +interface ReadonlyMapDeep + extends ReadonlyMap, ReadonlyDeep> {} + +/** +Same as `ReadonlyDeep`, but accepts only `ReadonlySet`s as inputs. Internal helper for `ReadonlyDeep`. +*/ +interface ReadonlySetDeep + extends ReadonlySet> {} + +/** +Same as `ReadonlyDeep`, but accepts only `object`s as inputs. Internal helper for `ReadonlyDeep`. +*/ +type ReadonlyObjectDeep = { + readonly [KeyType in keyof ObjectType]: ReadonlyDeep +}; diff --git a/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/require-at-least-one.d.ts b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/require-at-least-one.d.ts new file mode 100644 index 0000000..337379f --- /dev/null +++ b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/require-at-least-one.d.ts @@ -0,0 +1,32 @@ +import {Except} from './except'; + +/** +Create a type that requires at least one of the given keys. The remaining keys are kept as is. + +@example +``` +import {RequireAtLeastOne} from 'type-fest'; + +type Responder = { + text?: () => string; + json?: () => string; + + secure?: boolean; +}; + +const responder: RequireAtLeastOne = { + json: () => '{"message": "ok"}', + secure: true +}; +``` +*/ +export type RequireAtLeastOne = + { + // For each Key in KeysType make a mapped type + [Key in KeysType]: ( + // …by picking that Key's type and making it required + Required> + ) + }[KeysType] + // …then, make intersection types by adding the remaining keys to each mapped type. + & Except; diff --git a/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/require-exactly-one.d.ts b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/require-exactly-one.d.ts new file mode 100644 index 0000000..d8c71b7 --- /dev/null +++ b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/require-exactly-one.d.ts @@ -0,0 +1,36 @@ +// TODO: Remove this when we target TypeScript >=3.5. +// eslint-disable-next-line @typescript-eslint/generic-type-naming +type _Omit = Pick>; + +/** +Create a type that requires exactly one of the given keys and disallows more. The remaining keys are kept as is. + +Use-cases: +- Creating interfaces for components that only need one of the keys to display properly. +- Declaring generic keys in a single place for a single use-case that gets narrowed down via `RequireExactlyOne`. + +The caveat with `RequireExactlyOne` is that TypeScript doesn't always know at compile time every key that will exist at runtime. Therefore `RequireExactlyOne` can't do anything to prevent extra keys it doesn't know about. + +@example +``` +import {RequireExactlyOne} from 'type-fest'; + +type Responder = { + text: () => string; + json: () => string; + secure: boolean; +}; + +const responder: RequireExactlyOne = { + // Adding a `text` key here would cause a compile error. + + json: () => '{"message": "ok"}', + secure: true +}; +``` +*/ +export type RequireExactlyOne = + {[Key in KeysType]: ( + Required> & + Partial, never>> + )}[KeysType] & _Omit; diff --git a/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/set-optional.d.ts b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/set-optional.d.ts new file mode 100644 index 0000000..a9a256a --- /dev/null +++ b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/set-optional.d.ts @@ -0,0 +1,32 @@ +/** +Create a type that makes the given keys optional. The remaining keys are kept as is. The sister of the `SetRequired` type. + +Use-case: You want to define a single model where the only thing that changes is whether or not some of the keys are optional. + +@example +``` +import {SetOptional} from 'type-fest'; + +type Foo = { + a: number; + b?: string; + c: boolean; +} + +type SomeOptional = SetOptional; +// type SomeOptional = { +// a: number; +// b?: string; // Was already optional and still is. +// c?: boolean; // Is now optional. +// } +``` +*/ +export type SetOptional = + // Pick just the keys that are not optional from the base type. + Pick> & + // Pick the keys that should be optional from the base type and make them optional. + Partial> extends + // If `InferredType` extends the previous, then for each key, use the inferred type key. + infer InferredType + ? {[KeyType in keyof InferredType]: InferredType[KeyType]} + : never; diff --git a/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/set-required.d.ts b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/set-required.d.ts new file mode 100644 index 0000000..2572bc1 --- /dev/null +++ b/mybulma/node_modules/read-pkg-up/node_modules/type-fest/source/set-required.d.ts @@ -0,0 +1,32 @@ +/** +Create a type that makes the given keys required. The remaining keys are kept as is. The sister of the `SetOptional` type. + +Use-case: You want to define a single model where the only thing that changes is whether or not some of the keys are required. + +@example +``` +import {SetRequired} from 'type-fest'; + +type Foo = { + a?: number; + b: string; + c?: boolean; +} + +type SomeRequired = SetRequired; +// type SomeRequired = { +// a?: number; +// b: string; // Was already required and still is. +// c: boolean; // Is now required. +// } +``` +*/ +export type SetRequired = + // Pick just the keys that are not required from the base type. + Pick> & + // Pick the keys that should be required from the base type and make them required. + Required> extends + // If `InferredType` extends the previous, then for each key, use the inferred type key. + infer InferredType + ? {[KeyType in keyof InferredType]: InferredType[KeyType]} + : never; diff --git a/mybulma/node_modules/read-pkg/index.d.ts b/mybulma/node_modules/read-pkg/index.d.ts new file mode 100644 index 0000000..a937926 --- /dev/null +++ b/mybulma/node_modules/read-pkg/index.d.ts @@ -0,0 +1,67 @@ +import * as typeFest from 'type-fest'; +import normalize = require('normalize-package-data'); + +declare namespace readPkg { + interface Options { + /** + [Normalize](https://github.com/npm/normalize-package-data#what-normalization-currently-entails) the package data. + + @default true + */ + readonly normalize?: boolean; + + /** + Current working directory. + + @default process.cwd() + */ + readonly cwd?: string; + } + + interface NormalizeOptions extends Options { + readonly normalize?: true; + } + + type NormalizedPackageJson = PackageJson & normalize.Package; + type PackageJson = typeFest.PackageJson; +} + +declare const readPkg: { + /** + @returns The parsed JSON. + + @example + ``` + import readPkg = require('read-pkg'); + + (async () => { + console.log(await readPkg()); + //=> {name: 'read-pkg', …} + + console.log(await readPkg({cwd: 'some-other-directory'}); + //=> {name: 'unicorn', …} + })(); + ``` + */ + (options?: readPkg.NormalizeOptions): Promise; + (options: readPkg.Options): Promise; + + /** + @returns The parsed JSON. + + @example + ``` + import readPkg = require('read-pkg'); + + console.log(readPkg.sync()); + //=> {name: 'read-pkg', …} + + console.log(readPkg.sync({cwd: 'some-other-directory'}); + //=> {name: 'unicorn', …} + ``` + */ + sync(options?: readPkg.NormalizeOptions): readPkg.NormalizedPackageJson; + sync(options: readPkg.Options): readPkg.PackageJson; +}; + +export = readPkg; diff --git a/mybulma/node_modules/read-pkg/index.js b/mybulma/node_modules/read-pkg/index.js new file mode 100644 index 0000000..c1243a8 --- /dev/null +++ b/mybulma/node_modules/read-pkg/index.js @@ -0,0 +1,41 @@ +'use strict'; +const {promisify} = require('util'); +const fs = require('fs'); +const path = require('path'); +const parseJson = require('parse-json'); + +const readFileAsync = promisify(fs.readFile); + +module.exports = async options => { + options = { + cwd: process.cwd(), + normalize: true, + ...options + }; + + const filePath = path.resolve(options.cwd, 'package.json'); + const json = parseJson(await readFileAsync(filePath, 'utf8')); + + if (options.normalize) { + require('normalize-package-data')(json); + } + + return json; +}; + +module.exports.sync = options => { + options = { + cwd: process.cwd(), + normalize: true, + ...options + }; + + const filePath = path.resolve(options.cwd, 'package.json'); + const json = parseJson(fs.readFileSync(filePath, 'utf8')); + + if (options.normalize) { + require('normalize-package-data')(json); + } + + return json; +}; diff --git a/mybulma/node_modules/read-pkg/license b/mybulma/node_modules/read-pkg/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/mybulma/node_modules/read-pkg/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/mybulma/node_modules/read-pkg/node_modules/.bin/semver b/mybulma/node_modules/read-pkg/node_modules/.bin/semver new file mode 100644 index 0000000..801e77f --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/.bin/semver @@ -0,0 +1,160 @@ +#!/usr/bin/env node +// Standalone semver comparison program. +// Exits successfully and prints matching version(s) if +// any supplied version is valid and passes all tests. + +var argv = process.argv.slice(2) + +var versions = [] + +var range = [] + +var inc = null + +var version = require('../package.json').version + +var loose = false + +var includePrerelease = false + +var coerce = false + +var identifier + +var semver = require('../semver') + +var reverse = false + +var options = {} + +main() + +function main () { + if (!argv.length) return help() + while (argv.length) { + var a = argv.shift() + var indexOfEqualSign = a.indexOf('=') + if (indexOfEqualSign !== -1) { + a = a.slice(0, indexOfEqualSign) + argv.unshift(a.slice(indexOfEqualSign + 1)) + } + switch (a) { + case '-rv': case '-rev': case '--rev': case '--reverse': + reverse = true + break + case '-l': case '--loose': + loose = true + break + case '-p': case '--include-prerelease': + includePrerelease = true + break + case '-v': case '--version': + versions.push(argv.shift()) + break + case '-i': case '--inc': case '--increment': + switch (argv[0]) { + case 'major': case 'minor': case 'patch': case 'prerelease': + case 'premajor': case 'preminor': case 'prepatch': + inc = argv.shift() + break + default: + inc = 'patch' + break + } + break + case '--preid': + identifier = argv.shift() + break + case '-r': case '--range': + range.push(argv.shift()) + break + case '-c': case '--coerce': + coerce = true + break + case '-h': case '--help': case '-?': + return help() + default: + versions.push(a) + break + } + } + + var options = { loose: loose, includePrerelease: includePrerelease } + + versions = versions.map(function (v) { + return coerce ? (semver.coerce(v) || { version: v }).version : v + }).filter(function (v) { + return semver.valid(v) + }) + if (!versions.length) return fail() + if (inc && (versions.length !== 1 || range.length)) { return failInc() } + + for (var i = 0, l = range.length; i < l; i++) { + versions = versions.filter(function (v) { + return semver.satisfies(v, range[i], options) + }) + if (!versions.length) return fail() + } + return success(versions) +} + +function failInc () { + console.error('--inc can only be used on a single version with no range') + fail() +} + +function fail () { process.exit(1) } + +function success () { + var compare = reverse ? 'rcompare' : 'compare' + versions.sort(function (a, b) { + return semver[compare](a, b, options) + }).map(function (v) { + return semver.clean(v, options) + }).map(function (v) { + return inc ? semver.inc(v, inc, options, identifier) : v + }).forEach(function (v, i, _) { console.log(v) }) +} + +function help () { + console.log(['SemVer ' + version, + '', + 'A JavaScript implementation of the https://semver.org/ specification', + 'Copyright Isaac Z. Schlueter', + '', + 'Usage: semver [options] [ [...]]', + 'Prints valid versions sorted by SemVer precedence', + '', + 'Options:', + '-r --range ', + ' Print versions that match the specified range.', + '', + '-i --increment []', + ' Increment a version by the specified level. Level can', + ' be one of: major, minor, patch, premajor, preminor,', + " prepatch, or prerelease. Default level is 'patch'.", + ' Only one version may be specified.', + '', + '--preid ', + ' Identifier to be used to prefix premajor, preminor,', + ' prepatch or prerelease version increments.', + '', + '-l --loose', + ' Interpret versions and ranges loosely', + '', + '-p --include-prerelease', + ' Always include prerelease versions in range matching', + '', + '-c --coerce', + ' Coerce a string into SemVer if possible', + ' (does not imply --loose)', + '', + 'Program exits successfully if any valid version satisfies', + 'all supplied ranges, and prints all satisfying versions.', + '', + 'If no satisfying versions are found, then exits failure.', + '', + 'Versions are printed in ascending order, so supplying', + 'multiple versions to the utility will just sort them.' + ].join('\n')) +} diff --git a/mybulma/node_modules/read-pkg/node_modules/hosted-git-info/CHANGELOG.md b/mybulma/node_modules/read-pkg/node_modules/hosted-git-info/CHANGELOG.md new file mode 100644 index 0000000..6987fb4 --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/hosted-git-info/CHANGELOG.md @@ -0,0 +1,151 @@ +# Change Log + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + + +## [2.8.9](https://github.com/npm/hosted-git-info/compare/v2.8.8...v2.8.9) (2021-04-07) + + +### Bug Fixes + +* backport regex fix from [#76](https://github.com/npm/hosted-git-info/issues/76) ([29adfe5](https://github.com/npm/hosted-git-info/commit/29adfe5)), closes [#84](https://github.com/npm/hosted-git-info/issues/84) + + + + +## [2.8.8](https://github.com/npm/hosted-git-info/compare/v2.8.7...v2.8.8) (2020-02-29) + + +### Bug Fixes + +* [#61](https://github.com/npm/hosted-git-info/issues/61) & [#65](https://github.com/npm/hosted-git-info/issues/65) addressing issues w/ url.URL implmentation which regressed node 6 support ([5038b18](https://github.com/npm/hosted-git-info/commit/5038b18)), closes [#66](https://github.com/npm/hosted-git-info/issues/66) + + + + +## [2.8.7](https://github.com/npm/hosted-git-info/compare/v2.8.6...v2.8.7) (2020-02-26) + + +### Bug Fixes + +* Do not attempt to use url.URL when unavailable ([2d0bb66](https://github.com/npm/hosted-git-info/commit/2d0bb66)), closes [#61](https://github.com/npm/hosted-git-info/issues/61) [#62](https://github.com/npm/hosted-git-info/issues/62) +* Do not pass scp-style URLs to the WhatWG url.URL ([f2cdfcf](https://github.com/npm/hosted-git-info/commit/f2cdfcf)), closes [#60](https://github.com/npm/hosted-git-info/issues/60) + + + + +## [2.8.6](https://github.com/npm/hosted-git-info/compare/v2.8.5...v2.8.6) (2020-02-25) + + + + +## [2.8.5](https://github.com/npm/hosted-git-info/compare/v2.8.4...v2.8.5) (2019-10-07) + + +### Bug Fixes + +* updated pathmatch for gitlab ([e8325b5](https://github.com/npm/hosted-git-info/commit/e8325b5)), closes [#51](https://github.com/npm/hosted-git-info/issues/51) +* updated pathmatch for gitlab ([ffe056f](https://github.com/npm/hosted-git-info/commit/ffe056f)) + + + + +## [2.8.4](https://github.com/npm/hosted-git-info/compare/v2.8.3...v2.8.4) (2019-08-12) + + + + +## [2.8.3](https://github.com/npm/hosted-git-info/compare/v2.8.2...v2.8.3) (2019-08-12) + + + + +## [2.8.2](https://github.com/npm/hosted-git-info/compare/v2.8.1...v2.8.2) (2019-08-05) + + +### Bug Fixes + +* http protocol use sshurl by default ([3b1d629](https://github.com/npm/hosted-git-info/commit/3b1d629)), closes [#48](https://github.com/npm/hosted-git-info/issues/48) + + + + +## [2.8.1](https://github.com/npm/hosted-git-info/compare/v2.8.0...v2.8.1) (2019-08-05) + + +### Bug Fixes + +* ignore noCommittish on tarball url generation ([5d4a8d7](https://github.com/npm/hosted-git-info/commit/5d4a8d7)) +* use gist tarball url that works for anonymous gists ([1692435](https://github.com/npm/hosted-git-info/commit/1692435)) + + + + +# [2.8.0](https://github.com/npm/hosted-git-info/compare/v2.7.1...v2.8.0) (2019-08-05) + + +### Bug Fixes + +* Allow slashes in gitlab project section ([bbcf7b2](https://github.com/npm/hosted-git-info/commit/bbcf7b2)), closes [#46](https://github.com/npm/hosted-git-info/issues/46) [#43](https://github.com/npm/hosted-git-info/issues/43) +* **git-host:** disallow URI-encoded slash (%2F) in `path` ([3776fa5](https://github.com/npm/hosted-git-info/commit/3776fa5)), closes [#44](https://github.com/npm/hosted-git-info/issues/44) +* **gitlab:** Do not URL encode slashes in project name for GitLab https URL ([cbf04f9](https://github.com/npm/hosted-git-info/commit/cbf04f9)), closes [#47](https://github.com/npm/hosted-git-info/issues/47) +* do not allow invalid gist urls ([d5cf830](https://github.com/npm/hosted-git-info/commit/d5cf830)) +* **cache:** Switch to lru-cache to save ourselves from unlimited memory consumption ([e518222](https://github.com/npm/hosted-git-info/commit/e518222)), closes [#38](https://github.com/npm/hosted-git-info/issues/38) + + +### Features + +* give these objects a name ([60abaea](https://github.com/npm/hosted-git-info/commit/60abaea)) + + + + +## [2.7.1](https://github.com/npm/hosted-git-info/compare/v2.7.0...v2.7.1) (2018-07-07) + + +### Bug Fixes + +* **index:** Guard against non-string types ([5bc580d](https://github.com/npm/hosted-git-info/commit/5bc580d)) +* **parse:** Crash on strings that parse to having no host ([c931482](https://github.com/npm/hosted-git-info/commit/c931482)), closes [#35](https://github.com/npm/hosted-git-info/issues/35) + + + + +# [2.7.0](https://github.com/npm/hosted-git-info/compare/v2.6.1...v2.7.0) (2018-07-06) + + +### Bug Fixes + +* **github tarball:** update github tarballtemplate ([6efd582](https://github.com/npm/hosted-git-info/commit/6efd582)), closes [#34](https://github.com/npm/hosted-git-info/issues/34) +* **gitlab docs:** switched to lowercase anchors for readmes ([701bcd1](https://github.com/npm/hosted-git-info/commit/701bcd1)) + + +### Features + +* **all:** Support www. prefixes on hostnames ([3349575](https://github.com/npm/hosted-git-info/commit/3349575)), closes [#32](https://github.com/npm/hosted-git-info/issues/32) + + + + +## [2.6.1](https://github.com/npm/hosted-git-info/compare/v2.6.0...v2.6.1) (2018-06-25) + +### Bug Fixes + +* **Revert:** "compat: remove Object.assign fallback ([#25](https://github.com/npm/hosted-git-info/issues/25))" ([cce5a62](https://github.com/npm/hosted-git-info/commit/cce5a62)) +* **Revert:** "git-host: fix forgotten extend()" ([a815ec9](https://github.com/npm/hosted-git-info/commit/a815ec9)) + + + + +# [2.6.0](https://github.com/npm/hosted-git-info/compare/v2.5.0...v2.6.0) (2018-03-07) + + +### Bug Fixes + +* **compat:** remove Object.assign fallback ([#25](https://github.com/npm/hosted-git-info/issues/25)) ([627ab55](https://github.com/npm/hosted-git-info/commit/627ab55)) +* **git-host:** fix forgotten extend() ([eba1f7b](https://github.com/npm/hosted-git-info/commit/eba1f7b)) + + +### Features + +* **browse:** fragment support for browse() ([#28](https://github.com/npm/hosted-git-info/issues/28)) ([cd5e5bb](https://github.com/npm/hosted-git-info/commit/cd5e5bb)) diff --git a/mybulma/node_modules/read-pkg/node_modules/hosted-git-info/LICENSE b/mybulma/node_modules/read-pkg/node_modules/hosted-git-info/LICENSE new file mode 100644 index 0000000..4505576 --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/hosted-git-info/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2015, Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. diff --git a/mybulma/node_modules/read-pkg/node_modules/hosted-git-info/README.md b/mybulma/node_modules/read-pkg/node_modules/hosted-git-info/README.md new file mode 100644 index 0000000..7b723f6 --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/hosted-git-info/README.md @@ -0,0 +1,133 @@ +# hosted-git-info + +This will let you identify and transform various git hosts URLs between +protocols. It also can tell you what the URL is for the raw path for +particular file for direct access without git. + +## Example + +```javascript +var hostedGitInfo = require("hosted-git-info") +var info = hostedGitInfo.fromUrl("git@github.com:npm/hosted-git-info.git", opts) +/* info looks like: +{ + type: "github", + domain: "github.com", + user: "npm", + project: "hosted-git-info" +} +*/ +``` + +If the URL can't be matched with a git host, `null` will be returned. We +can match git, ssh and https urls. Additionally, we can match ssh connect +strings (`git@github.com:npm/hosted-git-info`) and shortcuts (eg, +`github:npm/hosted-git-info`). Github specifically, is detected in the case +of a third, unprefixed, form: `npm/hosted-git-info`. + +If it does match, the returned object has properties of: + +* info.type -- The short name of the service +* info.domain -- The domain for git protocol use +* info.user -- The name of the user/org on the git host +* info.project -- The name of the project on the git host + +## Version Contract + +The major version will be bumped any time… + +* The constructor stops accepting URLs that it previously accepted. +* A method is removed. +* A method can no longer accept the number and type of arguments it previously accepted. +* A method can return a different type than it currently returns. + +Implications: + +* I do not consider the specific format of the urls returned from, say + `.https()` to be a part of the contract. The contract is that it will + return a string that can be used to fetch the repo via HTTPS. But what + that string looks like, specifically, can change. +* Dropping support for a hosted git provider would constitute a breaking + change. + +## Usage + +### var info = hostedGitInfo.fromUrl(gitSpecifier[, options]) + +* *gitSpecifer* is a URL of a git repository or a SCP-style specifier of one. +* *options* is an optional object. It can have the following properties: + * *noCommittish* — If true then committishes won't be included in generated URLs. + * *noGitPlus* — If true then `git+` won't be prefixed on URLs. + +## Methods + +All of the methods take the same options as the `fromUrl` factory. Options +provided to a method override those provided to the constructor. + +* info.file(path, opts) + +Given the path of a file relative to the repository, returns a URL for +directly fetching it from the githost. If no committish was set then +`master` will be used as the default. + +For example `hostedGitInfo.fromUrl("git@github.com:npm/hosted-git-info.git#v1.0.0").file("package.json")` +would return `https://raw.githubusercontent.com/npm/hosted-git-info/v1.0.0/package.json` + +* info.shortcut(opts) + +eg, `github:npm/hosted-git-info` + +* info.browse(path, fragment, opts) + +eg, `https://github.com/npm/hosted-git-info/tree/v1.2.0`, +`https://github.com/npm/hosted-git-info/tree/v1.2.0/package.json`, +`https://github.com/npm/hosted-git-info/tree/v1.2.0/REAMDE.md#supported-hosts` + +* info.bugs(opts) + +eg, `https://github.com/npm/hosted-git-info/issues` + +* info.docs(opts) + +eg, `https://github.com/npm/hosted-git-info/tree/v1.2.0#readme` + +* info.https(opts) + +eg, `git+https://github.com/npm/hosted-git-info.git` + +* info.sshurl(opts) + +eg, `git+ssh://git@github.com/npm/hosted-git-info.git` + +* info.ssh(opts) + +eg, `git@github.com:npm/hosted-git-info.git` + +* info.path(opts) + +eg, `npm/hosted-git-info` + +* info.tarball(opts) + +eg, `https://github.com/npm/hosted-git-info/archive/v1.2.0.tar.gz` + +* info.getDefaultRepresentation() + +Returns the default output type. The default output type is based on the +string you passed in to be parsed + +* info.toString(opts) + +Uses the getDefaultRepresentation to call one of the other methods to get a URL for +this resource. As such `hostedGitInfo.fromUrl(url).toString()` will give +you a normalized version of the URL that still uses the same protocol. + +Shortcuts will still be returned as shortcuts, but the special case github +form of `org/project` will be normalized to `github:org/project`. + +SSH connect strings will be normalized into `git+ssh` URLs. + +## Supported hosts + +Currently this supports Github, Bitbucket and Gitlab. Pull requests for +additional hosts welcome. diff --git a/mybulma/node_modules/read-pkg/node_modules/hosted-git-info/git-host-info.js b/mybulma/node_modules/read-pkg/node_modules/hosted-git-info/git-host-info.js new file mode 100644 index 0000000..8147e33 --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/hosted-git-info/git-host-info.js @@ -0,0 +1,79 @@ +'use strict' + +var gitHosts = module.exports = { + github: { + // First two are insecure and generally shouldn't be used any more, but + // they are still supported. + 'protocols': [ 'git', 'http', 'git+ssh', 'git+https', 'ssh', 'https' ], + 'domain': 'github.com', + 'treepath': 'tree', + 'filetemplate': 'https://{auth@}raw.githubusercontent.com/{user}/{project}/{committish}/{path}', + 'bugstemplate': 'https://{domain}/{user}/{project}/issues', + 'gittemplate': 'git://{auth@}{domain}/{user}/{project}.git{#committish}', + 'tarballtemplate': 'https://codeload.{domain}/{user}/{project}/tar.gz/{committish}' + }, + bitbucket: { + 'protocols': [ 'git+ssh', 'git+https', 'ssh', 'https' ], + 'domain': 'bitbucket.org', + 'treepath': 'src', + 'tarballtemplate': 'https://{domain}/{user}/{project}/get/{committish}.tar.gz' + }, + gitlab: { + 'protocols': [ 'git+ssh', 'git+https', 'ssh', 'https' ], + 'domain': 'gitlab.com', + 'treepath': 'tree', + 'bugstemplate': 'https://{domain}/{user}/{project}/issues', + 'httpstemplate': 'git+https://{auth@}{domain}/{user}/{projectPath}.git{#committish}', + 'tarballtemplate': 'https://{domain}/{user}/{project}/repository/archive.tar.gz?ref={committish}', + 'pathmatch': /^[/]([^/]+)[/]((?!.*(\/-\/|\/repository\/archive\.tar\.gz\?=.*|\/repository\/[^/]+\/archive.tar.gz$)).*?)(?:[.]git|[/])?$/ + }, + gist: { + 'protocols': [ 'git', 'git+ssh', 'git+https', 'ssh', 'https' ], + 'domain': 'gist.github.com', + 'pathmatch': /^[/](?:([^/]+)[/])?([a-z0-9]{32,})(?:[.]git)?$/, + 'filetemplate': 'https://gist.githubusercontent.com/{user}/{project}/raw{/committish}/{path}', + 'bugstemplate': 'https://{domain}/{project}', + 'gittemplate': 'git://{domain}/{project}.git{#committish}', + 'sshtemplate': 'git@{domain}:/{project}.git{#committish}', + 'sshurltemplate': 'git+ssh://git@{domain}/{project}.git{#committish}', + 'browsetemplate': 'https://{domain}/{project}{/committish}', + 'browsefiletemplate': 'https://{domain}/{project}{/committish}{#path}', + 'docstemplate': 'https://{domain}/{project}{/committish}', + 'httpstemplate': 'git+https://{domain}/{project}.git{#committish}', + 'shortcuttemplate': '{type}:{project}{#committish}', + 'pathtemplate': '{project}{#committish}', + 'tarballtemplate': 'https://codeload.github.com/gist/{project}/tar.gz/{committish}', + 'hashformat': function (fragment) { + return 'file-' + formatHashFragment(fragment) + } + } +} + +var gitHostDefaults = { + 'sshtemplate': 'git@{domain}:{user}/{project}.git{#committish}', + 'sshurltemplate': 'git+ssh://git@{domain}/{user}/{project}.git{#committish}', + 'browsetemplate': 'https://{domain}/{user}/{project}{/tree/committish}', + 'browsefiletemplate': 'https://{domain}/{user}/{project}/{treepath}/{committish}/{path}{#fragment}', + 'docstemplate': 'https://{domain}/{user}/{project}{/tree/committish}#readme', + 'httpstemplate': 'git+https://{auth@}{domain}/{user}/{project}.git{#committish}', + 'filetemplate': 'https://{domain}/{user}/{project}/raw/{committish}/{path}', + 'shortcuttemplate': '{type}:{user}/{project}{#committish}', + 'pathtemplate': '{user}/{project}{#committish}', + 'pathmatch': /^[/]([^/]+)[/]([^/]+?)(?:[.]git|[/])?$/, + 'hashformat': formatHashFragment +} + +Object.keys(gitHosts).forEach(function (name) { + Object.keys(gitHostDefaults).forEach(function (key) { + if (gitHosts[name][key]) return + gitHosts[name][key] = gitHostDefaults[key] + }) + gitHosts[name].protocols_re = RegExp('^(' + + gitHosts[name].protocols.map(function (protocol) { + return protocol.replace(/([\\+*{}()[\]$^|])/g, '\\$1') + }).join('|') + '):$') +}) + +function formatHashFragment (fragment) { + return fragment.toLowerCase().replace(/^\W+|\/|\W+$/g, '').replace(/\W+/g, '-') +} diff --git a/mybulma/node_modules/read-pkg/node_modules/hosted-git-info/git-host.js b/mybulma/node_modules/read-pkg/node_modules/hosted-git-info/git-host.js new file mode 100644 index 0000000..9616fba --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/hosted-git-info/git-host.js @@ -0,0 +1,156 @@ +'use strict' +var gitHosts = require('./git-host-info.js') +/* eslint-disable node/no-deprecated-api */ + +// copy-pasta util._extend from node's source, to avoid pulling +// the whole util module into peoples' webpack bundles. +/* istanbul ignore next */ +var extend = Object.assign || function _extend (target, source) { + // Don't do anything if source isn't an object + if (source === null || typeof source !== 'object') return target + + var keys = Object.keys(source) + var i = keys.length + while (i--) { + target[keys[i]] = source[keys[i]] + } + return target +} + +module.exports = GitHost +function GitHost (type, user, auth, project, committish, defaultRepresentation, opts) { + var gitHostInfo = this + gitHostInfo.type = type + Object.keys(gitHosts[type]).forEach(function (key) { + gitHostInfo[key] = gitHosts[type][key] + }) + gitHostInfo.user = user + gitHostInfo.auth = auth + gitHostInfo.project = project + gitHostInfo.committish = committish + gitHostInfo.default = defaultRepresentation + gitHostInfo.opts = opts || {} +} + +GitHost.prototype.hash = function () { + return this.committish ? '#' + this.committish : '' +} + +GitHost.prototype._fill = function (template, opts) { + if (!template) return + var vars = extend({}, opts) + vars.path = vars.path ? vars.path.replace(/^[/]+/g, '') : '' + opts = extend(extend({}, this.opts), opts) + var self = this + Object.keys(this).forEach(function (key) { + if (self[key] != null && vars[key] == null) vars[key] = self[key] + }) + var rawAuth = vars.auth + var rawcommittish = vars.committish + var rawFragment = vars.fragment + var rawPath = vars.path + var rawProject = vars.project + Object.keys(vars).forEach(function (key) { + var value = vars[key] + if ((key === 'path' || key === 'project') && typeof value === 'string') { + vars[key] = value.split('/').map(function (pathComponent) { + return encodeURIComponent(pathComponent) + }).join('/') + } else { + vars[key] = encodeURIComponent(value) + } + }) + vars['auth@'] = rawAuth ? rawAuth + '@' : '' + vars['#fragment'] = rawFragment ? '#' + this.hashformat(rawFragment) : '' + vars.fragment = vars.fragment ? vars.fragment : '' + vars['#path'] = rawPath ? '#' + this.hashformat(rawPath) : '' + vars['/path'] = vars.path ? '/' + vars.path : '' + vars.projectPath = rawProject.split('/').map(encodeURIComponent).join('/') + if (opts.noCommittish) { + vars['#committish'] = '' + vars['/tree/committish'] = '' + vars['/committish'] = '' + vars.committish = '' + } else { + vars['#committish'] = rawcommittish ? '#' + rawcommittish : '' + vars['/tree/committish'] = vars.committish + ? '/' + vars.treepath + '/' + vars.committish + : '' + vars['/committish'] = vars.committish ? '/' + vars.committish : '' + vars.committish = vars.committish || 'master' + } + var res = template + Object.keys(vars).forEach(function (key) { + res = res.replace(new RegExp('[{]' + key + '[}]', 'g'), vars[key]) + }) + if (opts.noGitPlus) { + return res.replace(/^git[+]/, '') + } else { + return res + } +} + +GitHost.prototype.ssh = function (opts) { + return this._fill(this.sshtemplate, opts) +} + +GitHost.prototype.sshurl = function (opts) { + return this._fill(this.sshurltemplate, opts) +} + +GitHost.prototype.browse = function (P, F, opts) { + if (typeof P === 'string') { + if (typeof F !== 'string') { + opts = F + F = null + } + return this._fill(this.browsefiletemplate, extend({ + fragment: F, + path: P + }, opts)) + } else { + return this._fill(this.browsetemplate, P) + } +} + +GitHost.prototype.docs = function (opts) { + return this._fill(this.docstemplate, opts) +} + +GitHost.prototype.bugs = function (opts) { + return this._fill(this.bugstemplate, opts) +} + +GitHost.prototype.https = function (opts) { + return this._fill(this.httpstemplate, opts) +} + +GitHost.prototype.git = function (opts) { + return this._fill(this.gittemplate, opts) +} + +GitHost.prototype.shortcut = function (opts) { + return this._fill(this.shortcuttemplate, opts) +} + +GitHost.prototype.path = function (opts) { + return this._fill(this.pathtemplate, opts) +} + +GitHost.prototype.tarball = function (opts_) { + var opts = extend({}, opts_, { noCommittish: false }) + return this._fill(this.tarballtemplate, opts) +} + +GitHost.prototype.file = function (P, opts) { + return this._fill(this.filetemplate, extend({ path: P }, opts)) +} + +GitHost.prototype.getDefaultRepresentation = function () { + return this.default +} + +GitHost.prototype.toString = function (opts) { + if (this.default && typeof this[this.default] === 'function') return this[this.default](opts) + return this.sshurl(opts) +} diff --git a/mybulma/node_modules/read-pkg/node_modules/hosted-git-info/index.js b/mybulma/node_modules/read-pkg/node_modules/hosted-git-info/index.js new file mode 100644 index 0000000..0885772 --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/hosted-git-info/index.js @@ -0,0 +1,148 @@ +'use strict' +var url = require('url') +var gitHosts = require('./git-host-info.js') +var GitHost = module.exports = require('./git-host.js') + +var protocolToRepresentationMap = { + 'git+ssh:': 'sshurl', + 'git+https:': 'https', + 'ssh:': 'sshurl', + 'git:': 'git' +} + +function protocolToRepresentation (protocol) { + return protocolToRepresentationMap[protocol] || protocol.slice(0, -1) +} + +var authProtocols = { + 'git:': true, + 'https:': true, + 'git+https:': true, + 'http:': true, + 'git+http:': true +} + +var cache = {} + +module.exports.fromUrl = function (giturl, opts) { + if (typeof giturl !== 'string') return + var key = giturl + JSON.stringify(opts || {}) + + if (!(key in cache)) { + cache[key] = fromUrl(giturl, opts) + } + + return cache[key] +} + +function fromUrl (giturl, opts) { + if (giturl == null || giturl === '') return + var url = fixupUnqualifiedGist( + isGitHubShorthand(giturl) ? 'github:' + giturl : giturl + ) + var parsed = parseGitUrl(url) + var shortcutMatch = url.match(/^([^:]+):(?:[^@]+@)?(?:([^/]*)\/)?([^#]+)/) + var matches = Object.keys(gitHosts).map(function (gitHostName) { + try { + var gitHostInfo = gitHosts[gitHostName] + var auth = null + if (parsed.auth && authProtocols[parsed.protocol]) { + auth = parsed.auth + } + var committish = parsed.hash ? decodeURIComponent(parsed.hash.substr(1)) : null + var user = null + var project = null + var defaultRepresentation = null + if (shortcutMatch && shortcutMatch[1] === gitHostName) { + user = shortcutMatch[2] && decodeURIComponent(shortcutMatch[2]) + project = decodeURIComponent(shortcutMatch[3].replace(/\.git$/, '')) + defaultRepresentation = 'shortcut' + } else { + if (parsed.host && parsed.host !== gitHostInfo.domain && parsed.host.replace(/^www[.]/, '') !== gitHostInfo.domain) return + if (!gitHostInfo.protocols_re.test(parsed.protocol)) return + if (!parsed.path) return + var pathmatch = gitHostInfo.pathmatch + var matched = parsed.path.match(pathmatch) + if (!matched) return + /* istanbul ignore else */ + if (matched[1] !== null && matched[1] !== undefined) { + user = decodeURIComponent(matched[1].replace(/^:/, '')) + } + project = decodeURIComponent(matched[2]) + defaultRepresentation = protocolToRepresentation(parsed.protocol) + } + return new GitHost(gitHostName, user, auth, project, committish, defaultRepresentation, opts) + } catch (ex) { + /* istanbul ignore else */ + if (ex instanceof URIError) { + } else throw ex + } + }).filter(function (gitHostInfo) { return gitHostInfo }) + if (matches.length !== 1) return + return matches[0] +} + +function isGitHubShorthand (arg) { + // Note: This does not fully test the git ref format. + // See https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html + // + // The only way to do this properly would be to shell out to + // git-check-ref-format, and as this is a fast sync function, + // we don't want to do that. Just let git fail if it turns + // out that the commit-ish is invalid. + // GH usernames cannot start with . or - + return /^[^:@%/\s.-][^:@%/\s]*[/][^:@\s/%]+(?:#.*)?$/.test(arg) +} + +function fixupUnqualifiedGist (giturl) { + // necessary for round-tripping gists + var parsed = url.parse(giturl) + if (parsed.protocol === 'gist:' && parsed.host && !parsed.path) { + return parsed.protocol + '/' + parsed.host + } else { + return giturl + } +} + +function parseGitUrl (giturl) { + var matched = giturl.match(/^([^@]+)@([^:/]+):[/]?((?:[^/]+[/])?[^/]+?)(?:[.]git)?(#.*)?$/) + if (!matched) { + var legacy = url.parse(giturl) + // If we don't have url.URL, then sorry, this is just not fixable. + // This affects Node <= 6.12. + if (legacy.auth && typeof url.URL === 'function') { + // git urls can be in the form of scp-style/ssh-connect strings, like + // git+ssh://user@host.com:some/path, which the legacy url parser + // supports, but WhatWG url.URL class does not. However, the legacy + // parser de-urlencodes the username and password, so something like + // https://user%3An%40me:p%40ss%3Aword@x.com/ becomes + // https://user:n@me:p@ss:word@x.com/ which is all kinds of wrong. + // Pull off just the auth and host, so we dont' get the confusing + // scp-style URL, then pass that to the WhatWG parser to get the + // auth properly escaped. + var authmatch = giturl.match(/[^@]+@[^:/]+/) + /* istanbul ignore else - this should be impossible */ + if (authmatch) { + var whatwg = new url.URL(authmatch[0]) + legacy.auth = whatwg.username || '' + if (whatwg.password) legacy.auth += ':' + whatwg.password + } + } + return legacy + } + return { + protocol: 'git+ssh:', + slashes: true, + auth: matched[1], + host: matched[2], + port: null, + hostname: matched[2], + hash: matched[4], + search: null, + query: null, + pathname: '/' + matched[3], + path: '/' + matched[3], + href: 'git+ssh://' + matched[1] + '@' + matched[2] + + '/' + matched[3] + (matched[4] || '') + } +} diff --git a/mybulma/node_modules/read-pkg/node_modules/hosted-git-info/package.json b/mybulma/node_modules/read-pkg/node_modules/hosted-git-info/package.json new file mode 100644 index 0000000..8cc554c --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/hosted-git-info/package.json @@ -0,0 +1,40 @@ +{ + "name": "hosted-git-info", + "version": "2.8.9", + "description": "Provides metadata and conversions from repository urls for Github, Bitbucket and Gitlab", + "main": "index.js", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/hosted-git-info.git" + }, + "keywords": [ + "git", + "github", + "bitbucket", + "gitlab" + ], + "author": "Rebecca Turner (http://re-becca.org)", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/hosted-git-info/issues" + }, + "homepage": "https://github.com/npm/hosted-git-info", + "scripts": { + "prerelease": "npm t", + "postrelease": "npm publish --tag=ancient-legacy-fixes && git push --follow-tags", + "posttest": "standard", + "release": "standard-version -s", + "test:coverage": "tap --coverage-report=html -J --coverage=90 --no-esm test/*.js", + "test": "tap -J --coverage=90 --no-esm test/*.js" + }, + "devDependencies": { + "standard": "^11.0.1", + "standard-version": "^4.4.0", + "tap": "^12.7.0" + }, + "files": [ + "index.js", + "git-host.js", + "git-host-info.js" + ] +} diff --git a/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/AUTHORS b/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/AUTHORS new file mode 100644 index 0000000..66282ba --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/AUTHORS @@ -0,0 +1,4 @@ +# Names sorted by how much code was originally theirs. +Isaac Z. Schlueter +Meryn Stol +Robert Kowalski diff --git a/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/LICENSE b/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/LICENSE new file mode 100644 index 0000000..6ed662c --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/LICENSE @@ -0,0 +1,30 @@ +This package contains code originally written by Isaac Z. Schlueter. +Used with permission. + +Copyright (c) Meryn Stol ("Author") +All rights reserved. + +The BSD License + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS +BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR +BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN +IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/README.md b/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/README.md new file mode 100644 index 0000000..d2bd7bc --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/README.md @@ -0,0 +1,106 @@ +# normalize-package-data [![Build Status](https://travis-ci.org/npm/normalize-package-data.png?branch=master)](https://travis-ci.org/npm/normalize-package-data) + +normalize-package-data exports a function that normalizes package metadata. This data is typically found in a package.json file, but in principle could come from any source - for example the npm registry. + +normalize-package-data is used by [read-package-json](https://npmjs.org/package/read-package-json) to normalize the data it reads from a package.json file. In turn, read-package-json is used by [npm](https://npmjs.org/package/npm) and various npm-related tools. + +## Installation + +``` +npm install normalize-package-data +``` + +## Usage + +Basic usage is really simple. You call the function that normalize-package-data exports. Let's call it `normalizeData`. + +```javascript +normalizeData = require('normalize-package-data') +packageData = require("./package.json") +normalizeData(packageData) +// packageData is now normalized +``` + +#### Strict mode + +You may activate strict validation by passing true as the second argument. + +```javascript +normalizeData = require('normalize-package-data') +packageData = require("./package.json") +normalizeData(packageData, true) +// packageData is now normalized +``` + +If strict mode is activated, only Semver 2.0 version strings are accepted. Otherwise, Semver 1.0 strings are accepted as well. Packages must have a name, and the name field must not have contain leading or trailing whitespace. + +#### Warnings + +Optionally, you may pass a "warning" function. It gets called whenever the `normalizeData` function encounters something that doesn't look right. It indicates less than perfect input data. + +```javascript +normalizeData = require('normalize-package-data') +packageData = require("./package.json") +warnFn = function(msg) { console.error(msg) } +normalizeData(packageData, warnFn) +// packageData is now normalized. Any number of warnings may have been logged. +``` + +You may combine strict validation with warnings by passing `true` as the second argument, and `warnFn` as third. + +When `private` field is set to `true`, warnings will be suppressed. + +### Potential exceptions + +If the supplied data has an invalid name or version vield, `normalizeData` will throw an error. Depending on where you call `normalizeData`, you may want to catch these errors so can pass them to a callback. + +## What normalization (currently) entails + +* The value of `name` field gets trimmed (unless in strict mode). +* The value of the `version` field gets cleaned by `semver.clean`. See [documentation for the semver module](https://github.com/isaacs/node-semver). +* If `name` and/or `version` fields are missing, they are set to empty strings. +* If `files` field is not an array, it will be removed. +* If `bin` field is a string, then `bin` field will become an object with `name` set to the value of the `name` field, and `bin` set to the original string value. +* If `man` field is a string, it will become an array with the original string as its sole member. +* If `keywords` field is string, it is considered to be a list of keywords separated by one or more white-space characters. It gets converted to an array by splitting on `\s+`. +* All people fields (`author`, `maintainers`, `contributors`) get converted into objects with name, email and url properties. +* If `bundledDependencies` field (a typo) exists and `bundleDependencies` field does not, `bundledDependencies` will get renamed to `bundleDependencies`. +* If the value of any of the dependencies fields (`dependencies`, `devDependencies`, `optionalDependencies`) is a string, it gets converted into an object with familiar `name=>value` pairs. +* The values in `optionalDependencies` get added to `dependencies`. The `optionalDependencies` array is left untouched. +* As of v2: Dependencies that point at known hosted git providers (currently: github, bitbucket, gitlab) will have their URLs canonicalized, but protocols will be preserved. +* As of v2: Dependencies that use shortcuts for hosted git providers (`org/proj`, `github:org/proj`, `bitbucket:org/proj`, `gitlab:org/proj`, `gist:docid`) will have the shortcut left in place. (In the case of github, the `org/proj` form will be expanded to `github:org/proj`.) THIS MARKS A BREAKING CHANGE FROM V1, where the shorcut was previously expanded to a URL. +* If `description` field does not exist, but `readme` field does, then (more or less) the first paragraph of text that's found in the readme is taken as value for `description`. +* If `repository` field is a string, it will become an object with `url` set to the original string value, and `type` set to `"git"`. +* If `repository.url` is not a valid url, but in the style of "[owner-name]/[repo-name]", `repository.url` will be set to git+https://github.com/[owner-name]/[repo-name].git +* If `bugs` field is a string, the value of `bugs` field is changed into an object with `url` set to the original string value. +* If `bugs` field does not exist, but `repository` field points to a repository hosted on GitHub, the value of the `bugs` field gets set to an url in the form of https://github.com/[owner-name]/[repo-name]/issues . If the repository field points to a GitHub Gist repo url, the associated http url is chosen. +* If `bugs` field is an object, the resulting value only has email and url properties. If email and url properties are not strings, they are ignored. If no valid values for either email or url is found, bugs field will be removed. +* If `homepage` field is not a string, it will be removed. +* If the url in the `homepage` field does not specify a protocol, then http is assumed. For example, `myproject.org` will be changed to `http://myproject.org`. +* If `homepage` field does not exist, but `repository` field points to a repository hosted on GitHub, the value of the `homepage` field gets set to an url in the form of https://github.com/[owner-name]/[repo-name]#readme . If the repository field points to a GitHub Gist repo url, the associated http url is chosen. + +### Rules for name field + +If `name` field is given, the value of the name field must be a string. The string may not: + +* start with a period. +* contain the following characters: `/@\s+%` +* contain any characters that would need to be encoded for use in urls. +* resemble the word `node_modules` or `favicon.ico` (case doesn't matter). + +### Rules for version field + +If `version` field is given, the value of the version field must be a valid *semver* string, as determined by the `semver.valid` method. See [documentation for the semver module](https://github.com/isaacs/node-semver). + +### Rules for license field + +The `license` field should be a valid *SPDX license expression* or one of the special values allowed by [validate-npm-package-license](https://npmjs.com/package/validate-npm-package-license). See [documentation for the license field in package.json](https://docs.npmjs.com/files/package.json#license). + +## Credits + +This package contains code based on read-package-json written by Isaac Z. Schlueter. Used with permisson. + +## License + +normalize-package-data is released under the [BSD 2-Clause License](http://opensource.org/licenses/MIT). +Copyright (c) 2013 Meryn Stol diff --git a/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/lib/extract_description.js b/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/lib/extract_description.js new file mode 100644 index 0000000..83f10aa --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/lib/extract_description.js @@ -0,0 +1,14 @@ +module.exports = extractDescription + +// Extracts description from contents of a readme file in markdown format +function extractDescription (d) { + if (!d) return; + if (d === "ERROR: No README data found!") return; + // the first block of text before the first heading + // that isn't the first line heading + d = d.trim().split('\n') + for (var s = 0; d[s] && d[s].trim().match(/^(#|$)/); s ++); + var l = d.length + for (var e = s + 1; e < l && d[e].trim(); e ++); + return d.slice(s, e).join(' ').trim() +} diff --git a/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/lib/fixer.js b/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/lib/fixer.js new file mode 100644 index 0000000..27682e9 --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/lib/fixer.js @@ -0,0 +1,418 @@ +var semver = require("semver") +var validateLicense = require('validate-npm-package-license'); +var hostedGitInfo = require("hosted-git-info") +var isBuiltinModule = require("resolve").isCore +var depTypes = ["dependencies","devDependencies","optionalDependencies"] +var extractDescription = require("./extract_description") +var url = require("url") +var typos = require("./typos.json") + +var fixer = module.exports = { + // default warning function + warn: function() {}, + + fixRepositoryField: function(data) { + if (data.repositories) { + this.warn("repositories"); + data.repository = data.repositories[0] + } + if (!data.repository) return this.warn("missingRepository") + if (typeof data.repository === "string") { + data.repository = { + type: "git", + url: data.repository + } + } + var r = data.repository.url || "" + if (r) { + var hosted = hostedGitInfo.fromUrl(r) + if (hosted) { + r = data.repository.url + = hosted.getDefaultRepresentation() == "shortcut" ? hosted.https() : hosted.toString() + } + } + + if (r.match(/github.com\/[^\/]+\/[^\/]+\.git\.git$/)) { + this.warn("brokenGitUrl", r) + } + } + +, fixTypos: function(data) { + Object.keys(typos.topLevel).forEach(function (d) { + if (data.hasOwnProperty(d)) { + this.warn("typo", d, typos.topLevel[d]) + } + }, this) + } + +, fixScriptsField: function(data) { + if (!data.scripts) return + if (typeof data.scripts !== "object") { + this.warn("nonObjectScripts") + delete data.scripts + return + } + Object.keys(data.scripts).forEach(function (k) { + if (typeof data.scripts[k] !== "string") { + this.warn("nonStringScript") + delete data.scripts[k] + } else if (typos.script[k] && !data.scripts[typos.script[k]]) { + this.warn("typo", k, typos.script[k], "scripts") + } + }, this) + } + +, fixFilesField: function(data) { + var files = data.files + if (files && !Array.isArray(files)) { + this.warn("nonArrayFiles") + delete data.files + } else if (data.files) { + data.files = data.files.filter(function(file) { + if (!file || typeof file !== "string") { + this.warn("invalidFilename", file) + return false + } else { + return true + } + }, this) + } + } + +, fixBinField: function(data) { + if (!data.bin) return; + if (typeof data.bin === "string") { + var b = {} + var match + if (match = data.name.match(/^@[^/]+[/](.*)$/)) { + b[match[1]] = data.bin + } else { + b[data.name] = data.bin + } + data.bin = b + } + } + +, fixManField: function(data) { + if (!data.man) return; + if (typeof data.man === "string") { + data.man = [ data.man ] + } + } +, fixBundleDependenciesField: function(data) { + var bdd = "bundledDependencies" + var bd = "bundleDependencies" + if (data[bdd] && !data[bd]) { + data[bd] = data[bdd] + delete data[bdd] + } + if (data[bd] && !Array.isArray(data[bd])) { + this.warn("nonArrayBundleDependencies") + delete data[bd] + } else if (data[bd]) { + data[bd] = data[bd].filter(function(bd) { + if (!bd || typeof bd !== 'string') { + this.warn("nonStringBundleDependency", bd) + return false + } else { + if (!data.dependencies) { + data.dependencies = {} + } + if (!data.dependencies.hasOwnProperty(bd)) { + this.warn("nonDependencyBundleDependency", bd) + data.dependencies[bd] = "*" + } + return true + } + }, this) + } + } + +, fixDependencies: function(data, strict) { + var loose = !strict + objectifyDeps(data, this.warn) + addOptionalDepsToDeps(data, this.warn) + this.fixBundleDependenciesField(data) + + ;['dependencies','devDependencies'].forEach(function(deps) { + if (!(deps in data)) return + if (!data[deps] || typeof data[deps] !== "object") { + this.warn("nonObjectDependencies", deps) + delete data[deps] + return + } + Object.keys(data[deps]).forEach(function (d) { + var r = data[deps][d] + if (typeof r !== 'string') { + this.warn("nonStringDependency", d, JSON.stringify(r)) + delete data[deps][d] + } + var hosted = hostedGitInfo.fromUrl(data[deps][d]) + if (hosted) data[deps][d] = hosted.toString() + }, this) + }, this) + } + +, fixModulesField: function (data) { + if (data.modules) { + this.warn("deprecatedModules") + delete data.modules + } + } + +, fixKeywordsField: function (data) { + if (typeof data.keywords === "string") { + data.keywords = data.keywords.split(/,\s+/) + } + if (data.keywords && !Array.isArray(data.keywords)) { + delete data.keywords + this.warn("nonArrayKeywords") + } else if (data.keywords) { + data.keywords = data.keywords.filter(function(kw) { + if (typeof kw !== "string" || !kw) { + this.warn("nonStringKeyword"); + return false + } else { + return true + } + }, this) + } + } + +, fixVersionField: function(data, strict) { + // allow "loose" semver 1.0 versions in non-strict mode + // enforce strict semver 2.0 compliance in strict mode + var loose = !strict + if (!data.version) { + data.version = "" + return true + } + if (!semver.valid(data.version, loose)) { + throw new Error('Invalid version: "'+ data.version + '"') + } + data.version = semver.clean(data.version, loose) + return true + } + +, fixPeople: function(data) { + modifyPeople(data, unParsePerson) + modifyPeople(data, parsePerson) + } + +, fixNameField: function(data, options) { + if (typeof options === "boolean") options = {strict: options} + else if (typeof options === "undefined") options = {} + var strict = options.strict + if (!data.name && !strict) { + data.name = "" + return + } + if (typeof data.name !== "string") { + throw new Error("name field must be a string.") + } + if (!strict) + data.name = data.name.trim() + ensureValidName(data.name, strict, options.allowLegacyCase) + if (isBuiltinModule(data.name)) + this.warn("conflictingName", data.name) + } + + +, fixDescriptionField: function (data) { + if (data.description && typeof data.description !== 'string') { + this.warn("nonStringDescription") + delete data.description + } + if (data.readme && !data.description) + data.description = extractDescription(data.readme) + if(data.description === undefined) delete data.description; + if (!data.description) this.warn("missingDescription") + } + +, fixReadmeField: function (data) { + if (!data.readme) { + this.warn("missingReadme") + data.readme = "ERROR: No README data found!" + } + } + +, fixBugsField: function(data) { + if (!data.bugs && data.repository && data.repository.url) { + var hosted = hostedGitInfo.fromUrl(data.repository.url) + if(hosted && hosted.bugs()) { + data.bugs = {url: hosted.bugs()} + } + } + else if(data.bugs) { + var emailRe = /^.+@.*\..+$/ + if(typeof data.bugs == "string") { + if(emailRe.test(data.bugs)) + data.bugs = {email:data.bugs} + else if(url.parse(data.bugs).protocol) + data.bugs = {url: data.bugs} + else + this.warn("nonEmailUrlBugsString") + } + else { + bugsTypos(data.bugs, this.warn) + var oldBugs = data.bugs + data.bugs = {} + if(oldBugs.url) { + if(typeof(oldBugs.url) == "string" && url.parse(oldBugs.url).protocol) + data.bugs.url = oldBugs.url + else + this.warn("nonUrlBugsUrlField") + } + if(oldBugs.email) { + if(typeof(oldBugs.email) == "string" && emailRe.test(oldBugs.email)) + data.bugs.email = oldBugs.email + else + this.warn("nonEmailBugsEmailField") + } + } + if(!data.bugs.email && !data.bugs.url) { + delete data.bugs + this.warn("emptyNormalizedBugs") + } + } + } + +, fixHomepageField: function(data) { + if (!data.homepage && data.repository && data.repository.url) { + var hosted = hostedGitInfo.fromUrl(data.repository.url) + if (hosted && hosted.docs()) data.homepage = hosted.docs() + } + if (!data.homepage) return + + if(typeof data.homepage !== "string") { + this.warn("nonUrlHomepage") + return delete data.homepage + } + if(!url.parse(data.homepage).protocol) { + data.homepage = "http://" + data.homepage + } + } + +, fixLicenseField: function(data) { + if (!data.license) { + return this.warn("missingLicense") + } else{ + if ( + typeof(data.license) !== 'string' || + data.license.length < 1 || + data.license.trim() === '' + ) { + this.warn("invalidLicense") + } else { + if (!validateLicense(data.license).validForNewPackages) + this.warn("invalidLicense") + } + } + } +} + +function isValidScopedPackageName(spec) { + if (spec.charAt(0) !== '@') return false + + var rest = spec.slice(1).split('/') + if (rest.length !== 2) return false + + return rest[0] && rest[1] && + rest[0] === encodeURIComponent(rest[0]) && + rest[1] === encodeURIComponent(rest[1]) +} + +function isCorrectlyEncodedName(spec) { + return !spec.match(/[\/@\s\+%:]/) && + spec === encodeURIComponent(spec) +} + +function ensureValidName (name, strict, allowLegacyCase) { + if (name.charAt(0) === "." || + !(isValidScopedPackageName(name) || isCorrectlyEncodedName(name)) || + (strict && (!allowLegacyCase) && name !== name.toLowerCase()) || + name.toLowerCase() === "node_modules" || + name.toLowerCase() === "favicon.ico") { + throw new Error("Invalid name: " + JSON.stringify(name)) + } +} + +function modifyPeople (data, fn) { + if (data.author) data.author = fn(data.author) + ;["maintainers", "contributors"].forEach(function (set) { + if (!Array.isArray(data[set])) return; + data[set] = data[set].map(fn) + }) + return data +} + +function unParsePerson (person) { + if (typeof person === "string") return person + var name = person.name || "" + var u = person.url || person.web + var url = u ? (" ("+u+")") : "" + var e = person.email || person.mail + var email = e ? (" <"+e+">") : "" + return name+email+url +} + +function parsePerson (person) { + if (typeof person !== "string") return person + var name = person.match(/^([^\(<]+)/) + var url = person.match(/\(([^\)]+)\)/) + var email = person.match(/<([^>]+)>/) + var obj = {} + if (name && name[0].trim()) obj.name = name[0].trim() + if (email) obj.email = email[1]; + if (url) obj.url = url[1]; + return obj +} + +function addOptionalDepsToDeps (data, warn) { + var o = data.optionalDependencies + if (!o) return; + var d = data.dependencies || {} + Object.keys(o).forEach(function (k) { + d[k] = o[k] + }) + data.dependencies = d +} + +function depObjectify (deps, type, warn) { + if (!deps) return {} + if (typeof deps === "string") { + deps = deps.trim().split(/[\n\r\s\t ,]+/) + } + if (!Array.isArray(deps)) return deps + warn("deprecatedArrayDependencies", type) + var o = {} + deps.filter(function (d) { + return typeof d === "string" + }).forEach(function(d) { + d = d.trim().split(/(:?[@\s><=])/) + var dn = d.shift() + var dv = d.join("") + dv = dv.trim() + dv = dv.replace(/^@/, "") + o[dn] = dv + }) + return o +} + +function objectifyDeps (data, warn) { + depTypes.forEach(function (type) { + if (!data[type]) return; + data[type] = depObjectify(data[type], type, warn) + }) +} + +function bugsTypos(bugs, warn) { + if (!bugs) return + Object.keys(bugs).forEach(function (k) { + if (typos.bugs[k]) { + warn("typo", k, typos.bugs[k], "bugs") + bugs[typos.bugs[k]] = bugs[k] + delete bugs[k] + } + }) +} diff --git a/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/lib/make_warning.js b/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/lib/make_warning.js new file mode 100644 index 0000000..4ac74ad --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/lib/make_warning.js @@ -0,0 +1,23 @@ +var util = require("util") +var messages = require("./warning_messages.json") + +module.exports = function() { + var args = Array.prototype.slice.call(arguments, 0) + var warningName = args.shift() + if (warningName == "typo") { + return makeTypoWarning.apply(null,args) + } + else { + var msgTemplate = messages[warningName] ? messages[warningName] : warningName + ": '%s'" + args.unshift(msgTemplate) + return util.format.apply(null, args) + } +} + +function makeTypoWarning (providedName, probableName, field) { + if (field) { + providedName = field + "['" + providedName + "']" + probableName = field + "['" + probableName + "']" + } + return util.format(messages.typo, providedName, probableName) +} diff --git a/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/lib/normalize.js b/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/lib/normalize.js new file mode 100644 index 0000000..bd1bfef --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/lib/normalize.js @@ -0,0 +1,39 @@ +module.exports = normalize + +var fixer = require("./fixer") +normalize.fixer = fixer + +var makeWarning = require("./make_warning") + +var fieldsToFix = ['name','version','description','repository','modules','scripts' + ,'files','bin','man','bugs','keywords','readme','homepage','license'] +var otherThingsToFix = ['dependencies','people', 'typos'] + +var thingsToFix = fieldsToFix.map(function(fieldName) { + return ucFirst(fieldName) + "Field" +}) +// two ways to do this in CoffeeScript on only one line, sub-70 chars: +// thingsToFix = fieldsToFix.map (name) -> ucFirst(name) + "Field" +// thingsToFix = (ucFirst(name) + "Field" for name in fieldsToFix) +thingsToFix = thingsToFix.concat(otherThingsToFix) + +function normalize (data, warn, strict) { + if(warn === true) warn = null, strict = true + if(!strict) strict = false + if(!warn || data.private) warn = function(msg) { /* noop */ } + + if (data.scripts && + data.scripts.install === "node-gyp rebuild" && + !data.scripts.preinstall) { + data.gypfile = true + } + fixer.warn = function() { warn(makeWarning.apply(null, arguments)) } + thingsToFix.forEach(function(thingName) { + fixer["fix" + ucFirst(thingName)](data, strict) + }) + data._id = data.name + "@" + data.version +} + +function ucFirst (string) { + return string.charAt(0).toUpperCase() + string.slice(1); +} diff --git a/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/lib/safe_format.js b/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/lib/safe_format.js new file mode 100644 index 0000000..b07f100 --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/lib/safe_format.js @@ -0,0 +1,9 @@ +var util = require('util') + +module.exports = function() { + var args = Array.prototype.slice.call(arguments, 0) + args.forEach(function(arg) { + if (!arg) throw new TypeError('Bad arguments.') + }) + return util.format.apply(null, arguments) +} diff --git a/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/lib/typos.json b/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/lib/typos.json new file mode 100644 index 0000000..7f9dd28 --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/lib/typos.json @@ -0,0 +1,25 @@ +{ + "topLevel": { + "dependancies": "dependencies" + ,"dependecies": "dependencies" + ,"depdenencies": "dependencies" + ,"devEependencies": "devDependencies" + ,"depends": "dependencies" + ,"dev-dependencies": "devDependencies" + ,"devDependences": "devDependencies" + ,"devDepenencies": "devDependencies" + ,"devdependencies": "devDependencies" + ,"repostitory": "repository" + ,"repo": "repository" + ,"prefereGlobal": "preferGlobal" + ,"hompage": "homepage" + ,"hampage": "homepage" + ,"autohr": "author" + ,"autor": "author" + ,"contributers": "contributors" + ,"publicationConfig": "publishConfig" + ,"script": "scripts" + }, + "bugs": { "web": "url", "name": "url" }, + "script": { "server": "start", "tests": "test" } +} diff --git a/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/lib/warning_messages.json b/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/lib/warning_messages.json new file mode 100644 index 0000000..4890f50 --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/lib/warning_messages.json @@ -0,0 +1,30 @@ +{ + "repositories": "'repositories' (plural) Not supported. Please pick one as the 'repository' field" + ,"missingRepository": "No repository field." + ,"brokenGitUrl": "Probably broken git url: %s" + ,"nonObjectScripts": "scripts must be an object" + ,"nonStringScript": "script values must be string commands" + ,"nonArrayFiles": "Invalid 'files' member" + ,"invalidFilename": "Invalid filename in 'files' list: %s" + ,"nonArrayBundleDependencies": "Invalid 'bundleDependencies' list. Must be array of package names" + ,"nonStringBundleDependency": "Invalid bundleDependencies member: %s" + ,"nonDependencyBundleDependency": "Non-dependency in bundleDependencies: %s" + ,"nonObjectDependencies": "%s field must be an object" + ,"nonStringDependency": "Invalid dependency: %s %s" + ,"deprecatedArrayDependencies": "specifying %s as array is deprecated" + ,"deprecatedModules": "modules field is deprecated" + ,"nonArrayKeywords": "keywords should be an array of strings" + ,"nonStringKeyword": "keywords should be an array of strings" + ,"conflictingName": "%s is also the name of a node core module." + ,"nonStringDescription": "'description' field should be a string" + ,"missingDescription": "No description" + ,"missingReadme": "No README data" + ,"missingLicense": "No license field." + ,"nonEmailUrlBugsString": "Bug string field must be url, email, or {email,url}" + ,"nonUrlBugsUrlField": "bugs.url field must be a string url. Deleted." + ,"nonEmailBugsEmailField": "bugs.email field must be a string email. Deleted." + ,"emptyNormalizedBugs": "Normalized value of bugs field is an empty object. Deleted." + ,"nonUrlHomepage": "homepage field must be a string url. Deleted." + ,"invalidLicense": "license should be a valid SPDX license expression" + ,"typo": "%s should probably be %s." +} diff --git a/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/package.json b/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/package.json new file mode 100644 index 0000000..dea34bb --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/normalize-package-data/package.json @@ -0,0 +1,31 @@ +{ + "name": "normalize-package-data", + "version": "2.5.0", + "author": "Meryn Stol ", + "description": "Normalizes data that can be found in package.json files.", + "license": "BSD-2-Clause", + "repository": { + "type": "git", + "url": "git://github.com/npm/normalize-package-data.git" + }, + "main": "lib/normalize.js", + "scripts": { + "test": "tap test/*.js" + }, + "dependencies": { + "hosted-git-info": "^2.1.4", + "resolve": "^1.10.0", + "semver": "2 || 3 || 4 || 5", + "validate-npm-package-license": "^3.0.1" + }, + "devDependencies": { + "async": "^2.6.1", + "tap": "^12.4.0", + "underscore": "^1.8.3" + }, + "files": [ + "lib/*.js", + "lib/*.json", + "AUTHORS" + ] +} diff --git a/mybulma/node_modules/read-pkg/node_modules/semver/CHANGELOG.md b/mybulma/node_modules/read-pkg/node_modules/semver/CHANGELOG.md new file mode 100644 index 0000000..66304fd --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/semver/CHANGELOG.md @@ -0,0 +1,39 @@ +# changes log + +## 5.7 + +* Add `minVersion` method + +## 5.6 + +* Move boolean `loose` param to an options object, with + backwards-compatibility protection. +* Add ability to opt out of special prerelease version handling with + the `includePrerelease` option flag. + +## 5.5 + +* Add version coercion capabilities + +## 5.4 + +* Add intersection checking + +## 5.3 + +* Add `minSatisfying` method + +## 5.2 + +* Add `prerelease(v)` that returns prerelease components + +## 5.1 + +* Add Backus-Naur for ranges +* Remove excessively cute inspection methods + +## 5.0 + +* Remove AMD/Browserified build artifacts +* Fix ltr and gtr when using the `*` range +* Fix for range `*` with a prerelease identifier diff --git a/mybulma/node_modules/read-pkg/node_modules/semver/LICENSE b/mybulma/node_modules/read-pkg/node_modules/semver/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/semver/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/mybulma/node_modules/read-pkg/node_modules/semver/README.md b/mybulma/node_modules/read-pkg/node_modules/semver/README.md new file mode 100644 index 0000000..f8dfa5a --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/semver/README.md @@ -0,0 +1,412 @@ +semver(1) -- The semantic versioner for npm +=========================================== + +## Install + +```bash +npm install --save semver +```` + +## Usage + +As a node module: + +```js +const semver = require('semver') + +semver.valid('1.2.3') // '1.2.3' +semver.valid('a.b.c') // null +semver.clean(' =v1.2.3 ') // '1.2.3' +semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true +semver.gt('1.2.3', '9.8.7') // false +semver.lt('1.2.3', '9.8.7') // true +semver.minVersion('>=1.0.0') // '1.0.0' +semver.valid(semver.coerce('v2')) // '2.0.0' +semver.valid(semver.coerce('42.6.7.9.3-alpha')) // '42.6.7' +``` + +As a command-line utility: + +``` +$ semver -h + +A JavaScript implementation of the https://semver.org/ specification +Copyright Isaac Z. Schlueter + +Usage: semver [options] [ [...]] +Prints valid versions sorted by SemVer precedence + +Options: +-r --range + Print versions that match the specified range. + +-i --increment [] + Increment a version by the specified level. Level can + be one of: major, minor, patch, premajor, preminor, + prepatch, or prerelease. Default level is 'patch'. + Only one version may be specified. + +--preid + Identifier to be used to prefix premajor, preminor, + prepatch or prerelease version increments. + +-l --loose + Interpret versions and ranges loosely + +-p --include-prerelease + Always include prerelease versions in range matching + +-c --coerce + Coerce a string into SemVer if possible + (does not imply --loose) + +Program exits successfully if any valid version satisfies +all supplied ranges, and prints all satisfying versions. + +If no satisfying versions are found, then exits failure. + +Versions are printed in ascending order, so supplying +multiple versions to the utility will just sort them. +``` + +## Versions + +A "version" is described by the `v2.0.0` specification found at +. + +A leading `"="` or `"v"` character is stripped off and ignored. + +## Ranges + +A `version range` is a set of `comparators` which specify versions +that satisfy the range. + +A `comparator` is composed of an `operator` and a `version`. The set +of primitive `operators` is: + +* `<` Less than +* `<=` Less than or equal to +* `>` Greater than +* `>=` Greater than or equal to +* `=` Equal. If no operator is specified, then equality is assumed, + so this operator is optional, but MAY be included. + +For example, the comparator `>=1.2.7` would match the versions +`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6` +or `1.1.0`. + +Comparators can be joined by whitespace to form a `comparator set`, +which is satisfied by the **intersection** of all of the comparators +it includes. + +A range is composed of one or more comparator sets, joined by `||`. A +version matches a range if and only if every comparator in at least +one of the `||`-separated comparator sets is satisfied by the version. + +For example, the range `>=1.2.7 <1.3.0` would match the versions +`1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`, +or `1.1.0`. + +The range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`, +`1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`. + +### Prerelease Tags + +If a version has a prerelease tag (for example, `1.2.3-alpha.3`) then +it will only be allowed to satisfy comparator sets if at least one +comparator with the same `[major, minor, patch]` tuple also has a +prerelease tag. + +For example, the range `>1.2.3-alpha.3` would be allowed to match the +version `1.2.3-alpha.7`, but it would *not* be satisfied by +`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater +than" `1.2.3-alpha.3` according to the SemVer sort rules. The version +range only accepts prerelease tags on the `1.2.3` version. The +version `3.4.5` *would* satisfy the range, because it does not have a +prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`. + +The purpose for this behavior is twofold. First, prerelease versions +frequently are updated very quickly, and contain many breaking changes +that are (by the author's design) not yet fit for public consumption. +Therefore, by default, they are excluded from range matching +semantics. + +Second, a user who has opted into using a prerelease version has +clearly indicated the intent to use *that specific* set of +alpha/beta/rc versions. By including a prerelease tag in the range, +the user is indicating that they are aware of the risk. However, it +is still not appropriate to assume that they have opted into taking a +similar risk on the *next* set of prerelease versions. + +Note that this behavior can be suppressed (treating all prerelease +versions as if they were normal versions, for the purpose of range +matching) by setting the `includePrerelease` flag on the options +object to any +[functions](https://github.com/npm/node-semver#functions) that do +range matching. + +#### Prerelease Identifiers + +The method `.inc` takes an additional `identifier` string argument that +will append the value of the string as a prerelease identifier: + +```javascript +semver.inc('1.2.3', 'prerelease', 'beta') +// '1.2.4-beta.0' +``` + +command-line example: + +```bash +$ semver 1.2.3 -i prerelease --preid beta +1.2.4-beta.0 +``` + +Which then can be used to increment further: + +```bash +$ semver 1.2.4-beta.0 -i prerelease +1.2.4-beta.1 +``` + +### Advanced Range Syntax + +Advanced range syntax desugars to primitive comparators in +deterministic ways. + +Advanced ranges may be combined in the same way as primitive +comparators using white space or `||`. + +#### Hyphen Ranges `X.Y.Z - A.B.C` + +Specifies an inclusive set. + +* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4` + +If a partial version is provided as the first version in the inclusive +range, then the missing pieces are replaced with zeroes. + +* `1.2 - 2.3.4` := `>=1.2.0 <=2.3.4` + +If a partial version is provided as the second version in the +inclusive range, then all versions that start with the supplied parts +of the tuple are accepted, but nothing that would be greater than the +provided tuple parts. + +* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0` +* `1.2.3 - 2` := `>=1.2.3 <3.0.0` + +#### X-Ranges `1.2.x` `1.X` `1.2.*` `*` + +Any of `X`, `x`, or `*` may be used to "stand in" for one of the +numeric values in the `[major, minor, patch]` tuple. + +* `*` := `>=0.0.0` (Any version satisfies) +* `1.x` := `>=1.0.0 <2.0.0` (Matching major version) +* `1.2.x` := `>=1.2.0 <1.3.0` (Matching major and minor versions) + +A partial version range is treated as an X-Range, so the special +character is in fact optional. + +* `""` (empty string) := `*` := `>=0.0.0` +* `1` := `1.x.x` := `>=1.0.0 <2.0.0` +* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0` + +#### Tilde Ranges `~1.2.3` `~1.2` `~1` + +Allows patch-level changes if a minor version is specified on the +comparator. Allows minor-level changes if not. + +* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0` +* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0` (Same as `1.2.x`) +* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0` (Same as `1.x`) +* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0` +* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0` (Same as `0.2.x`) +* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0` (Same as `0.x`) +* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0` Note that prereleases in + the `1.2.3` version will be allowed, if they are greater than or + equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but + `1.2.4-beta.2` would not, because it is a prerelease of a + different `[major, minor, patch]` tuple. + +#### Caret Ranges `^1.2.3` `^0.2.5` `^0.0.4` + +Allows changes that do not modify the left-most non-zero digit in the +`[major, minor, patch]` tuple. In other words, this allows patch and +minor updates for versions `1.0.0` and above, patch updates for +versions `0.X >=0.1.0`, and *no* updates for versions `0.0.X`. + +Many authors treat a `0.x` version as if the `x` were the major +"breaking-change" indicator. + +Caret ranges are ideal when an author may make breaking changes +between `0.2.4` and `0.3.0` releases, which is a common practice. +However, it presumes that there will *not* be breaking changes between +`0.2.4` and `0.2.5`. It allows for changes that are presumed to be +additive (but non-breaking), according to commonly observed practices. + +* `^1.2.3` := `>=1.2.3 <2.0.0` +* `^0.2.3` := `>=0.2.3 <0.3.0` +* `^0.0.3` := `>=0.0.3 <0.0.4` +* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0` Note that prereleases in + the `1.2.3` version will be allowed, if they are greater than or + equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but + `1.2.4-beta.2` would not, because it is a prerelease of a + different `[major, minor, patch]` tuple. +* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4` Note that prereleases in the + `0.0.3` version *only* will be allowed, if they are greater than or + equal to `beta`. So, `0.0.3-pr.2` would be allowed. + +When parsing caret ranges, a missing `patch` value desugars to the +number `0`, but will allow flexibility within that value, even if the +major and minor versions are both `0`. + +* `^1.2.x` := `>=1.2.0 <2.0.0` +* `^0.0.x` := `>=0.0.0 <0.1.0` +* `^0.0` := `>=0.0.0 <0.1.0` + +A missing `minor` and `patch` values will desugar to zero, but also +allow flexibility within those values, even if the major version is +zero. + +* `^1.x` := `>=1.0.0 <2.0.0` +* `^0.x` := `>=0.0.0 <1.0.0` + +### Range Grammar + +Putting all this together, here is a Backus-Naur grammar for ranges, +for the benefit of parser authors: + +```bnf +range-set ::= range ( logical-or range ) * +logical-or ::= ( ' ' ) * '||' ( ' ' ) * +range ::= hyphen | simple ( ' ' simple ) * | '' +hyphen ::= partial ' - ' partial +simple ::= primitive | partial | tilde | caret +primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial +partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )? +xr ::= 'x' | 'X' | '*' | nr +nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) * +tilde ::= '~' partial +caret ::= '^' partial +qualifier ::= ( '-' pre )? ( '+' build )? +pre ::= parts +build ::= parts +parts ::= part ( '.' part ) * +part ::= nr | [-0-9A-Za-z]+ +``` + +## Functions + +All methods and classes take a final `options` object argument. All +options in this object are `false` by default. The options supported +are: + +- `loose` Be more forgiving about not-quite-valid semver strings. + (Any resulting output will always be 100% strict compliant, of + course.) For backwards compatibility reasons, if the `options` + argument is a boolean value instead of an object, it is interpreted + to be the `loose` param. +- `includePrerelease` Set to suppress the [default + behavior](https://github.com/npm/node-semver#prerelease-tags) of + excluding prerelease tagged versions from ranges unless they are + explicitly opted into. + +Strict-mode Comparators and Ranges will be strict about the SemVer +strings that they parse. + +* `valid(v)`: Return the parsed version, or null if it's not valid. +* `inc(v, release)`: Return the version incremented by the release + type (`major`, `premajor`, `minor`, `preminor`, `patch`, + `prepatch`, or `prerelease`), or null if it's not valid + * `premajor` in one call will bump the version up to the next major + version and down to a prerelease of that major version. + `preminor`, and `prepatch` work the same way. + * If called from a non-prerelease version, the `prerelease` will work the + same as `prepatch`. It increments the patch version, then makes a + prerelease. If the input version is already a prerelease it simply + increments it. +* `prerelease(v)`: Returns an array of prerelease components, or null + if none exist. Example: `prerelease('1.2.3-alpha.1') -> ['alpha', 1]` +* `major(v)`: Return the major version number. +* `minor(v)`: Return the minor version number. +* `patch(v)`: Return the patch version number. +* `intersects(r1, r2, loose)`: Return true if the two supplied ranges + or comparators intersect. +* `parse(v)`: Attempt to parse a string as a semantic version, returning either + a `SemVer` object or `null`. + +### Comparison + +* `gt(v1, v2)`: `v1 > v2` +* `gte(v1, v2)`: `v1 >= v2` +* `lt(v1, v2)`: `v1 < v2` +* `lte(v1, v2)`: `v1 <= v2` +* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent, + even if they're not the exact same string. You already know how to + compare strings. +* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`. +* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call + the corresponding function above. `"==="` and `"!=="` do simple + string comparison, but are included for completeness. Throws if an + invalid comparison string is provided. +* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if + `v2` is greater. Sorts in ascending order if passed to `Array.sort()`. +* `rcompare(v1, v2)`: The reverse of compare. Sorts an array of versions + in descending order when passed to `Array.sort()`. +* `diff(v1, v2)`: Returns difference between two versions by the release type + (`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`), + or null if the versions are the same. + +### Comparators + +* `intersects(comparator)`: Return true if the comparators intersect + +### Ranges + +* `validRange(range)`: Return the valid range or null if it's not valid +* `satisfies(version, range)`: Return true if the version satisfies the + range. +* `maxSatisfying(versions, range)`: Return the highest version in the list + that satisfies the range, or `null` if none of them do. +* `minSatisfying(versions, range)`: Return the lowest version in the list + that satisfies the range, or `null` if none of them do. +* `minVersion(range)`: Return the lowest version that can possibly match + the given range. +* `gtr(version, range)`: Return `true` if version is greater than all the + versions possible in the range. +* `ltr(version, range)`: Return `true` if version is less than all the + versions possible in the range. +* `outside(version, range, hilo)`: Return true if the version is outside + the bounds of the range in either the high or low direction. The + `hilo` argument must be either the string `'>'` or `'<'`. (This is + the function called by `gtr` and `ltr`.) +* `intersects(range)`: Return true if any of the ranges comparators intersect + +Note that, since ranges may be non-contiguous, a version might not be +greater than a range, less than a range, *or* satisfy a range! For +example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9` +until `2.0.0`, so the version `1.2.10` would not be greater than the +range (because `2.0.1` satisfies, which is higher), nor less than the +range (since `1.2.8` satisfies, which is lower), and it also does not +satisfy the range. + +If you want to know if a version satisfies or does not satisfy a +range, use the `satisfies(version, range)` function. + +### Coercion + +* `coerce(version)`: Coerces a string to semver if possible + +This aims to provide a very forgiving translation of a non-semver string to +semver. It looks for the first digit in a string, and consumes all +remaining characters which satisfy at least a partial semver (e.g., `1`, +`1.2`, `1.2.3`) up to the max permitted length (256 characters). Longer +versions are simply truncated (`4.6.3.9.2-alpha2` becomes `4.6.3`). All +surrounding text is simply ignored (`v3.4 replaces v3.3.1` becomes +`3.4.0`). Only text which lacks digits will fail coercion (`version one` +is not valid). The maximum length for any semver component considered for +coercion is 16 characters; longer components will be ignored +(`10000000000000000.4.7.4` becomes `4.7.4`). The maximum value for any +semver component is `Number.MAX_SAFE_INTEGER || (2**53 - 1)`; higher value +components are invalid (`9999999999999999.4.7.4` is likely invalid). diff --git a/mybulma/node_modules/read-pkg/node_modules/semver/bin/semver b/mybulma/node_modules/read-pkg/node_modules/semver/bin/semver new file mode 100644 index 0000000..801e77f --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/semver/bin/semver @@ -0,0 +1,160 @@ +#!/usr/bin/env node +// Standalone semver comparison program. +// Exits successfully and prints matching version(s) if +// any supplied version is valid and passes all tests. + +var argv = process.argv.slice(2) + +var versions = [] + +var range = [] + +var inc = null + +var version = require('../package.json').version + +var loose = false + +var includePrerelease = false + +var coerce = false + +var identifier + +var semver = require('../semver') + +var reverse = false + +var options = {} + +main() + +function main () { + if (!argv.length) return help() + while (argv.length) { + var a = argv.shift() + var indexOfEqualSign = a.indexOf('=') + if (indexOfEqualSign !== -1) { + a = a.slice(0, indexOfEqualSign) + argv.unshift(a.slice(indexOfEqualSign + 1)) + } + switch (a) { + case '-rv': case '-rev': case '--rev': case '--reverse': + reverse = true + break + case '-l': case '--loose': + loose = true + break + case '-p': case '--include-prerelease': + includePrerelease = true + break + case '-v': case '--version': + versions.push(argv.shift()) + break + case '-i': case '--inc': case '--increment': + switch (argv[0]) { + case 'major': case 'minor': case 'patch': case 'prerelease': + case 'premajor': case 'preminor': case 'prepatch': + inc = argv.shift() + break + default: + inc = 'patch' + break + } + break + case '--preid': + identifier = argv.shift() + break + case '-r': case '--range': + range.push(argv.shift()) + break + case '-c': case '--coerce': + coerce = true + break + case '-h': case '--help': case '-?': + return help() + default: + versions.push(a) + break + } + } + + var options = { loose: loose, includePrerelease: includePrerelease } + + versions = versions.map(function (v) { + return coerce ? (semver.coerce(v) || { version: v }).version : v + }).filter(function (v) { + return semver.valid(v) + }) + if (!versions.length) return fail() + if (inc && (versions.length !== 1 || range.length)) { return failInc() } + + for (var i = 0, l = range.length; i < l; i++) { + versions = versions.filter(function (v) { + return semver.satisfies(v, range[i], options) + }) + if (!versions.length) return fail() + } + return success(versions) +} + +function failInc () { + console.error('--inc can only be used on a single version with no range') + fail() +} + +function fail () { process.exit(1) } + +function success () { + var compare = reverse ? 'rcompare' : 'compare' + versions.sort(function (a, b) { + return semver[compare](a, b, options) + }).map(function (v) { + return semver.clean(v, options) + }).map(function (v) { + return inc ? semver.inc(v, inc, options, identifier) : v + }).forEach(function (v, i, _) { console.log(v) }) +} + +function help () { + console.log(['SemVer ' + version, + '', + 'A JavaScript implementation of the https://semver.org/ specification', + 'Copyright Isaac Z. Schlueter', + '', + 'Usage: semver [options] [ [...]]', + 'Prints valid versions sorted by SemVer precedence', + '', + 'Options:', + '-r --range ', + ' Print versions that match the specified range.', + '', + '-i --increment []', + ' Increment a version by the specified level. Level can', + ' be one of: major, minor, patch, premajor, preminor,', + " prepatch, or prerelease. Default level is 'patch'.", + ' Only one version may be specified.', + '', + '--preid ', + ' Identifier to be used to prefix premajor, preminor,', + ' prepatch or prerelease version increments.', + '', + '-l --loose', + ' Interpret versions and ranges loosely', + '', + '-p --include-prerelease', + ' Always include prerelease versions in range matching', + '', + '-c --coerce', + ' Coerce a string into SemVer if possible', + ' (does not imply --loose)', + '', + 'Program exits successfully if any valid version satisfies', + 'all supplied ranges, and prints all satisfying versions.', + '', + 'If no satisfying versions are found, then exits failure.', + '', + 'Versions are printed in ascending order, so supplying', + 'multiple versions to the utility will just sort them.' + ].join('\n')) +} diff --git a/mybulma/node_modules/read-pkg/node_modules/semver/package.json b/mybulma/node_modules/read-pkg/node_modules/semver/package.json new file mode 100644 index 0000000..69d2db1 --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/semver/package.json @@ -0,0 +1,28 @@ +{ + "name": "semver", + "version": "5.7.1", + "description": "The semantic version parser used by npm.", + "main": "semver.js", + "scripts": { + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "devDependencies": { + "tap": "^13.0.0-rc.18" + }, + "license": "ISC", + "repository": "https://github.com/npm/node-semver", + "bin": { + "semver": "./bin/semver" + }, + "files": [ + "bin", + "range.bnf", + "semver.js" + ], + "tap": { + "check-coverage": true + } +} diff --git a/mybulma/node_modules/read-pkg/node_modules/semver/range.bnf b/mybulma/node_modules/read-pkg/node_modules/semver/range.bnf new file mode 100644 index 0000000..d4c6ae0 --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/semver/range.bnf @@ -0,0 +1,16 @@ +range-set ::= range ( logical-or range ) * +logical-or ::= ( ' ' ) * '||' ( ' ' ) * +range ::= hyphen | simple ( ' ' simple ) * | '' +hyphen ::= partial ' - ' partial +simple ::= primitive | partial | tilde | caret +primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial +partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )? +xr ::= 'x' | 'X' | '*' | nr +nr ::= '0' | [1-9] ( [0-9] ) * +tilde ::= '~' partial +caret ::= '^' partial +qualifier ::= ( '-' pre )? ( '+' build )? +pre ::= parts +build ::= parts +parts ::= part ( '.' part ) * +part ::= nr | [-0-9A-Za-z]+ diff --git a/mybulma/node_modules/read-pkg/node_modules/semver/semver.js b/mybulma/node_modules/read-pkg/node_modules/semver/semver.js new file mode 100644 index 0000000..d315d5d --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/semver/semver.js @@ -0,0 +1,1483 @@ +exports = module.exports = SemVer + +var debug +/* istanbul ignore next */ +if (typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG)) { + debug = function () { + var args = Array.prototype.slice.call(arguments, 0) + args.unshift('SEMVER') + console.log.apply(console, args) + } +} else { + debug = function () {} +} + +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +exports.SEMVER_SPEC_VERSION = '2.0.0' + +var MAX_LENGTH = 256 +var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || + /* istanbul ignore next */ 9007199254740991 + +// Max safe segment length for coercion. +var MAX_SAFE_COMPONENT_LENGTH = 16 + +// The actual regexps go on exports.re +var re = exports.re = [] +var src = exports.src = [] +var R = 0 + +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. + +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. + +var NUMERICIDENTIFIER = R++ +src[NUMERICIDENTIFIER] = '0|[1-9]\\d*' +var NUMERICIDENTIFIERLOOSE = R++ +src[NUMERICIDENTIFIERLOOSE] = '[0-9]+' + +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. + +var NONNUMERICIDENTIFIER = R++ +src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' + +// ## Main Version +// Three dot-separated numeric identifiers. + +var MAINVERSION = R++ +src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + + '(' + src[NUMERICIDENTIFIER] + ')\\.' + + '(' + src[NUMERICIDENTIFIER] + ')' + +var MAINVERSIONLOOSE = R++ +src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[NUMERICIDENTIFIERLOOSE] + ')' + +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. + +var PRERELEASEIDENTIFIER = R++ +src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + + '|' + src[NONNUMERICIDENTIFIER] + ')' + +var PRERELEASEIDENTIFIERLOOSE = R++ +src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + + '|' + src[NONNUMERICIDENTIFIER] + ')' + +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. + +var PRERELEASE = R++ +src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + + '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))' + +var PRERELEASELOOSE = R++ +src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + + '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))' + +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. + +var BUILDIDENTIFIER = R++ +src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+' + +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. + +var BUILD = R++ +src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + + '(?:\\.' + src[BUILDIDENTIFIER] + ')*))' + +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. + +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. + +var FULL = R++ +var FULLPLAIN = 'v?' + src[MAINVERSION] + + src[PRERELEASE] + '?' + + src[BUILD] + '?' + +src[FULL] = '^' + FULLPLAIN + '$' + +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + + src[PRERELEASELOOSE] + '?' + + src[BUILD] + '?' + +var LOOSE = R++ +src[LOOSE] = '^' + LOOSEPLAIN + '$' + +var GTLT = R++ +src[GTLT] = '((?:<|>)?=?)' + +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +var XRANGEIDENTIFIERLOOSE = R++ +src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' +var XRANGEIDENTIFIER = R++ +src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*' + +var XRANGEPLAIN = R++ +src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + + '(?:' + src[PRERELEASE] + ')?' + + src[BUILD] + '?' + + ')?)?' + +var XRANGEPLAINLOOSE = R++ +src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:' + src[PRERELEASELOOSE] + ')?' + + src[BUILD] + '?' + + ')?)?' + +var XRANGE = R++ +src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$' +var XRANGELOOSE = R++ +src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$' + +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +var COERCE = R++ +src[COERCE] = '(?:^|[^\\d])' + + '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:$|[^\\d])' + +// Tilde ranges. +// Meaning is "reasonably at or greater than" +var LONETILDE = R++ +src[LONETILDE] = '(?:~>?)' + +var TILDETRIM = R++ +src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+' +re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g') +var tildeTrimReplace = '$1~' + +var TILDE = R++ +src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$' +var TILDELOOSE = R++ +src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$' + +// Caret ranges. +// Meaning is "at least and backwards compatible with" +var LONECARET = R++ +src[LONECARET] = '(?:\\^)' + +var CARETTRIM = R++ +src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+' +re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g') +var caretTrimReplace = '$1^' + +var CARET = R++ +src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$' +var CARETLOOSE = R++ +src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$' + +// A simple gt/lt/eq thing, or just "" to indicate "any version" +var COMPARATORLOOSE = R++ +src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$' +var COMPARATOR = R++ +src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$' + +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +var COMPARATORTRIM = R++ +src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + + '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')' + +// this one has to use the /g flag +re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g') +var comparatorTrimReplace = '$1$2$3' + +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +var HYPHENRANGE = R++ +src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + + '\\s+-\\s+' + + '(' + src[XRANGEPLAIN] + ')' + + '\\s*$' + +var HYPHENRANGELOOSE = R++ +src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + + '\\s+-\\s+' + + '(' + src[XRANGEPLAINLOOSE] + ')' + + '\\s*$' + +// Star ranges basically just allow anything at all. +var STAR = R++ +src[STAR] = '(<|>)?=?\\s*\\*' + +// Compile to actual regexp objects. +// All are flag-free, unless they were created above with a flag. +for (var i = 0; i < R; i++) { + debug(i, src[i]) + if (!re[i]) { + re[i] = new RegExp(src[i]) + } +} + +exports.parse = parse +function parse (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (version instanceof SemVer) { + return version + } + + if (typeof version !== 'string') { + return null + } + + if (version.length > MAX_LENGTH) { + return null + } + + var r = options.loose ? re[LOOSE] : re[FULL] + if (!r.test(version)) { + return null + } + + try { + return new SemVer(version, options) + } catch (er) { + return null + } +} + +exports.valid = valid +function valid (version, options) { + var v = parse(version, options) + return v ? v.version : null +} + +exports.clean = clean +function clean (version, options) { + var s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null +} + +exports.SemVer = SemVer + +function SemVer (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + if (version instanceof SemVer) { + if (version.loose === options.loose) { + return version + } else { + version = version.version + } + } else if (typeof version !== 'string') { + throw new TypeError('Invalid Version: ' + version) + } + + if (version.length > MAX_LENGTH) { + throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') + } + + if (!(this instanceof SemVer)) { + return new SemVer(version, options) + } + + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose + + var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL]) + + if (!m) { + throw new TypeError('Invalid Version: ' + version) + } + + this.raw = version + + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] + + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } + + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } + + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] + } else { + this.prerelease = m[4].split('.').map(function (id) { + if (/^[0-9]+$/.test(id)) { + var num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num + } + } + return id + }) + } + + this.build = m[5] ? m[5].split('.') : [] + this.format() +} + +SemVer.prototype.format = function () { + this.version = this.major + '.' + this.minor + '.' + this.patch + if (this.prerelease.length) { + this.version += '-' + this.prerelease.join('.') + } + return this.version +} + +SemVer.prototype.toString = function () { + return this.version +} + +SemVer.prototype.compare = function (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return this.compareMain(other) || this.comparePre(other) +} + +SemVer.prototype.compareMain = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) +} + +SemVer.prototype.comparePre = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } + + var i = 0 + do { + var a = this.prerelease[i] + var b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) +} + +// preminor will bump the version up to the next minor release, and immediately +// down to pre-release. premajor and prepatch work the same way. +SemVer.prototype.inc = function (release, identifier) { + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier) + this.inc('pre', identifier) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier) + } + this.inc('pre', identifier) + break + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if (this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. + case 'pre': + if (this.prerelease.length === 0) { + this.prerelease = [0] + } else { + var i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } + } + if (i === -1) { + // didn't increment anything + this.prerelease.push(0) + } + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + if (this.prerelease[0] === identifier) { + if (isNaN(this.prerelease[1])) { + this.prerelease = [identifier, 0] + } + } else { + this.prerelease = [identifier, 0] + } + } + break + + default: + throw new Error('invalid increment argument: ' + release) + } + this.format() + this.raw = this.version + return this +} + +exports.inc = inc +function inc (version, release, loose, identifier) { + if (typeof (loose) === 'string') { + identifier = loose + loose = undefined + } + + try { + return new SemVer(version, loose).inc(release, identifier).version + } catch (er) { + return null + } +} + +exports.diff = diff +function diff (version1, version2) { + if (eq(version1, version2)) { + return null + } else { + var v1 = parse(version1) + var v2 = parse(version2) + var prefix = '' + if (v1.prerelease.length || v2.prerelease.length) { + prefix = 'pre' + var defaultResult = 'prerelease' + } + for (var key in v1) { + if (key === 'major' || key === 'minor' || key === 'patch') { + if (v1[key] !== v2[key]) { + return prefix + key + } + } + } + return defaultResult // may be undefined + } +} + +exports.compareIdentifiers = compareIdentifiers + +var numeric = /^[0-9]+$/ +function compareIdentifiers (a, b) { + var anum = numeric.test(a) + var bnum = numeric.test(b) + + if (anum && bnum) { + a = +a + b = +b + } + + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 +} + +exports.rcompareIdentifiers = rcompareIdentifiers +function rcompareIdentifiers (a, b) { + return compareIdentifiers(b, a) +} + +exports.major = major +function major (a, loose) { + return new SemVer(a, loose).major +} + +exports.minor = minor +function minor (a, loose) { + return new SemVer(a, loose).minor +} + +exports.patch = patch +function patch (a, loose) { + return new SemVer(a, loose).patch +} + +exports.compare = compare +function compare (a, b, loose) { + return new SemVer(a, loose).compare(new SemVer(b, loose)) +} + +exports.compareLoose = compareLoose +function compareLoose (a, b) { + return compare(a, b, true) +} + +exports.rcompare = rcompare +function rcompare (a, b, loose) { + return compare(b, a, loose) +} + +exports.sort = sort +function sort (list, loose) { + return list.sort(function (a, b) { + return exports.compare(a, b, loose) + }) +} + +exports.rsort = rsort +function rsort (list, loose) { + return list.sort(function (a, b) { + return exports.rcompare(a, b, loose) + }) +} + +exports.gt = gt +function gt (a, b, loose) { + return compare(a, b, loose) > 0 +} + +exports.lt = lt +function lt (a, b, loose) { + return compare(a, b, loose) < 0 +} + +exports.eq = eq +function eq (a, b, loose) { + return compare(a, b, loose) === 0 +} + +exports.neq = neq +function neq (a, b, loose) { + return compare(a, b, loose) !== 0 +} + +exports.gte = gte +function gte (a, b, loose) { + return compare(a, b, loose) >= 0 +} + +exports.lte = lte +function lte (a, b, loose) { + return compare(a, b, loose) <= 0 +} + +exports.cmp = cmp +function cmp (a, op, b, loose) { + switch (op) { + case '===': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a === b + + case '!==': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a !== b + + case '': + case '=': + case '==': + return eq(a, b, loose) + + case '!=': + return neq(a, b, loose) + + case '>': + return gt(a, b, loose) + + case '>=': + return gte(a, b, loose) + + case '<': + return lt(a, b, loose) + + case '<=': + return lte(a, b, loose) + + default: + throw new TypeError('Invalid operator: ' + op) + } +} + +exports.Comparator = Comparator +function Comparator (comp, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value + } + } + + if (!(this instanceof Comparator)) { + return new Comparator(comp, options) + } + + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) + + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } + + debug('comp', this) +} + +var ANY = {} +Comparator.prototype.parse = function (comp) { + var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR] + var m = comp.match(r) + + if (!m) { + throw new TypeError('Invalid comparator: ' + comp) + } + + this.operator = m[1] + if (this.operator === '=') { + this.operator = '' + } + + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } +} + +Comparator.prototype.toString = function () { + return this.value +} + +Comparator.prototype.test = function (version) { + debug('Comparator.test', version, this.options.loose) + + if (this.semver === ANY) { + return true + } + + if (typeof version === 'string') { + version = new SemVer(version, this.options) + } + + return cmp(version, this.operator, this.semver, this.options) +} + +Comparator.prototype.intersects = function (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } + + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + var rangeTmp + + if (this.operator === '') { + rangeTmp = new Range(comp.value, options) + return satisfies(this.value, rangeTmp, options) + } else if (comp.operator === '') { + rangeTmp = new Range(this.value, options) + return satisfies(comp.semver, rangeTmp, options) + } + + var sameDirectionIncreasing = + (this.operator === '>=' || this.operator === '>') && + (comp.operator === '>=' || comp.operator === '>') + var sameDirectionDecreasing = + (this.operator === '<=' || this.operator === '<') && + (comp.operator === '<=' || comp.operator === '<') + var sameSemVer = this.semver.version === comp.semver.version + var differentDirectionsInclusive = + (this.operator === '>=' || this.operator === '<=') && + (comp.operator === '>=' || comp.operator === '<=') + var oppositeDirectionsLessThan = + cmp(this.semver, '<', comp.semver, options) && + ((this.operator === '>=' || this.operator === '>') && + (comp.operator === '<=' || comp.operator === '<')) + var oppositeDirectionsGreaterThan = + cmp(this.semver, '>', comp.semver, options) && + ((this.operator === '<=' || this.operator === '<') && + (comp.operator === '>=' || comp.operator === '>')) + + return sameDirectionIncreasing || sameDirectionDecreasing || + (sameSemVer && differentDirectionsInclusive) || + oppositeDirectionsLessThan || oppositeDirectionsGreaterThan +} + +exports.Range = Range +function Range (range, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (range instanceof Range) { + if (range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease) { + return range + } else { + return new Range(range.raw, options) + } + } + + if (range instanceof Comparator) { + return new Range(range.value, options) + } + + if (!(this instanceof Range)) { + return new Range(range, options) + } + + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease + + // First, split based on boolean or || + this.raw = range + this.set = range.split(/\s*\|\|\s*/).map(function (range) { + return this.parseRange(range.trim()) + }, this).filter(function (c) { + // throw out any that are not relevant for whatever reason + return c.length + }) + + if (!this.set.length) { + throw new TypeError('Invalid SemVer Range: ' + range) + } + + this.format() +} + +Range.prototype.format = function () { + this.range = this.set.map(function (comps) { + return comps.join(' ').trim() + }).join('||').trim() + return this.range +} + +Range.prototype.toString = function () { + return this.range +} + +Range.prototype.parseRange = function (range) { + var loose = this.options.loose + range = range.trim() + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE] + range = range.replace(hr, hyphenReplace) + debug('hyphen replace', range) + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range, re[COMPARATORTRIM]) + + // `~ 1.2.3` => `~1.2.3` + range = range.replace(re[TILDETRIM], tildeTrimReplace) + + // `^ 1.2.3` => `^1.2.3` + range = range.replace(re[CARETTRIM], caretTrimReplace) + + // normalize spaces + range = range.split(/\s+/).join(' ') + + // At this point, the range is completely trimmed and + // ready to be split into comparators. + + var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR] + var set = range.split(' ').map(function (comp) { + return parseComparator(comp, this.options) + }, this).join(' ').split(/\s+/) + if (this.options.loose) { + // in loose mode, throw out any that are not valid comparators + set = set.filter(function (comp) { + return !!comp.match(compRe) + }) + } + set = set.map(function (comp) { + return new Comparator(comp, this.options) + }, this) + + return set +} + +Range.prototype.intersects = function (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') + } + + return this.set.some(function (thisComparators) { + return thisComparators.every(function (thisComparator) { + return range.set.some(function (rangeComparators) { + return rangeComparators.every(function (rangeComparator) { + return thisComparator.intersects(rangeComparator, options) + }) + }) + }) + }) +} + +// Mostly just for testing and legacy API reasons +exports.toComparators = toComparators +function toComparators (range, options) { + return new Range(range, options).set.map(function (comp) { + return comp.map(function (c) { + return c.value + }).join(' ').trim().split(' ') + }) +} + +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +function parseComparator (comp, options) { + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp +} + +function isX (id) { + return !id || id.toLowerCase() === 'x' || id === '*' +} + +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 +function replaceTildes (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceTilde(comp, options) + }).join(' ') +} + +function replaceTilde (comp, options) { + var r = options.loose ? re[TILDELOOSE] : re[TILDE] + return comp.replace(r, function (_, M, m, p, pr) { + debug('tilde', comp, _, M, m, p, pr) + var ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0 + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else if (pr) { + debug('replaceTilde pr', pr) + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } else { + // ~1.2.3 == >=1.2.3 <1.3.0 + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + + debug('tilde return', ret) + return ret + }) +} + +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 +// ^1.2.3 --> >=1.2.3 <2.0.0 +// ^1.2.0 --> >=1.2.0 <2.0.0 +function replaceCarets (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceCaret(comp, options) + }).join(' ') +} + +function replaceCaret (comp, options) { + debug('caret', comp, options) + var r = options.loose ? re[CARETLOOSE] : re[CARET] + return comp.replace(r, function (_, M, m, p, pr) { + debug('caret', comp, _, M, m, p, pr) + var ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + if (M === '0') { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else { + ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' + } + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + (+M + 1) + '.0.0' + } + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + (+M + 1) + '.0.0' + } + } + + debug('caret return', ret) + return ret + }) +} + +function replaceXRanges (comp, options) { + debug('replaceXRanges', comp, options) + return comp.split(/\s+/).map(function (comp) { + return replaceXRange(comp, options) + }).join(' ') +} + +function replaceXRange (comp, options) { + comp = comp.trim() + var r = options.loose ? re[XRANGELOOSE] : re[XRANGE] + return comp.replace(r, function (ret, gtlt, M, m, p, pr) { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + var xM = isX(M) + var xm = xM || isX(m) + var xp = xm || isX(p) + var anyX = xp + + if (gtlt === '=' && anyX) { + gtlt = '' + } + + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0' + } else { + // nothing is forbidden + ret = '*' + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 + } + p = 0 + + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + // >1.2.3 => >= 1.2.4 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } + } + + ret = gtlt + M + '.' + m + '.' + p + } else if (xm) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (xp) { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } + + debug('xRange return', ret) + + return ret + }) +} + +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +function replaceStars (comp, options) { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp.trim().replace(re[STAR], '') +} + +// This function is passed to string.replace(re[HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0 +function hyphenReplace ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = '>=' + fM + '.0.0' + } else if (isX(fp)) { + from = '>=' + fM + '.' + fm + '.0' + } else { + from = '>=' + from + } + + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = '<' + (+tM + 1) + '.0.0' + } else if (isX(tp)) { + to = '<' + tM + '.' + (+tm + 1) + '.0' + } else if (tpr) { + to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr + } else { + to = '<=' + to + } + + return (from + ' ' + to).trim() +} + +// if ANY of the sets match ALL of its comparators, then pass +Range.prototype.test = function (version) { + if (!version) { + return false + } + + if (typeof version === 'string') { + version = new SemVer(version, this.options) + } + + for (var i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true + } + } + return false +} + +function testSet (set, version, options) { + for (var i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } + } + + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === ANY) { + continue + } + + if (set[i].semver.prerelease.length > 0) { + var allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } + } + + // Version has a -pre, but it's not one of the ones we like. + return false + } + + return true +} + +exports.satisfies = satisfies +function satisfies (version, range, options) { + try { + range = new Range(range, options) + } catch (er) { + return false + } + return range.test(version) +} + +exports.maxSatisfying = maxSatisfying +function maxSatisfying (versions, range, options) { + var max = null + var maxSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } + } + }) + return max +} + +exports.minSatisfying = minSatisfying +function minSatisfying (versions, range, options) { + var min = null + var minSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) + } + } + }) + return min +} + +exports.minVersion = minVersion +function minVersion (range, loose) { + range = new Range(range, loose) + + var minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver + } + + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver + } + + minver = null + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] + + comparators.forEach(function (comparator) { + // Clone to avoid manipulating the comparator's semver object. + var compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!minver || gt(minver, compver)) { + minver = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error('Unexpected operation: ' + comparator.operator) + } + }) + } + + if (minver && range.test(minver)) { + return minver + } + + return null +} + +exports.validRange = validRange +function validRange (range, options) { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null + } +} + +// Determine if version is less than all the versions possible in the range +exports.ltr = ltr +function ltr (version, range, options) { + return outside(version, range, '<', options) +} + +// Determine if version is greater than all the versions possible in the range. +exports.gtr = gtr +function gtr (version, range, options) { + return outside(version, range, '>', options) +} + +exports.outside = outside +function outside (version, range, hilo, options) { + version = new SemVer(version, options) + range = new Range(range, options) + + var gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') + } + + // If it satisifes the range it is not outside + if (satisfies(version, range, options)) { + return false + } + + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. + + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] + + var high = null + var low = null + + comparators.forEach(function (comparator) { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) + + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } + + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } + } + return true +} + +exports.prerelease = prerelease +function prerelease (version, options) { + var parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null +} + +exports.intersects = intersects +function intersects (r1, r2, options) { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2) +} + +exports.coerce = coerce +function coerce (version) { + if (version instanceof SemVer) { + return version + } + + if (typeof version !== 'string') { + return null + } + + var match = version.match(re[COERCE]) + + if (match == null) { + return null + } + + return parse(match[1] + + '.' + (match[2] || '0') + + '.' + (match[3] || '0')) +} diff --git a/mybulma/node_modules/read-pkg/node_modules/type-fest/index.d.ts b/mybulma/node_modules/read-pkg/node_modules/type-fest/index.d.ts new file mode 100644 index 0000000..4ef4b18 --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/type-fest/index.d.ts @@ -0,0 +1,15 @@ +// Basic +export * from './source/basic'; + +// Utilities +export {Except} from './source/except'; +export {Mutable} from './source/mutable'; +export {Merge} from './source/merge'; +export {MergeExclusive} from './source/merge-exclusive'; +export {RequireAtLeastOne} from './source/require-at-least-one'; +export {ReadonlyDeep} from './source/readonly-deep'; +export {LiteralUnion} from './source/literal-union'; +export {Promisable} from './source/promisable'; + +// Miscellaneous +export {PackageJson} from './source/package-json'; diff --git a/mybulma/node_modules/read-pkg/node_modules/type-fest/license b/mybulma/node_modules/read-pkg/node_modules/type-fest/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/type-fest/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/mybulma/node_modules/read-pkg/node_modules/type-fest/package.json b/mybulma/node_modules/read-pkg/node_modules/type-fest/package.json new file mode 100644 index 0000000..2345809 --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/type-fest/package.json @@ -0,0 +1,51 @@ +{ + "name": "type-fest", + "version": "0.6.0", + "description": "A collection of essential TypeScript types", + "license": "(MIT OR CC0-1.0)", + "repository": "sindresorhus/type-fest", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && tsd" + }, + "files": [ + "index.d.ts", + "source" + ], + "keywords": [ + "typescript", + "ts", + "types", + "utility", + "util", + "utilities", + "omit", + "merge", + "json" + ], + "devDependencies": { + "@sindresorhus/tsconfig": "^0.4.0", + "@typescript-eslint/eslint-plugin": "^1.9.0", + "@typescript-eslint/parser": "^1.10.2", + "eslint-config-xo-typescript": "^0.14.0", + "tsd": "^0.7.3", + "xo": "^0.24.0" + }, + "xo": { + "extends": "xo-typescript", + "extensions": [ + "ts" + ], + "rules": { + "import/no-unresolved": "off", + "@typescript-eslint/indent": "off" + } + } +} diff --git a/mybulma/node_modules/read-pkg/node_modules/type-fest/readme.md b/mybulma/node_modules/read-pkg/node_modules/type-fest/readme.md new file mode 100644 index 0000000..ff3840c --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/type-fest/readme.md @@ -0,0 +1,119 @@ +
+
+
+ type-fest +
+
+ A collection of essential TypeScript types +
+
+
+
+
+ +[![Build Status](https://travis-ci.com/sindresorhus/type-fest.svg?branch=master)](https://travis-ci.com/sindresorhus/type-fest) +[![](https://img.shields.io/badge/unicorn-approved-ff69b4.svg)](https://www.youtube.com/watch?v=9auOCbH5Ns4) + + +Many of the types here should have been built-in. You can help by suggesting some of them to the [TypeScript project](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md). + +Either add this package as a dependency or copy-paste the needed types. No credit required. 👌 + +PR welcome for additional commonly needed types and docs improvements. Read the [contributing guidelines](.github/contributing.md) first. + + +## Install + +``` +$ npm install type-fest +``` + +*Requires TypeScript >=3.2* + + +## Usage + +```ts +import {Except} from 'type-fest'; + +type Foo = { + unicorn: string; + rainbow: boolean; +}; + +type FooWithoutRainbow = Except; +//=> {unicorn: string} +``` + + +## API + +Click the type names for complete docs. + +### Basic + +- [`Primitive`](source/basic.d.ts) - Matches any [primitive value](https://developer.mozilla.org/en-US/docs/Glossary/Primitive). +- [`Class`](source/basic.d.ts) - Matches a [`class` constructor](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes). +- [`TypedArray`](source/basic.d.ts) - Matches any [typed array](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray), like `Uint8Array` or `Float64Array`. +- [`JsonObject`](source/basic.d.ts) - Matches a JSON object. +- [`JsonArray`](source/basic.d.ts) - Matches a JSON array. +- [`JsonValue`](source/basic.d.ts) - Matches any valid JSON value. +- [`ObservableLike`](source/basic.d.ts) - Matches a value that is like an [Observable](https://github.com/tc39/proposal-observable). + +### Utilities + +- [`Except`](source/except.d.ts) - Create a type from an object type without certain keys. This is a stricter version of [`Omit`](https://www.typescriptlang.org/docs/handbook/release-notes/typescript-3-5.html#the-omit-helper-type). +- [`Mutable`](source/mutable.d.ts) - Convert an object with `readonly` properties into a mutable object. Inverse of `Readonly`. +- [`Merge`](source/merge.d.ts) - Merge two types into a new type. Keys of the second type overrides keys of the first type. +- [`MergeExclusive`](source/merge-exclusive.d.ts) - Create a type that has mutually exclusive properties. +- [`RequireAtLeastOne`](source/require-at-least-one.d.ts) - Create a type that requires at least one of the given properties. +- [`ReadonlyDeep`](source/readonly-deep.d.ts) - Create a deeply immutable version of a `object`/`Map`/`Set`/`Array` type. +- [`LiteralUnion`](source/literal-union.d.ts) - Create a union type by combining primitive types and literal types without sacrificing auto-completion in IDEs for the literal type part of the union. Workaround for [Microsoft/TypeScript#29729](https://github.com/Microsoft/TypeScript/issues/29729). +- [`Promisable`](source/promisable.d.ts) - Create a type that represents either the value or the value wrapped in `PromiseLike`. + +### Miscellaneous + +- [`PackageJson`](source/package-json.d.ts) - Type for [npm's `package.json` file](https://docs.npmjs.com/creating-a-package-json-file). + + +## Declined types + +*If we decline a type addition, we will make sure to document the better solution here.* + +- [`Diff` and `Spread`](https://github.com/sindresorhus/type-fest/pull/7) - The PR author didn't provide any real-world use-cases and the PR went stale. If you think this type is useful, provide some real-world use-cases and we might reconsider. + + +## Tips + +### Built-in types + +There are many advanced types most users don't know about. + +- [`Partial`](https://github.com/Microsoft/TypeScript/blob/2961bc3fc0ea1117d4e53bc8e97fa76119bc33e3/src/lib/es5.d.ts#L1401-L1406) - Make all properties in `T` optional. +- [`Required`](https://github.com/Microsoft/TypeScript/blob/2961bc3fc0ea1117d4e53bc8e97fa76119bc33e3/src/lib/es5.d.ts#L1408-L1413) - Make all properties in `T` required. +- [`Readonly`](https://github.com/Microsoft/TypeScript/blob/2961bc3fc0ea1117d4e53bc8e97fa76119bc33e3/src/lib/es5.d.ts#L1415-L1420) - Make all properties in `T` readonly. +- [`Pick`](https://github.com/Microsoft/TypeScript/blob/2961bc3fc0ea1117d4e53bc8e97fa76119bc33e3/src/lib/es5.d.ts#L1422-L1427) - From `T`, pick a set of properties whose keys are in the union `K`. +- [`Record`](https://github.com/Microsoft/TypeScript/blob/2961bc3fc0ea1117d4e53bc8e97fa76119bc33e3/src/lib/es5.d.ts#L1429-L1434) - Construct a type with a set of properties `K` of type `T`. +- [`Exclude`](https://github.com/Microsoft/TypeScript/blob/2961bc3fc0ea1117d4e53bc8e97fa76119bc33e3/src/lib/es5.d.ts#L1436-L1439) - Exclude from `T` those types that are assignable to `U`. +- [`Extract`](https://github.com/Microsoft/TypeScript/blob/2961bc3fc0ea1117d4e53bc8e97fa76119bc33e3/src/lib/es5.d.ts#L1441-L1444) - Extract from `T` those types that are assignable to `U`. +- [`NonNullable`](https://github.com/Microsoft/TypeScript/blob/2961bc3fc0ea1117d4e53bc8e97fa76119bc33e3/src/lib/es5.d.ts#L1446-L1449) - Exclude `null` and `undefined` from `T`. +- [`Parameters`](https://github.com/Microsoft/TypeScript/blob/2961bc3fc0ea1117d4e53bc8e97fa76119bc33e3/src/lib/es5.d.ts#L1451-L1454) - Obtain the parameters of a function type in a tuple. +- [`ConstructorParameters`](https://github.com/Microsoft/TypeScript/blob/2961bc3fc0ea1117d4e53bc8e97fa76119bc33e3/src/lib/es5.d.ts#L1456-L1459) - Obtain the parameters of a constructor function type in a tuple. +- [`ReturnType`](https://github.com/Microsoft/TypeScript/blob/2961bc3fc0ea1117d4e53bc8e97fa76119bc33e3/src/lib/es5.d.ts#L1461-L1464) – Obtain the return type of a function type. +- [`InstanceType`](https://github.com/Microsoft/TypeScript/blob/2961bc3fc0ea1117d4e53bc8e97fa76119bc33e3/src/lib/es5.d.ts#L1466-L1469) – Obtain the instance type of a constructor function type. + +You can find some examples in the [TypeScript docs](https://www.typescriptlang.org/docs/handbook/advanced-types.html#predefined-conditional-types). + + +## Maintainers + +- [Sindre Sorhus](https://github.com/sindresorhus) +- [Jarek Radosz](https://github.com/CvX) +- [Dimitri Benin](https://github.com/BendingBender) + + +## License + +(MIT OR CC0-1.0) diff --git a/mybulma/node_modules/read-pkg/node_modules/type-fest/source/basic.d.ts b/mybulma/node_modules/read-pkg/node_modules/type-fest/source/basic.d.ts new file mode 100644 index 0000000..f308c5f --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/type-fest/source/basic.d.ts @@ -0,0 +1,67 @@ +/// + +// TODO: This can just be `export type Primitive = not object` when the `not` keyword is out. +/** +Matches any [primitive value](https://developer.mozilla.org/en-US/docs/Glossary/Primitive). +*/ +export type Primitive = + | null + | undefined + | string + | number + | boolean + | symbol + | bigint; + +// TODO: Remove the `= unknown` sometime in the future when most users are on TS 3.5 as it's now the default +/** +Matches a [`class` constructor](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes). +*/ +export type Class = new(...arguments_: any[]) => T; + +/** +Matches any [typed array](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray), like `Uint8Array` or `Float64Array`. +*/ +export type TypedArray = + | Int8Array + | Uint8Array + | Uint8ClampedArray + | Int16Array + | Uint16Array + | Int32Array + | Uint32Array + | Float32Array + | Float64Array + | BigInt64Array + | BigUint64Array; + +/** +Matches a JSON object. + +This type can be useful to enforce some input to be JSON-compatible or as a super-type to be extended from. Don't use this as a direct return type as the user would have to double-cast it: `jsonObject as unknown as CustomResponse`. Instead, you could extend your CustomResponse type from it to ensure your type only uses JSON-compatible types: `interface CustomResponse extends JsonObject { … }`. +*/ +export type JsonObject = {[key: string]: JsonValue}; + +/** +Matches a JSON array. +*/ +export interface JsonArray extends Array {} + +/** +Matches any valid JSON value. +*/ +export type JsonValue = string | number | boolean | null | JsonObject | JsonArray; + +declare global { + interface SymbolConstructor { + readonly observable: symbol; + } +} + +/** +Matches a value that is like an [Observable](https://github.com/tc39/proposal-observable). +*/ +export interface ObservableLike { + subscribe(observer: (value: unknown) => void): void; + [Symbol.observable](): ObservableLike; +} diff --git a/mybulma/node_modules/read-pkg/node_modules/type-fest/source/except.d.ts b/mybulma/node_modules/read-pkg/node_modules/type-fest/source/except.d.ts new file mode 100644 index 0000000..7dedbaa --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/type-fest/source/except.d.ts @@ -0,0 +1,22 @@ +/** +Create a type from an object type without certain keys. + +This type is a stricter version of [`Omit`](https://www.typescriptlang.org/docs/handbook/release-notes/typescript-3-5.html#the-omit-helper-type). The `Omit` type does not restrict the omitted keys to be keys present on the given type, while `Except` does. The benefits of a stricter type are avoiding typos and allowing the compiler to pick up on rename refactors automatically. + +Please upvote [this issue](https://github.com/microsoft/TypeScript/issues/30825) if you want to have the stricter version as a built-in in TypeScript. + +@example +``` +import {Except} from 'type-fest'; + +type Foo = { + a: number; + b: string; + c: boolean; +}; + +type FooWithoutA = Except; +//=> {b: string}; +``` +*/ +export type Except = Pick>; diff --git a/mybulma/node_modules/read-pkg/node_modules/type-fest/source/literal-union.d.ts b/mybulma/node_modules/read-pkg/node_modules/type-fest/source/literal-union.d.ts new file mode 100644 index 0000000..52e8de6 --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/type-fest/source/literal-union.d.ts @@ -0,0 +1,33 @@ +import {Primitive} from './basic'; + +/** +Allows creating a union type by combining primitive types and literal types without sacrificing auto-completion in IDEs for the literal type part of the union. + +Currently, when a union type of a primitive type is combined with literal types, TypeScript loses all information about the combined literals. Thus, when such type is used in an IDE with autocompletion, no suggestions are made for the declared literals. + +This type is a workaround for [Microsoft/TypeScript#29729](https://github.com/Microsoft/TypeScript/issues/29729). It will be removed as soon as it's not needed anymore. + +@example +``` +import {LiteralUnion} from 'type-fest'; + +// Before + +type Pet = 'dog' | 'cat' | string; + +const pet: Pet = ''; +// Start typing in your TypeScript-enabled IDE. +// You **will not** get auto-completion for `dog` and `cat` literals. + +// After + +type Pet2 = LiteralUnion<'dog' | 'cat', string>; + +const pet: Pet2 = ''; +// You **will** get auto-completion for `dog` and `cat` literals. +``` + */ +export type LiteralUnion< + LiteralType extends BaseType, + BaseType extends Primitive +> = LiteralType | (BaseType & {_?: never}); diff --git a/mybulma/node_modules/read-pkg/node_modules/type-fest/source/merge-exclusive.d.ts b/mybulma/node_modules/read-pkg/node_modules/type-fest/source/merge-exclusive.d.ts new file mode 100644 index 0000000..6290f42 --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/type-fest/source/merge-exclusive.d.ts @@ -0,0 +1,39 @@ +// Helper type. Not useful on its own. +type Without = {[KeyType in Exclude]?: never}; + +/** +Create a type that has mutually exclusive properties. + +This type was inspired by [this comment](https://github.com/Microsoft/TypeScript/issues/14094#issuecomment-373782604). + +This type works with a helper type, called `Without`. `Without` produces a type that has only keys from `FirstType` which are not present on `SecondType` and sets the value type for these keys to `never`. This helper type is then used in `MergeExclusive` to remove keys from either `FirstType` or `SecondType`. + +@example +``` +import {MergeExclusive} from 'type-fest'; + +interface ExclusiveVariation1 { + exclusive1: boolean; +} + +interface ExclusiveVariation2 { + exclusive2: string; +} + +type ExclusiveOptions = MergeExclusive; + +let exclusiveOptions: ExclusiveOptions; + +exclusiveOptions = {exclusive1: true}; +//=> Works +exclusiveOptions = {exclusive2: 'hi'}; +//=> Works +exclusiveOptions = {exclusive1: true, exclusive2: 'hi'}; +//=> Error +``` +*/ +export type MergeExclusive = + (FirstType | SecondType) extends object ? + (Without & SecondType) | (Without & FirstType) : + FirstType | SecondType; + diff --git a/mybulma/node_modules/read-pkg/node_modules/type-fest/source/merge.d.ts b/mybulma/node_modules/read-pkg/node_modules/type-fest/source/merge.d.ts new file mode 100644 index 0000000..4b3920b --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/type-fest/source/merge.d.ts @@ -0,0 +1,22 @@ +import {Except} from './except'; + +/** +Merge two types into a new type. Keys of the second type overrides keys of the first type. + +@example +``` +import {Merge} from 'type-fest'; + +type Foo = { + a: number; + b: string; +}; + +type Bar = { + b: number; +}; + +const ab: Merge = {a: 1, b: 2}; +``` +*/ +export type Merge = Except> & SecondType; diff --git a/mybulma/node_modules/read-pkg/node_modules/type-fest/source/mutable.d.ts b/mybulma/node_modules/read-pkg/node_modules/type-fest/source/mutable.d.ts new file mode 100644 index 0000000..5c98039 --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/type-fest/source/mutable.d.ts @@ -0,0 +1,22 @@ +/** +Convert an object with `readonly` properties into a mutable object. Inverse of `Readonly`. + +This can be used to [store and mutate options within a class](https://github.com/sindresorhus/pageres/blob/4a5d05fca19a5fbd2f53842cbf3eb7b1b63bddd2/source/index.ts#L72), [edit `readonly` objects within tests](https://stackoverflow.com/questions/50703834), and [construct a `readonly` object within a function](https://github.com/Microsoft/TypeScript/issues/24509). + +@example +``` +import {Mutable} from 'type-fest'; + +type Foo = { + readonly a: number; + readonly b: string; +}; + +const mutableFoo: Mutable = {a: 1, b: '2'}; +mutableFoo.a = 3; +``` +*/ +export type Mutable = { + // For each `Key` in the keys of `ObjectType`, make a mapped type by removing the `readonly` modifier from the property. + -readonly [KeyType in keyof ObjectType]: ObjectType[KeyType]; +}; diff --git a/mybulma/node_modules/read-pkg/node_modules/type-fest/source/package-json.d.ts b/mybulma/node_modules/read-pkg/node_modules/type-fest/source/package-json.d.ts new file mode 100644 index 0000000..3179e58 --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/type-fest/source/package-json.d.ts @@ -0,0 +1,501 @@ +import {LiteralUnion} from '..'; + +declare namespace PackageJson { + /** + A person who has been involved in creating or maintaining the package. + */ + export type Person = + | string + | { + name: string; + url?: string; + email?: string; + }; + + export type BugsLocation = + | string + | { + /** + The URL to the package's issue tracker. + */ + url?: string; + + /** + The email address to which issues should be reported. + */ + email?: string; + }; + + export interface DirectoryLocations { + /** + Location for executable scripts. Sugar to generate entries in the `bin` property by walking the folder. + */ + bin?: string; + + /** + Location for Markdown files. + */ + doc?: string; + + /** + Location for example scripts. + */ + example?: string; + + /** + Location for the bulk of the library. + */ + lib?: string; + + /** + Location for man pages. Sugar to generate a `man` array by walking the folder. + */ + man?: string; + + /** + Location for test files. + */ + test?: string; + + [directoryType: string]: unknown; + } + + export type Scripts = { + /** + Run **before** the package is published (Also run on local `npm install` without any arguments). + */ + prepublish?: string; + + /** + Run both **before** the package is packed and published, and on local `npm install` without any arguments. This is run **after** `prepublish`, but **before** `prepublishOnly`. + */ + prepare?: string; + + /** + Run **before** the package is prepared and packed, **only** on `npm publish`. + */ + prepublishOnly?: string; + + /** + Run **before** a tarball is packed (on `npm pack`, `npm publish`, and when installing git dependencies). + */ + prepack?: string; + + /** + Run **after** the tarball has been generated and moved to its final destination. + */ + postpack?: string; + + /** + Run **after** the package is published. + */ + publish?: string; + + /** + Run **after** the package is published. + */ + postpublish?: string; + + /** + Run **before** the package is installed. + */ + preinstall?: string; + + /** + Run **after** the package is installed. + */ + install?: string; + + /** + Run **after** the package is installed and after `install`. + */ + postinstall?: string; + + /** + Run **before** the package is uninstalled and before `uninstall`. + */ + preuninstall?: string; + + /** + Run **before** the package is uninstalled. + */ + uninstall?: string; + + /** + Run **after** the package is uninstalled. + */ + postuninstall?: string; + + /** + Run **before** bump the package version and before `version`. + */ + preversion?: string; + + /** + Run **before** bump the package version. + */ + version?: string; + + /** + Run **after** bump the package version. + */ + postversion?: string; + + /** + Run with the `npm test` command, before `test`. + */ + pretest?: string; + + /** + Run with the `npm test` command. + */ + test?: string; + + /** + Run with the `npm test` command, after `test`. + */ + posttest?: string; + + /** + Run with the `npm stop` command, before `stop`. + */ + prestop?: string; + + /** + Run with the `npm stop` command. + */ + stop?: string; + + /** + Run with the `npm stop` command, after `stop`. + */ + poststop?: string; + + /** + Run with the `npm start` command, before `start`. + */ + prestart?: string; + + /** + Run with the `npm start` command. + */ + start?: string; + + /** + Run with the `npm start` command, after `start`. + */ + poststart?: string; + + /** + Run with the `npm restart` command, before `restart`. Note: `npm restart` will run the `stop` and `start` scripts if no `restart` script is provided. + */ + prerestart?: string; + + /** + Run with the `npm restart` command. Note: `npm restart` will run the `stop` and `start` scripts if no `restart` script is provided. + */ + restart?: string; + + /** + Run with the `npm restart` command, after `restart`. Note: `npm restart` will run the `stop` and `start` scripts if no `restart` script is provided. + */ + postrestart?: string; + } & { + [scriptName: string]: string; + }; + + /** + Dependencies of the package. The version range is a string which has one or more space-separated descriptors. Dependencies can also be identified with a tarball or Git URL. + */ + export interface Dependency { + [packageName: string]: string; + } + + export interface NonStandardEntryPoints { + /** + An ECMAScript module ID that is the primary entry point to the program. + */ + module?: string; + + /** + A module ID with untranspiled code that is the primary entry point to the program. + */ + esnext?: + | string + | { + main?: string; + browser?: string; + [moduleName: string]: string | undefined; + }; + + /** + A hint to JavaScript bundlers or component tools when packaging modules for client side use. + */ + browser?: + | string + | { + [moduleName: string]: string | false; + }; + } + + export interface TypeScriptConfiguration { + /** + Location of the bundled TypeScript declaration file. + */ + types?: string; + + /** + Location of the bundled TypeScript declaration file. Alias of `types`. + */ + typings?: string; + } + + export interface YarnConfiguration { + /** + If your package only allows one version of a given dependency, and you’d like to enforce the same behavior as `yarn install --flat` on the command line, set this to `true`. + + Note that if your `package.json` contains `"flat": true` and other packages depend on yours (e.g. you are building a library rather than an application), those other packages will also need `"flat": true` in their `package.json` or be installed with `yarn install --flat` on the command-line. + */ + flat?: boolean; + + /** + Selective version resolutions. Allows the definition of custom package versions inside dependencies without manual edits in the `yarn.lock` file. + */ + resolutions?: Dependency; + } + + export interface JSPMConfiguration { + /** + JSPM configuration. + */ + jspm?: PackageJson; + } +} + +/** +Type for [npm's `package.json` file](https://docs.npmjs.com/creating-a-package-json-file). Also includes types for fields used by other popular projects, like TypeScript and Yarn. +*/ +export type PackageJson = { + /** + The name of the package. + */ + name?: string; + + /** + Package version, parseable by [`node-semver`](https://github.com/npm/node-semver). + */ + version?: string; + + /** + Package description, listed in `npm search`. + */ + description?: string; + + /** + Keywords associated with package, listed in `npm search`. + */ + keywords?: string[]; + + /** + The URL to the package's homepage. + */ + homepage?: LiteralUnion<'.', string>; + + /** + The URL to the package's issue tracker and/or the email address to which issues should be reported. + */ + bugs?: PackageJson.BugsLocation; + + /** + The license for the package. + */ + license?: string; + + /** + The licenses for the package. + */ + licenses?: Array<{ + type?: string; + url?: string; + }>; + + author?: PackageJson.Person; + + /** + A list of people who contributed to the package. + */ + contributors?: PackageJson.Person[]; + + /** + A list of people who maintain the package. + */ + maintainers?: PackageJson.Person[]; + + /** + The files included in the package. + */ + files?: string[]; + + /** + The module ID that is the primary entry point to the program. + */ + main?: string; + + /** + The executable files that should be installed into the `PATH`. + */ + bin?: + | string + | { + [binary: string]: string; + }; + + /** + Filenames to put in place for the `man` program to find. + */ + man?: string | string[]; + + /** + Indicates the structure of the package. + */ + directories?: PackageJson.DirectoryLocations; + + /** + Location for the code repository. + */ + repository?: + | string + | { + type: string; + url: string; + }; + + /** + Script commands that are run at various times in the lifecycle of the package. The key is the lifecycle event, and the value is the command to run at that point. + */ + scripts?: PackageJson.Scripts; + + /** + Is used to set configuration parameters used in package scripts that persist across upgrades. + */ + config?: { + [configKey: string]: unknown; + }; + + /** + The dependencies of the package. + */ + dependencies?: PackageJson.Dependency; + + /** + Additional tooling dependencies that are not required for the package to work. Usually test, build, or documentation tooling. + */ + devDependencies?: PackageJson.Dependency; + + /** + Dependencies that are skipped if they fail to install. + */ + optionalDependencies?: PackageJson.Dependency; + + /** + Dependencies that will usually be required by the package user directly or via another dependency. + */ + peerDependencies?: PackageJson.Dependency; + + /** + Package names that are bundled when the package is published. + */ + bundledDependencies?: string[]; + + /** + Alias of `bundledDependencies`. + */ + bundleDependencies?: string[]; + + /** + Engines that this package runs on. + */ + engines?: { + [EngineName in 'npm' | 'node' | string]: string; + }; + + /** + @deprecated + */ + engineStrict?: boolean; + + /** + Operating systems the module runs on. + */ + os?: Array>; + + /** + CPU architectures the module runs on. + */ + cpu?: Array>; + + /** + If set to `true`, a warning will be shown if package is installed locally. Useful if the package is primarily a command-line application that should be installed globally. + + @deprecated + */ + preferGlobal?: boolean; + + /** + If set to `true`, then npm will refuse to publish it. + */ + private?: boolean; + + /** + * A set of config values that will be used at publish-time. It's especially handy to set the tag, registry or access, to ensure that a given package is not tagged with 'latest', published to the global public registry or that a scoped module is private by default. + */ + publishConfig?: { + [config: string]: unknown; + }; +} & +PackageJson.NonStandardEntryPoints & +PackageJson.TypeScriptConfiguration & +PackageJson.YarnConfiguration & +PackageJson.JSPMConfiguration & { + [key: string]: unknown; +}; diff --git a/mybulma/node_modules/read-pkg/node_modules/type-fest/source/promisable.d.ts b/mybulma/node_modules/read-pkg/node_modules/type-fest/source/promisable.d.ts new file mode 100644 index 0000000..71242a5 --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/type-fest/source/promisable.d.ts @@ -0,0 +1,23 @@ +/** +Create a type that represents either the value or the value wrapped in `PromiseLike`. + +Use-cases: +- A function accepts a callback that may either return a value synchronously or may return a promised value. +- This type could be the return type of `Promise#then()`, `Promise#catch()`, and `Promise#finally()` callbacks. + +Please upvote [this issue](https://github.com/microsoft/TypeScript/issues/31394) if you want to have this type as a built-in in TypeScript. + +@example +``` +import {Promisable} from 'type-fest'; + +async function logger(getLogEntry: () => Promisable): Promise { + const entry = await getLogEntry(); + console.log(entry); +} + +logger(() => 'foo'); +logger(() => Promise.resolve('bar')); +``` +*/ +export type Promisable = T | PromiseLike; diff --git a/mybulma/node_modules/read-pkg/node_modules/type-fest/source/readonly-deep.d.ts b/mybulma/node_modules/read-pkg/node_modules/type-fest/source/readonly-deep.d.ts new file mode 100644 index 0000000..8de4677 --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/type-fest/source/readonly-deep.d.ts @@ -0,0 +1,59 @@ +import {Primitive} from './basic'; + +/** +Convert `object`s, `Map`s, `Set`s, and `Array`s and all of their properties/elements into immutable structures recursively. + +This is useful when a deeply nested structure needs to be exposed as completely immutable, for example, an imported JSON module or when receiving an API response that is passed around. + +Please upvote [this issue](https://github.com/microsoft/TypeScript/issues/13923) if you want to have this type as a built-in in TypeScript. + +@example +``` +// data.json +{ + "foo": ["bar"] +} + +// main.ts +import {ReadonlyDeep} from 'type-fest'; +import dataJson = require('./data.json'); + +const data: ReadonlyDeep = dataJson; + +export default data; + +// test.ts +import data from './main'; + +data.foo.push('bar'); +//=> error TS2339: Property 'push' does not exist on type 'readonly string[]' +``` +*/ +export type ReadonlyDeep = T extends Primitive | ((...arguments: any[]) => unknown) + ? T + : T extends ReadonlyMap + ? ReadonlyMapDeep + : T extends ReadonlySet + ? ReadonlySetDeep + : T extends object + ? ReadonlyObjectDeep + : unknown; + +/** +Same as `ReadonlyDeep`, but accepts only `ReadonlyMap`s as inputs. Internal helper for `ReadonlyDeep`. +*/ +interface ReadonlyMapDeep + extends ReadonlyMap, ReadonlyDeep> {} + +/** +Same as `ReadonlyDeep`, but accepts only `ReadonlySet`s as inputs. Internal helper for `ReadonlyDeep`. +*/ +interface ReadonlySetDeep + extends ReadonlySet> {} + +/** +Same as `ReadonlyDeep`, but accepts only `object`s as inputs. Internal helper for `ReadonlyDeep`. +*/ +type ReadonlyObjectDeep = { + readonly [PropertyType in keyof ObjectType]: ReadonlyDeep +}; diff --git a/mybulma/node_modules/read-pkg/node_modules/type-fest/source/require-at-least-one.d.ts b/mybulma/node_modules/read-pkg/node_modules/type-fest/source/require-at-least-one.d.ts new file mode 100644 index 0000000..2200d49 --- /dev/null +++ b/mybulma/node_modules/read-pkg/node_modules/type-fest/source/require-at-least-one.d.ts @@ -0,0 +1,32 @@ +import {Except} from './except'; + +/** +Create a type that requires at least one of the given properties. The remaining properties are kept as is. + +@example +``` +import {RequireAtLeastOne} from 'type-fest'; + +type Responder = { + text?: () => string; + json?: () => string; + + secure?: boolean; +}; + +const responder: RequireAtLeastOne = { + json: () => '{"message": "ok"}', + secure: true +}; +``` +*/ +export type RequireAtLeastOne = + { + // For each Key in KeysType make a mapped type + [Key in KeysType]: ( + // …by picking that Key's type and making it required + Required> + ) + }[KeysType] + // …then, make intersection types by adding the remaining properties to each mapped type. + & Except; diff --git a/mybulma/node_modules/read-pkg/package.json b/mybulma/node_modules/read-pkg/package.json new file mode 100644 index 0000000..05dc532 --- /dev/null +++ b/mybulma/node_modules/read-pkg/package.json @@ -0,0 +1,49 @@ +{ + "name": "read-pkg", + "version": "5.2.0", + "description": "Read a package.json file", + "license": "MIT", + "repository": "sindresorhus/read-pkg", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "json", + "read", + "parse", + "file", + "fs", + "graceful", + "load", + "package", + "normalize" + ], + "dependencies": { + "@types/normalize-package-data": "^2.4.0", + "normalize-package-data": "^2.5.0", + "parse-json": "^5.0.0", + "type-fest": "^0.6.0" + }, + "devDependencies": { + "ava": "^2.2.0", + "tsd": "^0.7.2", + "xo": "^0.24.0" + }, + "xo": { + "ignores": [ + "test/test.js" + ] + } +} diff --git a/mybulma/node_modules/read-pkg/readme.md b/mybulma/node_modules/read-pkg/readme.md new file mode 100644 index 0000000..74afd10 --- /dev/null +++ b/mybulma/node_modules/read-pkg/readme.md @@ -0,0 +1,81 @@ +# read-pkg [![Build Status](https://travis-ci.org/sindresorhus/read-pkg.svg?branch=master)](https://travis-ci.org/sindresorhus/read-pkg) + +> Read a package.json file + + +## Why + +- [Gracefully handles filesystem issues](https://github.com/isaacs/node-graceful-fs) +- [Throws more helpful JSON errors](https://github.com/sindresorhus/parse-json) +- [Normalizes the data](https://github.com/npm/normalize-package-data#what-normalization-currently-entails) + + +## Install + +``` +$ npm install read-pkg +``` + + +## Usage + +```js +const readPkg = require('read-pkg'); + +(async () => { + console.log(await readPkg()); + //=> {name: 'read-pkg', …} + + console.log(await readPkg({cwd: 'some-other-directory'})); + //=> {name: 'unicorn', …} +})(); +``` + + +## API + +### readPkg(options?) + +Returns a `Promise` with the parsed JSON. + +### readPkg.sync(options?) + +Returns the parsed JSON. + +#### options + +Type: `object` + +##### cwd + +Type: `string`
+Default: `process.cwd()` + +Current working directory. + +##### normalize + +Type: `boolean`
+Default: `true` + +[Normalize](https://github.com/npm/normalize-package-data#what-normalization-currently-entails) the package data. + + +## Related + +- [read-pkg-up](https://github.com/sindresorhus/read-pkg-up) - Read the closest package.json file +- [write-pkg](https://github.com/sindresorhus/write-pkg) - Write a `package.json` file +- [load-json-file](https://github.com/sindresorhus/load-json-file) - Read and parse a JSON file + + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/mybulma/node_modules/readable-stream/CONTRIBUTING.md b/mybulma/node_modules/readable-stream/CONTRIBUTING.md new file mode 100644 index 0000000..f478d58 --- /dev/null +++ b/mybulma/node_modules/readable-stream/CONTRIBUTING.md @@ -0,0 +1,38 @@ +# Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + +## Moderation Policy + +The [Node.js Moderation Policy] applies to this WG. + +## Code of Conduct + +The [Node.js Code of Conduct][] applies to this WG. + +[Node.js Code of Conduct]: +https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md +[Node.js Moderation Policy]: +https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/mybulma/node_modules/readable-stream/GOVERNANCE.md b/mybulma/node_modules/readable-stream/GOVERNANCE.md new file mode 100644 index 0000000..16ffb93 --- /dev/null +++ b/mybulma/node_modules/readable-stream/GOVERNANCE.md @@ -0,0 +1,136 @@ +### Streams Working Group + +The Node.js Streams is jointly governed by a Working Group +(WG) +that is responsible for high-level guidance of the project. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Conduct guidelines +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project +[README.md](./README.md#current-project-team-members). + +### Collaborators + +The readable-stream GitHub repository is +maintained by the WG and additional Collaborators who are added by the +WG on an ongoing basis. + +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. These +individuals are identified by the WG and their addition as +Collaborators is discussed during the WG meeting. + +_Note:_ If you make a significant contribution and are not considered +for commit-access log an issue or contact a WG member directly and it +will be brought up in the next WG meeting. + +Modifications of the contents of the readable-stream repository are +made on +a collaborative basis. Anybody with a GitHub account may propose a +modification via pull request and it will be considered by the project +Collaborators. All pull requests must be reviewed and accepted by a +Collaborator with sufficient expertise who is able to take full +responsibility for the change. In the case of pull requests proposed +by an existing Collaborator, an additional Collaborator is required +for sign-off. Consensus should be sought if additional Collaborators +participate and there is disagreement around a particular +modification. See _Consensus Seeking Process_ below for further detail +on the consensus model used for governance. + +Collaborators may opt to elevate significant or controversial +modifications, or modifications that have not found consensus to the +WG for discussion by assigning the ***WG-agenda*** tag to a pull +request or issue. The WG should serve as the final arbiter where +required. + +For the current list of Collaborators, see the project +[README.md](./README.md#members). + +### WG Membership + +WG seats are not time-limited. There is no fixed size of the WG. +However, the expected target is between 6 and 12, to ensure adequate +coverage of important areas of expertise, balanced with the ability to +make decisions efficiently. + +There is no specific set of requirements or qualifications for WG +membership beyond these rules. + +The WG may add additional members to the WG by unanimous consensus. + +A WG member may be removed from the WG by voluntary resignation, or by +unanimous consensus of all other WG members. + +Changes to WG membership should be posted in the agenda, and may be +suggested as any other agenda item (see "WG Meetings" below). + +If an addition or removal is proposed during a meeting, and the full +WG is not in attendance to participate, then the addition or removal +is added to the agenda for the subsequent meeting. This is to ensure +that all members are given the opportunity to participate in all +membership decisions. If a WG member is unable to attend a meeting +where a planned membership decision is being made, then their consent +is assumed. + +No more than 1/3 of the WG members may be affiliated with the same +employer. If removal or resignation of a WG member, or a change of +employment by a WG member, creates a situation where more than 1/3 of +the WG membership shares an employer, then the situation must be +immediately remedied by the resignation or removal of one or more WG +members affiliated with the over-represented employer(s). + +### WG Meetings + +The WG meets occasionally on a Google Hangout On Air. A designated moderator +approved by the WG runs the meeting. Each meeting should be +published to YouTube. + +Items are added to the WG agenda that are considered contentious or +are modifications of governance, contribution policy, WG membership, +or release process. + +The intention of the agenda is not to approve or review all patches; +that should happen continuously on GitHub and be handled by the larger +group of Collaborators. + +Any community member or contributor can ask that something be added to +the next meeting's agenda by logging a GitHub Issue. Any Collaborator, +WG member or the moderator can add the item to the agenda by adding +the ***WG-agenda*** tag to the issue. + +Prior to each WG meeting the moderator will share the Agenda with +members of the WG. WG members can add any items they like to the +agenda at the beginning of each meeting. The moderator and the WG +cannot veto or remove items. + +The WG may invite persons or representatives from certain projects to +participate in a non-voting capacity. + +The moderator is responsible for summarizing the discussion of each +agenda item and sends it as a pull request after the meeting. + +### Consensus Seeking Process + +The WG follows a +[Consensus +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) +decision-making model. + +When an agenda item has appeared to reach a consensus the moderator +will ask "Does anyone object?" as a final call for dissent from the +consensus. + +If an agenda item cannot reach a consensus a WG member can call for +either a closing vote or a vote to table the issue to the next +meeting. The call for a vote must be seconded by a majority of the WG +or else the discussion will continue. Simple majority wins. + +Note that changes to WG membership require a majority consensus. See +"WG Membership" above. diff --git a/mybulma/node_modules/readable-stream/LICENSE b/mybulma/node_modules/readable-stream/LICENSE new file mode 100644 index 0000000..2873b3b --- /dev/null +++ b/mybulma/node_modules/readable-stream/LICENSE @@ -0,0 +1,47 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" diff --git a/mybulma/node_modules/readable-stream/README.md b/mybulma/node_modules/readable-stream/README.md new file mode 100644 index 0000000..6f035ab --- /dev/null +++ b/mybulma/node_modules/readable-stream/README.md @@ -0,0 +1,106 @@ +# readable-stream + +***Node.js core streams for userland*** [![Build Status](https://travis-ci.com/nodejs/readable-stream.svg?branch=master)](https://travis-ci.com/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readabe-stream.svg)](https://saucelabs.com/u/readabe-stream) + +```bash +npm install --save readable-stream +``` + +This package is a mirror of the streams implementations in Node.js. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v10.19.0/docs/api/stream.html). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +## Version 3.x.x + +v3.x.x of `readable-stream` is a cut from Node 10. This version supports Node 6, 8, and 10, as well as evergreen browsers, IE 11 and latest Safari. The breaking changes introduced by v3 are composed by the combined breaking changes in [Node v9](https://nodejs.org/en/blog/release/v9.0.0/) and [Node v10](https://nodejs.org/en/blog/release/v10.0.0/), as follows: + +1. Error codes: https://github.com/nodejs/node/pull/13310, + https://github.com/nodejs/node/pull/13291, + https://github.com/nodejs/node/pull/16589, + https://github.com/nodejs/node/pull/15042, + https://github.com/nodejs/node/pull/15665, + https://github.com/nodejs/readable-stream/pull/344 +2. 'readable' have precedence over flowing + https://github.com/nodejs/node/pull/18994 +3. make virtual methods errors consistent + https://github.com/nodejs/node/pull/18813 +4. updated streams error handling + https://github.com/nodejs/node/pull/18438 +5. writable.end should return this. + https://github.com/nodejs/node/pull/18780 +6. readable continues to read when push('') + https://github.com/nodejs/node/pull/18211 +7. add custom inspect to BufferList + https://github.com/nodejs/node/pull/17907 +8. always defer 'readable' with nextTick + https://github.com/nodejs/node/pull/17979 + +## Version 2.x.x +v2.x.x of `readable-stream` is a cut of the stream module from Node 8 (there have been no semver-major changes from Node 4 to 8). This version supports all Node.js versions from 0.8, as well as evergreen browsers and IE 10 & 11. + +### Big Thanks + +Cross-browser Testing Platform and Open Source <3 Provided by [Sauce Labs][sauce] + +# Usage + +You can swap your `require('stream')` with `require('readable-stream')` +without any changes, if you are just using one of the main classes and +functions. + +```js +const { + Readable, + Writable, + Transform, + Duplex, + pipeline, + finished +} = require('readable-stream') +```` + +Note that `require('stream')` will return `Stream`, while +`require('readable-stream')` will return `Readable`. We discourage using +whatever is exported directly, but rather use one of the properties as +shown in the example above. + +# Streams Working Group + +`readable-stream` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + + +## Team Members + +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E +* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> +* **Yoshua Wyuts** ([@yoshuawuyts](https://github.com/yoshuawuyts)) <yoshuawuyts@gmail.com> + +[sauce]: https://saucelabs.com diff --git a/mybulma/node_modules/readable-stream/errors-browser.js b/mybulma/node_modules/readable-stream/errors-browser.js new file mode 100644 index 0000000..fb8e73e --- /dev/null +++ b/mybulma/node_modules/readable-stream/errors-browser.js @@ -0,0 +1,127 @@ +'use strict'; + +function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; } + +var codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error; + } + + function getMessage(arg1, arg2, arg3) { + if (typeof message === 'string') { + return message; + } else { + return message(arg1, arg2, arg3); + } + } + + var NodeError = + /*#__PURE__*/ + function (_Base) { + _inheritsLoose(NodeError, _Base); + + function NodeError(arg1, arg2, arg3) { + return _Base.call(this, getMessage(arg1, arg2, arg3)) || this; + } + + return NodeError; + }(Base); + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + codes[code] = NodeError; +} // https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js + + +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + var len = expected.length; + expected = expected.map(function (i) { + return String(i); + }); + + if (len > 2) { + return "one of ".concat(thing, " ").concat(expected.slice(0, len - 1).join(', '), ", or ") + expected[len - 1]; + } else if (len === 2) { + return "one of ".concat(thing, " ").concat(expected[0], " or ").concat(expected[1]); + } else { + return "of ".concat(thing, " ").concat(expected[0]); + } + } else { + return "of ".concat(thing, " ").concat(String(expected)); + } +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith + + +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith + + +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + + return str.substring(this_len - search.length, this_len) === search; +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes + + +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"'; +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + var determiner; + + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + var msg; + + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = "The ".concat(name, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); + } else { + var type = includes(name, '.') ? 'property' : 'argument'; + msg = "The \"".concat(name, "\" ").concat(type, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); + } + + msg += ". Received type ".concat(typeof actual); + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented'; +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg; +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); +module.exports.codes = codes; diff --git a/mybulma/node_modules/readable-stream/errors.js b/mybulma/node_modules/readable-stream/errors.js new file mode 100644 index 0000000..8471526 --- /dev/null +++ b/mybulma/node_modules/readable-stream/errors.js @@ -0,0 +1,116 @@ +'use strict'; + +const codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error + } + + function getMessage (arg1, arg2, arg3) { + if (typeof message === 'string') { + return message + } else { + return message(arg1, arg2, arg3) + } + } + + class NodeError extends Base { + constructor (arg1, arg2, arg3) { + super(getMessage(arg1, arg2, arg3)); + } + } + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + + codes[code] = NodeError; +} + +// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + const len = expected.length; + expected = expected.map((i) => String(i)); + if (len > 2) { + return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + + expected[len - 1]; + } else if (len === 2) { + return `one of ${thing} ${expected[0]} or ${expected[1]}`; + } else { + return `of ${thing} ${expected[0]}`; + } + } else { + return `of ${thing} ${String(expected)}`; + } +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + return str.substring(this_len - search.length, this_len) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"' +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + let determiner; + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + let msg; + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; + } else { + const type = includes(name, '.') ? 'property' : 'argument'; + msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; + } + + msg += `. Received type ${typeof actual}`; + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented' +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); + +module.exports.codes = codes; diff --git a/mybulma/node_modules/readable-stream/experimentalWarning.js b/mybulma/node_modules/readable-stream/experimentalWarning.js new file mode 100644 index 0000000..78e8414 --- /dev/null +++ b/mybulma/node_modules/readable-stream/experimentalWarning.js @@ -0,0 +1,17 @@ +'use strict' + +var experimentalWarnings = new Set(); + +function emitExperimentalWarning(feature) { + if (experimentalWarnings.has(feature)) return; + var msg = feature + ' is an experimental feature. This feature could ' + + 'change at any time'; + experimentalWarnings.add(feature); + process.emitWarning(msg, 'ExperimentalWarning'); +} + +function noop() {} + +module.exports.emitExperimentalWarning = process.emitWarning + ? emitExperimentalWarning + : noop; diff --git a/mybulma/node_modules/readable-stream/lib/_stream_duplex.js b/mybulma/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 0000000..6752519 --- /dev/null +++ b/mybulma/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,139 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. +'use strict'; +/**/ + +var objectKeys = Object.keys || function (obj) { + var keys = []; + + for (var key in obj) { + keys.push(key); + } + + return keys; +}; +/**/ + + +module.exports = Duplex; + +var Readable = require('./_stream_readable'); + +var Writable = require('./_stream_writable'); + +require('inherits')(Duplex, Readable); + +{ + // Allow the keys array to be GC'ed. + var keys = objectKeys(Writable.prototype); + + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} + +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + Readable.call(this, options); + Writable.call(this, options); + this.allowHalfOpen = true; + + if (options) { + if (options.readable === false) this.readable = false; + if (options.writable === false) this.writable = false; + + if (options.allowHalfOpen === false) { + this.allowHalfOpen = false; + this.once('end', onend); + } + } +} + +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); +Object.defineProperty(Duplex.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +Object.defineProperty(Duplex.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); // the no-half-open enforcer + +function onend() { + // If the writable side ended, then we're ok. + if (this._writableState.ended) return; // no more data can be written. + // But allow more writes to happen in this tick. + + process.nextTick(onEndNT, this); +} + +function onEndNT(self) { + self.end(); +} + +Object.defineProperty(Duplex.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } // backward compatibility, the user is explicitly + // managing destroyed + + + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); \ No newline at end of file diff --git a/mybulma/node_modules/readable-stream/lib/_stream_passthrough.js b/mybulma/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 0000000..32e7414 --- /dev/null +++ b/mybulma/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,39 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. +'use strict'; + +module.exports = PassThrough; + +var Transform = require('./_stream_transform'); + +require('inherits')(PassThrough, Transform); + +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + Transform.call(this, options); +} + +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/mybulma/node_modules/readable-stream/lib/_stream_readable.js b/mybulma/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 0000000..192d451 --- /dev/null +++ b/mybulma/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,1124 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict'; + +module.exports = Readable; +/**/ + +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; +/**/ + +var EE = require('events').EventEmitter; + +var EElistenerCount = function EElistenerCount(emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ + + +var Stream = require('./internal/streams/stream'); +/**/ + + +var Buffer = require('buffer').Buffer; + +var OurUint8Array = global.Uint8Array || function () {}; + +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} + +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} +/**/ + + +var debugUtil = require('util'); + +var debug; + +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function debug() {}; +} +/**/ + + +var BufferList = require('./internal/streams/buffer_list'); + +var destroyImpl = require('./internal/streams/destroy'); + +var _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; + +var _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; // Lazy loaded to improve the startup performance. + + +var StringDecoder; +var createReadableStreamAsyncIterator; +var from; + +require('inherits')(Readable, Stream); + +var errorOrDestroy = destroyImpl.errorOrDestroy; +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; + +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} + +function ReadableState(options, stream, isDuplex) { + Duplex = Duplex || require('./_stream_duplex'); + options = options || {}; // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + + this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + + this.sync = true; // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + this.paused = true; // Should close be emitted on destroy. Defaults to true. + + this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'end' (and potentially 'finish') + + this.autoDestroy = !!options.autoDestroy; // has it been destroyed + + this.destroyed = false; // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + + this.defaultEncoding = options.defaultEncoding || 'utf8'; // the number of writers that are awaiting a drain event in .pipe()s + + this.awaitDrain = 0; // if true, a maybeReadMore has been scheduled + + this.readingMore = false; + this.decoder = null; + this.encoding = null; + + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} + +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + if (!(this instanceof Readable)) return new Readable(options); // Checking for a Stream.Duplex instance is faster here instead of inside + // the ReadableState constructor, at least with V8 6.5 + + var isDuplex = this instanceof Duplex; + this._readableState = new ReadableState(options, this, isDuplex); // legacy + + this.readable = true; + + if (options) { + if (typeof options.read === 'function') this._read = options.read; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + + Stream.call(this); +} + +Object.defineProperty(Readable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined) { + return false; + } + + return this._readableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } // backward compatibility, the user is explicitly + // managing destroyed + + + this._readableState.destroyed = value; + } +}); +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; + +Readable.prototype._destroy = function (err, cb) { + cb(err); +}; // Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. + + +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; // Unshift should *always* be something directly out of read() + + +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; + +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + debug('readableAddChunk', chunk); + var state = stream._readableState; + + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + + if (er) { + errorOrDestroy(stream, er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (addToFront) { + if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); + } else if (state.ended) { + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); + } else if (state.destroyed) { + return false; + } else { + state.reading = false; + + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + maybeReadMore(stream, state); + } + } // We can push more data if we are below the highWaterMark. + // Also, if we have no data yet, we can stand some more bytes. + // This is to work around cases where hwm=0, such as the repl. + + + return !state.ended && (state.length < state.highWaterMark || state.length === 0); +} + +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + state.awaitDrain = 0; + stream.emit('data', chunk); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + if (state.needReadable) emitReadable(stream); + } + + maybeReadMore(stream, state); +} + +function chunkInvalid(state, chunk) { + var er; + + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); + } + + return er; +} + +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; // backwards compatibility. + + +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + var decoder = new StringDecoder(enc); + this._readableState.decoder = decoder; // If setEncoding(null), decoder.encoding equals utf8 + + this._readableState.encoding = this._readableState.decoder.encoding; // Iterate over current buffer to convert already stored Buffers: + + var p = this._readableState.buffer.head; + var content = ''; + + while (p !== null) { + content += decoder.write(p.data); + p = p.next; + } + + this._readableState.buffer.clear(); + + if (content !== '') this._readableState.buffer.push(content); + this._readableState.length = content.length; + return this; +}; // Don't raise the hwm > 1GB + + +var MAX_HWM = 0x40000000; + +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + + return n; +} // This function is designed to be inlinable, so please take care when making +// changes to the function body. + + +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } // If we're asking for more than the current hwm, then raise the hwm. + + + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; // Don't have enough + + if (!state.ended) { + state.needReadable = true; + return 0; + } + + return state.length; +} // you can override either this method, or the async _read(n) below. + + +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + if (n !== 0) state.emittedReadable = false; // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + + if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); // if we've ended, and we're now clear, then finish it up. + + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + // if we need a readable event, then we need to do some reading. + + + var doRead = state.needReadable; + debug('need readable', doRead); // if we currently have less than the highWaterMark, then also read some + + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + + + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; // if the length is currently zero, then we *need* a readable event. + + if (state.length === 0) state.needReadable = true; // call internal read method + + this._read(state.highWaterMark); + + state.sync = false; // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + + if (!state.reading) n = howMuchToRead(nOrig, state); + } + + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + + if (ret === null) { + state.needReadable = state.length <= state.highWaterMark; + n = 0; + } else { + state.length -= n; + state.awaitDrain = 0; + } + + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; // If we tried to read() past the EOF, then emit end on the next tick. + + if (nOrig !== n && state.ended) endReadable(this); + } + + if (ret !== null) this.emit('data', ret); + return ret; +}; + +function onEofChunk(stream, state) { + debug('onEofChunk'); + if (state.ended) return; + + if (state.decoder) { + var chunk = state.decoder.end(); + + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + + state.ended = true; + + if (state.sync) { + // if we are sync, wait until next tick to emit the data. + // Otherwise we risk emitting data in the flow() + // the readable code triggers during a read() call + emitReadable(stream); + } else { + // emit 'readable' now to make sure it gets picked up. + state.needReadable = false; + + if (!state.emittedReadable) { + state.emittedReadable = true; + emitReadable_(stream); + } + } +} // Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. + + +function emitReadable(stream) { + var state = stream._readableState; + debug('emitReadable', state.needReadable, state.emittedReadable); + state.needReadable = false; + + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + process.nextTick(emitReadable_, stream); + } +} + +function emitReadable_(stream) { + var state = stream._readableState; + debug('emitReadable_', state.destroyed, state.length, state.ended); + + if (!state.destroyed && (state.length || state.ended)) { + stream.emit('readable'); + state.emittedReadable = false; + } // The stream needs another readable event if + // 1. It is not flowing, as the flow mechanism will take + // care of it. + // 2. It is not ended. + // 3. It is below the highWaterMark, so we can schedule + // another readable later. + + + state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; + flow(stream); +} // at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. + + +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + process.nextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + // Attempt to read more data if we should. + // + // The conditions for reading more data are (one of): + // - Not enough data buffered (state.length < state.highWaterMark). The loop + // is responsible for filling the buffer with enough data if such data + // is available. If highWaterMark is 0 and we are not in the flowing mode + // we should _not_ attempt to buffer any extra data. We'll get more data + // when the stream consumer calls read() instead. + // - No data in the buffer, and the stream is in flowing mode. In this mode + // the loop below is responsible for ensuring read() is called. Failing to + // call read here would abort the flow and there's no other mechanism for + // continuing the flow if the stream consumer has just subscribed to the + // 'data' event. + // + // In addition to the above conditions to keep reading data, the following + // conditions prevent the data from being read: + // - The stream has ended (state.ended). + // - There is already a pending 'read' operation (state.reading). This is a + // case where the the stream has called the implementation defined _read() + // method, but they are processing the call asynchronously and have _not_ + // called push() with new data. In this case we skip performing more + // read()s. The execution ends in this method again after the _read() ends + // up calling push() with more data. + while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { + var len = state.length; + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) // didn't get any data, stop spinning. + break; + } + + state.readingMore = false; +} // abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. + + +Readable.prototype._read = function (n) { + errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); +}; + +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + + case 1: + state.pipes = [state.pipes, dest]; + break; + + default: + state.pipes.push(dest); + break; + } + + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); + dest.on('unpipe', onunpipe); + + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + + function onend() { + debug('onend'); + dest.end(); + } // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + + + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + var cleanedUp = false; + + function cleanup() { + debug('cleanup'); // cleanup event handlers once the pipe is broken + + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + cleanedUp = true; // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + src.on('data', ondata); + + function ondata(chunk) { + debug('ondata'); + var ret = dest.write(chunk); + debug('dest.write', ret); + + if (ret === false) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; + } + + src.pause(); + } + } // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + + + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); + } // Make sure our error handler is attached before userland ones. + + + prependListener(dest, 'error', onerror); // Both close and finish should trigger unpipe, but only once. + + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + + dest.once('close', onclose); + + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } // tell the dest that it's being piped to + + + dest.emit('pipe', src); // start the flow if it hasn't been started already. + + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src) { + return function pipeOnDrainFunctionResult() { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} + +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { + hasUnpiped: false + }; // if we're not piping anywhere, then do nothing. + + if (state.pipesCount === 0) return this; // just one destination. most common case. + + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + if (!dest) dest = state.pipes; // got a match. + + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } // slow case. multiple pipe destinations. + + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + + for (var i = 0; i < len; i++) { + dests[i].emit('unpipe', this, { + hasUnpiped: false + }); + } + + return this; + } // try to find the right one. + + + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + dest.emit('unpipe', this, unpipeInfo); + return this; +}; // set up data events if they are asked for +// Ensure readable listeners eventually get something + + +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + var state = this._readableState; + + if (ev === 'data') { + // update readableListening so that resume() may be a no-op + // a few lines down. This is needed to support once('readable'). + state.readableListening = this.listenerCount('readable') > 0; // Try start flowing on next tick if stream isn't explicitly paused + + if (state.flowing !== false) this.resume(); + } else if (ev === 'readable') { + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.flowing = false; + state.emittedReadable = false; + debug('on readable', state.length, state.reading); + + if (state.length) { + emitReadable(this); + } else if (!state.reading) { + process.nextTick(nReadingNextTick, this); + } + } + } + + return res; +}; + +Readable.prototype.addListener = Readable.prototype.on; + +Readable.prototype.removeListener = function (ev, fn) { + var res = Stream.prototype.removeListener.call(this, ev, fn); + + if (ev === 'readable') { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + + return res; +}; + +Readable.prototype.removeAllListeners = function (ev) { + var res = Stream.prototype.removeAllListeners.apply(this, arguments); + + if (ev === 'readable' || ev === undefined) { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + + return res; +}; + +function updateReadableListening(self) { + var state = self._readableState; + state.readableListening = self.listenerCount('readable') > 0; + + if (state.resumeScheduled && !state.paused) { + // flowing needs to be set to true now, otherwise + // the upcoming resume will not flow. + state.flowing = true; // crude way to check if we should resume + } else if (self.listenerCount('data') > 0) { + self.resume(); + } +} + +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} // pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. + + +Readable.prototype.resume = function () { + var state = this._readableState; + + if (!state.flowing) { + debug('resume'); // we flow only if there is no one listening + // for readable, but we still have to call + // resume() + + state.flowing = !state.readableListening; + resume(this, state); + } + + state.paused = false; + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + process.nextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + debug('resume', state.reading); + + if (!state.reading) { + stream.read(0); + } + + state.resumeScheduled = false; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} + +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + + if (this._readableState.flowing !== false) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + + this._readableState.paused = true; + return this; +}; + +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + + while (state.flowing && stream.read() !== null) { + ; + } +} // wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. + + +Readable.prototype.wrap = function (stream) { + var _this = this; + + var state = this._readableState; + var paused = false; + stream.on('end', function () { + debug('wrapped end'); + + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + + _this.push(null); + }); + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); // don't skip over falsy values in objectMode + + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + + var ret = _this.push(chunk); + + if (!ret) { + paused = true; + stream.pause(); + } + }); // proxy all the other methods. + // important when wrapping filters and duplexes. + + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function methodWrap(method) { + return function methodWrapReturnFunction() { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } // proxy certain important events. + + + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } // when we try to consume some more bytes, simply unpause the + // underlying stream. + + + this._read = function (n) { + debug('wrapped _read', n); + + if (paused) { + paused = false; + stream.resume(); + } + }; + + return this; +}; + +if (typeof Symbol === 'function') { + Readable.prototype[Symbol.asyncIterator] = function () { + if (createReadableStreamAsyncIterator === undefined) { + createReadableStreamAsyncIterator = require('./internal/streams/async_iterator'); + } + + return createReadableStreamAsyncIterator(this); + }; +} + +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.highWaterMark; + } +}); +Object.defineProperty(Readable.prototype, 'readableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState && this._readableState.buffer; + } +}); +Object.defineProperty(Readable.prototype, 'readableFlowing', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.flowing; + }, + set: function set(state) { + if (this._readableState) { + this._readableState.flowing = state; + } + } +}); // exposed for testing purposes only. + +Readable._fromList = fromList; +Object.defineProperty(Readable.prototype, 'readableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.length; + } +}); // Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. + +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = state.buffer.consume(n, state.decoder); + } + return ret; +} + +function endReadable(stream) { + var state = stream._readableState; + debug('endReadable', state.endEmitted); + + if (!state.endEmitted) { + state.ended = true; + process.nextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + debug('endReadableNT', state.endEmitted, state.length); // Check that we didn't get one last unshift. + + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the writable side is ready for autoDestroy as well + var wState = stream._writableState; + + if (!wState || wState.autoDestroy && wState.finished) { + stream.destroy(); + } + } + } +} + +if (typeof Symbol === 'function') { + Readable.from = function (iterable, opts) { + if (from === undefined) { + from = require('./internal/streams/from'); + } + + return from(Readable, iterable, opts); + }; +} + +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + + return -1; +} \ No newline at end of file diff --git a/mybulma/node_modules/readable-stream/lib/_stream_transform.js b/mybulma/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 0000000..41a738c --- /dev/null +++ b/mybulma/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,201 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. +'use strict'; + +module.exports = Transform; + +var _require$codes = require('../errors').codes, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, + ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; + +var Duplex = require('./_stream_duplex'); + +require('inherits')(Transform, Duplex); + +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + var cb = ts.writecb; + + if (cb === null) { + return this.emit('error', new ERR_MULTIPLE_CALLBACK()); + } + + ts.writechunk = null; + ts.writecb = null; + if (data != null) // single equals check for both `null` and `undefined` + this.push(data); + cb(er); + var rs = this._readableState; + rs.reading = false; + + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} + +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + Duplex.call(this, options); + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; // start out asking for a readable event once data is transformed. + + this._readableState.needReadable = true; // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + + this._readableState.sync = false; + + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + if (typeof options.flush === 'function') this._flush = options.flush; + } // When the writable side finishes, then flush out anything remaining. + + + this.on('prefinish', prefinish); +} + +function prefinish() { + var _this = this; + + if (typeof this._flush === 'function' && !this._readableState.destroyed) { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} + +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; // This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. + + +Transform.prototype._transform = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); +}; + +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; // Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. + + +Transform.prototype._read = function (n) { + var ts = this._transformState; + + if (ts.writechunk !== null && !ts.transforming) { + ts.transforming = true; + + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; + +Transform.prototype._destroy = function (err, cb) { + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + }); +}; + +function done(stream, er, data) { + if (er) return stream.emit('error', er); + if (data != null) // single equals check for both `null` and `undefined` + stream.push(data); // TODO(BridgeAR): Write a test for these two error cases + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + + if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); + if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); + return stream.push(null); +} \ No newline at end of file diff --git a/mybulma/node_modules/readable-stream/lib/_stream_writable.js b/mybulma/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 0000000..a2634d7 --- /dev/null +++ b/mybulma/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,697 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. +'use strict'; + +module.exports = Writable; +/* */ + +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} // It seems a linked list but it is not +// there will be only 2 of these for each stream + + +function CorkedRequest(state) { + var _this = this; + + this.next = null; + this.entry = null; + + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ + + +var Duplex; +/**/ + +Writable.WritableState = WritableState; +/**/ + +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ + +var Stream = require('./internal/streams/stream'); +/**/ + + +var Buffer = require('buffer').Buffer; + +var OurUint8Array = global.Uint8Array || function () {}; + +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} + +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +var destroyImpl = require('./internal/streams/destroy'); + +var _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; + +var _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, + ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, + ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, + ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; + +var errorOrDestroy = destroyImpl.errorOrDestroy; + +require('inherits')(Writable, Stream); + +function nop() {} + +function WritableState(options, stream, isDuplex) { + Duplex = Duplex || require('./_stream_duplex'); + options = options || {}; // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream, + // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. + + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag to indicate whether or not this stream + // contains buffers or objects. + + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + + this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); // if _final has been called + + this.finalCalled = false; // drain event flag. + + this.needDrain = false; // at the start of calling end() + + this.ending = false; // when end() has been called, and returned + + this.ended = false; // when 'finish' is emitted + + this.finished = false; // has it been destroyed + + this.destroyed = false; // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + + this.defaultEncoding = options.defaultEncoding || 'utf8'; // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + + this.length = 0; // a flag to see when we're in the middle of a write. + + this.writing = false; // when true all writes will be buffered until .uncork() call + + this.corked = 0; // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + + this.sync = true; // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + + this.bufferProcessing = false; // the callback that's passed to _write(chunk,cb) + + this.onwrite = function (er) { + onwrite(stream, er); + }; // the callback that the user supplies to write(chunk,encoding,cb) + + + this.writecb = null; // the amount that is being written when _write is called. + + this.writelen = 0; + this.bufferedRequest = null; + this.lastBufferedRequest = null; // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + + this.pendingcb = 0; // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + + this.prefinished = false; // True if the error was already emitted and should not be thrown again + + this.errorEmitted = false; // Should close be emitted on destroy. Defaults to true. + + this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'finish' (and potentially 'end') + + this.autoDestroy = !!options.autoDestroy; // count buffered requests + + this.bufferedRequestCount = 0; // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + + this.corkedRequestsFree = new CorkedRequest(this); +} + +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + + while (current) { + out.push(current); + current = current.next; + } + + return out; +}; + +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function writableStateBufferGetter() { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); // Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. + + +var realHasInstance; + +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function value(object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function realHasInstance(object) { + return object instanceof this; + }; +} + +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + // Checking for a Stream.Duplex instance is faster here instead of inside + // the WritableState constructor, at least with V8 6.5 + + var isDuplex = this instanceof Duplex; + if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); + this._writableState = new WritableState(options, this, isDuplex); // legacy. + + this.writable = true; + + if (options) { + if (typeof options.write === 'function') this._write = options.write; + if (typeof options.writev === 'function') this._writev = options.writev; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + if (typeof options.final === 'function') this._final = options.final; + } + + Stream.call(this); +} // Otherwise people can pipe Writable streams, which is just wrong. + + +Writable.prototype.pipe = function () { + errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); +}; + +function writeAfterEnd(stream, cb) { + var er = new ERR_STREAM_WRITE_AFTER_END(); // TODO: defer error events consistently everywhere, not just the cb + + errorOrDestroy(stream, er); + process.nextTick(cb, er); +} // Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. + + +function validChunk(stream, state, chunk, cb) { + var er; + + if (chunk === null) { + er = new ERR_STREAM_NULL_VALUES(); + } else if (typeof chunk !== 'string' && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); + } + + if (er) { + errorOrDestroy(stream, er); + process.nextTick(cb, er); + return false; + } + + return true; +} + +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + + var isBuf = !state.objectMode && _isUint8Array(chunk); + + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + if (typeof cb !== 'function') cb = nop; + if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + return ret; +}; + +Writable.prototype.cork = function () { + this._writableState.corked++; +}; + +Writable.prototype.uncork = function () { + var state = this._writableState; + + if (state.corked) { + state.corked--; + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; + +Object.defineProperty(Writable.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); + +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + + return chunk; +} + +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); // if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. + +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + + var len = state.objectMode ? 1 : chunk.length; + state.length += len; + var ret = state.length < state.highWaterMark; // we must ensure that previous needDrain will not be reset to false. + + if (!ret) state.needDrain = true; + + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + + return ret; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + process.nextTick(cb, er); // this can emit finish, and it will always happen + // after error + + process.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); // this can emit finish, but finish must + // always follow error + + finishMaybe(stream, state); + } +} + +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} + +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); + onwriteStateUpdate(state); + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state) || stream.destroyed; + + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + + if (sync) { + process.nextTick(afterWrite, stream, state, finished, cb); + } else { + afterWrite(stream, state, finished, cb); + } + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} // Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. + + +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} // if there's something in the buffer waiting, then process it + + +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + var count = 0; + var allBuffers = true; + + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + + buffer.allBuffers = allBuffers; + doWrite(stream, state, true, state.length, buffer, '', holder.finish); // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + + state.pendingcb++; + state.lastBufferedRequest = null; + + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + + if (state.writing) { + break; + } + } + + if (entry === null) state.lastBufferedRequest = null; + } + + state.bufferedRequest = entry; + state.bufferProcessing = false; +} + +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); // .end() fully uncorks + + if (state.corked) { + state.corked = 1; + this.uncork(); + } // ignore unnecessary end() calls. + + + if (!state.ending) endWritable(this, state, cb); + return this; +}; + +Object.defineProperty(Writable.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); + +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} + +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + + if (err) { + errorOrDestroy(stream, err); + } + + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} + +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function' && !state.destroyed) { + state.pendingcb++; + state.finalCalled = true; + process.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} + +function finishMaybe(stream, state) { + var need = needFinish(state); + + if (need) { + prefinish(stream, state); + + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the readable side is ready for autoDestroy as well + var rState = stream._readableState; + + if (!rState || rState.autoDestroy && rState.endEmitted) { + stream.destroy(); + } + } + } + } + + return need; +} + +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + + if (cb) { + if (state.finished) process.nextTick(cb);else stream.once('finish', cb); + } + + state.ended = true; + stream.writable = false; +} + +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } // reuse the free corkReq. + + + state.corkedRequestsFree.next = corkReq; +} + +Object.defineProperty(Writable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._writableState === undefined) { + return false; + } + + return this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } // backward compatibility, the user is explicitly + // managing destroyed + + + this._writableState.destroyed = value; + } +}); +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; + +Writable.prototype._destroy = function (err, cb) { + cb(err); +}; \ No newline at end of file diff --git a/mybulma/node_modules/readable-stream/lib/internal/streams/async_iterator.js b/mybulma/node_modules/readable-stream/lib/internal/streams/async_iterator.js new file mode 100644 index 0000000..9fb615a --- /dev/null +++ b/mybulma/node_modules/readable-stream/lib/internal/streams/async_iterator.js @@ -0,0 +1,207 @@ +'use strict'; + +var _Object$setPrototypeO; + +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } + +var finished = require('./end-of-stream'); + +var kLastResolve = Symbol('lastResolve'); +var kLastReject = Symbol('lastReject'); +var kError = Symbol('error'); +var kEnded = Symbol('ended'); +var kLastPromise = Symbol('lastPromise'); +var kHandlePromise = Symbol('handlePromise'); +var kStream = Symbol('stream'); + +function createIterResult(value, done) { + return { + value: value, + done: done + }; +} + +function readAndResolve(iter) { + var resolve = iter[kLastResolve]; + + if (resolve !== null) { + var data = iter[kStream].read(); // we defer if data is null + // we can be expecting either 'end' or + // 'error' + + if (data !== null) { + iter[kLastPromise] = null; + iter[kLastResolve] = null; + iter[kLastReject] = null; + resolve(createIterResult(data, false)); + } + } +} + +function onReadable(iter) { + // we wait for the next tick, because it might + // emit an error with process.nextTick + process.nextTick(readAndResolve, iter); +} + +function wrapForNext(lastPromise, iter) { + return function (resolve, reject) { + lastPromise.then(function () { + if (iter[kEnded]) { + resolve(createIterResult(undefined, true)); + return; + } + + iter[kHandlePromise](resolve, reject); + }, reject); + }; +} + +var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); +var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { + get stream() { + return this[kStream]; + }, + + next: function next() { + var _this = this; + + // if we have detected an error in the meanwhile + // reject straight away + var error = this[kError]; + + if (error !== null) { + return Promise.reject(error); + } + + if (this[kEnded]) { + return Promise.resolve(createIterResult(undefined, true)); + } + + if (this[kStream].destroyed) { + // We need to defer via nextTick because if .destroy(err) is + // called, the error will be emitted via nextTick, and + // we cannot guarantee that there is no error lingering around + // waiting to be emitted. + return new Promise(function (resolve, reject) { + process.nextTick(function () { + if (_this[kError]) { + reject(_this[kError]); + } else { + resolve(createIterResult(undefined, true)); + } + }); + }); + } // if we have multiple next() calls + // we will wait for the previous Promise to finish + // this logic is optimized to support for await loops, + // where next() is only called once at a time + + + var lastPromise = this[kLastPromise]; + var promise; + + if (lastPromise) { + promise = new Promise(wrapForNext(lastPromise, this)); + } else { + // fast path needed to support multiple this.push() + // without triggering the next() queue + var data = this[kStream].read(); + + if (data !== null) { + return Promise.resolve(createIterResult(data, false)); + } + + promise = new Promise(this[kHandlePromise]); + } + + this[kLastPromise] = promise; + return promise; + } +}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { + return this; +}), _defineProperty(_Object$setPrototypeO, "return", function _return() { + var _this2 = this; + + // destroy(err, cb) is a private API + // we can guarantee we have that here, because we control the + // Readable class this is attached to + return new Promise(function (resolve, reject) { + _this2[kStream].destroy(null, function (err) { + if (err) { + reject(err); + return; + } + + resolve(createIterResult(undefined, true)); + }); + }); +}), _Object$setPrototypeO), AsyncIteratorPrototype); + +var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { + var _Object$create; + + var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { + value: stream, + writable: true + }), _defineProperty(_Object$create, kLastResolve, { + value: null, + writable: true + }), _defineProperty(_Object$create, kLastReject, { + value: null, + writable: true + }), _defineProperty(_Object$create, kError, { + value: null, + writable: true + }), _defineProperty(_Object$create, kEnded, { + value: stream._readableState.endEmitted, + writable: true + }), _defineProperty(_Object$create, kHandlePromise, { + value: function value(resolve, reject) { + var data = iterator[kStream].read(); + + if (data) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(data, false)); + } else { + iterator[kLastResolve] = resolve; + iterator[kLastReject] = reject; + } + }, + writable: true + }), _Object$create)); + iterator[kLastPromise] = null; + finished(stream, function (err) { + if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + var reject = iterator[kLastReject]; // reject if we are waiting for data in the Promise + // returned by next() and store the error + + if (reject !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + reject(err); + } + + iterator[kError] = err; + return; + } + + var resolve = iterator[kLastResolve]; + + if (resolve !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(undefined, true)); + } + + iterator[kEnded] = true; + }); + stream.on('readable', onReadable.bind(null, iterator)); + return iterator; +}; + +module.exports = createReadableStreamAsyncIterator; \ No newline at end of file diff --git a/mybulma/node_modules/readable-stream/lib/internal/streams/buffer_list.js b/mybulma/node_modules/readable-stream/lib/internal/streams/buffer_list.js new file mode 100644 index 0000000..cdea425 --- /dev/null +++ b/mybulma/node_modules/readable-stream/lib/internal/streams/buffer_list.js @@ -0,0 +1,210 @@ +'use strict'; + +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } + +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } + +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } + +function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } + +var _require = require('buffer'), + Buffer = _require.Buffer; + +var _require2 = require('util'), + inspect = _require2.inspect; + +var custom = inspect && inspect.custom || 'inspect'; + +function copyBuffer(src, target, offset) { + Buffer.prototype.copy.call(src, target, offset); +} + +module.exports = +/*#__PURE__*/ +function () { + function BufferList() { + _classCallCheck(this, BufferList); + + this.head = null; + this.tail = null; + this.length = 0; + } + + _createClass(BufferList, [{ + key: "push", + value: function push(v) { + var entry = { + data: v, + next: null + }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + } + }, { + key: "unshift", + value: function unshift(v) { + var entry = { + data: v, + next: this.head + }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + } + }, { + key: "shift", + value: function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + } + }, { + key: "clear", + value: function clear() { + this.head = this.tail = null; + this.length = 0; + } + }, { + key: "join", + value: function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + + while (p = p.next) { + ret += s + p.data; + } + + return ret; + } + }, { + key: "concat", + value: function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + + return ret; + } // Consumes a specified amount of bytes or characters from the buffered data. + + }, { + key: "consume", + value: function consume(n, hasStrings) { + var ret; + + if (n < this.head.data.length) { + // `slice` is the same for buffers and strings. + ret = this.head.data.slice(0, n); + this.head.data = this.head.data.slice(n); + } else if (n === this.head.data.length) { + // First chunk is a perfect match. + ret = this.shift(); + } else { + // Result spans more than one buffer. + ret = hasStrings ? this._getString(n) : this._getBuffer(n); + } + + return ret; + } + }, { + key: "first", + value: function first() { + return this.head.data; + } // Consumes a specified amount of characters from the buffered data. + + }, { + key: "_getString", + value: function _getString(n) { + var p = this.head; + var c = 1; + var ret = p.data; + n -= ret.length; + + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = str.slice(nb); + } + + break; + } + + ++c; + } + + this.length -= c; + return ret; + } // Consumes a specified amount of bytes from the buffered data. + + }, { + key: "_getBuffer", + value: function _getBuffer(n) { + var ret = Buffer.allocUnsafe(n); + var p = this.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = buf.slice(nb); + } + + break; + } + + ++c; + } + + this.length -= c; + return ret; + } // Make sure the linked list only shows the minimal necessary information. + + }, { + key: custom, + value: function value(_, options) { + return inspect(this, _objectSpread({}, options, { + // Only inspect one level. + depth: 0, + // It should not recurse. + customInspect: false + })); + } + }]); + + return BufferList; +}(); \ No newline at end of file diff --git a/mybulma/node_modules/readable-stream/lib/internal/streams/destroy.js b/mybulma/node_modules/readable-stream/lib/internal/streams/destroy.js new file mode 100644 index 0000000..3268a16 --- /dev/null +++ b/mybulma/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -0,0 +1,105 @@ +'use strict'; // undocumented cb() API, needed for core, not for public API + +function destroy(err, cb) { + var _this = this; + + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err) { + if (!this._writableState) { + process.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + process.nextTick(emitErrorNT, this, err); + } + } + + return this; + } // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + + if (this._readableState) { + this._readableState.destroyed = true; + } // if this is a duplex stream mark the writable part as destroyed as well + + + if (this._writableState) { + this._writableState.destroyed = true; + } + + this._destroy(err || null, function (err) { + if (!cb && err) { + if (!_this._writableState) { + process.nextTick(emitErrorAndCloseNT, _this, err); + } else if (!_this._writableState.errorEmitted) { + _this._writableState.errorEmitted = true; + process.nextTick(emitErrorAndCloseNT, _this, err); + } else { + process.nextTick(emitCloseNT, _this); + } + } else if (cb) { + process.nextTick(emitCloseNT, _this); + cb(err); + } else { + process.nextTick(emitCloseNT, _this); + } + }); + + return this; +} + +function emitErrorAndCloseNT(self, err) { + emitErrorNT(self, err); + emitCloseNT(self); +} + +function emitCloseNT(self) { + if (self._writableState && !self._writableState.emitClose) return; + if (self._readableState && !self._readableState.emitClose) return; + self.emit('close'); +} + +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} + +function emitErrorNT(self, err) { + self.emit('error', err); +} + +function errorOrDestroy(stream, err) { + // We have tests that rely on errors being emitted + // in the same tick, so changing this is semver major. + // For now when you opt-in to autoDestroy we allow + // the error to be emitted nextTick. In a future + // semver major update we should change the default to this. + var rState = stream._readableState; + var wState = stream._writableState; + if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); +} + +module.exports = { + destroy: destroy, + undestroy: undestroy, + errorOrDestroy: errorOrDestroy +}; \ No newline at end of file diff --git a/mybulma/node_modules/readable-stream/lib/internal/streams/end-of-stream.js b/mybulma/node_modules/readable-stream/lib/internal/streams/end-of-stream.js new file mode 100644 index 0000000..831f286 --- /dev/null +++ b/mybulma/node_modules/readable-stream/lib/internal/streams/end-of-stream.js @@ -0,0 +1,104 @@ +// Ported from https://github.com/mafintosh/end-of-stream with +// permission from the author, Mathias Buus (@mafintosh). +'use strict'; + +var ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE; + +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + + for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + + callback.apply(this, args); + }; +} + +function noop() {} + +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} + +function eos(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + callback = once(callback || noop); + var readable = opts.readable || opts.readable !== false && stream.readable; + var writable = opts.writable || opts.writable !== false && stream.writable; + + var onlegacyfinish = function onlegacyfinish() { + if (!stream.writable) onfinish(); + }; + + var writableEnded = stream._writableState && stream._writableState.finished; + + var onfinish = function onfinish() { + writable = false; + writableEnded = true; + if (!readable) callback.call(stream); + }; + + var readableEnded = stream._readableState && stream._readableState.endEmitted; + + var onend = function onend() { + readable = false; + readableEnded = true; + if (!writable) callback.call(stream); + }; + + var onerror = function onerror(err) { + callback.call(stream, err); + }; + + var onclose = function onclose() { + var err; + + if (readable && !readableEnded) { + if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + + if (writable && !writableEnded) { + if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + }; + + var onrequest = function onrequest() { + stream.req.on('finish', onfinish); + }; + + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest();else stream.on('request', onrequest); + } else if (writable && !stream._writableState) { + // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + return function () { + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +} + +module.exports = eos; \ No newline at end of file diff --git a/mybulma/node_modules/readable-stream/lib/internal/streams/from-browser.js b/mybulma/node_modules/readable-stream/lib/internal/streams/from-browser.js new file mode 100644 index 0000000..a4ce56f --- /dev/null +++ b/mybulma/node_modules/readable-stream/lib/internal/streams/from-browser.js @@ -0,0 +1,3 @@ +module.exports = function () { + throw new Error('Readable.from is not available in the browser') +}; diff --git a/mybulma/node_modules/readable-stream/lib/internal/streams/from.js b/mybulma/node_modules/readable-stream/lib/internal/streams/from.js new file mode 100644 index 0000000..6c41284 --- /dev/null +++ b/mybulma/node_modules/readable-stream/lib/internal/streams/from.js @@ -0,0 +1,64 @@ +'use strict'; + +function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } + +function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } + +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } + +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } + +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } + +var ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE; + +function from(Readable, iterable, opts) { + var iterator; + + if (iterable && typeof iterable.next === 'function') { + iterator = iterable; + } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); + + var readable = new Readable(_objectSpread({ + objectMode: true + }, opts)); // Reading boolean to protect against _read + // being called before last iteration completion. + + var reading = false; + + readable._read = function () { + if (!reading) { + reading = true; + next(); + } + }; + + function next() { + return _next2.apply(this, arguments); + } + + function _next2() { + _next2 = _asyncToGenerator(function* () { + try { + var _ref = yield iterator.next(), + value = _ref.value, + done = _ref.done; + + if (done) { + readable.push(null); + } else if (readable.push((yield value))) { + next(); + } else { + reading = false; + } + } catch (err) { + readable.destroy(err); + } + }); + return _next2.apply(this, arguments); + } + + return readable; +} + +module.exports = from; \ No newline at end of file diff --git a/mybulma/node_modules/readable-stream/lib/internal/streams/pipeline.js b/mybulma/node_modules/readable-stream/lib/internal/streams/pipeline.js new file mode 100644 index 0000000..6589909 --- /dev/null +++ b/mybulma/node_modules/readable-stream/lib/internal/streams/pipeline.js @@ -0,0 +1,97 @@ +// Ported from https://github.com/mafintosh/pump with +// permission from the author, Mathias Buus (@mafintosh). +'use strict'; + +var eos; + +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + callback.apply(void 0, arguments); + }; +} + +var _require$codes = require('../../../errors').codes, + ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; + +function noop(err) { + // Rethrow the error if it exists to avoid swallowing it + if (err) throw err; +} + +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} + +function destroyer(stream, reading, writing, callback) { + callback = once(callback); + var closed = false; + stream.on('close', function () { + closed = true; + }); + if (eos === undefined) eos = require('./end-of-stream'); + eos(stream, { + readable: reading, + writable: writing + }, function (err) { + if (err) return callback(err); + closed = true; + callback(); + }); + var destroyed = false; + return function (err) { + if (closed) return; + if (destroyed) return; + destroyed = true; // request.destroy just do .end - .abort is what we want + + if (isRequest(stream)) return stream.abort(); + if (typeof stream.destroy === 'function') return stream.destroy(); + callback(err || new ERR_STREAM_DESTROYED('pipe')); + }; +} + +function call(fn) { + fn(); +} + +function pipe(from, to) { + return from.pipe(to); +} + +function popCallback(streams) { + if (!streams.length) return noop; + if (typeof streams[streams.length - 1] !== 'function') return noop; + return streams.pop(); +} + +function pipeline() { + for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { + streams[_key] = arguments[_key]; + } + + var callback = popCallback(streams); + if (Array.isArray(streams[0])) streams = streams[0]; + + if (streams.length < 2) { + throw new ERR_MISSING_ARGS('streams'); + } + + var error; + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1; + var writing = i > 0; + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err; + if (err) destroys.forEach(call); + if (reading) return; + destroys.forEach(call); + callback(error); + }); + }); + return streams.reduce(pipe); +} + +module.exports = pipeline; \ No newline at end of file diff --git a/mybulma/node_modules/readable-stream/lib/internal/streams/state.js b/mybulma/node_modules/readable-stream/lib/internal/streams/state.js new file mode 100644 index 0000000..19887eb --- /dev/null +++ b/mybulma/node_modules/readable-stream/lib/internal/streams/state.js @@ -0,0 +1,27 @@ +'use strict'; + +var ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE; + +function highWaterMarkFrom(options, isDuplex, duplexKey) { + return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; +} + +function getHighWaterMark(state, options, duplexKey, isDuplex) { + var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); + + if (hwm != null) { + if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { + var name = isDuplex ? duplexKey : 'highWaterMark'; + throw new ERR_INVALID_OPT_VALUE(name, hwm); + } + + return Math.floor(hwm); + } // Default value + + + return state.objectMode ? 16 : 16 * 1024; +} + +module.exports = { + getHighWaterMark: getHighWaterMark +}; \ No newline at end of file diff --git a/mybulma/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/mybulma/node_modules/readable-stream/lib/internal/streams/stream-browser.js new file mode 100644 index 0000000..9332a3f --- /dev/null +++ b/mybulma/node_modules/readable-stream/lib/internal/streams/stream-browser.js @@ -0,0 +1 @@ +module.exports = require('events').EventEmitter; diff --git a/mybulma/node_modules/readable-stream/lib/internal/streams/stream.js b/mybulma/node_modules/readable-stream/lib/internal/streams/stream.js new file mode 100644 index 0000000..ce2ad5b --- /dev/null +++ b/mybulma/node_modules/readable-stream/lib/internal/streams/stream.js @@ -0,0 +1 @@ +module.exports = require('stream'); diff --git a/mybulma/node_modules/readable-stream/package.json b/mybulma/node_modules/readable-stream/package.json new file mode 100644 index 0000000..0b0c4bd --- /dev/null +++ b/mybulma/node_modules/readable-stream/package.json @@ -0,0 +1,68 @@ +{ + "name": "readable-stream", + "version": "3.6.0", + "description": "Streams3, a user-land copy of the stream library from Node.js", + "main": "readable.js", + "engines": { + "node": ">= 6" + }, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "devDependencies": { + "@babel/cli": "^7.2.0", + "@babel/core": "^7.2.0", + "@babel/polyfill": "^7.0.0", + "@babel/preset-env": "^7.2.0", + "airtap": "0.0.9", + "assert": "^1.4.0", + "bl": "^2.0.0", + "deep-strict-equal": "^0.2.0", + "events.once": "^2.0.2", + "glob": "^7.1.2", + "gunzip-maybe": "^1.4.1", + "hyperquest": "^2.1.3", + "lolex": "^2.6.0", + "nyc": "^11.0.0", + "pump": "^3.0.0", + "rimraf": "^2.6.2", + "tap": "^12.0.0", + "tape": "^4.9.0", + "tar-fs": "^1.16.2", + "util-promisify": "^2.1.0" + }, + "scripts": { + "test": "tap -J --no-esm test/parallel/*.js test/ours/*.js", + "ci": "TAP=1 tap --no-esm test/parallel/*.js test/ours/*.js | tee test.tap", + "test-browsers": "airtap --sauce-connect --loopback airtap.local -- test/browser.js", + "test-browser-local": "airtap --open --local -- test/browser.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov", + "update-browser-errors": "babel -o errors-browser.js errors.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream" + }, + "keywords": [ + "readable", + "stream", + "pipe" + ], + "browser": { + "util": false, + "worker_threads": false, + "./errors": "./errors-browser.js", + "./readable.js": "./readable-browser.js", + "./lib/internal/streams/from.js": "./lib/internal/streams/from-browser.js", + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + }, + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "license": "MIT" +} diff --git a/mybulma/node_modules/readable-stream/readable-browser.js b/mybulma/node_modules/readable-stream/readable-browser.js new file mode 100644 index 0000000..adbf60d --- /dev/null +++ b/mybulma/node_modules/readable-stream/readable-browser.js @@ -0,0 +1,9 @@ +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); +exports.finished = require('./lib/internal/streams/end-of-stream.js'); +exports.pipeline = require('./lib/internal/streams/pipeline.js'); diff --git a/mybulma/node_modules/readable-stream/readable.js b/mybulma/node_modules/readable-stream/readable.js new file mode 100644 index 0000000..9e0ca12 --- /dev/null +++ b/mybulma/node_modules/readable-stream/readable.js @@ -0,0 +1,16 @@ +var Stream = require('stream'); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream.Readable; + Object.assign(module.exports, Stream); + module.exports.Stream = Stream; +} else { + exports = module.exports = require('./lib/_stream_readable.js'); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = require('./lib/_stream_writable.js'); + exports.Duplex = require('./lib/_stream_duplex.js'); + exports.Transform = require('./lib/_stream_transform.js'); + exports.PassThrough = require('./lib/_stream_passthrough.js'); + exports.finished = require('./lib/internal/streams/end-of-stream.js'); + exports.pipeline = require('./lib/internal/streams/pipeline.js'); +} diff --git a/mybulma/node_modules/require-directory/.jshintrc b/mybulma/node_modules/require-directory/.jshintrc new file mode 100644 index 0000000..e14e4dc --- /dev/null +++ b/mybulma/node_modules/require-directory/.jshintrc @@ -0,0 +1,67 @@ +{ + "maxerr" : 50, + "bitwise" : true, + "camelcase" : true, + "curly" : true, + "eqeqeq" : true, + "forin" : true, + "immed" : true, + "indent" : 2, + "latedef" : true, + "newcap" : true, + "noarg" : true, + "noempty" : true, + "nonew" : true, + "plusplus" : true, + "quotmark" : true, + "undef" : true, + "unused" : true, + "strict" : true, + "trailing" : true, + "maxparams" : false, + "maxdepth" : false, + "maxstatements" : false, + "maxcomplexity" : false, + "maxlen" : false, + "asi" : false, + "boss" : false, + "debug" : false, + "eqnull" : true, + "es5" : false, + "esnext" : false, + "moz" : false, + "evil" : false, + "expr" : true, + "funcscope" : true, + "globalstrict" : true, + "iterator" : true, + "lastsemic" : false, + "laxbreak" : false, + "laxcomma" : false, + "loopfunc" : false, + "multistr" : false, + "proto" : false, + "scripturl" : false, + "smarttabs" : false, + "shadow" : false, + "sub" : false, + "supernew" : false, + "validthis" : false, + "browser" : true, + "couch" : false, + "devel" : true, + "dojo" : false, + "jquery" : false, + "mootools" : false, + "node" : true, + "nonstandard" : false, + "prototypejs" : false, + "rhino" : false, + "worker" : false, + "wsh" : false, + "yui" : false, + "nomen" : true, + "onevar" : true, + "passfail" : false, + "white" : true +} diff --git a/mybulma/node_modules/require-directory/.npmignore b/mybulma/node_modules/require-directory/.npmignore new file mode 100644 index 0000000..47cf365 --- /dev/null +++ b/mybulma/node_modules/require-directory/.npmignore @@ -0,0 +1 @@ +test/** diff --git a/mybulma/node_modules/require-directory/.travis.yml b/mybulma/node_modules/require-directory/.travis.yml new file mode 100644 index 0000000..20fd86b --- /dev/null +++ b/mybulma/node_modules/require-directory/.travis.yml @@ -0,0 +1,3 @@ +language: node_js +node_js: + - 0.10 diff --git a/mybulma/node_modules/require-directory/LICENSE b/mybulma/node_modules/require-directory/LICENSE new file mode 100644 index 0000000..a70f253 --- /dev/null +++ b/mybulma/node_modules/require-directory/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2011 Troy Goode + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/mybulma/node_modules/require-directory/README.markdown b/mybulma/node_modules/require-directory/README.markdown new file mode 100644 index 0000000..926a063 --- /dev/null +++ b/mybulma/node_modules/require-directory/README.markdown @@ -0,0 +1,184 @@ +# require-directory + +Recursively iterates over specified directory, `require()`'ing each file, and returning a nested hash structure containing those modules. + +**[Follow me (@troygoode) on Twitter!](https://twitter.com/intent/user?screen_name=troygoode)** + +[![NPM](https://nodei.co/npm/require-directory.png?downloads=true&stars=true)](https://nodei.co/npm/require-directory/) + +[![build status](https://secure.travis-ci.org/troygoode/node-require-directory.png)](http://travis-ci.org/troygoode/node-require-directory) + +## How To Use + +### Installation (via [npm](https://npmjs.org/package/require-directory)) + +```bash +$ npm install require-directory +``` + +### Usage + +A common pattern in node.js is to include an index file which creates a hash of the files in its current directory. Given a directory structure like so: + +* app.js +* routes/ + * index.js + * home.js + * auth/ + * login.js + * logout.js + * register.js + +`routes/index.js` uses `require-directory` to build the hash (rather than doing so manually) like so: + +```javascript +var requireDirectory = require('require-directory'); +module.exports = requireDirectory(module); +``` + +`app.js` references `routes/index.js` like any other module, but it now has a hash/tree of the exports from the `./routes/` directory: + +```javascript +var routes = require('./routes'); + +// snip + +app.get('/', routes.home); +app.get('/register', routes.auth.register); +app.get('/login', routes.auth.login); +app.get('/logout', routes.auth.logout); +``` + +The `routes` variable above is the equivalent of this: + +```javascript +var routes = { + home: require('routes/home.js'), + auth: { + login: require('routes/auth/login.js'), + logout: require('routes/auth/logout.js'), + register: require('routes/auth/register.js') + } +}; +``` + +*Note that `routes.index` will be `undefined` as you would hope.* + +### Specifying Another Directory + +You can specify which directory you want to build a tree of (if it isn't the current directory for whatever reason) by passing it as the second parameter. Not specifying the path (`requireDirectory(module)`) is the equivelant of `requireDirectory(module, __dirname)`: + +```javascript +var requireDirectory = require('require-directory'); +module.exports = requireDirectory(module, './some/subdirectory'); +``` + +For example, in the [example in the Usage section](#usage) we could have avoided creating `routes/index.js` and instead changed the first lines of `app.js` to: + +```javascript +var requireDirectory = require('require-directory'); +var routes = requireDirectory(module, './routes'); +``` + +## Options + +You can pass an options hash to `require-directory` as the 2nd parameter (or 3rd if you're passing the path to another directory as the 2nd parameter already). Here are the available options: + +### Whitelisting + +Whitelisting (either via RegExp or function) allows you to specify that only certain files be loaded. + +```javascript +var requireDirectory = require('require-directory'), + whitelist = /onlyinclude.js$/, + hash = requireDirectory(module, {include: whitelist}); +``` + +```javascript +var requireDirectory = require('require-directory'), + check = function(path){ + if(/onlyinclude.js$/.test(path)){ + return true; // don't include + }else{ + return false; // go ahead and include + } + }, + hash = requireDirectory(module, {include: check}); +``` + +### Blacklisting + +Blacklisting (either via RegExp or function) allows you to specify that all but certain files should be loaded. + +```javascript +var requireDirectory = require('require-directory'), + blacklist = /dontinclude\.js$/, + hash = requireDirectory(module, {exclude: blacklist}); +``` + +```javascript +var requireDirectory = require('require-directory'), + check = function(path){ + if(/dontinclude\.js$/.test(path)){ + return false; // don't include + }else{ + return true; // go ahead and include + } + }, + hash = requireDirectory(module, {exclude: check}); +``` + +### Visiting Objects As They're Loaded + +`require-directory` takes a function as the `visit` option that will be called for each module that is added to module.exports. + +```javascript +var requireDirectory = require('require-directory'), + visitor = function(obj) { + console.log(obj); // will be called for every module that is loaded + }, + hash = requireDirectory(module, {visit: visitor}); +``` + +The visitor can also transform the objects by returning a value: + +```javascript +var requireDirectory = require('require-directory'), + visitor = function(obj) { + return obj(new Date()); + }, + hash = requireDirectory(module, {visit: visitor}); +``` + +### Renaming Keys + +```javascript +var requireDirectory = require('require-directory'), + renamer = function(name) { + return name.toUpperCase(); + }, + hash = requireDirectory(module, {rename: renamer}); +``` + +### No Recursion + +```javascript +var requireDirectory = require('require-directory'), + hash = requireDirectory(module, {recurse: false}); +``` + +## Run Unit Tests + +```bash +$ npm run lint +$ npm test +``` + +## License + +[MIT License](http://www.opensource.org/licenses/mit-license.php) + +## Author + +[Troy Goode](https://github.com/TroyGoode) ([troygoode@gmail.com](mailto:troygoode@gmail.com)) + diff --git a/mybulma/node_modules/require-directory/index.js b/mybulma/node_modules/require-directory/index.js new file mode 100644 index 0000000..cd37da7 --- /dev/null +++ b/mybulma/node_modules/require-directory/index.js @@ -0,0 +1,86 @@ +'use strict'; + +var fs = require('fs'), + join = require('path').join, + resolve = require('path').resolve, + dirname = require('path').dirname, + defaultOptions = { + extensions: ['js', 'json', 'coffee'], + recurse: true, + rename: function (name) { + return name; + }, + visit: function (obj) { + return obj; + } + }; + +function checkFileInclusion(path, filename, options) { + return ( + // verify file has valid extension + (new RegExp('\\.(' + options.extensions.join('|') + ')$', 'i').test(filename)) && + + // if options.include is a RegExp, evaluate it and make sure the path passes + !(options.include && options.include instanceof RegExp && !options.include.test(path)) && + + // if options.include is a function, evaluate it and make sure the path passes + !(options.include && typeof options.include === 'function' && !options.include(path, filename)) && + + // if options.exclude is a RegExp, evaluate it and make sure the path doesn't pass + !(options.exclude && options.exclude instanceof RegExp && options.exclude.test(path)) && + + // if options.exclude is a function, evaluate it and make sure the path doesn't pass + !(options.exclude && typeof options.exclude === 'function' && options.exclude(path, filename)) + ); +} + +function requireDirectory(m, path, options) { + var retval = {}; + + // path is optional + if (path && !options && typeof path !== 'string') { + options = path; + path = null; + } + + // default options + options = options || {}; + for (var prop in defaultOptions) { + if (typeof options[prop] === 'undefined') { + options[prop] = defaultOptions[prop]; + } + } + + // if no path was passed in, assume the equivelant of __dirname from caller + // otherwise, resolve path relative to the equivalent of __dirname + path = !path ? dirname(m.filename) : resolve(dirname(m.filename), path); + + // get the path of each file in specified directory, append to current tree node, recurse + fs.readdirSync(path).forEach(function (filename) { + var joined = join(path, filename), + files, + key, + obj; + + if (fs.statSync(joined).isDirectory() && options.recurse) { + // this node is a directory; recurse + files = requireDirectory(m, joined, options); + // exclude empty directories + if (Object.keys(files).length) { + retval[options.rename(filename, joined, filename)] = files; + } + } else { + if (joined !== m.filename && checkFileInclusion(joined, filename, options)) { + // hash node key shouldn't include file extension + key = filename.substring(0, filename.lastIndexOf('.')); + obj = m.require(joined); + retval[options.rename(key, joined, filename)] = options.visit(obj, joined, filename) || obj; + } + } + }); + + return retval; +} + +module.exports = requireDirectory; +module.exports.defaults = defaultOptions; diff --git a/mybulma/node_modules/require-directory/package.json b/mybulma/node_modules/require-directory/package.json new file mode 100644 index 0000000..25ece4b --- /dev/null +++ b/mybulma/node_modules/require-directory/package.json @@ -0,0 +1,40 @@ +{ + "author": "Troy Goode (http://github.com/troygoode/)", + "name": "require-directory", + "version": "2.1.1", + "description": "Recursively iterates over specified directory, require()'ing each file, and returning a nested hash structure containing those modules.", + "keywords": [ + "require", + "directory", + "library", + "recursive" + ], + "homepage": "https://github.com/troygoode/node-require-directory/", + "main": "index.js", + "repository": { + "type": "git", + "url": "git://github.com/troygoode/node-require-directory.git" + }, + "contributors": [ + { + "name": "Troy Goode", + "email": "troygoode@gmail.com", + "web": "http://github.com/troygoode/" + } + ], + "license": "MIT", + "bugs": { + "url": "http://github.com/troygoode/node-require-directory/issues/" + }, + "engines": { + "node": ">=0.10.0" + }, + "devDependencies": { + "jshint": "^2.6.0", + "mocha": "^2.1.0" + }, + "scripts": { + "test": "mocha", + "lint": "jshint index.js test/test.js" + } +} diff --git a/mybulma/node_modules/resolve/.editorconfig b/mybulma/node_modules/resolve/.editorconfig new file mode 100644 index 0000000..d63f0bb --- /dev/null +++ b/mybulma/node_modules/resolve/.editorconfig @@ -0,0 +1,37 @@ +root = true + +[*] +indent_style = space +indent_size = 2 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true +max_line_length = 200 + +[*.js] +block_comment_start = /* +block_comment = * +block_comment_end = */ + +[*.yml] +indent_size = 1 + +[package.json] +indent_style = tab + +[lib/core.json] +indent_style = tab + +[CHANGELOG.md] +indent_style = space +indent_size = 2 + +[{*.json,Makefile}] +max_line_length = off + +[test/{dotdot,resolver,module_dir,multirepo,node_path,pathfilter,precedence}/**/*] +indent_style = off +indent_size = off +max_line_length = off +insert_final_newline = off diff --git a/mybulma/node_modules/resolve/.eslintrc b/mybulma/node_modules/resolve/.eslintrc new file mode 100644 index 0000000..ce1be6e --- /dev/null +++ b/mybulma/node_modules/resolve/.eslintrc @@ -0,0 +1,65 @@ +{ + "root": true, + + "extends": "@ljharb", + + "rules": { + "indent": [2, 4], + "strict": 0, + "complexity": 0, + "consistent-return": 0, + "curly": 0, + "dot-notation": [2, { "allowKeywords": true }], + "func-name-matching": 0, + "func-style": 0, + "global-require": 1, + "id-length": [2, { "min": 1, "max": 30 }], + "max-lines": [2, 350], + "max-lines-per-function": 0, + "max-nested-callbacks": 0, + "max-params": 0, + "max-statements-per-line": [2, { "max": 2 }], + "max-statements": 0, + "no-magic-numbers": 0, + "no-shadow": 0, + "no-use-before-define": 0, + "sort-keys": 0, + }, + "overrides": [ + { + "files": "bin/**", + "rules": { + "no-process-exit": "off", + }, + }, + { + "files": "example/**", + "rules": { + "no-console": 0, + }, + }, + { + "files": "test/resolver/nested_symlinks/mylib/*.js", + "rules": { + "no-throw-literal": 0, + }, + }, + { + "files": "test/**", + "parserOptions": { + "ecmaVersion": 5, + "allowReserved": false, + }, + "rules": { + "dot-notation": [2, { "allowPattern": "throws" }], + "max-lines": 0, + "max-lines-per-function": 0, + "no-unused-vars": [2, { "vars": "all", "args": "none" }], + }, + }, + ], + + "ignorePatterns": [ + "./test/resolver/malformed_package_json/package.json", + ], +} diff --git a/mybulma/node_modules/resolve/.github/FUNDING.yml b/mybulma/node_modules/resolve/.github/FUNDING.yml new file mode 100644 index 0000000..d9c0595 --- /dev/null +++ b/mybulma/node_modules/resolve/.github/FUNDING.yml @@ -0,0 +1,12 @@ +# These are supported funding model platforms + +github: [ljharb] +patreon: # Replace with a single Patreon username +open_collective: # Replace with a single Open Collective username +ko_fi: # Replace with a single Ko-fi username +tidelift: npm/resolve +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: # Replace with a single Liberapay username +issuehunt: # Replace with a single IssueHunt username +otechie: # Replace with a single Otechie username +custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] diff --git a/mybulma/node_modules/resolve/LICENSE b/mybulma/node_modules/resolve/LICENSE new file mode 100644 index 0000000..ff4fce2 --- /dev/null +++ b/mybulma/node_modules/resolve/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2012 James Halliday + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/mybulma/node_modules/resolve/SECURITY.md b/mybulma/node_modules/resolve/SECURITY.md new file mode 100644 index 0000000..82e4285 --- /dev/null +++ b/mybulma/node_modules/resolve/SECURITY.md @@ -0,0 +1,3 @@ +# Security + +Please email [@ljharb](https://github.com/ljharb) or see https://tidelift.com/security if you have a potential security vulnerability to report. diff --git a/mybulma/node_modules/resolve/async.js b/mybulma/node_modules/resolve/async.js new file mode 100644 index 0000000..f38c581 --- /dev/null +++ b/mybulma/node_modules/resolve/async.js @@ -0,0 +1,3 @@ +'use strict'; + +module.exports = require('./lib/async'); diff --git a/mybulma/node_modules/resolve/bin/resolve b/mybulma/node_modules/resolve/bin/resolve new file mode 100644 index 0000000..5ee329a --- /dev/null +++ b/mybulma/node_modules/resolve/bin/resolve @@ -0,0 +1,50 @@ +#!/usr/bin/env node + +'use strict'; + +var path = require('path'); +var fs = require('fs'); + +if ( + String(process.env.npm_lifecycle_script).slice(0, 8) !== 'resolve ' + && ( + !process.argv + || process.argv.length < 2 + || (process.argv[1] !== __filename && fs.statSync(process.argv[1]).ino !== fs.statSync(__filename).ino) + || (process.env._ && path.resolve(process.env._) !== __filename) + ) +) { + console.error('Error: `resolve` must be run directly as an executable'); + process.exit(1); +} + +var supportsPreserveSymlinkFlag = require('supports-preserve-symlinks-flag'); + +var preserveSymlinks = false; +for (var i = 2; i < process.argv.length; i += 1) { + if (process.argv[i].slice(0, 2) === '--') { + if (supportsPreserveSymlinkFlag && process.argv[i] === '--preserve-symlinks') { + preserveSymlinks = true; + } else if (process.argv[i].length > 2) { + console.error('Unknown argument ' + process.argv[i].replace(/[=].*$/, '')); + process.exit(2); + } + process.argv.splice(i, 1); + i -= 1; + if (process.argv[i] === '--') { break; } // eslint-disable-line no-restricted-syntax + } +} + +if (process.argv.length < 3) { + console.error('Error: `resolve` expects a specifier'); + process.exit(2); +} + +var resolve = require('../'); + +var result = resolve.sync(process.argv[2], { + basedir: process.cwd(), + preserveSymlinks: preserveSymlinks +}); + +console.log(result); diff --git a/mybulma/node_modules/resolve/example/async.js b/mybulma/node_modules/resolve/example/async.js new file mode 100644 index 0000000..20e65dc --- /dev/null +++ b/mybulma/node_modules/resolve/example/async.js @@ -0,0 +1,5 @@ +var resolve = require('../'); +resolve('tap', { basedir: __dirname }, function (err, res) { + if (err) console.error(err); + else console.log(res); +}); diff --git a/mybulma/node_modules/resolve/example/sync.js b/mybulma/node_modules/resolve/example/sync.js new file mode 100644 index 0000000..54b2cc1 --- /dev/null +++ b/mybulma/node_modules/resolve/example/sync.js @@ -0,0 +1,3 @@ +var resolve = require('../'); +var res = resolve.sync('tap', { basedir: __dirname }); +console.log(res); diff --git a/mybulma/node_modules/resolve/index.js b/mybulma/node_modules/resolve/index.js new file mode 100644 index 0000000..125d814 --- /dev/null +++ b/mybulma/node_modules/resolve/index.js @@ -0,0 +1,6 @@ +var async = require('./lib/async'); +async.core = require('./lib/core'); +async.isCore = require('./lib/is-core'); +async.sync = require('./lib/sync'); + +module.exports = async; diff --git a/mybulma/node_modules/resolve/lib/async.js b/mybulma/node_modules/resolve/lib/async.js new file mode 100644 index 0000000..60d2555 --- /dev/null +++ b/mybulma/node_modules/resolve/lib/async.js @@ -0,0 +1,329 @@ +var fs = require('fs'); +var getHomedir = require('./homedir'); +var path = require('path'); +var caller = require('./caller'); +var nodeModulesPaths = require('./node-modules-paths'); +var normalizeOptions = require('./normalize-options'); +var isCore = require('is-core-module'); + +var realpathFS = process.platform !== 'win32' && fs.realpath && typeof fs.realpath.native === 'function' ? fs.realpath.native : fs.realpath; + +var homedir = getHomedir(); +var defaultPaths = function () { + return [ + path.join(homedir, '.node_modules'), + path.join(homedir, '.node_libraries') + ]; +}; + +var defaultIsFile = function isFile(file, cb) { + fs.stat(file, function (err, stat) { + if (!err) { + return cb(null, stat.isFile() || stat.isFIFO()); + } + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') return cb(null, false); + return cb(err); + }); +}; + +var defaultIsDir = function isDirectory(dir, cb) { + fs.stat(dir, function (err, stat) { + if (!err) { + return cb(null, stat.isDirectory()); + } + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') return cb(null, false); + return cb(err); + }); +}; + +var defaultRealpath = function realpath(x, cb) { + realpathFS(x, function (realpathErr, realPath) { + if (realpathErr && realpathErr.code !== 'ENOENT') cb(realpathErr); + else cb(null, realpathErr ? x : realPath); + }); +}; + +var maybeRealpath = function maybeRealpath(realpath, x, opts, cb) { + if (opts && opts.preserveSymlinks === false) { + realpath(x, cb); + } else { + cb(null, x); + } +}; + +var defaultReadPackage = function defaultReadPackage(readFile, pkgfile, cb) { + readFile(pkgfile, function (readFileErr, body) { + if (readFileErr) cb(readFileErr); + else { + try { + var pkg = JSON.parse(body); + cb(null, pkg); + } catch (jsonErr) { + cb(null); + } + } + }); +}; + +var getPackageCandidates = function getPackageCandidates(x, start, opts) { + var dirs = nodeModulesPaths(start, opts, x); + for (var i = 0; i < dirs.length; i++) { + dirs[i] = path.join(dirs[i], x); + } + return dirs; +}; + +module.exports = function resolve(x, options, callback) { + var cb = callback; + var opts = options; + if (typeof options === 'function') { + cb = opts; + opts = {}; + } + if (typeof x !== 'string') { + var err = new TypeError('Path must be a string.'); + return process.nextTick(function () { + cb(err); + }); + } + + opts = normalizeOptions(x, opts); + + var isFile = opts.isFile || defaultIsFile; + var isDirectory = opts.isDirectory || defaultIsDir; + var readFile = opts.readFile || fs.readFile; + var realpath = opts.realpath || defaultRealpath; + var readPackage = opts.readPackage || defaultReadPackage; + if (opts.readFile && opts.readPackage) { + var conflictErr = new TypeError('`readFile` and `readPackage` are mutually exclusive.'); + return process.nextTick(function () { + cb(conflictErr); + }); + } + var packageIterator = opts.packageIterator; + + var extensions = opts.extensions || ['.js']; + var includeCoreModules = opts.includeCoreModules !== false; + var basedir = opts.basedir || path.dirname(caller()); + var parent = opts.filename || basedir; + + opts.paths = opts.paths || defaultPaths(); + + // ensure that `basedir` is an absolute path at this point, resolving against the process' current working directory + var absoluteStart = path.resolve(basedir); + + maybeRealpath( + realpath, + absoluteStart, + opts, + function (err, realStart) { + if (err) cb(err); + else init(realStart); + } + ); + + var res; + function init(basedir) { + if ((/^(?:\.\.?(?:\/|$)|\/|([A-Za-z]:)?[/\\])/).test(x)) { + res = path.resolve(basedir, x); + if (x === '.' || x === '..' || x.slice(-1) === '/') res += '/'; + if ((/\/$/).test(x) && res === basedir) { + loadAsDirectory(res, opts.package, onfile); + } else loadAsFile(res, opts.package, onfile); + } else if (includeCoreModules && isCore(x)) { + return cb(null, x); + } else loadNodeModules(x, basedir, function (err, n, pkg) { + if (err) cb(err); + else if (n) { + return maybeRealpath(realpath, n, opts, function (err, realN) { + if (err) { + cb(err); + } else { + cb(null, realN, pkg); + } + }); + } else { + var moduleError = new Error("Cannot find module '" + x + "' from '" + parent + "'"); + moduleError.code = 'MODULE_NOT_FOUND'; + cb(moduleError); + } + }); + } + + function onfile(err, m, pkg) { + if (err) cb(err); + else if (m) cb(null, m, pkg); + else loadAsDirectory(res, function (err, d, pkg) { + if (err) cb(err); + else if (d) { + maybeRealpath(realpath, d, opts, function (err, realD) { + if (err) { + cb(err); + } else { + cb(null, realD, pkg); + } + }); + } else { + var moduleError = new Error("Cannot find module '" + x + "' from '" + parent + "'"); + moduleError.code = 'MODULE_NOT_FOUND'; + cb(moduleError); + } + }); + } + + function loadAsFile(x, thePackage, callback) { + var loadAsFilePackage = thePackage; + var cb = callback; + if (typeof loadAsFilePackage === 'function') { + cb = loadAsFilePackage; + loadAsFilePackage = undefined; + } + + var exts = [''].concat(extensions); + load(exts, x, loadAsFilePackage); + + function load(exts, x, loadPackage) { + if (exts.length === 0) return cb(null, undefined, loadPackage); + var file = x + exts[0]; + + var pkg = loadPackage; + if (pkg) onpkg(null, pkg); + else loadpkg(path.dirname(file), onpkg); + + function onpkg(err, pkg_, dir) { + pkg = pkg_; + if (err) return cb(err); + if (dir && pkg && opts.pathFilter) { + var rfile = path.relative(dir, file); + var rel = rfile.slice(0, rfile.length - exts[0].length); + var r = opts.pathFilter(pkg, x, rel); + if (r) return load( + [''].concat(extensions.slice()), + path.resolve(dir, r), + pkg + ); + } + isFile(file, onex); + } + function onex(err, ex) { + if (err) return cb(err); + if (ex) return cb(null, file, pkg); + load(exts.slice(1), x, pkg); + } + } + } + + function loadpkg(dir, cb) { + if (dir === '' || dir === '/') return cb(null); + if (process.platform === 'win32' && (/^\w:[/\\]*$/).test(dir)) { + return cb(null); + } + if ((/[/\\]node_modules[/\\]*$/).test(dir)) return cb(null); + + maybeRealpath(realpath, dir, opts, function (unwrapErr, pkgdir) { + if (unwrapErr) return loadpkg(path.dirname(dir), cb); + var pkgfile = path.join(pkgdir, 'package.json'); + isFile(pkgfile, function (err, ex) { + // on err, ex is false + if (!ex) return loadpkg(path.dirname(dir), cb); + + readPackage(readFile, pkgfile, function (err, pkgParam) { + if (err) cb(err); + + var pkg = pkgParam; + + if (pkg && opts.packageFilter) { + pkg = opts.packageFilter(pkg, pkgfile); + } + cb(null, pkg, dir); + }); + }); + }); + } + + function loadAsDirectory(x, loadAsDirectoryPackage, callback) { + var cb = callback; + var fpkg = loadAsDirectoryPackage; + if (typeof fpkg === 'function') { + cb = fpkg; + fpkg = opts.package; + } + + maybeRealpath(realpath, x, opts, function (unwrapErr, pkgdir) { + if (unwrapErr) return cb(unwrapErr); + var pkgfile = path.join(pkgdir, 'package.json'); + isFile(pkgfile, function (err, ex) { + if (err) return cb(err); + if (!ex) return loadAsFile(path.join(x, 'index'), fpkg, cb); + + readPackage(readFile, pkgfile, function (err, pkgParam) { + if (err) return cb(err); + + var pkg = pkgParam; + + if (pkg && opts.packageFilter) { + pkg = opts.packageFilter(pkg, pkgfile); + } + + if (pkg && pkg.main) { + if (typeof pkg.main !== 'string') { + var mainError = new TypeError('package “' + pkg.name + '” `main` must be a string'); + mainError.code = 'INVALID_PACKAGE_MAIN'; + return cb(mainError); + } + if (pkg.main === '.' || pkg.main === './') { + pkg.main = 'index'; + } + loadAsFile(path.resolve(x, pkg.main), pkg, function (err, m, pkg) { + if (err) return cb(err); + if (m) return cb(null, m, pkg); + if (!pkg) return loadAsFile(path.join(x, 'index'), pkg, cb); + + var dir = path.resolve(x, pkg.main); + loadAsDirectory(dir, pkg, function (err, n, pkg) { + if (err) return cb(err); + if (n) return cb(null, n, pkg); + loadAsFile(path.join(x, 'index'), pkg, cb); + }); + }); + return; + } + + loadAsFile(path.join(x, '/index'), pkg, cb); + }); + }); + }); + } + + function processDirs(cb, dirs) { + if (dirs.length === 0) return cb(null, undefined); + var dir = dirs[0]; + + isDirectory(path.dirname(dir), isdir); + + function isdir(err, isdir) { + if (err) return cb(err); + if (!isdir) return processDirs(cb, dirs.slice(1)); + loadAsFile(dir, opts.package, onfile); + } + + function onfile(err, m, pkg) { + if (err) return cb(err); + if (m) return cb(null, m, pkg); + loadAsDirectory(dir, opts.package, ondir); + } + + function ondir(err, n, pkg) { + if (err) return cb(err); + if (n) return cb(null, n, pkg); + processDirs(cb, dirs.slice(1)); + } + } + function loadNodeModules(x, start, cb) { + var thunk = function () { return getPackageCandidates(x, start, opts); }; + processDirs( + cb, + packageIterator ? packageIterator(x, start, thunk, opts) : thunk() + ); + } +}; diff --git a/mybulma/node_modules/resolve/lib/caller.js b/mybulma/node_modules/resolve/lib/caller.js new file mode 100644 index 0000000..b14a280 --- /dev/null +++ b/mybulma/node_modules/resolve/lib/caller.js @@ -0,0 +1,8 @@ +module.exports = function () { + // see https://code.google.com/p/v8/wiki/JavaScriptStackTraceApi + var origPrepareStackTrace = Error.prepareStackTrace; + Error.prepareStackTrace = function (_, stack) { return stack; }; + var stack = (new Error()).stack; + Error.prepareStackTrace = origPrepareStackTrace; + return stack[2].getFileName(); +}; diff --git a/mybulma/node_modules/resolve/lib/core.js b/mybulma/node_modules/resolve/lib/core.js new file mode 100644 index 0000000..ecc5b2e --- /dev/null +++ b/mybulma/node_modules/resolve/lib/core.js @@ -0,0 +1,52 @@ +var current = (process.versions && process.versions.node && process.versions.node.split('.')) || []; + +function specifierIncluded(specifier) { + var parts = specifier.split(' '); + var op = parts.length > 1 ? parts[0] : '='; + var versionParts = (parts.length > 1 ? parts[1] : parts[0]).split('.'); + + for (var i = 0; i < 3; ++i) { + var cur = parseInt(current[i] || 0, 10); + var ver = parseInt(versionParts[i] || 0, 10); + if (cur === ver) { + continue; // eslint-disable-line no-restricted-syntax, no-continue + } + if (op === '<') { + return cur < ver; + } else if (op === '>=') { + return cur >= ver; + } + return false; + } + return op === '>='; +} + +function matchesRange(range) { + var specifiers = range.split(/ ?&& ?/); + if (specifiers.length === 0) { return false; } + for (var i = 0; i < specifiers.length; ++i) { + if (!specifierIncluded(specifiers[i])) { return false; } + } + return true; +} + +function versionIncluded(specifierValue) { + if (typeof specifierValue === 'boolean') { return specifierValue; } + if (specifierValue && typeof specifierValue === 'object') { + for (var i = 0; i < specifierValue.length; ++i) { + if (matchesRange(specifierValue[i])) { return true; } + } + return false; + } + return matchesRange(specifierValue); +} + +var data = require('./core.json'); + +var core = {}; +for (var mod in data) { // eslint-disable-line no-restricted-syntax + if (Object.prototype.hasOwnProperty.call(data, mod)) { + core[mod] = versionIncluded(data[mod]); + } +} +module.exports = core; diff --git a/mybulma/node_modules/resolve/lib/core.json b/mybulma/node_modules/resolve/lib/core.json new file mode 100644 index 0000000..058584b --- /dev/null +++ b/mybulma/node_modules/resolve/lib/core.json @@ -0,0 +1,153 @@ +{ + "assert": true, + "node:assert": [">= 14.18 && < 15", ">= 16"], + "assert/strict": ">= 15", + "node:assert/strict": ">= 16", + "async_hooks": ">= 8", + "node:async_hooks": [">= 14.18 && < 15", ">= 16"], + "buffer_ieee754": ">= 0.5 && < 0.9.7", + "buffer": true, + "node:buffer": [">= 14.18 && < 15", ">= 16"], + "child_process": true, + "node:child_process": [">= 14.18 && < 15", ">= 16"], + "cluster": ">= 0.5", + "node:cluster": [">= 14.18 && < 15", ">= 16"], + "console": true, + "node:console": [">= 14.18 && < 15", ">= 16"], + "constants": true, + "node:constants": [">= 14.18 && < 15", ">= 16"], + "crypto": true, + "node:crypto": [">= 14.18 && < 15", ">= 16"], + "_debug_agent": ">= 1 && < 8", + "_debugger": "< 8", + "dgram": true, + "node:dgram": [">= 14.18 && < 15", ">= 16"], + "diagnostics_channel": [">= 14.17 && < 15", ">= 15.1"], + "node:diagnostics_channel": [">= 14.18 && < 15", ">= 16"], + "dns": true, + "node:dns": [">= 14.18 && < 15", ">= 16"], + "dns/promises": ">= 15", + "node:dns/promises": ">= 16", + "domain": ">= 0.7.12", + "node:domain": [">= 14.18 && < 15", ">= 16"], + "events": true, + "node:events": [">= 14.18 && < 15", ">= 16"], + "freelist": "< 6", + "fs": true, + "node:fs": [">= 14.18 && < 15", ">= 16"], + "fs/promises": [">= 10 && < 10.1", ">= 14"], + "node:fs/promises": [">= 14.18 && < 15", ">= 16"], + "_http_agent": ">= 0.11.1", + "node:_http_agent": [">= 14.18 && < 15", ">= 16"], + "_http_client": ">= 0.11.1", + "node:_http_client": [">= 14.18 && < 15", ">= 16"], + "_http_common": ">= 0.11.1", + "node:_http_common": [">= 14.18 && < 15", ">= 16"], + "_http_incoming": ">= 0.11.1", + "node:_http_incoming": [">= 14.18 && < 15", ">= 16"], + "_http_outgoing": ">= 0.11.1", + "node:_http_outgoing": [">= 14.18 && < 15", ">= 16"], + "_http_server": ">= 0.11.1", + "node:_http_server": [">= 14.18 && < 15", ">= 16"], + "http": true, + "node:http": [">= 14.18 && < 15", ">= 16"], + "http2": ">= 8.8", + "node:http2": [">= 14.18 && < 15", ">= 16"], + "https": true, + "node:https": [">= 14.18 && < 15", ">= 16"], + "inspector": ">= 8", + "node:inspector": [">= 14.18 && < 15", ">= 16"], + "_linklist": "< 8", + "module": true, + "node:module": [">= 14.18 && < 15", ">= 16"], + "net": true, + "node:net": [">= 14.18 && < 15", ">= 16"], + "node-inspect/lib/_inspect": ">= 7.6 && < 12", + "node-inspect/lib/internal/inspect_client": ">= 7.6 && < 12", + "node-inspect/lib/internal/inspect_repl": ">= 7.6 && < 12", + "os": true, + "node:os": [">= 14.18 && < 15", ">= 16"], + "path": true, + "node:path": [">= 14.18 && < 15", ">= 16"], + "path/posix": ">= 15.3", + "node:path/posix": ">= 16", + "path/win32": ">= 15.3", + "node:path/win32": ">= 16", + "perf_hooks": ">= 8.5", + "node:perf_hooks": [">= 14.18 && < 15", ">= 16"], + "process": ">= 1", + "node:process": [">= 14.18 && < 15", ">= 16"], + "punycode": ">= 0.5", + "node:punycode": [">= 14.18 && < 15", ">= 16"], + "querystring": true, + "node:querystring": [">= 14.18 && < 15", ">= 16"], + "readline": true, + "node:readline": [">= 14.18 && < 15", ">= 16"], + "readline/promises": ">= 17", + "node:readline/promises": ">= 17", + "repl": true, + "node:repl": [">= 14.18 && < 15", ">= 16"], + "smalloc": ">= 0.11.5 && < 3", + "_stream_duplex": ">= 0.9.4", + "node:_stream_duplex": [">= 14.18 && < 15", ">= 16"], + "_stream_transform": ">= 0.9.4", + "node:_stream_transform": [">= 14.18 && < 15", ">= 16"], + "_stream_wrap": ">= 1.4.1", + "node:_stream_wrap": [">= 14.18 && < 15", ">= 16"], + "_stream_passthrough": ">= 0.9.4", + "node:_stream_passthrough": [">= 14.18 && < 15", ">= 16"], + "_stream_readable": ">= 0.9.4", + "node:_stream_readable": [">= 14.18 && < 15", ">= 16"], + "_stream_writable": ">= 0.9.4", + "node:_stream_writable": [">= 14.18 && < 15", ">= 16"], + "stream": true, + "node:stream": [">= 14.18 && < 15", ">= 16"], + "stream/consumers": ">= 16.7", + "node:stream/consumers": ">= 16.7", + "stream/promises": ">= 15", + "node:stream/promises": ">= 16", + "stream/web": ">= 16.5", + "node:stream/web": ">= 16.5", + "string_decoder": true, + "node:string_decoder": [">= 14.18 && < 15", ">= 16"], + "sys": [">= 0.4 && < 0.7", ">= 0.8"], + "node:sys": [">= 14.18 && < 15", ">= 16"], + "node:test": ">= 18", + "timers": true, + "node:timers": [">= 14.18 && < 15", ">= 16"], + "timers/promises": ">= 15", + "node:timers/promises": ">= 16", + "_tls_common": ">= 0.11.13", + "node:_tls_common": [">= 14.18 && < 15", ">= 16"], + "_tls_legacy": ">= 0.11.3 && < 10", + "_tls_wrap": ">= 0.11.3", + "node:_tls_wrap": [">= 14.18 && < 15", ">= 16"], + "tls": true, + "node:tls": [">= 14.18 && < 15", ">= 16"], + "trace_events": ">= 10", + "node:trace_events": [">= 14.18 && < 15", ">= 16"], + "tty": true, + "node:tty": [">= 14.18 && < 15", ">= 16"], + "url": true, + "node:url": [">= 14.18 && < 15", ">= 16"], + "util": true, + "node:util": [">= 14.18 && < 15", ">= 16"], + "util/types": ">= 15.3", + "node:util/types": ">= 16", + "v8/tools/arguments": ">= 10 && < 12", + "v8/tools/codemap": [">= 4.4 && < 5", ">= 5.2 && < 12"], + "v8/tools/consarray": [">= 4.4 && < 5", ">= 5.2 && < 12"], + "v8/tools/csvparser": [">= 4.4 && < 5", ">= 5.2 && < 12"], + "v8/tools/logreader": [">= 4.4 && < 5", ">= 5.2 && < 12"], + "v8/tools/profile_view": [">= 4.4 && < 5", ">= 5.2 && < 12"], + "v8/tools/splaytree": [">= 4.4 && < 5", ">= 5.2 && < 12"], + "v8": ">= 1", + "node:v8": [">= 14.18 && < 15", ">= 16"], + "vm": true, + "node:vm": [">= 14.18 && < 15", ">= 16"], + "wasi": ">= 13.4 && < 13.5", + "worker_threads": ">= 11.7", + "node:worker_threads": [">= 14.18 && < 15", ">= 16"], + "zlib": ">= 0.5", + "node:zlib": [">= 14.18 && < 15", ">= 16"] +} diff --git a/mybulma/node_modules/resolve/lib/homedir.js b/mybulma/node_modules/resolve/lib/homedir.js new file mode 100644 index 0000000..5ffdf73 --- /dev/null +++ b/mybulma/node_modules/resolve/lib/homedir.js @@ -0,0 +1,24 @@ +'use strict'; + +var os = require('os'); + +// adapted from https://github.com/sindresorhus/os-homedir/blob/11e089f4754db38bb535e5a8416320c4446e8cfd/index.js + +module.exports = os.homedir || function homedir() { + var home = process.env.HOME; + var user = process.env.LOGNAME || process.env.USER || process.env.LNAME || process.env.USERNAME; + + if (process.platform === 'win32') { + return process.env.USERPROFILE || process.env.HOMEDRIVE + process.env.HOMEPATH || home || null; + } + + if (process.platform === 'darwin') { + return home || (user ? '/Users/' + user : null); + } + + if (process.platform === 'linux') { + return home || (process.getuid() === 0 ? '/root' : (user ? '/home/' + user : null)); // eslint-disable-line no-extra-parens + } + + return home || null; +}; diff --git a/mybulma/node_modules/resolve/lib/is-core.js b/mybulma/node_modules/resolve/lib/is-core.js new file mode 100644 index 0000000..537f5c7 --- /dev/null +++ b/mybulma/node_modules/resolve/lib/is-core.js @@ -0,0 +1,5 @@ +var isCoreModule = require('is-core-module'); + +module.exports = function isCore(x) { + return isCoreModule(x); +}; diff --git a/mybulma/node_modules/resolve/lib/node-modules-paths.js b/mybulma/node_modules/resolve/lib/node-modules-paths.js new file mode 100644 index 0000000..1cff010 --- /dev/null +++ b/mybulma/node_modules/resolve/lib/node-modules-paths.js @@ -0,0 +1,42 @@ +var path = require('path'); +var parse = path.parse || require('path-parse'); // eslint-disable-line global-require + +var getNodeModulesDirs = function getNodeModulesDirs(absoluteStart, modules) { + var prefix = '/'; + if ((/^([A-Za-z]:)/).test(absoluteStart)) { + prefix = ''; + } else if ((/^\\\\/).test(absoluteStart)) { + prefix = '\\\\'; + } + + var paths = [absoluteStart]; + var parsed = parse(absoluteStart); + while (parsed.dir !== paths[paths.length - 1]) { + paths.push(parsed.dir); + parsed = parse(parsed.dir); + } + + return paths.reduce(function (dirs, aPath) { + return dirs.concat(modules.map(function (moduleDir) { + return path.resolve(prefix, aPath, moduleDir); + })); + }, []); +}; + +module.exports = function nodeModulesPaths(start, opts, request) { + var modules = opts && opts.moduleDirectory + ? [].concat(opts.moduleDirectory) + : ['node_modules']; + + if (opts && typeof opts.paths === 'function') { + return opts.paths( + request, + start, + function () { return getNodeModulesDirs(start, modules); }, + opts + ); + } + + var dirs = getNodeModulesDirs(start, modules); + return opts && opts.paths ? dirs.concat(opts.paths) : dirs; +}; diff --git a/mybulma/node_modules/resolve/lib/normalize-options.js b/mybulma/node_modules/resolve/lib/normalize-options.js new file mode 100644 index 0000000..4b56904 --- /dev/null +++ b/mybulma/node_modules/resolve/lib/normalize-options.js @@ -0,0 +1,10 @@ +module.exports = function (x, opts) { + /** + * This file is purposefully a passthrough. It's expected that third-party + * environments will override it at runtime in order to inject special logic + * into `resolve` (by manipulating the options). One such example is the PnP + * code path in Yarn. + */ + + return opts || {}; +}; diff --git a/mybulma/node_modules/resolve/lib/sync.js b/mybulma/node_modules/resolve/lib/sync.js new file mode 100644 index 0000000..0b6cd58 --- /dev/null +++ b/mybulma/node_modules/resolve/lib/sync.js @@ -0,0 +1,208 @@ +var isCore = require('is-core-module'); +var fs = require('fs'); +var path = require('path'); +var getHomedir = require('./homedir'); +var caller = require('./caller'); +var nodeModulesPaths = require('./node-modules-paths'); +var normalizeOptions = require('./normalize-options'); + +var realpathFS = process.platform !== 'win32' && fs.realpathSync && typeof fs.realpathSync.native === 'function' ? fs.realpathSync.native : fs.realpathSync; + +var homedir = getHomedir(); +var defaultPaths = function () { + return [ + path.join(homedir, '.node_modules'), + path.join(homedir, '.node_libraries') + ]; +}; + +var defaultIsFile = function isFile(file) { + try { + var stat = fs.statSync(file, { throwIfNoEntry: false }); + } catch (e) { + if (e && (e.code === 'ENOENT' || e.code === 'ENOTDIR')) return false; + throw e; + } + return !!stat && (stat.isFile() || stat.isFIFO()); +}; + +var defaultIsDir = function isDirectory(dir) { + try { + var stat = fs.statSync(dir, { throwIfNoEntry: false }); + } catch (e) { + if (e && (e.code === 'ENOENT' || e.code === 'ENOTDIR')) return false; + throw e; + } + return !!stat && stat.isDirectory(); +}; + +var defaultRealpathSync = function realpathSync(x) { + try { + return realpathFS(x); + } catch (realpathErr) { + if (realpathErr.code !== 'ENOENT') { + throw realpathErr; + } + } + return x; +}; + +var maybeRealpathSync = function maybeRealpathSync(realpathSync, x, opts) { + if (opts && opts.preserveSymlinks === false) { + return realpathSync(x); + } + return x; +}; + +var defaultReadPackageSync = function defaultReadPackageSync(readFileSync, pkgfile) { + var body = readFileSync(pkgfile); + try { + var pkg = JSON.parse(body); + return pkg; + } catch (jsonErr) {} +}; + +var getPackageCandidates = function getPackageCandidates(x, start, opts) { + var dirs = nodeModulesPaths(start, opts, x); + for (var i = 0; i < dirs.length; i++) { + dirs[i] = path.join(dirs[i], x); + } + return dirs; +}; + +module.exports = function resolveSync(x, options) { + if (typeof x !== 'string') { + throw new TypeError('Path must be a string.'); + } + var opts = normalizeOptions(x, options); + + var isFile = opts.isFile || defaultIsFile; + var readFileSync = opts.readFileSync || fs.readFileSync; + var isDirectory = opts.isDirectory || defaultIsDir; + var realpathSync = opts.realpathSync || defaultRealpathSync; + var readPackageSync = opts.readPackageSync || defaultReadPackageSync; + if (opts.readFileSync && opts.readPackageSync) { + throw new TypeError('`readFileSync` and `readPackageSync` are mutually exclusive.'); + } + var packageIterator = opts.packageIterator; + + var extensions = opts.extensions || ['.js']; + var includeCoreModules = opts.includeCoreModules !== false; + var basedir = opts.basedir || path.dirname(caller()); + var parent = opts.filename || basedir; + + opts.paths = opts.paths || defaultPaths(); + + // ensure that `basedir` is an absolute path at this point, resolving against the process' current working directory + var absoluteStart = maybeRealpathSync(realpathSync, path.resolve(basedir), opts); + + if ((/^(?:\.\.?(?:\/|$)|\/|([A-Za-z]:)?[/\\])/).test(x)) { + var res = path.resolve(absoluteStart, x); + if (x === '.' || x === '..' || x.slice(-1) === '/') res += '/'; + var m = loadAsFileSync(res) || loadAsDirectorySync(res); + if (m) return maybeRealpathSync(realpathSync, m, opts); + } else if (includeCoreModules && isCore(x)) { + return x; + } else { + var n = loadNodeModulesSync(x, absoluteStart); + if (n) return maybeRealpathSync(realpathSync, n, opts); + } + + var err = new Error("Cannot find module '" + x + "' from '" + parent + "'"); + err.code = 'MODULE_NOT_FOUND'; + throw err; + + function loadAsFileSync(x) { + var pkg = loadpkg(path.dirname(x)); + + if (pkg && pkg.dir && pkg.pkg && opts.pathFilter) { + var rfile = path.relative(pkg.dir, x); + var r = opts.pathFilter(pkg.pkg, x, rfile); + if (r) { + x = path.resolve(pkg.dir, r); // eslint-disable-line no-param-reassign + } + } + + if (isFile(x)) { + return x; + } + + for (var i = 0; i < extensions.length; i++) { + var file = x + extensions[i]; + if (isFile(file)) { + return file; + } + } + } + + function loadpkg(dir) { + if (dir === '' || dir === '/') return; + if (process.platform === 'win32' && (/^\w:[/\\]*$/).test(dir)) { + return; + } + if ((/[/\\]node_modules[/\\]*$/).test(dir)) return; + + var pkgfile = path.join(maybeRealpathSync(realpathSync, dir, opts), 'package.json'); + + if (!isFile(pkgfile)) { + return loadpkg(path.dirname(dir)); + } + + var pkg = readPackageSync(readFileSync, pkgfile); + + if (pkg && opts.packageFilter) { + // v2 will pass pkgfile + pkg = opts.packageFilter(pkg, /*pkgfile,*/ dir); // eslint-disable-line spaced-comment + } + + return { pkg: pkg, dir: dir }; + } + + function loadAsDirectorySync(x) { + var pkgfile = path.join(maybeRealpathSync(realpathSync, x, opts), '/package.json'); + if (isFile(pkgfile)) { + try { + var pkg = readPackageSync(readFileSync, pkgfile); + } catch (e) {} + + if (pkg && opts.packageFilter) { + // v2 will pass pkgfile + pkg = opts.packageFilter(pkg, /*pkgfile,*/ x); // eslint-disable-line spaced-comment + } + + if (pkg && pkg.main) { + if (typeof pkg.main !== 'string') { + var mainError = new TypeError('package “' + pkg.name + '” `main` must be a string'); + mainError.code = 'INVALID_PACKAGE_MAIN'; + throw mainError; + } + if (pkg.main === '.' || pkg.main === './') { + pkg.main = 'index'; + } + try { + var m = loadAsFileSync(path.resolve(x, pkg.main)); + if (m) return m; + var n = loadAsDirectorySync(path.resolve(x, pkg.main)); + if (n) return n; + } catch (e) {} + } + } + + return loadAsFileSync(path.join(x, '/index')); + } + + function loadNodeModulesSync(x, start) { + var thunk = function () { return getPackageCandidates(x, start, opts); }; + var dirs = packageIterator ? packageIterator(x, start, thunk, opts) : thunk(); + + for (var i = 0; i < dirs.length; i++) { + var dir = dirs[i]; + if (isDirectory(path.dirname(dir))) { + var m = loadAsFileSync(dir); + if (m) return m; + var n = loadAsDirectorySync(dir); + if (n) return n; + } + } + } +}; diff --git a/mybulma/node_modules/resolve/package.json b/mybulma/node_modules/resolve/package.json new file mode 100644 index 0000000..7177e0f --- /dev/null +++ b/mybulma/node_modules/resolve/package.json @@ -0,0 +1,71 @@ +{ + "name": "resolve", + "description": "resolve like require.resolve() on behalf of files asynchronously and synchronously", + "version": "1.22.1", + "repository": { + "type": "git", + "url": "git://github.com/browserify/resolve.git" + }, + "bin": { + "resolve": "./bin/resolve" + }, + "main": "index.js", + "keywords": [ + "resolve", + "require", + "node", + "module" + ], + "scripts": { + "prepack": "npmignore --auto --commentLines=autogenerated", + "prepublishOnly": "safe-publish-latest && cp node_modules/is-core-module/core.json ./lib/ ||:", + "prepublish": "not-in-publish || npm run prepublishOnly", + "prelint": "eclint check $(git ls-files | xargs find 2> /dev/null | grep -vE 'node_modules|\\.git')", + "lint": "eslint --ext=js,mjs --no-eslintrc -c .eslintrc . 'bin/**'", + "pretests-only": "cd ./test/resolver/nested_symlinks && node mylib/sync && node mylib/async", + "tests-only": "tape test/*.js", + "pretest": "npm run lint", + "test": "npm run --silent tests-only", + "posttest": "npm run test:multirepo && aud --production", + "test:multirepo": "cd ./test/resolver/multirepo && npm install && npm test" + }, + "devDependencies": { + "@ljharb/eslint-config": "^21.0.0", + "array.prototype.map": "^1.0.4", + "aud": "^2.0.0", + "copy-dir": "^1.3.0", + "eclint": "^2.8.1", + "eslint": "=8.8.0", + "in-publish": "^2.0.1", + "mkdirp": "^0.5.5", + "mv": "^2.1.1", + "npmignore": "^0.3.0", + "object-keys": "^1.1.1", + "rimraf": "^2.7.1", + "safe-publish-latest": "^2.0.0", + "semver": "^6.3.0", + "tap": "0.4.13", + "tape": "^5.5.3", + "tmp": "^0.0.31" + }, + "license": "MIT", + "author": { + "name": "James Halliday", + "email": "mail@substack.net", + "url": "http://substack.net" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + }, + "dependencies": { + "is-core-module": "^2.9.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "publishConfig": { + "ignore": [ + ".github/workflows", + "appveyor.yml" + ] + } +} diff --git a/mybulma/node_modules/resolve/readme.markdown b/mybulma/node_modules/resolve/readme.markdown new file mode 100644 index 0000000..ad34d60 --- /dev/null +++ b/mybulma/node_modules/resolve/readme.markdown @@ -0,0 +1,301 @@ +# resolve [![Version Badge][2]][1] + +implements the [node `require.resolve()` algorithm](https://nodejs.org/api/modules.html#modules_all_together) such that you can `require.resolve()` on behalf of a file asynchronously and synchronously + +[![github actions][actions-image]][actions-url] +[![coverage][codecov-image]][codecov-url] +[![dependency status][5]][6] +[![dev dependency status][7]][8] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] + +[![npm badge][11]][1] + +# example + +asynchronously resolve: + +```js +var resolve = require('resolve/async'); // or, require('resolve') +resolve('tap', { basedir: __dirname }, function (err, res) { + if (err) console.error(err); + else console.log(res); +}); +``` + +``` +$ node example/async.js +/home/substack/projects/node-resolve/node_modules/tap/lib/main.js +``` + +synchronously resolve: + +```js +var resolve = require('resolve/sync'); // or, `require('resolve').sync +var res = resolve('tap', { basedir: __dirname }); +console.log(res); +``` + +``` +$ node example/sync.js +/home/substack/projects/node-resolve/node_modules/tap/lib/main.js +``` + +# methods + +```js +var resolve = require('resolve'); +var async = require('resolve/async'); +var sync = require('resolve/sync'); +``` + +For both the synchronous and asynchronous methods, errors may have any of the following `err.code` values: + +- `MODULE_NOT_FOUND`: the given path string (`id`) could not be resolved to a module +- `INVALID_BASEDIR`: the specified `opts.basedir` doesn't exist, or is not a directory +- `INVALID_PACKAGE_MAIN`: a `package.json` was encountered with an invalid `main` property (eg. not a string) + +## resolve(id, opts={}, cb) + +Asynchronously resolve the module path string `id` into `cb(err, res [, pkg])`, where `pkg` (if defined) is the data from `package.json`. + +options are: + +* opts.basedir - directory to begin resolving from + +* opts.package - `package.json` data applicable to the module being loaded + +* opts.extensions - array of file extensions to search in order + +* opts.includeCoreModules - set to `false` to exclude node core modules (e.g. `fs`) from the search + +* opts.readFile - how to read files asynchronously + +* opts.isFile - function to asynchronously test whether a file exists + +* opts.isDirectory - function to asynchronously test whether a file exists and is a directory + +* opts.realpath - function to asynchronously resolve a potential symlink to its real path + +* `opts.readPackage(readFile, pkgfile, cb)` - function to asynchronously read and parse a package.json file + * readFile - the passed `opts.readFile` or `fs.readFile` if not specified + * pkgfile - path to package.json + * cb - callback + +* `opts.packageFilter(pkg, pkgfile, dir)` - transform the parsed package.json contents before looking at the "main" field + * pkg - package data + * pkgfile - path to package.json + * dir - directory that contains package.json + +* `opts.pathFilter(pkg, path, relativePath)` - transform a path within a package + * pkg - package data + * path - the path being resolved + * relativePath - the path relative from the package.json location + * returns - a relative path that will be joined from the package.json location + +* opts.paths - require.paths array to use if nothing is found on the normal `node_modules` recursive walk (probably don't use this) + + For advanced users, `paths` can also be a `opts.paths(request, start, opts)` function + * request - the import specifier being resolved + * start - lookup path + * getNodeModulesDirs - a thunk (no-argument function) that returns the paths using standard `node_modules` resolution + * opts - the resolution options + +* `opts.packageIterator(request, start, opts)` - return the list of candidate paths where the packages sources may be found (probably don't use this) + * request - the import specifier being resolved + * start - lookup path + * getPackageCandidates - a thunk (no-argument function) that returns the paths using standard `node_modules` resolution + * opts - the resolution options + +* opts.moduleDirectory - directory (or directories) in which to recursively look for modules. default: `"node_modules"` + +* opts.preserveSymlinks - if true, doesn't resolve `basedir` to real path before resolving. +This is the way Node resolves dependencies when executed with the [--preserve-symlinks](https://nodejs.org/api/all.html#cli_preserve_symlinks) flag. +**Note:** this property is currently `true` by default but it will be changed to +`false` in the next major version because *Node's resolution algorithm does not preserve symlinks by default*. + +default `opts` values: + +```js +{ + paths: [], + basedir: __dirname, + extensions: ['.js'], + includeCoreModules: true, + readFile: fs.readFile, + isFile: function isFile(file, cb) { + fs.stat(file, function (err, stat) { + if (!err) { + return cb(null, stat.isFile() || stat.isFIFO()); + } + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') return cb(null, false); + return cb(err); + }); + }, + isDirectory: function isDirectory(dir, cb) { + fs.stat(dir, function (err, stat) { + if (!err) { + return cb(null, stat.isDirectory()); + } + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') return cb(null, false); + return cb(err); + }); + }, + realpath: function realpath(file, cb) { + var realpath = typeof fs.realpath.native === 'function' ? fs.realpath.native : fs.realpath; + realpath(file, function (realPathErr, realPath) { + if (realPathErr && realPathErr.code !== 'ENOENT') cb(realPathErr); + else cb(null, realPathErr ? file : realPath); + }); + }, + readPackage: function defaultReadPackage(readFile, pkgfile, cb) { + readFile(pkgfile, function (readFileErr, body) { + if (readFileErr) cb(readFileErr); + else { + try { + var pkg = JSON.parse(body); + cb(null, pkg); + } catch (jsonErr) { + cb(null); + } + } + }); + }, + moduleDirectory: 'node_modules', + preserveSymlinks: true +} +``` + +## resolve.sync(id, opts) + +Synchronously resolve the module path string `id`, returning the result and +throwing an error when `id` can't be resolved. + +options are: + +* opts.basedir - directory to begin resolving from + +* opts.extensions - array of file extensions to search in order + +* opts.includeCoreModules - set to `false` to exclude node core modules (e.g. `fs`) from the search + +* opts.readFileSync - how to read files synchronously + +* opts.isFile - function to synchronously test whether a file exists + +* opts.isDirectory - function to synchronously test whether a file exists and is a directory + +* opts.realpathSync - function to synchronously resolve a potential symlink to its real path + +* `opts.readPackageSync(readFileSync, pkgfile)` - function to synchronously read and parse a package.json file + * readFileSync - the passed `opts.readFileSync` or `fs.readFileSync` if not specified + * pkgfile - path to package.json + +* `opts.packageFilter(pkg, dir)` - transform the parsed package.json contents before looking at the "main" field + * pkg - package data + * dir - directory that contains package.json (Note: the second argument will change to "pkgfile" in v2) + +* `opts.pathFilter(pkg, path, relativePath)` - transform a path within a package + * pkg - package data + * path - the path being resolved + * relativePath - the path relative from the package.json location + * returns - a relative path that will be joined from the package.json location + +* opts.paths - require.paths array to use if nothing is found on the normal `node_modules` recursive walk (probably don't use this) + + For advanced users, `paths` can also be a `opts.paths(request, start, opts)` function + * request - the import specifier being resolved + * start - lookup path + * getNodeModulesDirs - a thunk (no-argument function) that returns the paths using standard `node_modules` resolution + * opts - the resolution options + +* `opts.packageIterator(request, start, opts)` - return the list of candidate paths where the packages sources may be found (probably don't use this) + * request - the import specifier being resolved + * start - lookup path + * getPackageCandidates - a thunk (no-argument function) that returns the paths using standard `node_modules` resolution + * opts - the resolution options + +* opts.moduleDirectory - directory (or directories) in which to recursively look for modules. default: `"node_modules"` + +* opts.preserveSymlinks - if true, doesn't resolve `basedir` to real path before resolving. +This is the way Node resolves dependencies when executed with the [--preserve-symlinks](https://nodejs.org/api/all.html#cli_preserve_symlinks) flag. +**Note:** this property is currently `true` by default but it will be changed to +`false` in the next major version because *Node's resolution algorithm does not preserve symlinks by default*. + +default `opts` values: + +```js +{ + paths: [], + basedir: __dirname, + extensions: ['.js'], + includeCoreModules: true, + readFileSync: fs.readFileSync, + isFile: function isFile(file) { + try { + var stat = fs.statSync(file); + } catch (e) { + if (e && (e.code === 'ENOENT' || e.code === 'ENOTDIR')) return false; + throw e; + } + return stat.isFile() || stat.isFIFO(); + }, + isDirectory: function isDirectory(dir) { + try { + var stat = fs.statSync(dir); + } catch (e) { + if (e && (e.code === 'ENOENT' || e.code === 'ENOTDIR')) return false; + throw e; + } + return stat.isDirectory(); + }, + realpathSync: function realpathSync(file) { + try { + var realpath = typeof fs.realpathSync.native === 'function' ? fs.realpathSync.native : fs.realpathSync; + return realpath(file); + } catch (realPathErr) { + if (realPathErr.code !== 'ENOENT') { + throw realPathErr; + } + } + return file; + }, + readPackageSync: function defaultReadPackageSync(readFileSync, pkgfile) { + var body = readFileSync(pkgfile); + try { + var pkg = JSON.parse(body); + return pkg; + } catch (jsonErr) {} + }, + moduleDirectory: 'node_modules', + preserveSymlinks: true +} +``` + +# install + +With [npm](https://npmjs.org) do: + +```sh +npm install resolve +``` + +# license + +MIT + +[1]: https://npmjs.org/package/resolve +[2]: https://versionbadg.es/browserify/resolve.svg +[5]: https://david-dm.org/browserify/resolve.svg +[6]: https://david-dm.org/browserify/resolve +[7]: https://david-dm.org/browserify/resolve/dev-status.svg +[8]: https://david-dm.org/browserify/resolve#info=devDependencies +[11]: https://nodei.co/npm/resolve.png?downloads=true&stars=true +[license-image]: https://img.shields.io/npm/l/resolve.svg +[license-url]: LICENSE +[downloads-image]: https://img.shields.io/npm/dm/resolve.svg +[downloads-url]: https://npm-stat.com/charts.html?package=resolve +[codecov-image]: https://codecov.io/gh/browserify/resolve/branch/main/graphs/badge.svg +[codecov-url]: https://app.codecov.io/gh/browserify/resolve/ +[actions-image]: https://img.shields.io/endpoint?url=https://github-actions-badge-u3jn4tfpocch.runkit.sh/browserify/resolve +[actions-url]: https://github.com/browserify/resolve/actions diff --git a/mybulma/node_modules/resolve/sync.js b/mybulma/node_modules/resolve/sync.js new file mode 100644 index 0000000..cd0ee04 --- /dev/null +++ b/mybulma/node_modules/resolve/sync.js @@ -0,0 +1,3 @@ +'use strict'; + +module.exports = require('./lib/sync'); diff --git a/mybulma/node_modules/resolve/test/core.js b/mybulma/node_modules/resolve/test/core.js new file mode 100644 index 0000000..a477adc --- /dev/null +++ b/mybulma/node_modules/resolve/test/core.js @@ -0,0 +1,88 @@ +var test = require('tape'); +var keys = require('object-keys'); +var semver = require('semver'); + +var resolve = require('../'); + +var brokenNode = semver.satisfies(process.version, '11.11 - 11.13'); + +test('core modules', function (t) { + t.test('isCore()', function (st) { + st.ok(resolve.isCore('fs')); + st.ok(resolve.isCore('net')); + st.ok(resolve.isCore('http')); + + st.ok(!resolve.isCore('seq')); + st.ok(!resolve.isCore('../')); + + st.ok(!resolve.isCore('toString')); + + st.end(); + }); + + t.test('core list', function (st) { + var cores = keys(resolve.core); + st.plan(cores.length); + + for (var i = 0; i < cores.length; ++i) { + var mod = cores[i]; + // note: this must be require, not require.resolve, due to https://github.com/nodejs/node/issues/43274 + var requireFunc = function () { require(mod); }; // eslint-disable-line no-loop-func + t.comment(mod + ': ' + resolve.core[mod]); + if (resolve.core[mod]) { + st.doesNotThrow(requireFunc, mod + ' supported; requiring does not throw'); + } else if (brokenNode) { + st.ok(true, 'this version of node is broken: attempting to require things that fail to resolve breaks "home_paths" tests'); + } else { + st.throws(requireFunc, mod + ' not supported; requiring throws'); + } + } + + st.end(); + }); + + t.test('core via repl module', { skip: !resolve.core.repl }, function (st) { + var libs = require('repl')._builtinLibs; // eslint-disable-line no-underscore-dangle + if (!libs) { + st.skip('module.builtinModules does not exist'); + return st.end(); + } + for (var i = 0; i < libs.length; ++i) { + var mod = libs[i]; + st.ok(resolve.core[mod], mod + ' is a core module'); + st.doesNotThrow( + function () { require(mod); }, // eslint-disable-line no-loop-func + 'requiring ' + mod + ' does not throw' + ); + } + st.end(); + }); + + t.test('core via builtinModules list', { skip: !resolve.core.module }, function (st) { + var libs = require('module').builtinModules; + if (!libs) { + st.skip('module.builtinModules does not exist'); + return st.end(); + } + var blacklist = [ + '_debug_agent', + 'v8/tools/tickprocessor-driver', + 'v8/tools/SourceMap', + 'v8/tools/tickprocessor', + 'v8/tools/profile' + ]; + for (var i = 0; i < libs.length; ++i) { + var mod = libs[i]; + if (blacklist.indexOf(mod) === -1) { + st.ok(resolve.core[mod], mod + ' is a core module'); + st.doesNotThrow( + function () { require(mod); }, // eslint-disable-line no-loop-func + 'requiring ' + mod + ' does not throw' + ); + } + } + st.end(); + }); + + t.end(); +}); diff --git a/mybulma/node_modules/resolve/test/dotdot.js b/mybulma/node_modules/resolve/test/dotdot.js new file mode 100644 index 0000000..3080665 --- /dev/null +++ b/mybulma/node_modules/resolve/test/dotdot.js @@ -0,0 +1,29 @@ +var path = require('path'); +var test = require('tape'); +var resolve = require('../'); + +test('dotdot', function (t) { + t.plan(4); + var dir = path.join(__dirname, '/dotdot/abc'); + + resolve('..', { basedir: dir }, function (err, res, pkg) { + t.ifError(err); + t.equal(res, path.join(__dirname, 'dotdot/index.js')); + }); + + resolve('.', { basedir: dir }, function (err, res, pkg) { + t.ifError(err); + t.equal(res, path.join(dir, 'index.js')); + }); +}); + +test('dotdot sync', function (t) { + t.plan(2); + var dir = path.join(__dirname, '/dotdot/abc'); + + var a = resolve.sync('..', { basedir: dir }); + t.equal(a, path.join(__dirname, 'dotdot/index.js')); + + var b = resolve.sync('.', { basedir: dir }); + t.equal(b, path.join(dir, 'index.js')); +}); diff --git a/mybulma/node_modules/resolve/test/dotdot/abc/index.js b/mybulma/node_modules/resolve/test/dotdot/abc/index.js new file mode 100644 index 0000000..67f2534 --- /dev/null +++ b/mybulma/node_modules/resolve/test/dotdot/abc/index.js @@ -0,0 +1,2 @@ +var x = require('..'); +console.log(x); diff --git a/mybulma/node_modules/resolve/test/dotdot/index.js b/mybulma/node_modules/resolve/test/dotdot/index.js new file mode 100644 index 0000000..643f9fc --- /dev/null +++ b/mybulma/node_modules/resolve/test/dotdot/index.js @@ -0,0 +1 @@ +module.exports = 'whatever'; diff --git a/mybulma/node_modules/resolve/test/faulty_basedir.js b/mybulma/node_modules/resolve/test/faulty_basedir.js new file mode 100644 index 0000000..5f2141a --- /dev/null +++ b/mybulma/node_modules/resolve/test/faulty_basedir.js @@ -0,0 +1,29 @@ +var test = require('tape'); +var path = require('path'); +var resolve = require('../'); + +test('faulty basedir must produce error in windows', { skip: process.platform !== 'win32' }, function (t) { + t.plan(1); + + var resolverDir = 'C:\\a\\b\\c\\d'; + + resolve('tape/lib/test.js', { basedir: resolverDir }, function (err, res, pkg) { + t.equal(!!err, true); + }); +}); + +test('non-existent basedir should not throw when preserveSymlinks is false', function (t) { + t.plan(2); + + var opts = { + basedir: path.join(path.sep, 'unreal', 'path', 'that', 'does', 'not', 'exist'), + preserveSymlinks: false + }; + + var module = './dotdot/abc'; + + resolve(module, opts, function (err, res) { + t.equal(err.code, 'MODULE_NOT_FOUND'); + t.equal(res, undefined); + }); +}); diff --git a/mybulma/node_modules/resolve/test/filter.js b/mybulma/node_modules/resolve/test/filter.js new file mode 100644 index 0000000..8f8cccd --- /dev/null +++ b/mybulma/node_modules/resolve/test/filter.js @@ -0,0 +1,34 @@ +var path = require('path'); +var test = require('tape'); +var resolve = require('../'); + +test('filter', function (t) { + t.plan(4); + var dir = path.join(__dirname, 'resolver'); + var packageFilterArgs; + resolve('./baz', { + basedir: dir, + packageFilter: function (pkg, pkgfile) { + pkg.main = 'doom'; // eslint-disable-line no-param-reassign + packageFilterArgs = [pkg, pkgfile]; + return pkg; + } + }, function (err, res, pkg) { + if (err) t.fail(err); + + t.equal(res, path.join(dir, 'baz/doom.js'), 'changing the package "main" works'); + + var packageData = packageFilterArgs[0]; + t.equal(pkg, packageData, 'first packageFilter argument is "pkg"'); + t.equal(packageData.main, 'doom', 'package "main" was altered'); + + var packageFile = packageFilterArgs[1]; + t.equal( + packageFile, + path.join(dir, 'baz/package.json'), + 'second packageFilter argument is "pkgfile"' + ); + + t.end(); + }); +}); diff --git a/mybulma/node_modules/resolve/test/filter_sync.js b/mybulma/node_modules/resolve/test/filter_sync.js new file mode 100644 index 0000000..8a43b98 --- /dev/null +++ b/mybulma/node_modules/resolve/test/filter_sync.js @@ -0,0 +1,33 @@ +var path = require('path'); +var test = require('tape'); +var resolve = require('../'); + +test('filter', function (t) { + var dir = path.join(__dirname, 'resolver'); + var packageFilterArgs; + var res = resolve.sync('./baz', { + basedir: dir, + // NOTE: in v2.x, this will be `pkg, pkgfile, dir`, but must remain "broken" here in v1.x for compatibility + packageFilter: function (pkg, /*pkgfile,*/ dir) { // eslint-disable-line spaced-comment + pkg.main = 'doom'; // eslint-disable-line no-param-reassign + packageFilterArgs = 'is 1.x' ? [pkg, dir] : [pkg, pkgfile, dir]; // eslint-disable-line no-constant-condition, no-undef + return pkg; + } + }); + + t.equal(res, path.join(dir, 'baz/doom.js'), 'changing the package "main" works'); + + var packageData = packageFilterArgs[0]; + t.equal(packageData.main, 'doom', 'package "main" was altered'); + + if (!'is 1.x') { // eslint-disable-line no-constant-condition + var packageFile = packageFilterArgs[1]; + t.equal(packageFile, path.join(dir, 'baz', 'package.json'), 'package.json path is correct'); + } + + var packageDir = packageFilterArgs['is 1.x' ? 1 : 2]; // eslint-disable-line no-constant-condition + // eslint-disable-next-line no-constant-condition + t.equal(packageDir, path.join(dir, 'baz'), ('is 1.x' ? 'second' : 'third') + ' packageFilter argument is "dir"'); + + t.end(); +}); diff --git a/mybulma/node_modules/resolve/test/home_paths.js b/mybulma/node_modules/resolve/test/home_paths.js new file mode 100644 index 0000000..3b8c9b3 --- /dev/null +++ b/mybulma/node_modules/resolve/test/home_paths.js @@ -0,0 +1,127 @@ +'use strict'; + +var fs = require('fs'); +var homedir = require('../lib/homedir'); +var path = require('path'); + +var test = require('tape'); +var mkdirp = require('mkdirp'); +var rimraf = require('rimraf'); +var mv = require('mv'); +var copyDir = require('copy-dir'); +var tmp = require('tmp'); + +var HOME = homedir(); + +var hnm = path.join(HOME, '.node_modules'); +var hnl = path.join(HOME, '.node_libraries'); + +var resolve = require('../async'); + +function makeDir(t, dir, cb) { + mkdirp(dir, function (err) { + if (err) { + cb(err); + } else { + t.teardown(function cleanup() { + rimraf.sync(dir); + }); + cb(); + } + }); +} + +function makeTempDir(t, dir, cb) { + if (fs.existsSync(dir)) { + var tmpResult = tmp.dirSync(); + t.teardown(tmpResult.removeCallback); + var backup = path.join(tmpResult.name, path.basename(dir)); + mv(dir, backup, function (err) { + if (err) { + cb(err); + } else { + t.teardown(function () { + mv(backup, dir, cb); + }); + makeDir(t, dir, cb); + } + }); + } else { + makeDir(t, dir, cb); + } +} + +test('homedir module paths', function (t) { + t.plan(7); + + makeTempDir(t, hnm, function (err) { + t.error(err, 'no error with HNM temp dir'); + if (err) { + return t.end(); + } + + var bazHNMDir = path.join(hnm, 'baz'); + var dotMainDir = path.join(hnm, 'dot_main'); + copyDir.sync(path.join(__dirname, 'resolver/baz'), bazHNMDir); + copyDir.sync(path.join(__dirname, 'resolver/dot_main'), dotMainDir); + + var bazPkg = { name: 'baz', main: 'quux.js' }; + var dotMainPkg = { main: 'index' }; + + var bazHNMmain = path.join(bazHNMDir, 'quux.js'); + t.equal(require.resolve('baz'), bazHNMmain, 'sanity check: require.resolve finds HNM `baz`'); + var dotMainMain = path.join(dotMainDir, 'index.js'); + t.equal(require.resolve('dot_main'), dotMainMain, 'sanity check: require.resolve finds `dot_main`'); + + makeTempDir(t, hnl, function (err) { + t.error(err, 'no error with HNL temp dir'); + if (err) { + return t.end(); + } + var bazHNLDir = path.join(hnl, 'baz'); + copyDir.sync(path.join(__dirname, 'resolver/baz'), bazHNLDir); + + var dotSlashMainDir = path.join(hnl, 'dot_slash_main'); + var dotSlashMainMain = path.join(dotSlashMainDir, 'index.js'); + var dotSlashMainPkg = { main: 'index' }; + copyDir.sync(path.join(__dirname, 'resolver/dot_slash_main'), dotSlashMainDir); + + t.equal(require.resolve('baz'), bazHNMmain, 'sanity check: require.resolve finds HNM `baz`'); + t.equal(require.resolve('dot_slash_main'), dotSlashMainMain, 'sanity check: require.resolve finds HNL `dot_slash_main`'); + + t.test('with temp dirs', function (st) { + st.plan(3); + + st.test('just in `$HOME/.node_modules`', function (s2t) { + s2t.plan(3); + + resolve('dot_main', function (err, res, pkg) { + s2t.error(err, 'no error resolving `dot_main`'); + s2t.equal(res, dotMainMain, '`dot_main` resolves in `$HOME/.node_modules`'); + s2t.deepEqual(pkg, dotMainPkg); + }); + }); + + st.test('just in `$HOME/.node_libraries`', function (s2t) { + s2t.plan(3); + + resolve('dot_slash_main', function (err, res, pkg) { + s2t.error(err, 'no error resolving `dot_slash_main`'); + s2t.equal(res, dotSlashMainMain, '`dot_slash_main` resolves in `$HOME/.node_libraries`'); + s2t.deepEqual(pkg, dotSlashMainPkg); + }); + }); + + st.test('in `$HOME/.node_libraries` and `$HOME/.node_modules`', function (s2t) { + s2t.plan(3); + + resolve('baz', function (err, res, pkg) { + s2t.error(err, 'no error resolving `baz`'); + s2t.equal(res, bazHNMmain, '`baz` resolves in `$HOME/.node_modules` when in both'); + s2t.deepEqual(pkg, bazPkg); + }); + }); + }); + }); + }); +}); diff --git a/mybulma/node_modules/resolve/test/home_paths_sync.js b/mybulma/node_modules/resolve/test/home_paths_sync.js new file mode 100644 index 0000000..5d2c56f --- /dev/null +++ b/mybulma/node_modules/resolve/test/home_paths_sync.js @@ -0,0 +1,114 @@ +'use strict'; + +var fs = require('fs'); +var homedir = require('../lib/homedir'); +var path = require('path'); + +var test = require('tape'); +var mkdirp = require('mkdirp'); +var rimraf = require('rimraf'); +var mv = require('mv'); +var copyDir = require('copy-dir'); +var tmp = require('tmp'); + +var HOME = homedir(); + +var hnm = path.join(HOME, '.node_modules'); +var hnl = path.join(HOME, '.node_libraries'); + +var resolve = require('../sync'); + +function makeDir(t, dir, cb) { + mkdirp(dir, function (err) { + if (err) { + cb(err); + } else { + t.teardown(function cleanup() { + rimraf.sync(dir); + }); + cb(); + } + }); +} + +function makeTempDir(t, dir, cb) { + if (fs.existsSync(dir)) { + var tmpResult = tmp.dirSync(); + t.teardown(tmpResult.removeCallback); + var backup = path.join(tmpResult.name, path.basename(dir)); + mv(dir, backup, function (err) { + if (err) { + cb(err); + } else { + t.teardown(function () { + mv(backup, dir, cb); + }); + makeDir(t, dir, cb); + } + }); + } else { + makeDir(t, dir, cb); + } +} + +test('homedir module paths', function (t) { + t.plan(7); + + makeTempDir(t, hnm, function (err) { + t.error(err, 'no error with HNM temp dir'); + if (err) { + return t.end(); + } + + var bazHNMDir = path.join(hnm, 'baz'); + var dotMainDir = path.join(hnm, 'dot_main'); + copyDir.sync(path.join(__dirname, 'resolver/baz'), bazHNMDir); + copyDir.sync(path.join(__dirname, 'resolver/dot_main'), dotMainDir); + + var bazHNMmain = path.join(bazHNMDir, 'quux.js'); + t.equal(require.resolve('baz'), bazHNMmain, 'sanity check: require.resolve finds HNM `baz`'); + var dotMainMain = path.join(dotMainDir, 'index.js'); + t.equal(require.resolve('dot_main'), dotMainMain, 'sanity check: require.resolve finds `dot_main`'); + + makeTempDir(t, hnl, function (err) { + t.error(err, 'no error with HNL temp dir'); + if (err) { + return t.end(); + } + var bazHNLDir = path.join(hnl, 'baz'); + copyDir.sync(path.join(__dirname, 'resolver/baz'), bazHNLDir); + + var dotSlashMainDir = path.join(hnl, 'dot_slash_main'); + var dotSlashMainMain = path.join(dotSlashMainDir, 'index.js'); + copyDir.sync(path.join(__dirname, 'resolver/dot_slash_main'), dotSlashMainDir); + + t.equal(require.resolve('baz'), bazHNMmain, 'sanity check: require.resolve finds HNM `baz`'); + t.equal(require.resolve('dot_slash_main'), dotSlashMainMain, 'sanity check: require.resolve finds HNL `dot_slash_main`'); + + t.test('with temp dirs', function (st) { + st.plan(3); + + st.test('just in `$HOME/.node_modules`', function (s2t) { + s2t.plan(1); + + var res = resolve('dot_main'); + s2t.equal(res, dotMainMain, '`dot_main` resolves in `$HOME/.node_modules`'); + }); + + st.test('just in `$HOME/.node_libraries`', function (s2t) { + s2t.plan(1); + + var res = resolve('dot_slash_main'); + s2t.equal(res, dotSlashMainMain, '`dot_slash_main` resolves in `$HOME/.node_libraries`'); + }); + + st.test('in `$HOME/.node_libraries` and `$HOME/.node_modules`', function (s2t) { + s2t.plan(1); + + var res = resolve('baz'); + s2t.equal(res, bazHNMmain, '`baz` resolves in `$HOME/.node_modules` when in both'); + }); + }); + }); + }); +}); diff --git a/mybulma/node_modules/resolve/test/mock.js b/mybulma/node_modules/resolve/test/mock.js new file mode 100644 index 0000000..6116275 --- /dev/null +++ b/mybulma/node_modules/resolve/test/mock.js @@ -0,0 +1,315 @@ +var path = require('path'); +var test = require('tape'); +var resolve = require('../'); + +test('mock', function (t) { + t.plan(8); + + var files = {}; + files[path.resolve('/foo/bar/baz.js')] = 'beep'; + + var dirs = {}; + dirs[path.resolve('/foo/bar')] = true; + + function opts(basedir) { + return { + basedir: path.resolve(basedir), + isFile: function (file, cb) { + cb(null, Object.prototype.hasOwnProperty.call(files, path.resolve(file))); + }, + isDirectory: function (dir, cb) { + cb(null, !!dirs[path.resolve(dir)]); + }, + readFile: function (file, cb) { + cb(null, files[path.resolve(file)]); + }, + realpath: function (file, cb) { + cb(null, file); + } + }; + } + + resolve('./baz', opts('/foo/bar'), function (err, res, pkg) { + if (err) return t.fail(err); + t.equal(res, path.resolve('/foo/bar/baz.js')); + t.equal(pkg, undefined); + }); + + resolve('./baz.js', opts('/foo/bar'), function (err, res, pkg) { + if (err) return t.fail(err); + t.equal(res, path.resolve('/foo/bar/baz.js')); + t.equal(pkg, undefined); + }); + + resolve('baz', opts('/foo/bar'), function (err, res) { + t.equal(err.message, "Cannot find module 'baz' from '" + path.resolve('/foo/bar') + "'"); + t.equal(err.code, 'MODULE_NOT_FOUND'); + }); + + resolve('../baz', opts('/foo/bar'), function (err, res) { + t.equal(err.message, "Cannot find module '../baz' from '" + path.resolve('/foo/bar') + "'"); + t.equal(err.code, 'MODULE_NOT_FOUND'); + }); +}); + +test('mock from package', function (t) { + t.plan(8); + + var files = {}; + files[path.resolve('/foo/bar/baz.js')] = 'beep'; + + var dirs = {}; + dirs[path.resolve('/foo/bar')] = true; + + function opts(basedir) { + return { + basedir: path.resolve(basedir), + isFile: function (file, cb) { + cb(null, Object.prototype.hasOwnProperty.call(files, file)); + }, + isDirectory: function (dir, cb) { + cb(null, !!dirs[path.resolve(dir)]); + }, + 'package': { main: 'bar' }, + readFile: function (file, cb) { + cb(null, files[file]); + }, + realpath: function (file, cb) { + cb(null, file); + } + }; + } + + resolve('./baz', opts('/foo/bar'), function (err, res, pkg) { + if (err) return t.fail(err); + t.equal(res, path.resolve('/foo/bar/baz.js')); + t.equal(pkg && pkg.main, 'bar'); + }); + + resolve('./baz.js', opts('/foo/bar'), function (err, res, pkg) { + if (err) return t.fail(err); + t.equal(res, path.resolve('/foo/bar/baz.js')); + t.equal(pkg && pkg.main, 'bar'); + }); + + resolve('baz', opts('/foo/bar'), function (err, res) { + t.equal(err.message, "Cannot find module 'baz' from '" + path.resolve('/foo/bar') + "'"); + t.equal(err.code, 'MODULE_NOT_FOUND'); + }); + + resolve('../baz', opts('/foo/bar'), function (err, res) { + t.equal(err.message, "Cannot find module '../baz' from '" + path.resolve('/foo/bar') + "'"); + t.equal(err.code, 'MODULE_NOT_FOUND'); + }); +}); + +test('mock package', function (t) { + t.plan(2); + + var files = {}; + files[path.resolve('/foo/node_modules/bar/baz.js')] = 'beep'; + files[path.resolve('/foo/node_modules/bar/package.json')] = JSON.stringify({ + main: './baz.js' + }); + + var dirs = {}; + dirs[path.resolve('/foo')] = true; + dirs[path.resolve('/foo/node_modules')] = true; + + function opts(basedir) { + return { + basedir: path.resolve(basedir), + isFile: function (file, cb) { + cb(null, Object.prototype.hasOwnProperty.call(files, path.resolve(file))); + }, + isDirectory: function (dir, cb) { + cb(null, !!dirs[path.resolve(dir)]); + }, + readFile: function (file, cb) { + cb(null, files[path.resolve(file)]); + }, + realpath: function (file, cb) { + cb(null, file); + } + }; + } + + resolve('bar', opts('/foo'), function (err, res, pkg) { + if (err) return t.fail(err); + t.equal(res, path.resolve('/foo/node_modules/bar/baz.js')); + t.equal(pkg && pkg.main, './baz.js'); + }); +}); + +test('mock package from package', function (t) { + t.plan(2); + + var files = {}; + files[path.resolve('/foo/node_modules/bar/baz.js')] = 'beep'; + files[path.resolve('/foo/node_modules/bar/package.json')] = JSON.stringify({ + main: './baz.js' + }); + + var dirs = {}; + dirs[path.resolve('/foo')] = true; + dirs[path.resolve('/foo/node_modules')] = true; + + function opts(basedir) { + return { + basedir: path.resolve(basedir), + isFile: function (file, cb) { + cb(null, Object.prototype.hasOwnProperty.call(files, path.resolve(file))); + }, + isDirectory: function (dir, cb) { + cb(null, !!dirs[path.resolve(dir)]); + }, + 'package': { main: 'bar' }, + readFile: function (file, cb) { + cb(null, files[path.resolve(file)]); + }, + realpath: function (file, cb) { + cb(null, file); + } + }; + } + + resolve('bar', opts('/foo'), function (err, res, pkg) { + if (err) return t.fail(err); + t.equal(res, path.resolve('/foo/node_modules/bar/baz.js')); + t.equal(pkg && pkg.main, './baz.js'); + }); +}); + +test('symlinked', function (t) { + t.plan(4); + + var files = {}; + files[path.resolve('/foo/bar/baz.js')] = 'beep'; + files[path.resolve('/foo/bar/symlinked/baz.js')] = 'beep'; + + var dirs = {}; + dirs[path.resolve('/foo/bar')] = true; + dirs[path.resolve('/foo/bar/symlinked')] = true; + + function opts(basedir) { + return { + preserveSymlinks: false, + basedir: path.resolve(basedir), + isFile: function (file, cb) { + cb(null, Object.prototype.hasOwnProperty.call(files, path.resolve(file))); + }, + isDirectory: function (dir, cb) { + cb(null, !!dirs[path.resolve(dir)]); + }, + readFile: function (file, cb) { + cb(null, files[path.resolve(file)]); + }, + realpath: function (file, cb) { + var resolved = path.resolve(file); + + if (resolved.indexOf('symlinked') >= 0) { + cb(null, resolved); + return; + } + + var ext = path.extname(resolved); + + if (ext) { + var dir = path.dirname(resolved); + var base = path.basename(resolved); + cb(null, path.join(dir, 'symlinked', base)); + } else { + cb(null, path.join(resolved, 'symlinked')); + } + } + }; + } + + resolve('./baz', opts('/foo/bar'), function (err, res, pkg) { + if (err) return t.fail(err); + t.equal(res, path.resolve('/foo/bar/symlinked/baz.js')); + t.equal(pkg, undefined); + }); + + resolve('./baz.js', opts('/foo/bar'), function (err, res, pkg) { + if (err) return t.fail(err); + t.equal(res, path.resolve('/foo/bar/symlinked/baz.js')); + t.equal(pkg, undefined); + }); +}); + +test('readPackage', function (t) { + t.plan(3); + + var files = {}; + files[path.resolve('/foo/node_modules/bar/something-else.js')] = 'beep'; + files[path.resolve('/foo/node_modules/bar/package.json')] = JSON.stringify({ + main: './baz.js' + }); + files[path.resolve('/foo/node_modules/bar/baz.js')] = 'boop'; + + var dirs = {}; + dirs[path.resolve('/foo')] = true; + dirs[path.resolve('/foo/node_modules')] = true; + + function opts(basedir) { + return { + basedir: path.resolve(basedir), + isFile: function (file, cb) { + cb(null, Object.prototype.hasOwnProperty.call(files, path.resolve(file))); + }, + isDirectory: function (dir, cb) { + cb(null, !!dirs[path.resolve(dir)]); + }, + 'package': { main: 'bar' }, + readFile: function (file, cb) { + cb(null, files[path.resolve(file)]); + }, + realpath: function (file, cb) { + cb(null, file); + } + }; + } + + t.test('with readFile', function (st) { + st.plan(3); + + resolve('bar', opts('/foo'), function (err, res, pkg) { + st.error(err); + st.equal(res, path.resolve('/foo/node_modules/bar/baz.js')); + st.equal(pkg && pkg.main, './baz.js'); + }); + }); + + var readPackage = function (readFile, file, cb) { + var barPackage = path.join('bar', 'package.json'); + if (file.slice(-barPackage.length) === barPackage) { + cb(null, { main: './something-else.js' }); + } else { + cb(null, JSON.parse(files[path.resolve(file)])); + } + }; + + t.test('with readPackage', function (st) { + st.plan(3); + + var options = opts('/foo'); + delete options.readFile; + options.readPackage = readPackage; + resolve('bar', options, function (err, res, pkg) { + st.error(err); + st.equal(res, path.resolve('/foo/node_modules/bar/something-else.js')); + st.equal(pkg && pkg.main, './something-else.js'); + }); + }); + + t.test('with readFile and readPackage', function (st) { + st.plan(1); + + var options = opts('/foo'); + options.readPackage = readPackage; + resolve('bar', options, function (err) { + st.throws(function () { throw err; }, TypeError, 'errors when both readFile and readPackage are provided'); + }); + }); +}); diff --git a/mybulma/node_modules/resolve/test/mock_sync.js b/mybulma/node_modules/resolve/test/mock_sync.js new file mode 100644 index 0000000..c5a7e2a --- /dev/null +++ b/mybulma/node_modules/resolve/test/mock_sync.js @@ -0,0 +1,214 @@ +var path = require('path'); +var test = require('tape'); +var resolve = require('../'); + +test('mock', function (t) { + t.plan(4); + + var files = {}; + files[path.resolve('/foo/bar/baz.js')] = 'beep'; + + var dirs = {}; + dirs[path.resolve('/foo/bar')] = true; + + function opts(basedir) { + return { + basedir: path.resolve(basedir), + isFile: function (file) { + return Object.prototype.hasOwnProperty.call(files, path.resolve(file)); + }, + isDirectory: function (dir) { + return !!dirs[path.resolve(dir)]; + }, + readFileSync: function (file) { + return files[path.resolve(file)]; + }, + realpathSync: function (file) { + return file; + } + }; + } + + t.equal( + resolve.sync('./baz', opts('/foo/bar')), + path.resolve('/foo/bar/baz.js') + ); + + t.equal( + resolve.sync('./baz.js', opts('/foo/bar')), + path.resolve('/foo/bar/baz.js') + ); + + t.throws(function () { + resolve.sync('baz', opts('/foo/bar')); + }); + + t.throws(function () { + resolve.sync('../baz', opts('/foo/bar')); + }); +}); + +test('mock package', function (t) { + t.plan(1); + + var files = {}; + files[path.resolve('/foo/node_modules/bar/baz.js')] = 'beep'; + files[path.resolve('/foo/node_modules/bar/package.json')] = JSON.stringify({ + main: './baz.js' + }); + + var dirs = {}; + dirs[path.resolve('/foo')] = true; + dirs[path.resolve('/foo/node_modules')] = true; + + function opts(basedir) { + return { + basedir: path.resolve(basedir), + isFile: function (file) { + return Object.prototype.hasOwnProperty.call(files, path.resolve(file)); + }, + isDirectory: function (dir) { + return !!dirs[path.resolve(dir)]; + }, + readFileSync: function (file) { + return files[path.resolve(file)]; + }, + realpathSync: function (file) { + return file; + } + }; + } + + t.equal( + resolve.sync('bar', opts('/foo')), + path.resolve('/foo/node_modules/bar/baz.js') + ); +}); + +test('symlinked', function (t) { + t.plan(2); + + var files = {}; + files[path.resolve('/foo/bar/baz.js')] = 'beep'; + files[path.resolve('/foo/bar/symlinked/baz.js')] = 'beep'; + + var dirs = {}; + dirs[path.resolve('/foo/bar')] = true; + dirs[path.resolve('/foo/bar/symlinked')] = true; + + function opts(basedir) { + return { + preserveSymlinks: false, + basedir: path.resolve(basedir), + isFile: function (file) { + return Object.prototype.hasOwnProperty.call(files, path.resolve(file)); + }, + isDirectory: function (dir) { + return !!dirs[path.resolve(dir)]; + }, + readFileSync: function (file) { + return files[path.resolve(file)]; + }, + realpathSync: function (file) { + var resolved = path.resolve(file); + + if (resolved.indexOf('symlinked') >= 0) { + return resolved; + } + + var ext = path.extname(resolved); + + if (ext) { + var dir = path.dirname(resolved); + var base = path.basename(resolved); + return path.join(dir, 'symlinked', base); + } + return path.join(resolved, 'symlinked'); + } + }; + } + + t.equal( + resolve.sync('./baz', opts('/foo/bar')), + path.resolve('/foo/bar/symlinked/baz.js') + ); + + t.equal( + resolve.sync('./baz.js', opts('/foo/bar')), + path.resolve('/foo/bar/symlinked/baz.js') + ); +}); + +test('readPackageSync', function (t) { + t.plan(3); + + var files = {}; + files[path.resolve('/foo/node_modules/bar/something-else.js')] = 'beep'; + files[path.resolve('/foo/node_modules/bar/package.json')] = JSON.stringify({ + main: './baz.js' + }); + files[path.resolve('/foo/node_modules/bar/baz.js')] = 'boop'; + + var dirs = {}; + dirs[path.resolve('/foo')] = true; + dirs[path.resolve('/foo/node_modules')] = true; + + function opts(basedir, useReadPackage) { + return { + basedir: path.resolve(basedir), + isFile: function (file) { + return Object.prototype.hasOwnProperty.call(files, path.resolve(file)); + }, + isDirectory: function (dir) { + return !!dirs[path.resolve(dir)]; + }, + readFileSync: useReadPackage ? null : function (file) { + return files[path.resolve(file)]; + }, + realpathSync: function (file) { + return file; + } + }; + } + t.test('with readFile', function (st) { + st.plan(1); + + st.equal( + resolve.sync('bar', opts('/foo')), + path.resolve('/foo/node_modules/bar/baz.js') + ); + }); + + var readPackageSync = function (readFileSync, file) { + if (file.indexOf(path.join('bar', 'package.json')) >= 0) { + return { main: './something-else.js' }; + } + return JSON.parse(files[path.resolve(file)]); + }; + + t.test('with readPackage', function (st) { + st.plan(1); + + var options = opts('/foo'); + delete options.readFileSync; + options.readPackageSync = readPackageSync; + + st.equal( + resolve.sync('bar', options), + path.resolve('/foo/node_modules/bar/something-else.js') + ); + }); + + t.test('with readFile and readPackage', function (st) { + st.plan(1); + + var options = opts('/foo'); + options.readPackageSync = readPackageSync; + st.throws( + function () { resolve.sync('bar', options); }, + TypeError, + 'errors when both readFile and readPackage are provided' + ); + }); +}); + diff --git a/mybulma/node_modules/resolve/test/module_dir.js b/mybulma/node_modules/resolve/test/module_dir.js new file mode 100644 index 0000000..b50e5bb --- /dev/null +++ b/mybulma/node_modules/resolve/test/module_dir.js @@ -0,0 +1,56 @@ +var path = require('path'); +var test = require('tape'); +var resolve = require('../'); + +test('moduleDirectory strings', function (t) { + t.plan(4); + var dir = path.join(__dirname, 'module_dir'); + var xopts = { + basedir: dir, + moduleDirectory: 'xmodules' + }; + resolve('aaa', xopts, function (err, res, pkg) { + t.ifError(err); + t.equal(res, path.join(dir, '/xmodules/aaa/index.js')); + }); + + var yopts = { + basedir: dir, + moduleDirectory: 'ymodules' + }; + resolve('aaa', yopts, function (err, res, pkg) { + t.ifError(err); + t.equal(res, path.join(dir, '/ymodules/aaa/index.js')); + }); +}); + +test('moduleDirectory array', function (t) { + t.plan(6); + var dir = path.join(__dirname, 'module_dir'); + var aopts = { + basedir: dir, + moduleDirectory: ['xmodules', 'ymodules', 'zmodules'] + }; + resolve('aaa', aopts, function (err, res, pkg) { + t.ifError(err); + t.equal(res, path.join(dir, '/xmodules/aaa/index.js')); + }); + + var bopts = { + basedir: dir, + moduleDirectory: ['zmodules', 'ymodules', 'xmodules'] + }; + resolve('aaa', bopts, function (err, res, pkg) { + t.ifError(err); + t.equal(res, path.join(dir, '/ymodules/aaa/index.js')); + }); + + var copts = { + basedir: dir, + moduleDirectory: ['xmodules', 'ymodules', 'zmodules'] + }; + resolve('bbb', copts, function (err, res, pkg) { + t.ifError(err); + t.equal(res, path.join(dir, '/zmodules/bbb/main.js')); + }); +}); diff --git a/mybulma/node_modules/resolve/test/module_dir/xmodules/aaa/index.js b/mybulma/node_modules/resolve/test/module_dir/xmodules/aaa/index.js new file mode 100644 index 0000000..dd7cf7b --- /dev/null +++ b/mybulma/node_modules/resolve/test/module_dir/xmodules/aaa/index.js @@ -0,0 +1 @@ +module.exports = function (x) { return x * 100; }; diff --git a/mybulma/node_modules/resolve/test/module_dir/ymodules/aaa/index.js b/mybulma/node_modules/resolve/test/module_dir/ymodules/aaa/index.js new file mode 100644 index 0000000..ef2d4d4 --- /dev/null +++ b/mybulma/node_modules/resolve/test/module_dir/ymodules/aaa/index.js @@ -0,0 +1 @@ +module.exports = function (x) { return x + 100; }; diff --git a/mybulma/node_modules/resolve/test/module_dir/zmodules/bbb/main.js b/mybulma/node_modules/resolve/test/module_dir/zmodules/bbb/main.js new file mode 100644 index 0000000..e8ba629 --- /dev/null +++ b/mybulma/node_modules/resolve/test/module_dir/zmodules/bbb/main.js @@ -0,0 +1 @@ +module.exports = function (n) { return n * 111; }; diff --git a/mybulma/node_modules/resolve/test/module_dir/zmodules/bbb/package.json b/mybulma/node_modules/resolve/test/module_dir/zmodules/bbb/package.json new file mode 100644 index 0000000..c13b8cf --- /dev/null +++ b/mybulma/node_modules/resolve/test/module_dir/zmodules/bbb/package.json @@ -0,0 +1,3 @@ +{ + "main": "main.js" +} diff --git a/mybulma/node_modules/resolve/test/node-modules-paths.js b/mybulma/node_modules/resolve/test/node-modules-paths.js new file mode 100644 index 0000000..675441d --- /dev/null +++ b/mybulma/node_modules/resolve/test/node-modules-paths.js @@ -0,0 +1,143 @@ +var test = require('tape'); +var path = require('path'); +var parse = path.parse || require('path-parse'); +var keys = require('object-keys'); + +var nodeModulesPaths = require('../lib/node-modules-paths'); + +var verifyDirs = function verifyDirs(t, start, dirs, moduleDirectories, paths) { + var moduleDirs = [].concat(moduleDirectories || 'node_modules'); + if (paths) { + for (var k = 0; k < paths.length; ++k) { + moduleDirs.push(path.basename(paths[k])); + } + } + + var foundModuleDirs = {}; + var uniqueDirs = {}; + var parsedDirs = {}; + for (var i = 0; i < dirs.length; ++i) { + var parsed = parse(dirs[i]); + if (!foundModuleDirs[parsed.base]) { foundModuleDirs[parsed.base] = 0; } + foundModuleDirs[parsed.base] += 1; + parsedDirs[parsed.dir] = true; + uniqueDirs[dirs[i]] = true; + } + t.equal(keys(parsedDirs).length >= start.split(path.sep).length, true, 'there are >= dirs than "start" has'); + var foundModuleDirNames = keys(foundModuleDirs); + t.deepEqual(foundModuleDirNames, moduleDirs, 'all desired module dirs were found'); + t.equal(keys(uniqueDirs).length, dirs.length, 'all dirs provided were unique'); + + var counts = {}; + for (var j = 0; j < foundModuleDirNames.length; ++j) { + counts[foundModuleDirs[j]] = true; + } + t.equal(keys(counts).length, 1, 'all found module directories had the same count'); +}; + +test('node-modules-paths', function (t) { + t.test('no options', function (t) { + var start = path.join(__dirname, 'resolver'); + var dirs = nodeModulesPaths(start); + + verifyDirs(t, start, dirs); + + t.end(); + }); + + t.test('empty options', function (t) { + var start = path.join(__dirname, 'resolver'); + var dirs = nodeModulesPaths(start, {}); + + verifyDirs(t, start, dirs); + + t.end(); + }); + + t.test('with paths=array option', function (t) { + var start = path.join(__dirname, 'resolver'); + var paths = ['a', 'b']; + var dirs = nodeModulesPaths(start, { paths: paths }); + + verifyDirs(t, start, dirs, null, paths); + + t.end(); + }); + + t.test('with paths=function option', function (t) { + var paths = function paths(request, absoluteStart, getNodeModulesDirs, opts) { + return getNodeModulesDirs().concat(path.join(absoluteStart, 'not node modules', request)); + }; + + var start = path.join(__dirname, 'resolver'); + var dirs = nodeModulesPaths(start, { paths: paths }, 'pkg'); + + verifyDirs(t, start, dirs, null, [path.join(start, 'not node modules', 'pkg')]); + + t.end(); + }); + + t.test('with paths=function skipping node modules resolution', function (t) { + var paths = function paths(request, absoluteStart, getNodeModulesDirs, opts) { + return []; + }; + var start = path.join(__dirname, 'resolver'); + var dirs = nodeModulesPaths(start, { paths: paths }); + t.deepEqual(dirs, [], 'no node_modules was computed'); + t.end(); + }); + + t.test('with moduleDirectory option', function (t) { + var start = path.join(__dirname, 'resolver'); + var moduleDirectory = 'not node modules'; + var dirs = nodeModulesPaths(start, { moduleDirectory: moduleDirectory }); + + verifyDirs(t, start, dirs, moduleDirectory); + + t.end(); + }); + + t.test('with 1 moduleDirectory and paths options', function (t) { + var start = path.join(__dirname, 'resolver'); + var paths = ['a', 'b']; + var moduleDirectory = 'not node modules'; + var dirs = nodeModulesPaths(start, { paths: paths, moduleDirectory: moduleDirectory }); + + verifyDirs(t, start, dirs, moduleDirectory, paths); + + t.end(); + }); + + t.test('with 1+ moduleDirectory and paths options', function (t) { + var start = path.join(__dirname, 'resolver'); + var paths = ['a', 'b']; + var moduleDirectories = ['not node modules', 'other modules']; + var dirs = nodeModulesPaths(start, { paths: paths, moduleDirectory: moduleDirectories }); + + verifyDirs(t, start, dirs, moduleDirectories, paths); + + t.end(); + }); + + t.test('combine paths correctly on Windows', function (t) { + var start = 'C:\\Users\\username\\myProject\\src'; + var paths = []; + var moduleDirectories = ['node_modules', start]; + var dirs = nodeModulesPaths(start, { paths: paths, moduleDirectory: moduleDirectories }); + + t.equal(dirs.indexOf(path.resolve(start)) > -1, true, 'should contain start dir'); + + t.end(); + }); + + t.test('combine paths correctly on non-Windows', { skip: process.platform === 'win32' }, function (t) { + var start = '/Users/username/git/myProject/src'; + var paths = []; + var moduleDirectories = ['node_modules', '/Users/username/git/myProject/src']; + var dirs = nodeModulesPaths(start, { paths: paths, moduleDirectory: moduleDirectories }); + + t.equal(dirs.indexOf(path.resolve(start)) > -1, true, 'should contain start dir'); + + t.end(); + }); +}); diff --git a/mybulma/node_modules/resolve/test/node_path.js b/mybulma/node_modules/resolve/test/node_path.js new file mode 100644 index 0000000..e463d6c --- /dev/null +++ b/mybulma/node_modules/resolve/test/node_path.js @@ -0,0 +1,70 @@ +var fs = require('fs'); +var path = require('path'); +var test = require('tape'); +var resolve = require('../'); + +test('$NODE_PATH', function (t) { + t.plan(8); + + var isDir = function (dir, cb) { + if (dir === '/node_path' || dir === 'node_path/x') { + return cb(null, true); + } + fs.stat(dir, function (err, stat) { + if (!err) { + return cb(null, stat.isDirectory()); + } + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') return cb(null, false); + return cb(err); + }); + }; + + resolve('aaa', { + paths: [ + path.join(__dirname, '/node_path/x'), + path.join(__dirname, '/node_path/y') + ], + basedir: __dirname, + isDirectory: isDir + }, function (err, res) { + t.error(err); + t.equal(res, path.join(__dirname, '/node_path/x/aaa/index.js'), 'aaa resolves'); + }); + + resolve('bbb', { + paths: [ + path.join(__dirname, '/node_path/x'), + path.join(__dirname, '/node_path/y') + ], + basedir: __dirname, + isDirectory: isDir + }, function (err, res) { + t.error(err); + t.equal(res, path.join(__dirname, '/node_path/y/bbb/index.js'), 'bbb resolves'); + }); + + resolve('ccc', { + paths: [ + path.join(__dirname, '/node_path/x'), + path.join(__dirname, '/node_path/y') + ], + basedir: __dirname, + isDirectory: isDir + }, function (err, res) { + t.error(err); + t.equal(res, path.join(__dirname, '/node_path/x/ccc/index.js'), 'ccc resolves'); + }); + + // ensure that relative paths still resolve against the regular `node_modules` correctly + resolve('tap', { + paths: [ + 'node_path' + ], + basedir: path.join(__dirname, 'node_path/x'), + isDirectory: isDir + }, function (err, res) { + var root = require('tap/package.json').main; // eslint-disable-line global-require + t.error(err); + t.equal(res, path.resolve(__dirname, '..', 'node_modules/tap', root), 'tap resolves'); + }); +}); diff --git a/mybulma/node_modules/resolve/test/node_path/x/aaa/index.js b/mybulma/node_modules/resolve/test/node_path/x/aaa/index.js new file mode 100644 index 0000000..ad70d0b --- /dev/null +++ b/mybulma/node_modules/resolve/test/node_path/x/aaa/index.js @@ -0,0 +1 @@ +module.exports = 'A'; diff --git a/mybulma/node_modules/resolve/test/node_path/x/ccc/index.js b/mybulma/node_modules/resolve/test/node_path/x/ccc/index.js new file mode 100644 index 0000000..a64132e --- /dev/null +++ b/mybulma/node_modules/resolve/test/node_path/x/ccc/index.js @@ -0,0 +1 @@ +module.exports = 'C'; diff --git a/mybulma/node_modules/resolve/test/node_path/y/bbb/index.js b/mybulma/node_modules/resolve/test/node_path/y/bbb/index.js new file mode 100644 index 0000000..4d0f32e --- /dev/null +++ b/mybulma/node_modules/resolve/test/node_path/y/bbb/index.js @@ -0,0 +1 @@ +module.exports = 'B'; diff --git a/mybulma/node_modules/resolve/test/node_path/y/ccc/index.js b/mybulma/node_modules/resolve/test/node_path/y/ccc/index.js new file mode 100644 index 0000000..793315e --- /dev/null +++ b/mybulma/node_modules/resolve/test/node_path/y/ccc/index.js @@ -0,0 +1 @@ +module.exports = 'CY'; diff --git a/mybulma/node_modules/resolve/test/nonstring.js b/mybulma/node_modules/resolve/test/nonstring.js new file mode 100644 index 0000000..ef63c40 --- /dev/null +++ b/mybulma/node_modules/resolve/test/nonstring.js @@ -0,0 +1,9 @@ +var test = require('tape'); +var resolve = require('../'); + +test('nonstring', function (t) { + t.plan(1); + resolve(555, function (err, res, pkg) { + t.ok(err); + }); +}); diff --git a/mybulma/node_modules/resolve/test/pathfilter.js b/mybulma/node_modules/resolve/test/pathfilter.js new file mode 100644 index 0000000..16519ae --- /dev/null +++ b/mybulma/node_modules/resolve/test/pathfilter.js @@ -0,0 +1,75 @@ +var path = require('path'); +var test = require('tape'); +var resolve = require('../'); + +var resolverDir = path.join(__dirname, '/pathfilter/deep_ref'); + +var pathFilterFactory = function (t) { + return function (pkg, x, remainder) { + t.equal(pkg.version, '1.2.3'); + t.equal(x, path.join(resolverDir, 'node_modules/deep/ref')); + t.equal(remainder, 'ref'); + return 'alt'; + }; +}; + +test('#62: deep module references and the pathFilter', function (t) { + t.test('deep/ref.js', function (st) { + st.plan(3); + + resolve('deep/ref', { basedir: resolverDir }, function (err, res, pkg) { + if (err) st.fail(err); + + st.equal(pkg.version, '1.2.3'); + st.equal(res, path.join(resolverDir, 'node_modules/deep/ref.js')); + }); + + var res = resolve.sync('deep/ref', { basedir: resolverDir }); + st.equal(res, path.join(resolverDir, 'node_modules/deep/ref.js')); + }); + + t.test('deep/deeper/ref', function (st) { + st.plan(4); + + resolve( + 'deep/deeper/ref', + { basedir: resolverDir }, + function (err, res, pkg) { + if (err) t.fail(err); + st.notEqual(pkg, undefined); + st.equal(pkg.version, '1.2.3'); + st.equal(res, path.join(resolverDir, 'node_modules/deep/deeper/ref.js')); + } + ); + + var res = resolve.sync( + 'deep/deeper/ref', + { basedir: resolverDir } + ); + st.equal(res, path.join(resolverDir, 'node_modules/deep/deeper/ref.js')); + }); + + t.test('deep/ref alt', function (st) { + st.plan(8); + + var pathFilter = pathFilterFactory(st); + + var res = resolve.sync( + 'deep/ref', + { basedir: resolverDir, pathFilter: pathFilter } + ); + st.equal(res, path.join(resolverDir, 'node_modules/deep/alt.js')); + + resolve( + 'deep/ref', + { basedir: resolverDir, pathFilter: pathFilter }, + function (err, res, pkg) { + if (err) st.fail(err); + st.equal(res, path.join(resolverDir, 'node_modules/deep/alt.js')); + st.end(); + } + ); + }); + + t.end(); +}); diff --git a/mybulma/node_modules/resolve/test/pathfilter/deep_ref/main.js b/mybulma/node_modules/resolve/test/pathfilter/deep_ref/main.js new file mode 100644 index 0000000..e69de29 diff --git a/mybulma/node_modules/resolve/test/precedence.js b/mybulma/node_modules/resolve/test/precedence.js new file mode 100644 index 0000000..2febb59 --- /dev/null +++ b/mybulma/node_modules/resolve/test/precedence.js @@ -0,0 +1,23 @@ +var path = require('path'); +var test = require('tape'); +var resolve = require('../'); + +test('precedence', function (t) { + t.plan(3); + var dir = path.join(__dirname, 'precedence/aaa'); + + resolve('./', { basedir: dir }, function (err, res, pkg) { + t.ifError(err); + t.equal(res, path.join(dir, 'index.js')); + t.equal(pkg.name, 'resolve'); + }); +}); + +test('./ should not load ${dir}.js', function (t) { // eslint-disable-line no-template-curly-in-string + t.plan(1); + var dir = path.join(__dirname, 'precedence/bbb'); + + resolve('./', { basedir: dir }, function (err, res, pkg) { + t.ok(err); + }); +}); diff --git a/mybulma/node_modules/resolve/test/precedence/aaa.js b/mybulma/node_modules/resolve/test/precedence/aaa.js new file mode 100644 index 0000000..b83a3e7 --- /dev/null +++ b/mybulma/node_modules/resolve/test/precedence/aaa.js @@ -0,0 +1 @@ +module.exports = 'wtf'; diff --git a/mybulma/node_modules/resolve/test/precedence/aaa/index.js b/mybulma/node_modules/resolve/test/precedence/aaa/index.js new file mode 100644 index 0000000..e0f8f6a --- /dev/null +++ b/mybulma/node_modules/resolve/test/precedence/aaa/index.js @@ -0,0 +1 @@ +module.exports = 'okok'; diff --git a/mybulma/node_modules/resolve/test/precedence/aaa/main.js b/mybulma/node_modules/resolve/test/precedence/aaa/main.js new file mode 100644 index 0000000..93542a9 --- /dev/null +++ b/mybulma/node_modules/resolve/test/precedence/aaa/main.js @@ -0,0 +1 @@ +console.log(require('./')); diff --git a/mybulma/node_modules/resolve/test/precedence/bbb.js b/mybulma/node_modules/resolve/test/precedence/bbb.js new file mode 100644 index 0000000..2298f47 --- /dev/null +++ b/mybulma/node_modules/resolve/test/precedence/bbb.js @@ -0,0 +1 @@ +module.exports = '>_<'; diff --git a/mybulma/node_modules/resolve/test/precedence/bbb/main.js b/mybulma/node_modules/resolve/test/precedence/bbb/main.js new file mode 100644 index 0000000..716b81d --- /dev/null +++ b/mybulma/node_modules/resolve/test/precedence/bbb/main.js @@ -0,0 +1 @@ +console.log(require('./')); // should throw diff --git a/mybulma/node_modules/resolve/test/resolver.js b/mybulma/node_modules/resolve/test/resolver.js new file mode 100644 index 0000000..4903165 --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver.js @@ -0,0 +1,595 @@ +var path = require('path'); +var test = require('tape'); +var resolve = require('../'); +var async = require('../async'); + +test('`./async` entry point', function (t) { + t.equal(resolve, async, '`./async` entry point is the same as `main`'); + t.end(); +}); + +test('async foo', function (t) { + t.plan(12); + var dir = path.join(__dirname, 'resolver'); + + resolve('./foo', { basedir: dir }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'foo.js')); + t.equal(pkg && pkg.name, 'resolve'); + }); + + resolve('./foo.js', { basedir: dir }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'foo.js')); + t.equal(pkg && pkg.name, 'resolve'); + }); + + resolve('./foo', { basedir: dir, 'package': { main: 'resolver' } }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'foo.js')); + t.equal(pkg && pkg.main, 'resolver'); + }); + + resolve('./foo.js', { basedir: dir, 'package': { main: 'resolver' } }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'foo.js')); + t.equal(pkg.main, 'resolver'); + }); + + resolve('./foo', { basedir: dir, filename: path.join(dir, 'baz.js') }, function (err, res) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'foo.js')); + }); + + resolve('foo', { basedir: dir }, function (err) { + t.equal(err.message, "Cannot find module 'foo' from '" + path.resolve(dir) + "'"); + t.equal(err.code, 'MODULE_NOT_FOUND'); + }); + + // Test that filename is reported as the "from" value when passed. + resolve('foo', { basedir: dir, filename: path.join(dir, 'baz.js') }, function (err) { + t.equal(err.message, "Cannot find module 'foo' from '" + path.join(dir, 'baz.js') + "'"); + }); +}); + +test('bar', function (t) { + t.plan(6); + var dir = path.join(__dirname, 'resolver'); + + resolve('foo', { basedir: dir + '/bar' }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'bar/node_modules/foo/index.js')); + t.equal(pkg, undefined); + }); + + resolve('foo', { basedir: dir + '/bar' }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'bar/node_modules/foo/index.js')); + t.equal(pkg, undefined); + }); + + resolve('foo', { basedir: dir + '/bar', 'package': { main: 'bar' } }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'bar/node_modules/foo/index.js')); + t.equal(pkg.main, 'bar'); + }); +}); + +test('baz', function (t) { + t.plan(4); + var dir = path.join(__dirname, 'resolver'); + + resolve('./baz', { basedir: dir }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'baz/quux.js')); + t.equal(pkg.main, 'quux.js'); + }); + + resolve('./baz', { basedir: dir, 'package': { main: 'resolver' } }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'baz/quux.js')); + t.equal(pkg.main, 'quux.js'); + }); +}); + +test('biz', function (t) { + t.plan(24); + var dir = path.join(__dirname, 'resolver/biz/node_modules'); + + resolve('./grux', { basedir: dir }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'grux/index.js')); + t.equal(pkg, undefined); + }); + + resolve('./grux', { basedir: dir, 'package': { main: 'biz' } }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'grux/index.js')); + t.equal(pkg.main, 'biz'); + }); + + resolve('./garply', { basedir: dir }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'garply/lib/index.js')); + t.equal(pkg.main, './lib'); + }); + + resolve('./garply', { basedir: dir, 'package': { main: 'biz' } }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'garply/lib/index.js')); + t.equal(pkg.main, './lib'); + }); + + resolve('tiv', { basedir: dir + '/grux' }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'tiv/index.js')); + t.equal(pkg, undefined); + }); + + resolve('tiv', { basedir: dir + '/grux', 'package': { main: 'grux' } }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'tiv/index.js')); + t.equal(pkg.main, 'grux'); + }); + + resolve('tiv', { basedir: dir + '/garply' }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'tiv/index.js')); + t.equal(pkg, undefined); + }); + + resolve('tiv', { basedir: dir + '/garply', 'package': { main: './lib' } }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'tiv/index.js')); + t.equal(pkg.main, './lib'); + }); + + resolve('grux', { basedir: dir + '/tiv' }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'grux/index.js')); + t.equal(pkg, undefined); + }); + + resolve('grux', { basedir: dir + '/tiv', 'package': { main: 'tiv' } }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'grux/index.js')); + t.equal(pkg.main, 'tiv'); + }); + + resolve('garply', { basedir: dir + '/tiv' }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'garply/lib/index.js')); + t.equal(pkg.main, './lib'); + }); + + resolve('garply', { basedir: dir + '/tiv', 'package': { main: 'tiv' } }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'garply/lib/index.js')); + t.equal(pkg.main, './lib'); + }); +}); + +test('quux', function (t) { + t.plan(2); + var dir = path.join(__dirname, 'resolver/quux'); + + resolve('./foo', { basedir: dir, 'package': { main: 'quux' } }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'foo/index.js')); + t.equal(pkg.main, 'quux'); + }); +}); + +test('normalize', function (t) { + t.plan(2); + var dir = path.join(__dirname, 'resolver/biz/node_modules/grux'); + + resolve('../grux', { basedir: dir }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'index.js')); + t.equal(pkg, undefined); + }); +}); + +test('cup', function (t) { + t.plan(5); + var dir = path.join(__dirname, 'resolver'); + + resolve('./cup', { basedir: dir, extensions: ['.js', '.coffee'] }, function (err, res) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'cup.coffee')); + }); + + resolve('./cup.coffee', { basedir: dir }, function (err, res) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'cup.coffee')); + }); + + resolve('./cup', { basedir: dir, extensions: ['.js'] }, function (err, res) { + t.equal(err.message, "Cannot find module './cup' from '" + path.resolve(dir) + "'"); + t.equal(err.code, 'MODULE_NOT_FOUND'); + }); + + // Test that filename is reported as the "from" value when passed. + resolve('./cup', { basedir: dir, extensions: ['.js'], filename: path.join(dir, 'cupboard.js') }, function (err, res) { + t.equal(err.message, "Cannot find module './cup' from '" + path.join(dir, 'cupboard.js') + "'"); + }); +}); + +test('mug', function (t) { + t.plan(3); + var dir = path.join(__dirname, 'resolver'); + + resolve('./mug', { basedir: dir }, function (err, res) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'mug.js')); + }); + + resolve('./mug', { basedir: dir, extensions: ['.coffee', '.js'] }, function (err, res) { + if (err) t.fail(err); + t.equal(res, path.join(dir, '/mug.coffee')); + }); + + resolve('./mug', { basedir: dir, extensions: ['.js', '.coffee'] }, function (err, res) { + t.equal(res, path.join(dir, '/mug.js')); + }); +}); + +test('other path', function (t) { + t.plan(6); + var resolverDir = path.join(__dirname, 'resolver'); + var dir = path.join(resolverDir, 'bar'); + var otherDir = path.join(resolverDir, 'other_path'); + + resolve('root', { basedir: dir, paths: [otherDir] }, function (err, res) { + if (err) t.fail(err); + t.equal(res, path.join(resolverDir, 'other_path/root.js')); + }); + + resolve('lib/other-lib', { basedir: dir, paths: [otherDir] }, function (err, res) { + if (err) t.fail(err); + t.equal(res, path.join(resolverDir, 'other_path/lib/other-lib.js')); + }); + + resolve('root', { basedir: dir }, function (err, res) { + t.equal(err.message, "Cannot find module 'root' from '" + path.resolve(dir) + "'"); + t.equal(err.code, 'MODULE_NOT_FOUND'); + }); + + resolve('zzz', { basedir: dir, paths: [otherDir] }, function (err, res) { + t.equal(err.message, "Cannot find module 'zzz' from '" + path.resolve(dir) + "'"); + t.equal(err.code, 'MODULE_NOT_FOUND'); + }); +}); + +test('path iterator', function (t) { + t.plan(2); + + var resolverDir = path.join(__dirname, 'resolver'); + + var exactIterator = function (x, start, getPackageCandidates, opts) { + return [path.join(resolverDir, x)]; + }; + + resolve('baz', { packageIterator: exactIterator }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(resolverDir, 'baz/quux.js')); + t.equal(pkg && pkg.name, 'baz'); + }); +}); + +test('incorrect main', function (t) { + t.plan(1); + + var resolverDir = path.join(__dirname, 'resolver'); + var dir = path.join(resolverDir, 'incorrect_main'); + + resolve('./incorrect_main', { basedir: resolverDir }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'index.js')); + }); +}); + +test('missing index', function (t) { + t.plan(2); + + var resolverDir = path.join(__dirname, 'resolver'); + resolve('./missing_index', { basedir: resolverDir }, function (err, res, pkg) { + t.ok(err instanceof Error); + t.equal(err && err.code, 'MODULE_NOT_FOUND', 'error has correct error code'); + }); +}); + +test('missing main', function (t) { + t.plan(1); + + var resolverDir = path.join(__dirname, 'resolver'); + + resolve('./missing_main', { basedir: resolverDir }, function (err, res, pkg) { + t.equal(err && err.code, 'MODULE_NOT_FOUND', 'error has correct error code'); + }); +}); + +test('null main', function (t) { + t.plan(1); + + var resolverDir = path.join(__dirname, 'resolver'); + + resolve('./null_main', { basedir: resolverDir }, function (err, res, pkg) { + t.equal(err && err.code, 'MODULE_NOT_FOUND', 'error has correct error code'); + }); +}); + +test('main: false', function (t) { + t.plan(2); + + var basedir = path.join(__dirname, 'resolver'); + var dir = path.join(basedir, 'false_main'); + resolve('./false_main', { basedir: basedir }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal( + res, + path.join(dir, 'index.js'), + '`"main": false`: resolves to `index.js`' + ); + t.deepEqual(pkg, { + name: 'false_main', + main: false + }); + }); +}); + +test('without basedir', function (t) { + t.plan(1); + + var dir = path.join(__dirname, 'resolver/without_basedir'); + var tester = require(path.join(dir, 'main.js')); // eslint-disable-line global-require + + tester(t, function (err, res, pkg) { + if (err) { + t.fail(err); + } else { + t.equal(res, path.join(dir, 'node_modules/mymodule.js')); + } + }); +}); + +test('#52 - incorrectly resolves module-paths like "./someFolder/" when there is a file of the same name', function (t) { + t.plan(2); + + var dir = path.join(__dirname, 'resolver'); + + resolve('./foo', { basedir: path.join(dir, 'same_names') }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'same_names/foo.js')); + }); + + resolve('./foo/', { basedir: path.join(dir, 'same_names') }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'same_names/foo/index.js')); + }); +}); + +test('#211 - incorrectly resolves module-paths like "." when from inside a folder with a sibling file of the same name', function (t) { + t.plan(2); + + var dir = path.join(__dirname, 'resolver'); + + resolve('./', { basedir: path.join(dir, 'same_names/foo') }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'same_names/foo/index.js')); + }); + + resolve('.', { basedir: path.join(dir, 'same_names/foo') }, function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'same_names/foo/index.js')); + }); +}); + +test('async: #121 - treating an existing file as a dir when no basedir', function (t) { + var testFile = path.basename(__filename); + + t.test('sanity check', function (st) { + st.plan(1); + resolve('./' + testFile, function (err, res, pkg) { + if (err) t.fail(err); + st.equal(res, __filename, 'sanity check'); + }); + }); + + t.test('with a fake directory', function (st) { + st.plan(4); + + resolve('./' + testFile + '/blah', function (err, res, pkg) { + st.ok(err, 'there is an error'); + st.notOk(res, 'no result'); + + st.equal(err && err.code, 'MODULE_NOT_FOUND', 'error code matches require.resolve'); + st.equal( + err && err.message, + 'Cannot find module \'./' + testFile + '/blah\' from \'' + __dirname + '\'', + 'can not find nonexistent module' + ); + st.end(); + }); + }); + + t.end(); +}); + +test('async dot main', function (t) { + var start = new Date(); + t.plan(3); + resolve('./resolver/dot_main', function (err, ret) { + t.notOk(err); + t.equal(ret, path.join(__dirname, 'resolver/dot_main/index.js')); + t.ok(new Date() - start < 50, 'resolve.sync timedout'); + t.end(); + }); +}); + +test('async dot slash main', function (t) { + var start = new Date(); + t.plan(3); + resolve('./resolver/dot_slash_main', function (err, ret) { + t.notOk(err); + t.equal(ret, path.join(__dirname, 'resolver/dot_slash_main/index.js')); + t.ok(new Date() - start < 50, 'resolve.sync timedout'); + t.end(); + }); +}); + +test('not a directory', function (t) { + t.plan(6); + var path = './foo'; + resolve(path, { basedir: __filename }, function (err, res, pkg) { + t.ok(err, 'a non-directory errors'); + t.equal(arguments.length, 1); + t.equal(res, undefined); + t.equal(pkg, undefined); + + t.equal(err && err.message, 'Cannot find module \'' + path + '\' from \'' + __filename + '\''); + t.equal(err && err.code, 'MODULE_NOT_FOUND'); + }); +}); + +test('non-string "main" field in package.json', function (t) { + t.plan(5); + + var dir = path.join(__dirname, 'resolver'); + resolve('./invalid_main', { basedir: dir }, function (err, res, pkg) { + t.ok(err, 'errors on non-string main'); + t.equal(err.message, 'package “invalid_main” `main` must be a string'); + t.equal(err.code, 'INVALID_PACKAGE_MAIN'); + t.equal(res, undefined, 'res is undefined'); + t.equal(pkg, undefined, 'pkg is undefined'); + }); +}); + +test('non-string "main" field in package.json', function (t) { + t.plan(5); + + var dir = path.join(__dirname, 'resolver'); + resolve('./invalid_main', { basedir: dir }, function (err, res, pkg) { + t.ok(err, 'errors on non-string main'); + t.equal(err.message, 'package “invalid_main” `main` must be a string'); + t.equal(err.code, 'INVALID_PACKAGE_MAIN'); + t.equal(res, undefined, 'res is undefined'); + t.equal(pkg, undefined, 'pkg is undefined'); + }); +}); + +test('browser field in package.json', function (t) { + t.plan(3); + + var dir = path.join(__dirname, 'resolver'); + resolve( + './browser_field', + { + basedir: dir, + packageFilter: function packageFilter(pkg) { + if (pkg.browser) { + pkg.main = pkg.browser; // eslint-disable-line no-param-reassign + delete pkg.browser; // eslint-disable-line no-param-reassign + } + return pkg; + } + }, + function (err, res, pkg) { + if (err) t.fail(err); + t.equal(res, path.join(dir, 'browser_field', 'b.js')); + t.equal(pkg && pkg.main, 'b'); + t.equal(pkg && pkg.browser, undefined); + } + ); +}); + +test('absolute paths', function (t) { + t.plan(4); + + var extensionless = __filename.slice(0, -path.extname(__filename).length); + + resolve(__filename, function (err, res) { + t.equal( + res, + __filename, + 'absolute path to this file resolves' + ); + }); + resolve(extensionless, function (err, res) { + t.equal( + res, + __filename, + 'extensionless absolute path to this file resolves' + ); + }); + resolve(__filename, { basedir: process.cwd() }, function (err, res) { + t.equal( + res, + __filename, + 'absolute path to this file with a basedir resolves' + ); + }); + resolve(extensionless, { basedir: process.cwd() }, function (err, res) { + t.equal( + res, + __filename, + 'extensionless absolute path to this file with a basedir resolves' + ); + }); +}); + +test('malformed package.json', function (t) { + /* eslint operator-linebreak: ["error", "before"], function-paren-newline: "off" */ + t.plan( + (3 * 3) // 3 sets of 3 assertions in the final callback + + 2 // 1 readPackage call with malformed package.json + ); + + var basedir = path.join(__dirname, 'resolver/malformed_package_json'); + var expected = path.join(basedir, 'index.js'); + + resolve('./index.js', { basedir: basedir }, function (err, res, pkg) { + t.error(err, 'no error'); + t.equal(res, expected, 'malformed package.json is silently ignored'); + t.equal(pkg, undefined, 'malformed package.json gives an undefined `pkg` argument'); + }); + + resolve( + './index.js', + { + basedir: basedir, + packageFilter: function (pkg, pkgfile, dir) { + t.fail('should not reach here'); + } + }, + function (err, res, pkg) { + t.error(err, 'with packageFilter: no error'); + t.equal(res, expected, 'with packageFilter: malformed package.json is silently ignored'); + t.equal(pkg, undefined, 'with packageFilter: malformed package.json gives an undefined `pkg` argument'); + } + ); + + resolve( + './index.js', + { + basedir: basedir, + readPackage: function (readFile, pkgfile, cb) { + t.equal(pkgfile, path.join(basedir, 'package.json'), 'readPackageSync: `pkgfile` is package.json path'); + readFile(pkgfile, function (err, result) { + try { + cb(null, JSON.parse(result)); + } catch (e) { + t.ok(e instanceof SyntaxError, 'readPackage: malformed package.json parses as a syntax error'); + cb(null); + } + }); + } + }, + function (err, res, pkg) { + t.error(err, 'with readPackage: no error'); + t.equal(res, expected, 'with readPackage: malformed package.json is silently ignored'); + t.equal(pkg, undefined, 'with readPackage: malformed package.json gives an undefined `pkg` argument'); + } + ); +}); diff --git a/mybulma/node_modules/resolve/test/resolver/baz/doom.js b/mybulma/node_modules/resolve/test/resolver/baz/doom.js new file mode 100644 index 0000000..e69de29 diff --git a/mybulma/node_modules/resolve/test/resolver/baz/package.json b/mybulma/node_modules/resolve/test/resolver/baz/package.json new file mode 100644 index 0000000..2f77720 --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver/baz/package.json @@ -0,0 +1,4 @@ +{ + "name": "baz", + "main": "quux.js" +} diff --git a/mybulma/node_modules/resolve/test/resolver/baz/quux.js b/mybulma/node_modules/resolve/test/resolver/baz/quux.js new file mode 100644 index 0000000..bd816ea --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver/baz/quux.js @@ -0,0 +1 @@ +module.exports = 1; diff --git a/mybulma/node_modules/resolve/test/resolver/browser_field/a.js b/mybulma/node_modules/resolve/test/resolver/browser_field/a.js new file mode 100644 index 0000000..e69de29 diff --git a/mybulma/node_modules/resolve/test/resolver/browser_field/b.js b/mybulma/node_modules/resolve/test/resolver/browser_field/b.js new file mode 100644 index 0000000..e69de29 diff --git a/mybulma/node_modules/resolve/test/resolver/browser_field/package.json b/mybulma/node_modules/resolve/test/resolver/browser_field/package.json new file mode 100644 index 0000000..bf406f0 --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver/browser_field/package.json @@ -0,0 +1,5 @@ +{ + "name": "browser_field", + "main": "a", + "browser": "b" +} diff --git a/mybulma/node_modules/resolve/test/resolver/cup.coffee b/mybulma/node_modules/resolve/test/resolver/cup.coffee new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver/cup.coffee @@ -0,0 +1 @@ + diff --git a/mybulma/node_modules/resolve/test/resolver/dot_main/index.js b/mybulma/node_modules/resolve/test/resolver/dot_main/index.js new file mode 100644 index 0000000..bd816ea --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver/dot_main/index.js @@ -0,0 +1 @@ +module.exports = 1; diff --git a/mybulma/node_modules/resolve/test/resolver/dot_main/package.json b/mybulma/node_modules/resolve/test/resolver/dot_main/package.json new file mode 100644 index 0000000..d7f4fc8 --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver/dot_main/package.json @@ -0,0 +1,3 @@ +{ + "main": "." +} diff --git a/mybulma/node_modules/resolve/test/resolver/dot_slash_main/index.js b/mybulma/node_modules/resolve/test/resolver/dot_slash_main/index.js new file mode 100644 index 0000000..bd816ea --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver/dot_slash_main/index.js @@ -0,0 +1 @@ +module.exports = 1; diff --git a/mybulma/node_modules/resolve/test/resolver/dot_slash_main/package.json b/mybulma/node_modules/resolve/test/resolver/dot_slash_main/package.json new file mode 100644 index 0000000..f51287b --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver/dot_slash_main/package.json @@ -0,0 +1,3 @@ +{ + "main": "./" +} diff --git a/mybulma/node_modules/resolve/test/resolver/false_main/index.js b/mybulma/node_modules/resolve/test/resolver/false_main/index.js new file mode 100644 index 0000000..e69de29 diff --git a/mybulma/node_modules/resolve/test/resolver/false_main/package.json b/mybulma/node_modules/resolve/test/resolver/false_main/package.json new file mode 100644 index 0000000..a7416c0 --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver/false_main/package.json @@ -0,0 +1,4 @@ +{ + "name": "false_main", + "main": false +} diff --git a/mybulma/node_modules/resolve/test/resolver/foo.js b/mybulma/node_modules/resolve/test/resolver/foo.js new file mode 100644 index 0000000..bd816ea --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver/foo.js @@ -0,0 +1 @@ +module.exports = 1; diff --git a/mybulma/node_modules/resolve/test/resolver/incorrect_main/index.js b/mybulma/node_modules/resolve/test/resolver/incorrect_main/index.js new file mode 100644 index 0000000..bc1fb0a --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver/incorrect_main/index.js @@ -0,0 +1,2 @@ +// this is the actual main file 'index.js', not 'wrong.js' like the package.json would indicate +module.exports = 1; diff --git a/mybulma/node_modules/resolve/test/resolver/incorrect_main/package.json b/mybulma/node_modules/resolve/test/resolver/incorrect_main/package.json new file mode 100644 index 0000000..b718804 --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver/incorrect_main/package.json @@ -0,0 +1,3 @@ +{ + "main": "wrong.js" +} diff --git a/mybulma/node_modules/resolve/test/resolver/invalid_main/package.json b/mybulma/node_modules/resolve/test/resolver/invalid_main/package.json new file mode 100644 index 0000000..0590748 --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver/invalid_main/package.json @@ -0,0 +1,7 @@ +{ + "name": "invalid_main", + "main": [ + "why is this a thing", + "srsly omg wtf" + ] +} diff --git a/mybulma/node_modules/resolve/test/resolver/malformed_package_json/index.js b/mybulma/node_modules/resolve/test/resolver/malformed_package_json/index.js new file mode 100644 index 0000000..e69de29 diff --git a/mybulma/node_modules/resolve/test/resolver/malformed_package_json/package.json b/mybulma/node_modules/resolve/test/resolver/malformed_package_json/package.json new file mode 100644 index 0000000..98232c6 --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver/malformed_package_json/package.json @@ -0,0 +1 @@ +{ diff --git a/mybulma/node_modules/resolve/test/resolver/mug.coffee b/mybulma/node_modules/resolve/test/resolver/mug.coffee new file mode 100644 index 0000000..e69de29 diff --git a/mybulma/node_modules/resolve/test/resolver/mug.js b/mybulma/node_modules/resolve/test/resolver/mug.js new file mode 100644 index 0000000..e69de29 diff --git a/mybulma/node_modules/resolve/test/resolver/multirepo/lerna.json b/mybulma/node_modules/resolve/test/resolver/multirepo/lerna.json new file mode 100644 index 0000000..d6707ca --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver/multirepo/lerna.json @@ -0,0 +1,6 @@ +{ + "packages": [ + "packages/*" + ], + "version": "0.0.0" +} diff --git a/mybulma/node_modules/resolve/test/resolver/multirepo/package.json b/mybulma/node_modules/resolve/test/resolver/multirepo/package.json new file mode 100644 index 0000000..8508f9d --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver/multirepo/package.json @@ -0,0 +1,20 @@ +{ + "name": "monorepo-symlink-test", + "private": true, + "version": "0.0.0", + "description": "", + "main": "index.js", + "scripts": { + "postinstall": "lerna bootstrap", + "test": "node packages/package-a" + }, + "author": "", + "license": "MIT", + "dependencies": { + "jquery": "^3.3.1", + "resolve": "../../../" + }, + "devDependencies": { + "lerna": "^3.4.3" + } +} diff --git a/mybulma/node_modules/resolve/test/resolver/multirepo/packages/package-a/index.js b/mybulma/node_modules/resolve/test/resolver/multirepo/packages/package-a/index.js new file mode 100644 index 0000000..8875a32 --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver/multirepo/packages/package-a/index.js @@ -0,0 +1,35 @@ +'use strict'; + +var assert = require('assert'); +var path = require('path'); +var resolve = require('resolve'); + +var basedir = __dirname + '/node_modules/@my-scope/package-b'; + +var expected = path.join(__dirname, '../../node_modules/jquery/dist/jquery.js'); + +/* + * preserveSymlinks === false + * will search NPM package from + * - packages/package-b/node_modules + * - packages/node_modules + * - node_modules + */ +assert.equal(resolve.sync('jquery', { basedir: basedir, preserveSymlinks: false }), expected); +assert.equal(resolve.sync('../../node_modules/jquery', { basedir: basedir, preserveSymlinks: false }), expected); + +/* + * preserveSymlinks === true + * will search NPM package from + * - packages/package-a/node_modules/@my-scope/packages/package-b/node_modules + * - packages/package-a/node_modules/@my-scope/packages/node_modules + * - packages/package-a/node_modules/@my-scope/node_modules + * - packages/package-a/node_modules/node_modules + * - packages/package-a/node_modules + * - packages/node_modules + * - node_modules + */ +assert.equal(resolve.sync('jquery', { basedir: basedir, preserveSymlinks: true }), expected); +assert.equal(resolve.sync('../../../../../node_modules/jquery', { basedir: basedir, preserveSymlinks: true }), expected); + +console.log(' * all monorepo paths successfully resolved through symlinks'); diff --git a/mybulma/node_modules/resolve/test/resolver/multirepo/packages/package-a/package.json b/mybulma/node_modules/resolve/test/resolver/multirepo/packages/package-a/package.json new file mode 100644 index 0000000..204de51 --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver/multirepo/packages/package-a/package.json @@ -0,0 +1,14 @@ +{ + "name": "@my-scope/package-a", + "version": "0.0.0", + "private": true, + "description": "", + "license": "MIT", + "main": "index.js", + "scripts": { + "test": "echo \"Error: run tests from root\" && exit 1" + }, + "dependencies": { + "@my-scope/package-b": "^0.0.0" + } +} diff --git a/mybulma/node_modules/resolve/test/resolver/multirepo/packages/package-b/index.js b/mybulma/node_modules/resolve/test/resolver/multirepo/packages/package-b/index.js new file mode 100644 index 0000000..e69de29 diff --git a/mybulma/node_modules/resolve/test/resolver/multirepo/packages/package-b/package.json b/mybulma/node_modules/resolve/test/resolver/multirepo/packages/package-b/package.json new file mode 100644 index 0000000..f57c3b5 --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver/multirepo/packages/package-b/package.json @@ -0,0 +1,14 @@ +{ + "name": "@my-scope/package-b", + "private": true, + "version": "0.0.0", + "description": "", + "license": "MIT", + "main": "index.js", + "scripts": { + "test": "echo \"Error: run tests from root\" && exit 1" + }, + "dependencies": { + "@my-scope/package-a": "^0.0.0" + } +} diff --git a/mybulma/node_modules/resolve/test/resolver/nested_symlinks/mylib/async.js b/mybulma/node_modules/resolve/test/resolver/nested_symlinks/mylib/async.js new file mode 100644 index 0000000..9b4846a --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver/nested_symlinks/mylib/async.js @@ -0,0 +1,26 @@ +var a = require.resolve('buffer/').replace(process.cwd(), '$CWD'); +var b; +var c; + +var test = function test() { + console.log(a, ': require.resolve, preserveSymlinks ' + (process.execArgv.indexOf('preserve-symlinks') > -1 ? 'true' : 'false')); + console.log(b, ': preserveSymlinks true'); + console.log(c, ': preserveSymlinks false'); + + if (a !== b && a !== c) { + throw 'async: no match'; + } + console.log('async: success! a matched either b or c\n'); +}; + +require('resolve')('buffer/', { preserveSymlinks: true }, function (err, result) { + if (err) { throw err; } + b = result.replace(process.cwd(), '$CWD'); + if (b && c) { test(); } +}); +require('resolve')('buffer/', { preserveSymlinks: false }, function (err, result) { + if (err) { throw err; } + c = result.replace(process.cwd(), '$CWD'); + if (b && c) { test(); } +}); + diff --git a/mybulma/node_modules/resolve/test/resolver/nested_symlinks/mylib/package.json b/mybulma/node_modules/resolve/test/resolver/nested_symlinks/mylib/package.json new file mode 100644 index 0000000..acfe9e9 --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver/nested_symlinks/mylib/package.json @@ -0,0 +1,15 @@ +{ + "name": "mylib", + "version": "0.0.0", + "description": "", + "private": true, + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "keywords": [], + "author": "", + "license": "ISC", + "dependencies": { + "buffer": "*" + } +} diff --git a/mybulma/node_modules/resolve/test/resolver/nested_symlinks/mylib/sync.js b/mybulma/node_modules/resolve/test/resolver/nested_symlinks/mylib/sync.js new file mode 100644 index 0000000..3283efc --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver/nested_symlinks/mylib/sync.js @@ -0,0 +1,12 @@ +var a = require.resolve('buffer/').replace(process.cwd(), '$CWD'); +var b = require('resolve').sync('buffer/', { preserveSymlinks: true }).replace(process.cwd(), '$CWD'); +var c = require('resolve').sync('buffer/', { preserveSymlinks: false }).replace(process.cwd(), '$CWD'); + +console.log(a, ': require.resolve, preserveSymlinks ' + (process.execArgv.indexOf('preserve-symlinks') > -1 ? 'true' : 'false')); +console.log(b, ': preserveSymlinks true'); +console.log(c, ': preserveSymlinks false'); + +if (a !== b && a !== c) { + throw 'sync: no match'; +} +console.log('sync: success! a matched either b or c\n'); diff --git a/mybulma/node_modules/resolve/test/resolver/other_path/lib/other-lib.js b/mybulma/node_modules/resolve/test/resolver/other_path/lib/other-lib.js new file mode 100644 index 0000000..e69de29 diff --git a/mybulma/node_modules/resolve/test/resolver/other_path/root.js b/mybulma/node_modules/resolve/test/resolver/other_path/root.js new file mode 100644 index 0000000..e69de29 diff --git a/mybulma/node_modules/resolve/test/resolver/quux/foo/index.js b/mybulma/node_modules/resolve/test/resolver/quux/foo/index.js new file mode 100644 index 0000000..bd816ea --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver/quux/foo/index.js @@ -0,0 +1 @@ +module.exports = 1; diff --git a/mybulma/node_modules/resolve/test/resolver/same_names/foo.js b/mybulma/node_modules/resolve/test/resolver/same_names/foo.js new file mode 100644 index 0000000..888cae3 --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver/same_names/foo.js @@ -0,0 +1 @@ +module.exports = 42; diff --git a/mybulma/node_modules/resolve/test/resolver/same_names/foo/index.js b/mybulma/node_modules/resolve/test/resolver/same_names/foo/index.js new file mode 100644 index 0000000..bd816ea --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver/same_names/foo/index.js @@ -0,0 +1 @@ +module.exports = 1; diff --git a/mybulma/node_modules/resolve/test/resolver/symlinked/_/node_modules/foo.js b/mybulma/node_modules/resolve/test/resolver/symlinked/_/node_modules/foo.js new file mode 100644 index 0000000..e69de29 diff --git a/mybulma/node_modules/resolve/test/resolver/symlinked/_/symlink_target/.gitkeep b/mybulma/node_modules/resolve/test/resolver/symlinked/_/symlink_target/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/mybulma/node_modules/resolve/test/resolver/symlinked/package/bar.js b/mybulma/node_modules/resolve/test/resolver/symlinked/package/bar.js new file mode 100644 index 0000000..cb1c2c0 --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver/symlinked/package/bar.js @@ -0,0 +1 @@ +module.exports = 'bar'; diff --git a/mybulma/node_modules/resolve/test/resolver/symlinked/package/package.json b/mybulma/node_modules/resolve/test/resolver/symlinked/package/package.json new file mode 100644 index 0000000..8e1b585 --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver/symlinked/package/package.json @@ -0,0 +1,3 @@ +{ + "main": "bar.js" +} \ No newline at end of file diff --git a/mybulma/node_modules/resolve/test/resolver/without_basedir/main.js b/mybulma/node_modules/resolve/test/resolver/without_basedir/main.js new file mode 100644 index 0000000..5b31975 --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver/without_basedir/main.js @@ -0,0 +1,5 @@ +var resolve = require('../../../'); + +module.exports = function (t, cb) { + resolve('mymodule', null, cb); +}; diff --git a/mybulma/node_modules/resolve/test/resolver_sync.js b/mybulma/node_modules/resolve/test/resolver_sync.js new file mode 100644 index 0000000..53453d6 --- /dev/null +++ b/mybulma/node_modules/resolve/test/resolver_sync.js @@ -0,0 +1,726 @@ +var path = require('path'); +var fs = require('fs'); +var test = require('tape'); + +var resolve = require('../'); +var sync = require('../sync'); + +var requireResolveSupportsPaths = require.resolve.length > 1 + && !(/^v12\.[012]\./).test(process.version); // broken in v12.0-12.2, see https://github.com/nodejs/node/issues/27794 + +test('`./sync` entry point', function (t) { + t.equal(resolve.sync, sync, '`./sync` entry point is the same as `.sync` on `main`'); + t.end(); +}); + +test('foo', function (t) { + var dir = path.join(__dirname, 'resolver'); + + t.equal( + resolve.sync('./foo', { basedir: dir }), + path.join(dir, 'foo.js'), + './foo' + ); + if (requireResolveSupportsPaths) { + t.equal( + resolve.sync('./foo', { basedir: dir }), + require.resolve('./foo', { paths: [dir] }), + './foo: resolve.sync === require.resolve' + ); + } + + t.equal( + resolve.sync('./foo.js', { basedir: dir }), + path.join(dir, 'foo.js'), + './foo.js' + ); + if (requireResolveSupportsPaths) { + t.equal( + resolve.sync('./foo.js', { basedir: dir }), + require.resolve('./foo.js', { paths: [dir] }), + './foo.js: resolve.sync === require.resolve' + ); + } + + t.equal( + resolve.sync('./foo.js', { basedir: dir, filename: path.join(dir, 'bar.js') }), + path.join(dir, 'foo.js') + ); + + t.throws(function () { + resolve.sync('foo', { basedir: dir }); + }); + + // Test that filename is reported as the "from" value when passed. + t.throws( + function () { + resolve.sync('foo', { basedir: dir, filename: path.join(dir, 'bar.js') }); + }, + { + name: 'Error', + message: "Cannot find module 'foo' from '" + path.join(dir, 'bar.js') + "'" + } + ); + + t.end(); +}); + +test('bar', function (t) { + var dir = path.join(__dirname, 'resolver'); + + var basedir = path.join(dir, 'bar'); + + t.equal( + resolve.sync('foo', { basedir: basedir }), + path.join(dir, 'bar/node_modules/foo/index.js'), + 'foo in bar' + ); + if (requireResolveSupportsPaths) { + t.equal( + resolve.sync('foo', { basedir: basedir }), + require.resolve('foo', { paths: [basedir] }), + 'foo in bar: resolve.sync === require.resolve' + ); + } + + t.end(); +}); + +test('baz', function (t) { + var dir = path.join(__dirname, 'resolver'); + + t.equal( + resolve.sync('./baz', { basedir: dir }), + path.join(dir, 'baz/quux.js'), + './baz' + ); + if (requireResolveSupportsPaths) { + t.equal( + resolve.sync('./baz', { basedir: dir }), + require.resolve('./baz', { paths: [dir] }), + './baz: resolve.sync === require.resolve' + ); + } + + t.end(); +}); + +test('biz', function (t) { + var dir = path.join(__dirname, 'resolver/biz/node_modules'); + + t.equal( + resolve.sync('./grux', { basedir: dir }), + path.join(dir, 'grux/index.js') + ); + if (requireResolveSupportsPaths) { + t.equal( + resolve.sync('./grux', { basedir: dir }), + require.resolve('./grux', { paths: [dir] }), + './grux: resolve.sync === require.resolve' + ); + } + + var tivDir = path.join(dir, 'grux'); + t.equal( + resolve.sync('tiv', { basedir: tivDir }), + path.join(dir, 'tiv/index.js') + ); + if (requireResolveSupportsPaths) { + t.equal( + resolve.sync('tiv', { basedir: tivDir }), + require.resolve('tiv', { paths: [tivDir] }), + 'tiv: resolve.sync === require.resolve' + ); + } + + var gruxDir = path.join(dir, 'tiv'); + t.equal( + resolve.sync('grux', { basedir: gruxDir }), + path.join(dir, 'grux/index.js') + ); + if (requireResolveSupportsPaths) { + t.equal( + resolve.sync('grux', { basedir: gruxDir }), + require.resolve('grux', { paths: [gruxDir] }), + 'grux: resolve.sync === require.resolve' + ); + } + + t.end(); +}); + +test('normalize', function (t) { + var dir = path.join(__dirname, 'resolver/biz/node_modules/grux'); + + t.equal( + resolve.sync('../grux', { basedir: dir }), + path.join(dir, 'index.js') + ); + if (requireResolveSupportsPaths) { + t.equal( + resolve.sync('../grux', { basedir: dir }), + require.resolve('../grux', { paths: [dir] }), + '../grux: resolve.sync === require.resolve' + ); + } + + t.end(); +}); + +test('cup', function (t) { + var dir = path.join(__dirname, 'resolver'); + + t.equal( + resolve.sync('./cup', { + basedir: dir, + extensions: ['.js', '.coffee'] + }), + path.join(dir, 'cup.coffee'), + './cup -> ./cup.coffee' + ); + + t.equal( + resolve.sync('./cup.coffee', { basedir: dir }), + path.join(dir, 'cup.coffee'), + './cup.coffee' + ); + + t.throws(function () { + resolve.sync('./cup', { + basedir: dir, + extensions: ['.js'] + }); + }); + + if (requireResolveSupportsPaths) { + t.equal( + resolve.sync('./cup.coffee', { basedir: dir, extensions: ['.js', '.coffee'] }), + require.resolve('./cup.coffee', { paths: [dir] }), + './cup.coffee: resolve.sync === require.resolve' + ); + } + + t.end(); +}); + +test('mug', function (t) { + var dir = path.join(__dirname, 'resolver'); + + t.equal( + resolve.sync('./mug', { basedir: dir }), + path.join(dir, 'mug.js'), + './mug -> ./mug.js' + ); + if (requireResolveSupportsPaths) { + t.equal( + resolve.sync('./mug', { basedir: dir }), + require.resolve('./mug', { paths: [dir] }), + './mug: resolve.sync === require.resolve' + ); + } + + t.equal( + resolve.sync('./mug', { + basedir: dir, + extensions: ['.coffee', '.js'] + }), + path.join(dir, 'mug.coffee'), + './mug -> ./mug.coffee' + ); + + t.equal( + resolve.sync('./mug', { + basedir: dir, + extensions: ['.js', '.coffee'] + }), + path.join(dir, 'mug.js'), + './mug -> ./mug.js' + ); + + t.end(); +}); + +test('other path', function (t) { + var resolverDir = path.join(__dirname, 'resolver'); + var dir = path.join(resolverDir, 'bar'); + var otherDir = path.join(resolverDir, 'other_path'); + + t.equal( + resolve.sync('root', { + basedir: dir, + paths: [otherDir] + }), + path.join(resolverDir, 'other_path/root.js') + ); + + t.equal( + resolve.sync('lib/other-lib', { + basedir: dir, + paths: [otherDir] + }), + path.join(resolverDir, 'other_path/lib/other-lib.js') + ); + + t.throws(function () { + resolve.sync('root', { basedir: dir }); + }); + + t.throws(function () { + resolve.sync('zzz', { + basedir: dir, + paths: [otherDir] + }); + }); + + t.end(); +}); + +test('path iterator', function (t) { + var resolverDir = path.join(__dirname, 'resolver'); + + var exactIterator = function (x, start, getPackageCandidates, opts) { + return [path.join(resolverDir, x)]; + }; + + t.equal( + resolve.sync('baz', { packageIterator: exactIterator }), + path.join(resolverDir, 'baz/quux.js') + ); + + t.end(); +}); + +test('incorrect main', function (t) { + var resolverDir = path.join(__dirname, 'resolver'); + var dir = path.join(resolverDir, 'incorrect_main'); + + t.equal( + resolve.sync('./incorrect_main', { basedir: resolverDir }), + path.join(dir, 'index.js') + ); + if (requireResolveSupportsPaths) { + t.equal( + resolve.sync('./incorrect_main', { basedir: resolverDir }), + require.resolve('./incorrect_main', { paths: [resolverDir] }), + './incorrect_main: resolve.sync === require.resolve' + ); + } + + t.end(); +}); + +test('missing index', function (t) { + t.plan(requireResolveSupportsPaths ? 2 : 1); + + var resolverDir = path.join(__dirname, 'resolver'); + try { + resolve.sync('./missing_index', { basedir: resolverDir }); + t.fail('did not fail'); + } catch (err) { + t.equal(err && err.code, 'MODULE_NOT_FOUND', 'error has correct error code'); + } + if (requireResolveSupportsPaths) { + try { + require.resolve('./missing_index', { basedir: resolverDir }); + t.fail('require.resolve did not fail'); + } catch (err) { + t.equal(err && err.code, 'MODULE_NOT_FOUND', 'error has correct error code'); + } + } +}); + +test('missing main', function (t) { + var resolverDir = path.join(__dirname, 'resolver'); + + try { + resolve.sync('./missing_main', { basedir: resolverDir }); + t.fail('require.resolve did not fail'); + } catch (err) { + t.equal(err && err.code, 'MODULE_NOT_FOUND', 'error has correct error code'); + } + if (requireResolveSupportsPaths) { + try { + resolve.sync('./missing_main', { basedir: resolverDir }); + t.fail('require.resolve did not fail'); + } catch (err) { + t.equal(err && err.code, 'MODULE_NOT_FOUND', 'error has correct error code'); + } + } + + t.end(); +}); + +test('null main', function (t) { + var resolverDir = path.join(__dirname, 'resolver'); + + try { + resolve.sync('./null_main', { basedir: resolverDir }); + t.fail('require.resolve did not fail'); + } catch (err) { + t.equal(err && err.code, 'MODULE_NOT_FOUND', 'error has correct error code'); + } + if (requireResolveSupportsPaths) { + try { + resolve.sync('./null_main', { basedir: resolverDir }); + t.fail('require.resolve did not fail'); + } catch (err) { + t.equal(err && err.code, 'MODULE_NOT_FOUND', 'error has correct error code'); + } + } + + t.end(); +}); + +test('main: false', function (t) { + var basedir = path.join(__dirname, 'resolver'); + var dir = path.join(basedir, 'false_main'); + t.equal( + resolve.sync('./false_main', { basedir: basedir }), + path.join(dir, 'index.js'), + '`"main": false`: resolves to `index.js`' + ); + if (requireResolveSupportsPaths) { + t.equal( + resolve.sync('./false_main', { basedir: basedir }), + require.resolve('./false_main', { paths: [basedir] }), + '`"main": false`: resolve.sync === require.resolve' + ); + } + + t.end(); +}); + +var stubStatSync = function stubStatSync(fn) { + var statSync = fs.statSync; + try { + fs.statSync = function () { + throw new EvalError('Unknown Error'); + }; + return fn(); + } finally { + fs.statSync = statSync; + } +}; + +test('#79 - re-throw non ENOENT errors from stat', function (t) { + var dir = path.join(__dirname, 'resolver'); + + stubStatSync(function () { + t.throws(function () { + resolve.sync('foo', { basedir: dir }); + }, /Unknown Error/); + }); + + t.end(); +}); + +test('#52 - incorrectly resolves module-paths like "./someFolder/" when there is a file of the same name', function (t) { + var dir = path.join(__dirname, 'resolver'); + var basedir = path.join(dir, 'same_names'); + + t.equal( + resolve.sync('./foo', { basedir: basedir }), + path.join(dir, 'same_names/foo.js') + ); + if (requireResolveSupportsPaths) { + t.equal( + resolve.sync('./foo', { basedir: basedir }), + require.resolve('./foo', { paths: [basedir] }), + './foo: resolve.sync === require.resolve' + ); + } + + t.equal( + resolve.sync('./foo/', { basedir: basedir }), + path.join(dir, 'same_names/foo/index.js') + ); + if (requireResolveSupportsPaths) { + t.equal( + resolve.sync('./foo/', { basedir: basedir }), + require.resolve('./foo/', { paths: [basedir] }), + './foo/: resolve.sync === require.resolve' + ); + } + + t.end(); +}); + +test('#211 - incorrectly resolves module-paths like "." when from inside a folder with a sibling file of the same name', function (t) { + var dir = path.join(__dirname, 'resolver'); + var basedir = path.join(dir, 'same_names/foo'); + + t.equal( + resolve.sync('./', { basedir: basedir }), + path.join(dir, 'same_names/foo/index.js'), + './' + ); + if (requireResolveSupportsPaths) { + t.equal( + resolve.sync('./', { basedir: basedir }), + require.resolve('./', { paths: [basedir] }), + './: resolve.sync === require.resolve' + ); + } + + t.equal( + resolve.sync('.', { basedir: basedir }), + path.join(dir, 'same_names/foo/index.js'), + '.' + ); + if (requireResolveSupportsPaths) { + t.equal( + resolve.sync('.', { basedir: basedir }), + require.resolve('.', { paths: [basedir] }), + '.: resolve.sync === require.resolve', + { todo: true } + ); + } + + t.end(); +}); + +test('sync: #121 - treating an existing file as a dir when no basedir', function (t) { + var testFile = path.basename(__filename); + + t.test('sanity check', function (st) { + st.equal( + resolve.sync('./' + testFile), + __filename, + 'sanity check' + ); + st.equal( + resolve.sync('./' + testFile), + require.resolve('./' + testFile), + 'sanity check: resolve.sync === require.resolve' + ); + + st.end(); + }); + + t.test('with a fake directory', function (st) { + function run() { return resolve.sync('./' + testFile + '/blah'); } + + st.throws(run, 'throws an error'); + + try { + run(); + } catch (e) { + st.equal(e.code, 'MODULE_NOT_FOUND', 'error code matches require.resolve'); + st.equal( + e.message, + 'Cannot find module \'./' + testFile + '/blah\' from \'' + __dirname + '\'', + 'can not find nonexistent module' + ); + } + + st.end(); + }); + + t.end(); +}); + +test('sync dot main', function (t) { + var start = new Date(); + + t.equal( + resolve.sync('./resolver/dot_main'), + path.join(__dirname, 'resolver/dot_main/index.js'), + './resolver/dot_main' + ); + t.equal( + resolve.sync('./resolver/dot_main'), + require.resolve('./resolver/dot_main'), + './resolver/dot_main: resolve.sync === require.resolve' + ); + + t.ok(new Date() - start < 50, 'resolve.sync timedout'); + + t.end(); +}); + +test('sync dot slash main', function (t) { + var start = new Date(); + + t.equal( + resolve.sync('./resolver/dot_slash_main'), + path.join(__dirname, 'resolver/dot_slash_main/index.js') + ); + t.equal( + resolve.sync('./resolver/dot_slash_main'), + require.resolve('./resolver/dot_slash_main'), + './resolver/dot_slash_main: resolve.sync === require.resolve' + ); + + t.ok(new Date() - start < 50, 'resolve.sync timedout'); + + t.end(); +}); + +test('not a directory', function (t) { + var path = './foo'; + try { + resolve.sync(path, { basedir: __filename }); + t.fail(); + } catch (err) { + t.ok(err, 'a non-directory errors'); + t.equal(err && err.message, 'Cannot find module \'' + path + "' from '" + __filename + "'"); + t.equal(err && err.code, 'MODULE_NOT_FOUND'); + } + t.end(); +}); + +test('non-string "main" field in package.json', function (t) { + var dir = path.join(__dirname, 'resolver'); + try { + var result = resolve.sync('./invalid_main', { basedir: dir }); + t.equal(result, undefined, 'result should not exist'); + t.fail('should not get here'); + } catch (err) { + t.ok(err, 'errors on non-string main'); + t.equal(err.message, 'package “invalid_main” `main` must be a string'); + t.equal(err.code, 'INVALID_PACKAGE_MAIN'); + } + t.end(); +}); + +test('non-string "main" field in package.json', function (t) { + var dir = path.join(__dirname, 'resolver'); + try { + var result = resolve.sync('./invalid_main', { basedir: dir }); + t.equal(result, undefined, 'result should not exist'); + t.fail('should not get here'); + } catch (err) { + t.ok(err, 'errors on non-string main'); + t.equal(err.message, 'package “invalid_main” `main` must be a string'); + t.equal(err.code, 'INVALID_PACKAGE_MAIN'); + } + t.end(); +}); + +test('browser field in package.json', function (t) { + var dir = path.join(__dirname, 'resolver'); + var res = resolve.sync('./browser_field', { + basedir: dir, + packageFilter: function packageFilter(pkg) { + if (pkg.browser) { + pkg.main = pkg.browser; // eslint-disable-line no-param-reassign + delete pkg.browser; // eslint-disable-line no-param-reassign + } + return pkg; + } + }); + t.equal(res, path.join(dir, 'browser_field', 'b.js')); + t.end(); +}); + +test('absolute paths', function (t) { + var extensionless = __filename.slice(0, -path.extname(__filename).length); + + t.equal( + resolve.sync(__filename), + __filename, + 'absolute path to this file resolves' + ); + t.equal( + resolve.sync(__filename), + require.resolve(__filename), + 'absolute path to this file: resolve.sync === require.resolve' + ); + + t.equal( + resolve.sync(extensionless), + __filename, + 'extensionless absolute path to this file resolves' + ); + t.equal( + resolve.sync(__filename), + require.resolve(__filename), + 'absolute path to this file: resolve.sync === require.resolve' + ); + + t.equal( + resolve.sync(__filename, { basedir: process.cwd() }), + __filename, + 'absolute path to this file with a basedir resolves' + ); + if (requireResolveSupportsPaths) { + t.equal( + resolve.sync(__filename, { basedir: process.cwd() }), + require.resolve(__filename, { paths: [process.cwd()] }), + 'absolute path to this file + basedir: resolve.sync === require.resolve' + ); + } + + t.equal( + resolve.sync(extensionless, { basedir: process.cwd() }), + __filename, + 'extensionless absolute path to this file with a basedir resolves' + ); + if (requireResolveSupportsPaths) { + t.equal( + resolve.sync(extensionless, { basedir: process.cwd() }), + require.resolve(extensionless, { paths: [process.cwd()] }), + 'extensionless absolute path to this file + basedir: resolve.sync === require.resolve' + ); + } + + t.end(); +}); + +test('malformed package.json', function (t) { + t.plan(5 + (requireResolveSupportsPaths ? 1 : 0)); + + var basedir = path.join(__dirname, 'resolver/malformed_package_json'); + var expected = path.join(basedir, 'index.js'); + + t.equal( + resolve.sync('./index.js', { basedir: basedir }), + expected, + 'malformed package.json is silently ignored' + ); + if (requireResolveSupportsPaths) { + t.equal( + resolve.sync('./index.js', { basedir: basedir }), + require.resolve('./index.js', { paths: [basedir] }), + 'malformed package.json: resolve.sync === require.resolve' + ); + } + + var res1 = resolve.sync( + './index.js', + { + basedir: basedir, + packageFilter: function (pkg, pkgfile, dir) { + t.fail('should not reach here'); + } + } + ); + + t.equal( + res1, + expected, + 'with packageFilter: malformed package.json is silently ignored' + ); + + var res2 = resolve.sync( + './index.js', + { + basedir: basedir, + readPackageSync: function (readFileSync, pkgfile) { + t.equal(pkgfile, path.join(basedir, 'package.json'), 'readPackageSync: `pkgfile` is package.json path'); + var result = String(readFileSync(pkgfile)); + try { + return JSON.parse(result); + } catch (e) { + t.ok(e instanceof SyntaxError, 'readPackageSync: malformed package.json parses as a syntax error'); + } + } + } + ); + + t.equal( + res2, + expected, + 'with readPackageSync: malformed package.json is silently ignored' + ); +}); diff --git a/mybulma/node_modules/resolve/test/shadowed_core.js b/mybulma/node_modules/resolve/test/shadowed_core.js new file mode 100644 index 0000000..3a5f4fc --- /dev/null +++ b/mybulma/node_modules/resolve/test/shadowed_core.js @@ -0,0 +1,54 @@ +var test = require('tape'); +var resolve = require('../'); +var path = require('path'); + +test('shadowed core modules still return core module', function (t) { + t.plan(2); + + resolve('util', { basedir: path.join(__dirname, 'shadowed_core') }, function (err, res) { + t.ifError(err); + t.equal(res, 'util'); + }); +}); + +test('shadowed core modules still return core module [sync]', function (t) { + t.plan(1); + + var res = resolve.sync('util', { basedir: path.join(__dirname, 'shadowed_core') }); + + t.equal(res, 'util'); +}); + +test('shadowed core modules return shadow when appending `/`', function (t) { + t.plan(2); + + resolve('util/', { basedir: path.join(__dirname, 'shadowed_core') }, function (err, res) { + t.ifError(err); + t.equal(res, path.join(__dirname, 'shadowed_core/node_modules/util/index.js')); + }); +}); + +test('shadowed core modules return shadow when appending `/` [sync]', function (t) { + t.plan(1); + + var res = resolve.sync('util/', { basedir: path.join(__dirname, 'shadowed_core') }); + + t.equal(res, path.join(__dirname, 'shadowed_core/node_modules/util/index.js')); +}); + +test('shadowed core modules return shadow with `includeCoreModules: false`', function (t) { + t.plan(2); + + resolve('util', { basedir: path.join(__dirname, 'shadowed_core'), includeCoreModules: false }, function (err, res) { + t.ifError(err); + t.equal(res, path.join(__dirname, 'shadowed_core/node_modules/util/index.js')); + }); +}); + +test('shadowed core modules return shadow with `includeCoreModules: false` [sync]', function (t) { + t.plan(1); + + var res = resolve.sync('util', { basedir: path.join(__dirname, 'shadowed_core'), includeCoreModules: false }); + + t.equal(res, path.join(__dirname, 'shadowed_core/node_modules/util/index.js')); +}); diff --git a/mybulma/node_modules/resolve/test/shadowed_core/node_modules/util/index.js b/mybulma/node_modules/resolve/test/shadowed_core/node_modules/util/index.js new file mode 100644 index 0000000..e69de29 diff --git a/mybulma/node_modules/resolve/test/subdirs.js b/mybulma/node_modules/resolve/test/subdirs.js new file mode 100644 index 0000000..b7b8450 --- /dev/null +++ b/mybulma/node_modules/resolve/test/subdirs.js @@ -0,0 +1,13 @@ +var test = require('tape'); +var resolve = require('../'); +var path = require('path'); + +test('subdirs', function (t) { + t.plan(2); + + var dir = path.join(__dirname, '/subdirs'); + resolve('a/b/c/x.json', { basedir: dir }, function (err, res) { + t.ifError(err); + t.equal(res, path.join(dir, 'node_modules/a/b/c/x.json')); + }); +}); diff --git a/mybulma/node_modules/resolve/test/symlinks.js b/mybulma/node_modules/resolve/test/symlinks.js new file mode 100644 index 0000000..35f881a --- /dev/null +++ b/mybulma/node_modules/resolve/test/symlinks.js @@ -0,0 +1,176 @@ +var path = require('path'); +var fs = require('fs'); +var test = require('tape'); +var map = require('array.prototype.map'); +var resolve = require('../'); + +var symlinkDir = path.join(__dirname, 'resolver', 'symlinked', 'symlink'); +var packageDir = path.join(__dirname, 'resolver', 'symlinked', '_', 'node_modules', 'package'); +var modADir = path.join(__dirname, 'symlinks', 'source', 'node_modules', 'mod-a'); +var symlinkModADir = path.join(__dirname, 'symlinks', 'dest', 'node_modules', 'mod-a'); +try { + fs.unlinkSync(symlinkDir); +} catch (err) {} +try { + fs.unlinkSync(packageDir); +} catch (err) {} +try { + fs.unlinkSync(modADir); +} catch (err) {} +try { + fs.unlinkSync(symlinkModADir); +} catch (err) {} + +try { + fs.symlinkSync('./_/symlink_target', symlinkDir, 'dir'); +} catch (err) { + // if fails then it is probably on Windows and lets try to create a junction + fs.symlinkSync(path.join(__dirname, 'resolver', 'symlinked', '_', 'symlink_target') + '\\', symlinkDir, 'junction'); +} +try { + fs.symlinkSync('../../package', packageDir, 'dir'); +} catch (err) { + // if fails then it is probably on Windows and lets try to create a junction + fs.symlinkSync(path.join(__dirname, '..', '..', 'package') + '\\', packageDir, 'junction'); +} +try { + fs.symlinkSync('../../source/node_modules/mod-a', symlinkModADir, 'dir'); +} catch (err) { + // if fails then it is probably on Windows and lets try to create a junction + fs.symlinkSync(path.join(__dirname, '..', '..', 'source', 'node_modules', 'mod-a') + '\\', symlinkModADir, 'junction'); +} + +test('symlink', function (t) { + t.plan(2); + + resolve('foo', { basedir: symlinkDir, preserveSymlinks: false }, function (err, res, pkg) { + t.error(err); + t.equal(res, path.join(__dirname, 'resolver', 'symlinked', '_', 'node_modules', 'foo.js')); + }); +}); + +test('sync symlink when preserveSymlinks = true', function (t) { + t.plan(4); + + resolve('foo', { basedir: symlinkDir }, function (err, res, pkg) { + t.ok(err, 'there is an error'); + t.notOk(res, 'no result'); + + t.equal(err && err.code, 'MODULE_NOT_FOUND', 'error code matches require.resolve'); + t.equal( + err && err.message, + 'Cannot find module \'foo\' from \'' + symlinkDir + '\'', + 'can not find nonexistent module' + ); + }); +}); + +test('sync symlink', function (t) { + var start = new Date(); + t.doesNotThrow(function () { + t.equal( + resolve.sync('foo', { basedir: symlinkDir, preserveSymlinks: false }), + path.join(__dirname, 'resolver', 'symlinked', '_', 'node_modules', 'foo.js') + ); + }); + t.ok(new Date() - start < 50, 'resolve.sync timedout'); + t.end(); +}); + +test('sync symlink when preserveSymlinks = true', function (t) { + t.throws(function () { + resolve.sync('foo', { basedir: symlinkDir }); + }, /Cannot find module 'foo'/); + t.end(); +}); + +test('sync symlink from node_modules to other dir when preserveSymlinks = false', function (t) { + var basedir = path.join(__dirname, 'resolver', 'symlinked', '_'); + var fn = resolve.sync('package', { basedir: basedir, preserveSymlinks: false }); + + t.equal(fn, path.resolve(__dirname, 'resolver/symlinked/package/bar.js')); + t.end(); +}); + +test('async symlink from node_modules to other dir when preserveSymlinks = false', function (t) { + t.plan(2); + var basedir = path.join(__dirname, 'resolver', 'symlinked', '_'); + resolve('package', { basedir: basedir, preserveSymlinks: false }, function (err, result) { + t.notOk(err, 'no error'); + t.equal(result, path.resolve(__dirname, 'resolver/symlinked/package/bar.js')); + }); +}); + +test('packageFilter', function (t) { + function relative(x) { + return path.relative(__dirname, x); + } + + function testPackageFilter(preserveSymlinks) { + return function (st) { + st.plan('is 1.x' ? 3 : 5); // eslint-disable-line no-constant-condition + + var destMain = 'symlinks/dest/node_modules/mod-a/index.js'; + var destPkg = 'symlinks/dest/node_modules/mod-a/package.json'; + var sourceMain = 'symlinks/source/node_modules/mod-a/index.js'; + var sourcePkg = 'symlinks/source/node_modules/mod-a/package.json'; + var destDir = path.join(__dirname, 'symlinks', 'dest'); + + /* eslint multiline-comment-style: 0 */ + /* v2.x will restore these tests + var packageFilterPath = []; + var actualPath = resolve.sync('mod-a', { + basedir: destDir, + preserveSymlinks: preserveSymlinks, + packageFilter: function (pkg, pkgfile, dir) { + packageFilterPath.push(pkgfile); + } + }); + st.equal( + relative(actualPath), + path.normalize(preserveSymlinks ? destMain : sourceMain), + 'sync: actual path is correct' + ); + st.deepEqual( + map(packageFilterPath, relative), + map(preserveSymlinks ? [destPkg, destPkg] : [sourcePkg, sourcePkg], path.normalize), + 'sync: packageFilter pkgfile arg is correct' + ); + */ + + var asyncPackageFilterPath = []; + resolve( + 'mod-a', + { + basedir: destDir, + preserveSymlinks: preserveSymlinks, + packageFilter: function (pkg, pkgfile) { + asyncPackageFilterPath.push(pkgfile); + } + }, + function (err, actualPath) { + st.error(err, 'no error'); + st.equal( + relative(actualPath), + path.normalize(preserveSymlinks ? destMain : sourceMain), + 'async: actual path is correct' + ); + st.deepEqual( + map(asyncPackageFilterPath, relative), + map( + preserveSymlinks ? [destPkg, destPkg, destPkg] : [sourcePkg, sourcePkg, sourcePkg], + path.normalize + ), + 'async: packageFilter pkgfile arg is correct' + ); + } + ); + }; + } + + t.test('preserveSymlinks: false', testPackageFilter(false)); + + t.test('preserveSymlinks: true', testPackageFilter(true)); + + t.end(); +}); diff --git a/mybulma/node_modules/retry/.npmignore b/mybulma/node_modules/retry/.npmignore new file mode 100644 index 0000000..432f285 --- /dev/null +++ b/mybulma/node_modules/retry/.npmignore @@ -0,0 +1,3 @@ +/node_modules/* +npm-debug.log +coverage diff --git a/mybulma/node_modules/retry/.travis.yml b/mybulma/node_modules/retry/.travis.yml new file mode 100644 index 0000000..bcde212 --- /dev/null +++ b/mybulma/node_modules/retry/.travis.yml @@ -0,0 +1,15 @@ +language: node_js +node_js: + - "4" +before_install: + - pip install --user codecov +after_success: + - codecov --file coverage/lcov.info --disable search +# travis encrypt [subdomain]:[api token]@[room id] +# notifications: +# email: false +# campfire: +# rooms: +# secure: xyz +# on_failure: always +# on_success: always diff --git a/mybulma/node_modules/retry/License b/mybulma/node_modules/retry/License new file mode 100644 index 0000000..0b58de3 --- /dev/null +++ b/mybulma/node_modules/retry/License @@ -0,0 +1,21 @@ +Copyright (c) 2011: +Tim Koschützki (tim@debuggable.com) +Felix Geisendörfer (felix@debuggable.com) + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. diff --git a/mybulma/node_modules/retry/Makefile b/mybulma/node_modules/retry/Makefile new file mode 100644 index 0000000..1968d8f --- /dev/null +++ b/mybulma/node_modules/retry/Makefile @@ -0,0 +1,18 @@ +SHELL := /bin/bash + +release-major: test + npm version major -m "Release %s" + git push + npm publish + +release-minor: test + npm version minor -m "Release %s" + git push + npm publish + +release-patch: test + npm version patch -m "Release %s" + git push + npm publish + +.PHONY: test release-major release-minor release-patch diff --git a/mybulma/node_modules/retry/README.md b/mybulma/node_modules/retry/README.md new file mode 100644 index 0000000..16e28ec --- /dev/null +++ b/mybulma/node_modules/retry/README.md @@ -0,0 +1,227 @@ + +[![Build Status](https://secure.travis-ci.org/tim-kos/node-retry.png?branch=master)](http://travis-ci.org/tim-kos/node-retry "Check this project's build status on TravisCI") +[![codecov](https://codecov.io/gh/tim-kos/node-retry/branch/master/graph/badge.svg)](https://codecov.io/gh/tim-kos/node-retry) + + +# retry + +Abstraction for exponential and custom retry strategies for failed operations. + +## Installation + + npm install retry + +## Current Status + +This module has been tested and is ready to be used. + +## Tutorial + +The example below will retry a potentially failing `dns.resolve` operation +`10` times using an exponential backoff strategy. With the default settings, this +means the last attempt is made after `17 minutes and 3 seconds`. + +``` javascript +var dns = require('dns'); +var retry = require('retry'); + +function faultTolerantResolve(address, cb) { + var operation = retry.operation(); + + operation.attempt(function(currentAttempt) { + dns.resolve(address, function(err, addresses) { + if (operation.retry(err)) { + return; + } + + cb(err ? operation.mainError() : null, addresses); + }); + }); +} + +faultTolerantResolve('nodejs.org', function(err, addresses) { + console.log(err, addresses); +}); +``` + +Of course you can also configure the factors that go into the exponential +backoff. See the API documentation below for all available settings. +currentAttempt is an int representing the number of attempts so far. + +``` javascript +var operation = retry.operation({ + retries: 5, + factor: 3, + minTimeout: 1 * 1000, + maxTimeout: 60 * 1000, + randomize: true, +}); +``` + +## API + +### retry.operation([options]) + +Creates a new `RetryOperation` object. `options` is the same as `retry.timeouts()`'s `options`, with two additions: + +* `forever`: Whether to retry forever, defaults to `false`. +* `unref`: Whether to [unref](https://nodejs.org/api/timers.html#timers_unref) the setTimeout's, defaults to `false`. +* `maxRetryTime`: The maximum time (in milliseconds) that the retried operation is allowed to run. Default is `Infinity`. + +### retry.timeouts([options]) + +Returns an array of timeouts. All time `options` and return values are in +milliseconds. If `options` is an array, a copy of that array is returned. + +`options` is a JS object that can contain any of the following keys: + +* `retries`: The maximum amount of times to retry the operation. Default is `10`. Seting this to `1` means `do it once, then retry it once`. +* `factor`: The exponential factor to use. Default is `2`. +* `minTimeout`: The number of milliseconds before starting the first retry. Default is `1000`. +* `maxTimeout`: The maximum number of milliseconds between two retries. Default is `Infinity`. +* `randomize`: Randomizes the timeouts by multiplying with a factor between `1` to `2`. Default is `false`. + +The formula used to calculate the individual timeouts is: + +``` +Math.min(random * minTimeout * Math.pow(factor, attempt), maxTimeout) +``` + +Have a look at [this article][article] for a better explanation of approach. + +If you want to tune your `factor` / `times` settings to attempt the last retry +after a certain amount of time, you can use wolfram alpha. For example in order +to tune for `10` attempts in `5 minutes`, you can use this equation: + +![screenshot](https://github.com/tim-kos/node-retry/raw/master/equation.gif) + +Explaining the various values from left to right: + +* `k = 0 ... 9`: The `retries` value (10) +* `1000`: The `minTimeout` value in ms (1000) +* `x^k`: No need to change this, `x` will be your resulting factor +* `5 * 60 * 1000`: The desired total amount of time for retrying in ms (5 minutes) + +To make this a little easier for you, use wolfram alpha to do the calculations: + + + +[article]: http://dthain.blogspot.com/2009/02/exponential-backoff-in-distributed.html + +### retry.createTimeout(attempt, opts) + +Returns a new `timeout` (integer in milliseconds) based on the given parameters. + +`attempt` is an integer representing for which retry the timeout should be calculated. If your retry operation was executed 4 times you had one attempt and 3 retries. If you then want to calculate a new timeout, you should set `attempt` to 4 (attempts are zero-indexed). + +`opts` can include `factor`, `minTimeout`, `randomize` (boolean) and `maxTimeout`. They are documented above. + +`retry.createTimeout()` is used internally by `retry.timeouts()` and is public for you to be able to create your own timeouts for reinserting an item, see [issue #13](https://github.com/tim-kos/node-retry/issues/13). + +### retry.wrap(obj, [options], [methodNames]) + +Wrap all functions of the `obj` with retry. Optionally you can pass operation options and +an array of method names which need to be wrapped. + +``` +retry.wrap(obj) + +retry.wrap(obj, ['method1', 'method2']) + +retry.wrap(obj, {retries: 3}) + +retry.wrap(obj, {retries: 3}, ['method1', 'method2']) +``` +The `options` object can take any options that the usual call to `retry.operation` can take. + +### new RetryOperation(timeouts, [options]) + +Creates a new `RetryOperation` where `timeouts` is an array where each value is +a timeout given in milliseconds. + +Available options: +* `forever`: Whether to retry forever, defaults to `false`. +* `unref`: Wether to [unref](https://nodejs.org/api/timers.html#timers_unref) the setTimeout's, defaults to `false`. + +If `forever` is true, the following changes happen: +* `RetryOperation.errors()` will only output an array of one item: the last error. +* `RetryOperation` will repeatedly use the `timeouts` array. Once all of its timeouts have been used up, it restarts with the first timeout, then uses the second and so on. + +#### retryOperation.errors() + +Returns an array of all errors that have been passed to `retryOperation.retry()` so far. The +returning array has the errors ordered chronologically based on when they were passed to +`retryOperation.retry()`, which means the first passed error is at index zero and the last is +at the last index. + +#### retryOperation.mainError() + +A reference to the error object that occured most frequently. Errors are +compared using the `error.message` property. + +If multiple error messages occured the same amount of time, the last error +object with that message is returned. + +If no errors occured so far, the value is `null`. + +#### retryOperation.attempt(fn, timeoutOps) + +Defines the function `fn` that is to be retried and executes it for the first +time right away. The `fn` function can receive an optional `currentAttempt` callback that represents the number of attempts to execute `fn` so far. + +Optionally defines `timeoutOps` which is an object having a property `timeout` in miliseconds and a property `cb` callback function. +Whenever your retry operation takes longer than `timeout` to execute, the timeout callback function `cb` is called. + + +#### retryOperation.try(fn) + +This is an alias for `retryOperation.attempt(fn)`. This is deprecated. Please use `retryOperation.attempt(fn)` instead. + +#### retryOperation.start(fn) + +This is an alias for `retryOperation.attempt(fn)`. This is deprecated. Please use `retryOperation.attempt(fn)` instead. + +#### retryOperation.retry(error) + +Returns `false` when no `error` value is given, or the maximum amount of retries +has been reached. + +Otherwise it returns `true`, and retries the operation after the timeout for +the current attempt number. + +#### retryOperation.stop() + +Allows you to stop the operation being retried. Useful for aborting the operation on a fatal error etc. + +#### retryOperation.reset() + +Resets the internal state of the operation object, so that you can call `attempt()` again as if this was a new operation object. + +#### retryOperation.attempts() + +Returns an int representing the number of attempts it took to call `fn` before it was successful. + +## License + +retry is licensed under the MIT license. + + +# Changelog + +0.10.0 Adding `stop` functionality, thanks to @maxnachlinger. + +0.9.0 Adding `unref` functionality, thanks to @satazor. + +0.8.0 Implementing retry.wrap. + +0.7.0 Some bug fixes and made retry.createTimeout() public. Fixed issues [#10](https://github.com/tim-kos/node-retry/issues/10), [#12](https://github.com/tim-kos/node-retry/issues/12), and [#13](https://github.com/tim-kos/node-retry/issues/13). + +0.6.0 Introduced optional timeOps parameter for the attempt() function which is an object having a property timeout in milliseconds and a property cb callback function. Whenever your retry operation takes longer than timeout to execute, the timeout callback function cb is called. + +0.5.0 Some minor refactoring. + +0.4.0 Changed retryOperation.try() to retryOperation.attempt(). Deprecated the aliases start() and try() for it. + +0.3.0 Added retryOperation.start() which is an alias for retryOperation.try(). + +0.2.0 Added attempts() function and parameter to retryOperation.try() representing the number of attempts it took to call fn(). diff --git a/mybulma/node_modules/retry/equation.gif b/mybulma/node_modules/retry/equation.gif new file mode 100644 index 0000000..9710723 Binary files /dev/null and b/mybulma/node_modules/retry/equation.gif differ diff --git a/mybulma/node_modules/retry/example/dns.js b/mybulma/node_modules/retry/example/dns.js new file mode 100644 index 0000000..446729b --- /dev/null +++ b/mybulma/node_modules/retry/example/dns.js @@ -0,0 +1,31 @@ +var dns = require('dns'); +var retry = require('../lib/retry'); + +function faultTolerantResolve(address, cb) { + var opts = { + retries: 2, + factor: 2, + minTimeout: 1 * 1000, + maxTimeout: 2 * 1000, + randomize: true + }; + var operation = retry.operation(opts); + + operation.attempt(function(currentAttempt) { + dns.resolve(address, function(err, addresses) { + if (operation.retry(err)) { + return; + } + + cb(operation.mainError(), operation.errors(), addresses); + }); + }); +} + +faultTolerantResolve('nodejs.org', function(err, errors, addresses) { + console.warn('err:'); + console.log(err); + + console.warn('addresses:'); + console.log(addresses); +}); \ No newline at end of file diff --git a/mybulma/node_modules/retry/example/stop.js b/mybulma/node_modules/retry/example/stop.js new file mode 100644 index 0000000..e1ceafe --- /dev/null +++ b/mybulma/node_modules/retry/example/stop.js @@ -0,0 +1,40 @@ +var retry = require('../lib/retry'); + +function attemptAsyncOperation(someInput, cb) { + var opts = { + retries: 2, + factor: 2, + minTimeout: 1 * 1000, + maxTimeout: 2 * 1000, + randomize: true + }; + var operation = retry.operation(opts); + + operation.attempt(function(currentAttempt) { + failingAsyncOperation(someInput, function(err, result) { + + if (err && err.message === 'A fatal error') { + operation.stop(); + return cb(err); + } + + if (operation.retry(err)) { + return; + } + + cb(operation.mainError(), operation.errors(), result); + }); + }); +} + +attemptAsyncOperation('test input', function(err, errors, result) { + console.warn('err:'); + console.log(err); + + console.warn('result:'); + console.log(result); +}); + +function failingAsyncOperation(input, cb) { + return setImmediate(cb.bind(null, new Error('A fatal error'))); +} diff --git a/mybulma/node_modules/retry/index.js b/mybulma/node_modules/retry/index.js new file mode 100644 index 0000000..ee62f3a --- /dev/null +++ b/mybulma/node_modules/retry/index.js @@ -0,0 +1 @@ +module.exports = require('./lib/retry'); \ No newline at end of file diff --git a/mybulma/node_modules/retry/lib/retry.js b/mybulma/node_modules/retry/lib/retry.js new file mode 100644 index 0000000..dcb5768 --- /dev/null +++ b/mybulma/node_modules/retry/lib/retry.js @@ -0,0 +1,100 @@ +var RetryOperation = require('./retry_operation'); + +exports.operation = function(options) { + var timeouts = exports.timeouts(options); + return new RetryOperation(timeouts, { + forever: options && options.forever, + unref: options && options.unref, + maxRetryTime: options && options.maxRetryTime + }); +}; + +exports.timeouts = function(options) { + if (options instanceof Array) { + return [].concat(options); + } + + var opts = { + retries: 10, + factor: 2, + minTimeout: 1 * 1000, + maxTimeout: Infinity, + randomize: false + }; + for (var key in options) { + opts[key] = options[key]; + } + + if (opts.minTimeout > opts.maxTimeout) { + throw new Error('minTimeout is greater than maxTimeout'); + } + + var timeouts = []; + for (var i = 0; i < opts.retries; i++) { + timeouts.push(this.createTimeout(i, opts)); + } + + if (options && options.forever && !timeouts.length) { + timeouts.push(this.createTimeout(i, opts)); + } + + // sort the array numerically ascending + timeouts.sort(function(a,b) { + return a - b; + }); + + return timeouts; +}; + +exports.createTimeout = function(attempt, opts) { + var random = (opts.randomize) + ? (Math.random() + 1) + : 1; + + var timeout = Math.round(random * opts.minTimeout * Math.pow(opts.factor, attempt)); + timeout = Math.min(timeout, opts.maxTimeout); + + return timeout; +}; + +exports.wrap = function(obj, options, methods) { + if (options instanceof Array) { + methods = options; + options = null; + } + + if (!methods) { + methods = []; + for (var key in obj) { + if (typeof obj[key] === 'function') { + methods.push(key); + } + } + } + + for (var i = 0; i < methods.length; i++) { + var method = methods[i]; + var original = obj[method]; + + obj[method] = function retryWrapper(original) { + var op = exports.operation(options); + var args = Array.prototype.slice.call(arguments, 1); + var callback = args.pop(); + + args.push(function(err) { + if (op.retry(err)) { + return; + } + if (err) { + arguments[0] = op.mainError(); + } + callback.apply(this, arguments); + }); + + op.attempt(function() { + original.apply(obj, args); + }); + }.bind(obj, original); + obj[method].options = options; + } +}; diff --git a/mybulma/node_modules/retry/lib/retry_operation.js b/mybulma/node_modules/retry/lib/retry_operation.js new file mode 100644 index 0000000..1e56469 --- /dev/null +++ b/mybulma/node_modules/retry/lib/retry_operation.js @@ -0,0 +1,158 @@ +function RetryOperation(timeouts, options) { + // Compatibility for the old (timeouts, retryForever) signature + if (typeof options === 'boolean') { + options = { forever: options }; + } + + this._originalTimeouts = JSON.parse(JSON.stringify(timeouts)); + this._timeouts = timeouts; + this._options = options || {}; + this._maxRetryTime = options && options.maxRetryTime || Infinity; + this._fn = null; + this._errors = []; + this._attempts = 1; + this._operationTimeout = null; + this._operationTimeoutCb = null; + this._timeout = null; + this._operationStart = null; + + if (this._options.forever) { + this._cachedTimeouts = this._timeouts.slice(0); + } +} +module.exports = RetryOperation; + +RetryOperation.prototype.reset = function() { + this._attempts = 1; + this._timeouts = this._originalTimeouts; +} + +RetryOperation.prototype.stop = function() { + if (this._timeout) { + clearTimeout(this._timeout); + } + + this._timeouts = []; + this._cachedTimeouts = null; +}; + +RetryOperation.prototype.retry = function(err) { + if (this._timeout) { + clearTimeout(this._timeout); + } + + if (!err) { + return false; + } + var currentTime = new Date().getTime(); + if (err && currentTime - this._operationStart >= this._maxRetryTime) { + this._errors.unshift(new Error('RetryOperation timeout occurred')); + return false; + } + + this._errors.push(err); + + var timeout = this._timeouts.shift(); + if (timeout === undefined) { + if (this._cachedTimeouts) { + // retry forever, only keep last error + this._errors.splice(this._errors.length - 1, this._errors.length); + this._timeouts = this._cachedTimeouts.slice(0); + timeout = this._timeouts.shift(); + } else { + return false; + } + } + + var self = this; + var timer = setTimeout(function() { + self._attempts++; + + if (self._operationTimeoutCb) { + self._timeout = setTimeout(function() { + self._operationTimeoutCb(self._attempts); + }, self._operationTimeout); + + if (self._options.unref) { + self._timeout.unref(); + } + } + + self._fn(self._attempts); + }, timeout); + + if (this._options.unref) { + timer.unref(); + } + + return true; +}; + +RetryOperation.prototype.attempt = function(fn, timeoutOps) { + this._fn = fn; + + if (timeoutOps) { + if (timeoutOps.timeout) { + this._operationTimeout = timeoutOps.timeout; + } + if (timeoutOps.cb) { + this._operationTimeoutCb = timeoutOps.cb; + } + } + + var self = this; + if (this._operationTimeoutCb) { + this._timeout = setTimeout(function() { + self._operationTimeoutCb(); + }, self._operationTimeout); + } + + this._operationStart = new Date().getTime(); + + this._fn(this._attempts); +}; + +RetryOperation.prototype.try = function(fn) { + console.log('Using RetryOperation.try() is deprecated'); + this.attempt(fn); +}; + +RetryOperation.prototype.start = function(fn) { + console.log('Using RetryOperation.start() is deprecated'); + this.attempt(fn); +}; + +RetryOperation.prototype.start = RetryOperation.prototype.try; + +RetryOperation.prototype.errors = function() { + return this._errors; +}; + +RetryOperation.prototype.attempts = function() { + return this._attempts; +}; + +RetryOperation.prototype.mainError = function() { + if (this._errors.length === 0) { + return null; + } + + var counts = {}; + var mainError = null; + var mainErrorCount = 0; + + for (var i = 0; i < this._errors.length; i++) { + var error = this._errors[i]; + var message = error.message; + var count = (counts[message] || 0) + 1; + + counts[message] = count; + + if (count >= mainErrorCount) { + mainError = error; + mainErrorCount = count; + } + } + + return mainError; +}; diff --git a/mybulma/node_modules/retry/package.json b/mybulma/node_modules/retry/package.json new file mode 100644 index 0000000..73c7259 --- /dev/null +++ b/mybulma/node_modules/retry/package.json @@ -0,0 +1,32 @@ +{ + "author": "Tim Koschützki (http://debuggable.com/)", + "name": "retry", + "description": "Abstraction for exponential and custom retry strategies for failed operations.", + "license": "MIT", + "version": "0.12.0", + "homepage": "https://github.com/tim-kos/node-retry", + "repository": { + "type": "git", + "url": "git://github.com/tim-kos/node-retry.git" + }, + "directories": { + "lib": "./lib" + }, + "main": "index", + "engines": { + "node": ">= 4" + }, + "dependencies": {}, + "devDependencies": { + "fake": "0.2.0", + "istanbul": "^0.4.5", + "tape": "^4.8.0" + }, + "scripts": { + "test": "./node_modules/.bin/istanbul cover ./node_modules/tape/bin/tape ./test/integration/*.js", + "release:major": "env SEMANTIC=major npm run release", + "release:minor": "env SEMANTIC=minor npm run release", + "release:patch": "env SEMANTIC=patch npm run release", + "release": "npm version ${SEMANTIC:-patch} -m \"Release %s\" && git push && git push --tags && npm publish" + } +} diff --git a/mybulma/node_modules/retry/test/common.js b/mybulma/node_modules/retry/test/common.js new file mode 100644 index 0000000..2247206 --- /dev/null +++ b/mybulma/node_modules/retry/test/common.js @@ -0,0 +1,10 @@ +var common = module.exports; +var path = require('path'); + +var rootDir = path.join(__dirname, '..'); +common.dir = { + lib: rootDir + '/lib' +}; + +common.assert = require('assert'); +common.fake = require('fake'); \ No newline at end of file diff --git a/mybulma/node_modules/retry/test/integration/test-forever.js b/mybulma/node_modules/retry/test/integration/test-forever.js new file mode 100644 index 0000000..b41307c --- /dev/null +++ b/mybulma/node_modules/retry/test/integration/test-forever.js @@ -0,0 +1,24 @@ +var common = require('../common'); +var assert = common.assert; +var retry = require(common.dir.lib + '/retry'); + +(function testForeverUsesFirstTimeout() { + var operation = retry.operation({ + retries: 0, + minTimeout: 100, + maxTimeout: 100, + forever: true + }); + + operation.attempt(function(numAttempt) { + console.log('>numAttempt', numAttempt); + var err = new Error("foo"); + if (numAttempt == 10) { + operation.stop(); + } + + if (operation.retry(err)) { + return; + } + }); +})(); diff --git a/mybulma/node_modules/retry/test/integration/test-retry-operation.js b/mybulma/node_modules/retry/test/integration/test-retry-operation.js new file mode 100644 index 0000000..e351bb6 --- /dev/null +++ b/mybulma/node_modules/retry/test/integration/test-retry-operation.js @@ -0,0 +1,258 @@ +var common = require('../common'); +var assert = common.assert; +var fake = common.fake.create(); +var retry = require(common.dir.lib + '/retry'); + +(function testReset() { + var error = new Error('some error'); + var operation = retry.operation([1, 2, 3]); + var attempts = 0; + + var finalCallback = fake.callback('finalCallback'); + fake.expectAnytime(finalCallback); + + var expectedFinishes = 1; + var finishes = 0; + + var fn = function() { + operation.attempt(function(currentAttempt) { + attempts++; + assert.equal(currentAttempt, attempts); + if (operation.retry(error)) { + return; + } + + finishes++ + assert.equal(expectedFinishes, finishes); + assert.strictEqual(attempts, 4); + assert.strictEqual(operation.attempts(), attempts); + assert.strictEqual(operation.mainError(), error); + + if (finishes < 2) { + attempts = 0; + expectedFinishes++; + operation.reset(); + fn() + } else { + finalCallback(); + } + }); + }; + + fn(); +})(); + +(function testErrors() { + var operation = retry.operation(); + + var error = new Error('some error'); + var error2 = new Error('some other error'); + operation._errors.push(error); + operation._errors.push(error2); + + assert.deepEqual(operation.errors(), [error, error2]); +})(); + +(function testMainErrorReturnsMostFrequentError() { + var operation = retry.operation(); + var error = new Error('some error'); + var error2 = new Error('some other error'); + + operation._errors.push(error); + operation._errors.push(error2); + operation._errors.push(error); + + assert.strictEqual(operation.mainError(), error); +})(); + +(function testMainErrorReturnsLastErrorOnEqualCount() { + var operation = retry.operation(); + var error = new Error('some error'); + var error2 = new Error('some other error'); + + operation._errors.push(error); + operation._errors.push(error2); + + assert.strictEqual(operation.mainError(), error2); +})(); + +(function testAttempt() { + var operation = retry.operation(); + var fn = new Function(); + + var timeoutOpts = { + timeout: 1, + cb: function() {} + }; + operation.attempt(fn, timeoutOpts); + + assert.strictEqual(fn, operation._fn); + assert.strictEqual(timeoutOpts.timeout, operation._operationTimeout); + assert.strictEqual(timeoutOpts.cb, operation._operationTimeoutCb); +})(); + +(function testRetry() { + var error = new Error('some error'); + var operation = retry.operation([1, 2, 3]); + var attempts = 0; + + var finalCallback = fake.callback('finalCallback'); + fake.expectAnytime(finalCallback); + + var fn = function() { + operation.attempt(function(currentAttempt) { + attempts++; + assert.equal(currentAttempt, attempts); + if (operation.retry(error)) { + return; + } + + assert.strictEqual(attempts, 4); + assert.strictEqual(operation.attempts(), attempts); + assert.strictEqual(operation.mainError(), error); + finalCallback(); + }); + }; + + fn(); +})(); + +(function testRetryForever() { + var error = new Error('some error'); + var operation = retry.operation({ retries: 3, forever: true }); + var attempts = 0; + + var finalCallback = fake.callback('finalCallback'); + fake.expectAnytime(finalCallback); + + var fn = function() { + operation.attempt(function(currentAttempt) { + attempts++; + assert.equal(currentAttempt, attempts); + if (attempts !== 6 && operation.retry(error)) { + return; + } + + assert.strictEqual(attempts, 6); + assert.strictEqual(operation.attempts(), attempts); + assert.strictEqual(operation.mainError(), error); + finalCallback(); + }); + }; + + fn(); +})(); + +(function testRetryForeverNoRetries() { + var error = new Error('some error'); + var delay = 50 + var operation = retry.operation({ + retries: null, + forever: true, + minTimeout: delay, + maxTimeout: delay + }); + + var attempts = 0; + var startTime = new Date().getTime(); + + var finalCallback = fake.callback('finalCallback'); + fake.expectAnytime(finalCallback); + + var fn = function() { + operation.attempt(function(currentAttempt) { + attempts++; + assert.equal(currentAttempt, attempts); + if (attempts !== 4 && operation.retry(error)) { + return; + } + + var endTime = new Date().getTime(); + var minTime = startTime + (delay * 3); + var maxTime = minTime + 20 // add a little headroom for code execution time + assert(endTime >= minTime) + assert(endTime < maxTime) + assert.strictEqual(attempts, 4); + assert.strictEqual(operation.attempts(), attempts); + assert.strictEqual(operation.mainError(), error); + finalCallback(); + }); + }; + + fn(); +})(); + +(function testStop() { + var error = new Error('some error'); + var operation = retry.operation([1, 2, 3]); + var attempts = 0; + + var finalCallback = fake.callback('finalCallback'); + fake.expectAnytime(finalCallback); + + var fn = function() { + operation.attempt(function(currentAttempt) { + attempts++; + assert.equal(currentAttempt, attempts); + + if (attempts === 2) { + operation.stop(); + + assert.strictEqual(attempts, 2); + assert.strictEqual(operation.attempts(), attempts); + assert.strictEqual(operation.mainError(), error); + finalCallback(); + } + + if (operation.retry(error)) { + return; + } + }); + }; + + fn(); +})(); + +(function testMaxRetryTime() { + var error = new Error('some error'); + var maxRetryTime = 30; + var operation = retry.operation({ + minTimeout: 1, + maxRetryTime: maxRetryTime + }); + var attempts = 0; + + var finalCallback = fake.callback('finalCallback'); + fake.expectAnytime(finalCallback); + + var longAsyncFunction = function (wait, callback){ + setTimeout(callback, wait); + }; + + var fn = function() { + var startTime = new Date().getTime(); + operation.attempt(function(currentAttempt) { + attempts++; + assert.equal(currentAttempt, attempts); + + if (attempts !== 2) { + if (operation.retry(error)) { + return; + } + } else { + var curTime = new Date().getTime(); + longAsyncFunction(maxRetryTime - (curTime - startTime - 1), function(){ + if (operation.retry(error)) { + assert.fail('timeout should be occurred'); + return; + } + + assert.strictEqual(operation.mainError(), error); + finalCallback(); + }); + } + }); + }; + + fn(); +})(); diff --git a/mybulma/node_modules/retry/test/integration/test-retry-wrap.js b/mybulma/node_modules/retry/test/integration/test-retry-wrap.js new file mode 100644 index 0000000..3d2b6bf --- /dev/null +++ b/mybulma/node_modules/retry/test/integration/test-retry-wrap.js @@ -0,0 +1,101 @@ +var common = require('../common'); +var assert = common.assert; +var fake = common.fake.create(); +var retry = require(common.dir.lib + '/retry'); + +function getLib() { + return { + fn1: function() {}, + fn2: function() {}, + fn3: function() {} + }; +} + +(function wrapAll() { + var lib = getLib(); + retry.wrap(lib); + assert.equal(lib.fn1.name, 'bound retryWrapper'); + assert.equal(lib.fn2.name, 'bound retryWrapper'); + assert.equal(lib.fn3.name, 'bound retryWrapper'); +}()); + +(function wrapAllPassOptions() { + var lib = getLib(); + retry.wrap(lib, {retries: 2}); + assert.equal(lib.fn1.name, 'bound retryWrapper'); + assert.equal(lib.fn2.name, 'bound retryWrapper'); + assert.equal(lib.fn3.name, 'bound retryWrapper'); + assert.equal(lib.fn1.options.retries, 2); + assert.equal(lib.fn2.options.retries, 2); + assert.equal(lib.fn3.options.retries, 2); +}()); + +(function wrapDefined() { + var lib = getLib(); + retry.wrap(lib, ['fn2', 'fn3']); + assert.notEqual(lib.fn1.name, 'bound retryWrapper'); + assert.equal(lib.fn2.name, 'bound retryWrapper'); + assert.equal(lib.fn3.name, 'bound retryWrapper'); +}()); + +(function wrapDefinedAndPassOptions() { + var lib = getLib(); + retry.wrap(lib, {retries: 2}, ['fn2', 'fn3']); + assert.notEqual(lib.fn1.name, 'bound retryWrapper'); + assert.equal(lib.fn2.name, 'bound retryWrapper'); + assert.equal(lib.fn3.name, 'bound retryWrapper'); + assert.equal(lib.fn2.options.retries, 2); + assert.equal(lib.fn3.options.retries, 2); +}()); + +(function runWrappedWithoutError() { + var callbackCalled; + var lib = {method: function(a, b, callback) { + assert.equal(a, 1); + assert.equal(b, 2); + assert.equal(typeof callback, 'function'); + callback(); + }}; + retry.wrap(lib); + lib.method(1, 2, function() { + callbackCalled = true; + }); + assert.ok(callbackCalled); +}()); + +(function runWrappedSeveralWithoutError() { + var callbacksCalled = 0; + var lib = { + fn1: function (a, callback) { + assert.equal(a, 1); + assert.equal(typeof callback, 'function'); + callback(); + }, + fn2: function (a, callback) { + assert.equal(a, 2); + assert.equal(typeof callback, 'function'); + callback(); + } + }; + retry.wrap(lib, {}, ['fn1', 'fn2']); + lib.fn1(1, function() { + callbacksCalled++; + }); + lib.fn2(2, function() { + callbacksCalled++; + }); + assert.equal(callbacksCalled, 2); +}()); + +(function runWrappedWithError() { + var callbackCalled; + var lib = {method: function(callback) { + callback(new Error('Some error')); + }}; + retry.wrap(lib, {retries: 1}); + lib.method(function(err) { + callbackCalled = true; + assert.ok(err instanceof Error); + }); + assert.ok(!callbackCalled); +}()); diff --git a/mybulma/node_modules/retry/test/integration/test-timeouts.js b/mybulma/node_modules/retry/test/integration/test-timeouts.js new file mode 100644 index 0000000..7206b0f --- /dev/null +++ b/mybulma/node_modules/retry/test/integration/test-timeouts.js @@ -0,0 +1,69 @@ +var common = require('../common'); +var assert = common.assert; +var retry = require(common.dir.lib + '/retry'); + +(function testDefaultValues() { + var timeouts = retry.timeouts(); + + assert.equal(timeouts.length, 10); + assert.equal(timeouts[0], 1000); + assert.equal(timeouts[1], 2000); + assert.equal(timeouts[2], 4000); +})(); + +(function testDefaultValuesWithRandomize() { + var minTimeout = 5000; + var timeouts = retry.timeouts({ + minTimeout: minTimeout, + randomize: true + }); + + assert.equal(timeouts.length, 10); + assert.ok(timeouts[0] > minTimeout); + assert.ok(timeouts[1] > timeouts[0]); + assert.ok(timeouts[2] > timeouts[1]); +})(); + +(function testPassedTimeoutsAreUsed() { + var timeoutsArray = [1000, 2000, 3000]; + var timeouts = retry.timeouts(timeoutsArray); + assert.deepEqual(timeouts, timeoutsArray); + assert.notStrictEqual(timeouts, timeoutsArray); +})(); + +(function testTimeoutsAreWithinBoundaries() { + var minTimeout = 1000; + var maxTimeout = 10000; + var timeouts = retry.timeouts({ + minTimeout: minTimeout, + maxTimeout: maxTimeout + }); + for (var i = 0; i < timeouts; i++) { + assert.ok(timeouts[i] >= minTimeout); + assert.ok(timeouts[i] <= maxTimeout); + } +})(); + +(function testTimeoutsAreIncremental() { + var timeouts = retry.timeouts(); + var lastTimeout = timeouts[0]; + for (var i = 0; i < timeouts; i++) { + assert.ok(timeouts[i] > lastTimeout); + lastTimeout = timeouts[i]; + } +})(); + +(function testTimeoutsAreIncrementalForFactorsLessThanOne() { + var timeouts = retry.timeouts({ + retries: 3, + factor: 0.5 + }); + + var expected = [250, 500, 1000]; + assert.deepEqual(expected, timeouts); +})(); + +(function testRetries() { + var timeouts = retry.timeouts({retries: 2}); + assert.strictEqual(timeouts.length, 2); +})(); diff --git a/mybulma/node_modules/rimraf/CHANGELOG.md b/mybulma/node_modules/rimraf/CHANGELOG.md new file mode 100644 index 0000000..f116f14 --- /dev/null +++ b/mybulma/node_modules/rimraf/CHANGELOG.md @@ -0,0 +1,65 @@ +# v3.0 + +- Add `--preserve-root` option to executable (default true) +- Drop support for Node.js below version 6 + +# v2.7 + +- Make `glob` an optional dependency + +# 2.6 + +- Retry on EBUSY on non-windows platforms as well +- Make `rimraf.sync` 10000% more reliable on Windows + +# 2.5 + +- Handle Windows EPERM when lstat-ing read-only dirs +- Add glob option to pass options to glob + +# 2.4 + +- Add EPERM to delay/retry loop +- Add `disableGlob` option + +# 2.3 + +- Make maxBusyTries and emfileWait configurable +- Handle weird SunOS unlink-dir issue +- Glob the CLI arg for better Windows support + +# 2.2 + +- Handle ENOENT properly on Windows +- Allow overriding fs methods +- Treat EPERM as indicative of non-empty dir +- Remove optional graceful-fs dep +- Consistently return null error instead of undefined on success +- win32: Treat ENOTEMPTY the same as EBUSY +- Add `rimraf` binary + +# 2.1 + +- Fix SunOS error code for a non-empty directory +- Try rmdir before readdir +- Treat EISDIR like EPERM +- Remove chmod +- Remove lstat polyfill, node 0.7 is not supported + +# 2.0 + +- Fix myGid call to check process.getgid +- Simplify the EBUSY backoff logic. +- Use fs.lstat in node >= 0.7.9 +- Remove gently option +- remove fiber implementation +- Delete files that are marked read-only + +# 1.0 + +- Allow ENOENT in sync method +- Throw when no callback is provided +- Make opts.gently an absolute path +- use 'stat' if 'lstat' is not available +- Consistent error naming, and rethrow non-ENOENT stat errors +- add fiber implementation diff --git a/mybulma/node_modules/rimraf/LICENSE b/mybulma/node_modules/rimraf/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/mybulma/node_modules/rimraf/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/mybulma/node_modules/rimraf/README.md b/mybulma/node_modules/rimraf/README.md new file mode 100644 index 0000000..423b8cf --- /dev/null +++ b/mybulma/node_modules/rimraf/README.md @@ -0,0 +1,101 @@ +[![Build Status](https://travis-ci.org/isaacs/rimraf.svg?branch=master)](https://travis-ci.org/isaacs/rimraf) [![Dependency Status](https://david-dm.org/isaacs/rimraf.svg)](https://david-dm.org/isaacs/rimraf) [![devDependency Status](https://david-dm.org/isaacs/rimraf/dev-status.svg)](https://david-dm.org/isaacs/rimraf#info=devDependencies) + +The [UNIX command](http://en.wikipedia.org/wiki/Rm_(Unix)) `rm -rf` for node. + +Install with `npm install rimraf`, or just drop rimraf.js somewhere. + +## API + +`rimraf(f, [opts], callback)` + +The first parameter will be interpreted as a globbing pattern for files. If you +want to disable globbing you can do so with `opts.disableGlob` (defaults to +`false`). This might be handy, for instance, if you have filenames that contain +globbing wildcard characters. + +The callback will be called with an error if there is one. Certain +errors are handled for you: + +* Windows: `EBUSY` and `ENOTEMPTY` - rimraf will back off a maximum of + `opts.maxBusyTries` times before giving up, adding 100ms of wait + between each attempt. The default `maxBusyTries` is 3. +* `ENOENT` - If the file doesn't exist, rimraf will return + successfully, since your desired outcome is already the case. +* `EMFILE` - Since `readdir` requires opening a file descriptor, it's + possible to hit `EMFILE` if too many file descriptors are in use. + In the sync case, there's nothing to be done for this. But in the + async case, rimraf will gradually back off with timeouts up to + `opts.emfileWait` ms, which defaults to 1000. + +## options + +* unlink, chmod, stat, lstat, rmdir, readdir, + unlinkSync, chmodSync, statSync, lstatSync, rmdirSync, readdirSync + + In order to use a custom file system library, you can override + specific fs functions on the options object. + + If any of these functions are present on the options object, then + the supplied function will be used instead of the default fs + method. + + Sync methods are only relevant for `rimraf.sync()`, of course. + + For example: + + ```javascript + var myCustomFS = require('some-custom-fs') + + rimraf('some-thing', myCustomFS, callback) + ``` + +* maxBusyTries + + If an `EBUSY`, `ENOTEMPTY`, or `EPERM` error code is encountered + on Windows systems, then rimraf will retry with a linear backoff + wait of 100ms longer on each try. The default maxBusyTries is 3. + + Only relevant for async usage. + +* emfileWait + + If an `EMFILE` error is encountered, then rimraf will retry + repeatedly with a linear backoff of 1ms longer on each try, until + the timeout counter hits this max. The default limit is 1000. + + If you repeatedly encounter `EMFILE` errors, then consider using + [graceful-fs](http://npm.im/graceful-fs) in your program. + + Only relevant for async usage. + +* glob + + Set to `false` to disable [glob](http://npm.im/glob) pattern + matching. + + Set to an object to pass options to the glob module. The default + glob options are `{ nosort: true, silent: true }`. + + Glob version 6 is used in this module. + + Relevant for both sync and async usage. + +* disableGlob + + Set to any non-falsey value to disable globbing entirely. + (Equivalent to setting `glob: false`.) + +## rimraf.sync + +It can remove stuff synchronously, too. But that's not so good. Use +the async API. It's better. + +## CLI + +If installed with `npm install rimraf -g` it can be used as a global +command `rimraf [ ...]` which is useful for cross platform support. + +## mkdirp + +If you need to create a directory recursively, check out +[mkdirp](https://github.com/substack/node-mkdirp). diff --git a/mybulma/node_modules/rimraf/bin.js b/mybulma/node_modules/rimraf/bin.js new file mode 100644 index 0000000..023814c --- /dev/null +++ b/mybulma/node_modules/rimraf/bin.js @@ -0,0 +1,68 @@ +#!/usr/bin/env node + +const rimraf = require('./') + +const path = require('path') + +const isRoot = arg => /^(\/|[a-zA-Z]:\\)$/.test(path.resolve(arg)) +const filterOutRoot = arg => { + const ok = preserveRoot === false || !isRoot(arg) + if (!ok) { + console.error(`refusing to remove ${arg}`) + console.error('Set --no-preserve-root to allow this') + } + return ok +} + +let help = false +let dashdash = false +let noglob = false +let preserveRoot = true +const args = process.argv.slice(2).filter(arg => { + if (dashdash) + return !!arg + else if (arg === '--') + dashdash = true + else if (arg === '--no-glob' || arg === '-G') + noglob = true + else if (arg === '--glob' || arg === '-g') + noglob = false + else if (arg.match(/^(-+|\/)(h(elp)?|\?)$/)) + help = true + else if (arg === '--preserve-root') + preserveRoot = true + else if (arg === '--no-preserve-root') + preserveRoot = false + else + return !!arg +}).filter(arg => !preserveRoot || filterOutRoot(arg)) + +const go = n => { + if (n >= args.length) + return + const options = noglob ? { glob: false } : {} + rimraf(args[n], options, er => { + if (er) + throw er + go(n+1) + }) +} + +if (help || args.length === 0) { + // If they didn't ask for help, then this is not a "success" + const log = help ? console.log : console.error + log('Usage: rimraf [ ...]') + log('') + log(' Deletes all files and folders at "path" recursively.') + log('') + log('Options:') + log('') + log(' -h, --help Display this usage info') + log(' -G, --no-glob Do not expand glob patterns in arguments') + log(' -g, --glob Expand glob patterns in arguments (default)') + log(' --preserve-root Do not remove \'/\' (default)') + log(' --no-preserve-root Do not treat \'/\' specially') + log(' -- Stop parsing flags') + process.exit(help ? 0 : 1) +} else + go(0) diff --git a/mybulma/node_modules/rimraf/package.json b/mybulma/node_modules/rimraf/package.json new file mode 100644 index 0000000..1bf8d5e --- /dev/null +++ b/mybulma/node_modules/rimraf/package.json @@ -0,0 +1,32 @@ +{ + "name": "rimraf", + "version": "3.0.2", + "main": "rimraf.js", + "description": "A deep deletion module for node (like `rm -rf`)", + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "repository": "git://github.com/isaacs/rimraf.git", + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --follow-tags", + "test": "tap test/*.js" + }, + "bin": "./bin.js", + "dependencies": { + "glob": "^7.1.3" + }, + "files": [ + "LICENSE", + "README.md", + "bin.js", + "rimraf.js" + ], + "devDependencies": { + "mkdirp": "^0.5.1", + "tap": "^12.1.1" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } +} diff --git a/mybulma/node_modules/rimraf/rimraf.js b/mybulma/node_modules/rimraf/rimraf.js new file mode 100644 index 0000000..34da417 --- /dev/null +++ b/mybulma/node_modules/rimraf/rimraf.js @@ -0,0 +1,360 @@ +const assert = require("assert") +const path = require("path") +const fs = require("fs") +let glob = undefined +try { + glob = require("glob") +} catch (_err) { + // treat glob as optional. +} + +const defaultGlobOpts = { + nosort: true, + silent: true +} + +// for EMFILE handling +let timeout = 0 + +const isWindows = (process.platform === "win32") + +const defaults = options => { + const methods = [ + 'unlink', + 'chmod', + 'stat', + 'lstat', + 'rmdir', + 'readdir' + ] + methods.forEach(m => { + options[m] = options[m] || fs[m] + m = m + 'Sync' + options[m] = options[m] || fs[m] + }) + + options.maxBusyTries = options.maxBusyTries || 3 + options.emfileWait = options.emfileWait || 1000 + if (options.glob === false) { + options.disableGlob = true + } + if (options.disableGlob !== true && glob === undefined) { + throw Error('glob dependency not found, set `options.disableGlob = true` if intentional') + } + options.disableGlob = options.disableGlob || false + options.glob = options.glob || defaultGlobOpts +} + +const rimraf = (p, options, cb) => { + if (typeof options === 'function') { + cb = options + options = {} + } + + assert(p, 'rimraf: missing path') + assert.equal(typeof p, 'string', 'rimraf: path should be a string') + assert.equal(typeof cb, 'function', 'rimraf: callback function required') + assert(options, 'rimraf: invalid options argument provided') + assert.equal(typeof options, 'object', 'rimraf: options should be object') + + defaults(options) + + let busyTries = 0 + let errState = null + let n = 0 + + const next = (er) => { + errState = errState || er + if (--n === 0) + cb(errState) + } + + const afterGlob = (er, results) => { + if (er) + return cb(er) + + n = results.length + if (n === 0) + return cb() + + results.forEach(p => { + const CB = (er) => { + if (er) { + if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") && + busyTries < options.maxBusyTries) { + busyTries ++ + // try again, with the same exact callback as this one. + return setTimeout(() => rimraf_(p, options, CB), busyTries * 100) + } + + // this one won't happen if graceful-fs is used. + if (er.code === "EMFILE" && timeout < options.emfileWait) { + return setTimeout(() => rimraf_(p, options, CB), timeout ++) + } + + // already gone + if (er.code === "ENOENT") er = null + } + + timeout = 0 + next(er) + } + rimraf_(p, options, CB) + }) + } + + if (options.disableGlob || !glob.hasMagic(p)) + return afterGlob(null, [p]) + + options.lstat(p, (er, stat) => { + if (!er) + return afterGlob(null, [p]) + + glob(p, options.glob, afterGlob) + }) + +} + +// Two possible strategies. +// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR +// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR +// +// Both result in an extra syscall when you guess wrong. However, there +// are likely far more normal files in the world than directories. This +// is based on the assumption that a the average number of files per +// directory is >= 1. +// +// If anyone ever complains about this, then I guess the strategy could +// be made configurable somehow. But until then, YAGNI. +const rimraf_ = (p, options, cb) => { + assert(p) + assert(options) + assert(typeof cb === 'function') + + // sunos lets the root user unlink directories, which is... weird. + // so we have to lstat here and make sure it's not a dir. + options.lstat(p, (er, st) => { + if (er && er.code === "ENOENT") + return cb(null) + + // Windows can EPERM on stat. Life is suffering. + if (er && er.code === "EPERM" && isWindows) + fixWinEPERM(p, options, er, cb) + + if (st && st.isDirectory()) + return rmdir(p, options, er, cb) + + options.unlink(p, er => { + if (er) { + if (er.code === "ENOENT") + return cb(null) + if (er.code === "EPERM") + return (isWindows) + ? fixWinEPERM(p, options, er, cb) + : rmdir(p, options, er, cb) + if (er.code === "EISDIR") + return rmdir(p, options, er, cb) + } + return cb(er) + }) + }) +} + +const fixWinEPERM = (p, options, er, cb) => { + assert(p) + assert(options) + assert(typeof cb === 'function') + + options.chmod(p, 0o666, er2 => { + if (er2) + cb(er2.code === "ENOENT" ? null : er) + else + options.stat(p, (er3, stats) => { + if (er3) + cb(er3.code === "ENOENT" ? null : er) + else if (stats.isDirectory()) + rmdir(p, options, er, cb) + else + options.unlink(p, cb) + }) + }) +} + +const fixWinEPERMSync = (p, options, er) => { + assert(p) + assert(options) + + try { + options.chmodSync(p, 0o666) + } catch (er2) { + if (er2.code === "ENOENT") + return + else + throw er + } + + let stats + try { + stats = options.statSync(p) + } catch (er3) { + if (er3.code === "ENOENT") + return + else + throw er + } + + if (stats.isDirectory()) + rmdirSync(p, options, er) + else + options.unlinkSync(p) +} + +const rmdir = (p, options, originalEr, cb) => { + assert(p) + assert(options) + assert(typeof cb === 'function') + + // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) + // if we guessed wrong, and it's not a directory, then + // raise the original error. + options.rmdir(p, er => { + if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")) + rmkids(p, options, cb) + else if (er && er.code === "ENOTDIR") + cb(originalEr) + else + cb(er) + }) +} + +const rmkids = (p, options, cb) => { + assert(p) + assert(options) + assert(typeof cb === 'function') + + options.readdir(p, (er, files) => { + if (er) + return cb(er) + let n = files.length + if (n === 0) + return options.rmdir(p, cb) + let errState + files.forEach(f => { + rimraf(path.join(p, f), options, er => { + if (errState) + return + if (er) + return cb(errState = er) + if (--n === 0) + options.rmdir(p, cb) + }) + }) + }) +} + +// this looks simpler, and is strictly *faster*, but will +// tie up the JavaScript thread and fail on excessively +// deep directory trees. +const rimrafSync = (p, options) => { + options = options || {} + defaults(options) + + assert(p, 'rimraf: missing path') + assert.equal(typeof p, 'string', 'rimraf: path should be a string') + assert(options, 'rimraf: missing options') + assert.equal(typeof options, 'object', 'rimraf: options should be object') + + let results + + if (options.disableGlob || !glob.hasMagic(p)) { + results = [p] + } else { + try { + options.lstatSync(p) + results = [p] + } catch (er) { + results = glob.sync(p, options.glob) + } + } + + if (!results.length) + return + + for (let i = 0; i < results.length; i++) { + const p = results[i] + + let st + try { + st = options.lstatSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + + // Windows can EPERM on stat. Life is suffering. + if (er.code === "EPERM" && isWindows) + fixWinEPERMSync(p, options, er) + } + + try { + // sunos lets the root user unlink directories, which is... weird. + if (st && st.isDirectory()) + rmdirSync(p, options, null) + else + options.unlinkSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + if (er.code === "EPERM") + return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) + if (er.code !== "EISDIR") + throw er + + rmdirSync(p, options, er) + } + } +} + +const rmdirSync = (p, options, originalEr) => { + assert(p) + assert(options) + + try { + options.rmdirSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + if (er.code === "ENOTDIR") + throw originalEr + if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM") + rmkidsSync(p, options) + } +} + +const rmkidsSync = (p, options) => { + assert(p) + assert(options) + options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options)) + + // We only end up here once we got ENOTEMPTY at least once, and + // at this point, we are guaranteed to have removed all the kids. + // So, we know that it won't be ENOENT or ENOTDIR or anything else. + // try really hard to delete stuff on windows, because it has a + // PROFOUNDLY annoying habit of not closing handles promptly when + // files are deleted, resulting in spurious ENOTEMPTY errors. + const retries = isWindows ? 100 : 1 + let i = 0 + do { + let threw = true + try { + const ret = options.rmdirSync(p, options) + threw = false + return ret + } finally { + if (++i < retries && threw) + continue + } + } while (true) +} + +module.exports = rimraf +rimraf.sync = rimrafSync diff --git a/mybulma/node_modules/safe-buffer/LICENSE b/mybulma/node_modules/safe-buffer/LICENSE new file mode 100644 index 0000000..0c068ce --- /dev/null +++ b/mybulma/node_modules/safe-buffer/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/mybulma/node_modules/safe-buffer/README.md b/mybulma/node_modules/safe-buffer/README.md new file mode 100644 index 0000000..e9a81af --- /dev/null +++ b/mybulma/node_modules/safe-buffer/README.md @@ -0,0 +1,584 @@ +# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg +[travis-url]: https://travis-ci.org/feross/safe-buffer +[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg +[npm-url]: https://npmjs.org/package/safe-buffer +[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg +[downloads-url]: https://npmjs.org/package/safe-buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +#### Safer Node.js Buffer API + +**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`, +`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.** + +**Uses the built-in implementation when available.** + +## install + +``` +npm install safe-buffer +``` + +## usage + +The goal of this package is to provide a safe replacement for the node.js `Buffer`. + +It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to +the top of your node.js modules: + +```js +var Buffer = require('safe-buffer').Buffer + +// Existing buffer code will continue to work without issues: + +new Buffer('hey', 'utf8') +new Buffer([1, 2, 3], 'utf8') +new Buffer(obj) +new Buffer(16) // create an uninitialized buffer (potentially unsafe) + +// But you can use these new explicit APIs to make clear what you want: + +Buffer.from('hey', 'utf8') // convert from many types to a Buffer +Buffer.alloc(16) // create a zero-filled buffer (safe) +Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe) +``` + +## api + +### Class Method: Buffer.from(array) + + +* `array` {Array} + +Allocates a new `Buffer` using an `array` of octets. + +```js +const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]); + // creates a new Buffer containing ASCII bytes + // ['b','u','f','f','e','r'] +``` + +A `TypeError` will be thrown if `array` is not an `Array`. + +### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]]) + + +* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or + a `new ArrayBuffer()` +* `byteOffset` {Number} Default: `0` +* `length` {Number} Default: `arrayBuffer.length - byteOffset` + +When passed a reference to the `.buffer` property of a `TypedArray` instance, +the newly created `Buffer` will share the same allocated memory as the +TypedArray. + +```js +const arr = new Uint16Array(2); +arr[0] = 5000; +arr[1] = 4000; + +const buf = Buffer.from(arr.buffer); // shares the memory with arr; + +console.log(buf); + // Prints: + +// changing the TypedArray changes the Buffer also +arr[1] = 6000; + +console.log(buf); + // Prints: +``` + +The optional `byteOffset` and `length` arguments specify a memory range within +the `arrayBuffer` that will be shared by the `Buffer`. + +```js +const ab = new ArrayBuffer(10); +const buf = Buffer.from(ab, 0, 2); +console.log(buf.length); + // Prints: 2 +``` + +A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`. + +### Class Method: Buffer.from(buffer) + + +* `buffer` {Buffer} + +Copies the passed `buffer` data onto a new `Buffer` instance. + +```js +const buf1 = Buffer.from('buffer'); +const buf2 = Buffer.from(buf1); + +buf1[0] = 0x61; +console.log(buf1.toString()); + // 'auffer' +console.log(buf2.toString()); + // 'buffer' (copy is not changed) +``` + +A `TypeError` will be thrown if `buffer` is not a `Buffer`. + +### Class Method: Buffer.from(str[, encoding]) + + +* `str` {String} String to encode. +* `encoding` {String} Encoding to use, Default: `'utf8'` + +Creates a new `Buffer` containing the given JavaScript string `str`. If +provided, the `encoding` parameter identifies the character encoding. +If not provided, `encoding` defaults to `'utf8'`. + +```js +const buf1 = Buffer.from('this is a tést'); +console.log(buf1.toString()); + // prints: this is a tést +console.log(buf1.toString('ascii')); + // prints: this is a tC)st + +const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex'); +console.log(buf2.toString()); + // prints: this is a tést +``` + +A `TypeError` will be thrown if `str` is not a string. + +### Class Method: Buffer.alloc(size[, fill[, encoding]]) + + +* `size` {Number} +* `fill` {Value} Default: `undefined` +* `encoding` {String} Default: `utf8` + +Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the +`Buffer` will be *zero-filled*. + +```js +const buf = Buffer.alloc(5); +console.log(buf); + // +``` + +The `size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +If `fill` is specified, the allocated `Buffer` will be initialized by calling +`buf.fill(fill)`. See [`buf.fill()`][] for more information. + +```js +const buf = Buffer.alloc(5, 'a'); +console.log(buf); + // +``` + +If both `fill` and `encoding` are specified, the allocated `Buffer` will be +initialized by calling `buf.fill(fill, encoding)`. For example: + +```js +const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); +console.log(buf); + // +``` + +Calling `Buffer.alloc(size)` can be significantly slower than the alternative +`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance +contents will *never contain sensitive data*. + +A `TypeError` will be thrown if `size` is not a number. + +### Class Method: Buffer.allocUnsafe(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must +be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit +architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is +thrown. A zero-length Buffer will be created if a `size` less than or equal to +0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +```js +const buf = Buffer.allocUnsafe(5); +console.log(buf); + // + // (octets will be different, every time) +buf.fill(0); +console.log(buf); + // +``` + +A `TypeError` will be thrown if `size` is not a number. + +Note that the `Buffer` module pre-allocates an internal `Buffer` instance of +size `Buffer.poolSize` that is used as a pool for the fast allocation of new +`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated +`new Buffer(size)` constructor) only when `size` is less than or equal to +`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default +value of `Buffer.poolSize` is `8192` but can be modified. + +Use of this pre-allocated internal memory pool is a key difference between +calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. +Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer +pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal +Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The +difference is subtle but can be important when an application requires the +additional performance that `Buffer.allocUnsafe(size)` provides. + +### Class Method: Buffer.allocUnsafeSlow(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The +`size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, +allocations under 4KB are, by default, sliced from a single pre-allocated +`Buffer`. This allows applications to avoid the garbage collection overhead of +creating many individually allocated Buffers. This approach improves both +performance and memory usage by eliminating the need to track and cleanup as +many `Persistent` objects. + +However, in the case where a developer may need to retain a small chunk of +memory from a pool for an indeterminate amount of time, it may be appropriate +to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then +copy out the relevant bits. + +```js +// need to keep around a few small chunks of memory +const store = []; + +socket.on('readable', () => { + const data = socket.read(); + // allocate for retained data + const sb = Buffer.allocUnsafeSlow(10); + // copy the data into the new allocation + data.copy(sb, 0, 0, 10); + store.push(sb); +}); +``` + +Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after* +a developer has observed undue memory retention in their applications. + +A `TypeError` will be thrown if `size` is not a number. + +### All the Rest + +The rest of the `Buffer` API is exactly the same as in node.js. +[See the docs](https://nodejs.org/api/buffer.html). + + +## Related links + +- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660) +- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4) + +## Why is `Buffer` unsafe? + +Today, the node.js `Buffer` constructor is overloaded to handle many different argument +types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.), +`ArrayBuffer`, and also `Number`. + +The API is optimized for convenience: you can throw any type at it, and it will try to do +what you want. + +Because the Buffer constructor is so powerful, you often see code like this: + +```js +// Convert UTF-8 strings to hex +function toHex (str) { + return new Buffer(str).toString('hex') +} +``` + +***But what happens if `toHex` is called with a `Number` argument?*** + +### Remote Memory Disclosure + +If an attacker can make your program call the `Buffer` constructor with a `Number` +argument, then they can make it allocate uninitialized memory from the node.js process. +This could potentially disclose TLS private keys, user data, or database passwords. + +When the `Buffer` constructor is passed a `Number` argument, it returns an +**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like +this, you **MUST** overwrite the contents before returning it to the user. + +From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size): + +> `new Buffer(size)` +> +> - `size` Number +> +> The underlying memory for `Buffer` instances created in this way is not initialized. +> **The contents of a newly created `Buffer` are unknown and could contain sensitive +> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes. + +(Emphasis our own.) + +Whenever the programmer intended to create an uninitialized `Buffer` you often see code +like this: + +```js +var buf = new Buffer(16) + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### Would this ever be a problem in real code? + +Yes. It's surprisingly common to forget to check the type of your variables in a +dynamically-typed language like JavaScript. + +Usually the consequences of assuming the wrong type is that your program crashes with an +uncaught exception. But the failure mode for forgetting to check the type of arguments to +the `Buffer` constructor is more catastrophic. + +Here's an example of a vulnerable service that takes a JSON payload and converts it to +hex: + +```js +// Take a JSON payload {str: "some string"} and convert it to hex +var server = http.createServer(function (req, res) { + var data = '' + req.setEncoding('utf8') + req.on('data', function (chunk) { + data += chunk + }) + req.on('end', function () { + var body = JSON.parse(data) + res.end(new Buffer(body.str).toString('hex')) + }) +}) + +server.listen(8080) +``` + +In this example, an http client just has to send: + +```json +{ + "str": 1000 +} +``` + +and it will get back 1,000 bytes of uninitialized memory from the server. + +This is a very serious bug. It's similar in severity to the +[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process +memory by remote attackers. + + +### Which real-world packages were vulnerable? + +#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht) + +[Mathias Buus](https://github.com/mafintosh) and I +([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages, +[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow +anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get +them to reveal 20 bytes at a time of uninitialized memory from the node.js process. + +Here's +[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8) +that fixed it. We released a new fixed version, created a +[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all +vulnerable versions on npm so users will get a warning to upgrade to a newer version. + +#### [`ws`](https://www.npmjs.com/package/ws) + +That got us wondering if there were other vulnerable packages. Sure enough, within a short +period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the +most popular WebSocket implementation in node.js. + +If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as +expected, then uninitialized server memory would be disclosed to the remote peer. + +These were the vulnerable methods: + +```js +socket.send(number) +socket.ping(number) +socket.pong(number) +``` + +Here's a vulnerable socket server with some echo functionality: + +```js +server.on('connection', function (socket) { + socket.on('message', function (message) { + message = JSON.parse(message) + if (message.type === 'echo') { + socket.send(message.data) // send back the user's message + } + }) +}) +``` + +`socket.send(number)` called on the server, will disclose server memory. + +Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue +was fixed, with a more detailed explanation. Props to +[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the +[Node Security Project disclosure](https://nodesecurity.io/advisories/67). + + +### What's the solution? + +It's important that node.js offers a fast way to get memory otherwise performance-critical +applications would needlessly get a lot slower. + +But we need a better way to *signal our intent* as programmers. **When we want +uninitialized memory, we should request it explicitly.** + +Sensitive functionality should not be packed into a developer-friendly API that loosely +accepts many different types. This type of API encourages the lazy practice of passing +variables in without checking the type very carefully. + +#### A new API: `Buffer.allocUnsafe(number)` + +The functionality of creating buffers with uninitialized memory should be part of another +API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that +frequently gets user input of all sorts of different types passed into it. + +```js +var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory! + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### How do we fix node.js core? + +We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as +`semver-major`) which defends against one case: + +```js +var str = 16 +new Buffer(str, 'utf8') +``` + +In this situation, it's implied that the programmer intended the first argument to be a +string, since they passed an encoding as a second argument. Today, node.js will allocate +uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not +what the programmer intended. + +But this is only a partial solution, since if the programmer does `new Buffer(variable)` +(without an `encoding` parameter) there's no way to know what they intended. If `variable` +is sometimes a number, then uninitialized memory will sometimes be returned. + +### What's the real long-term fix? + +We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when +we need uninitialized memory. But that would break 1000s of packages. + +~~We believe the best solution is to:~~ + +~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~ + +~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~ + +#### Update + +We now support adding three new APIs: + +- `Buffer.from(value)` - convert from any type to a buffer +- `Buffer.alloc(size)` - create a zero-filled buffer +- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size + +This solves the core problem that affected `ws` and `bittorrent-dht` which is +`Buffer(variable)` getting tricked into taking a number argument. + +This way, existing code continues working and the impact on the npm ecosystem will be +minimal. Over time, npm maintainers can migrate performance-critical code to use +`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`. + + +### Conclusion + +We think there's a serious design issue with the `Buffer` API as it exists today. It +promotes insecure software by putting high-risk functionality into a convenient API +with friendly "developer ergonomics". + +This wasn't merely a theoretical exercise because we found the issue in some of the +most popular npm packages. + +Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of +`buffer`. + +```js +var Buffer = require('safe-buffer').Buffer +``` + +Eventually, we hope that node.js core can switch to this new, safer behavior. We believe +the impact on the ecosystem would be minimal since it's not a breaking change. +Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while +older, insecure packages would magically become safe from this attack vector. + + +## links + +- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514) +- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67) +- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68) + + +## credit + +The original issues in `bittorrent-dht` +([disclosure](https://nodesecurity.io/advisories/68)) and +`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by +[Mathias Buus](https://github.com/mafintosh) and +[Feross Aboukhadijeh](http://feross.org/). + +Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues +and for his work running the [Node Security Project](https://nodesecurity.io/). + +Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and +auditing the code. + + +## license + +MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org) diff --git a/mybulma/node_modules/safe-buffer/index.d.ts b/mybulma/node_modules/safe-buffer/index.d.ts new file mode 100644 index 0000000..e9fed80 --- /dev/null +++ b/mybulma/node_modules/safe-buffer/index.d.ts @@ -0,0 +1,187 @@ +declare module "safe-buffer" { + export class Buffer { + length: number + write(string: string, offset?: number, length?: number, encoding?: string): number; + toString(encoding?: string, start?: number, end?: number): string; + toJSON(): { type: 'Buffer', data: any[] }; + equals(otherBuffer: Buffer): boolean; + compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; + copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + slice(start?: number, end?: number): Buffer; + writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readUInt8(offset: number, noAssert?: boolean): number; + readUInt16LE(offset: number, noAssert?: boolean): number; + readUInt16BE(offset: number, noAssert?: boolean): number; + readUInt32LE(offset: number, noAssert?: boolean): number; + readUInt32BE(offset: number, noAssert?: boolean): number; + readInt8(offset: number, noAssert?: boolean): number; + readInt16LE(offset: number, noAssert?: boolean): number; + readInt16BE(offset: number, noAssert?: boolean): number; + readInt32LE(offset: number, noAssert?: boolean): number; + readInt32BE(offset: number, noAssert?: boolean): number; + readFloatLE(offset: number, noAssert?: boolean): number; + readFloatBE(offset: number, noAssert?: boolean): number; + readDoubleLE(offset: number, noAssert?: boolean): number; + readDoubleBE(offset: number, noAssert?: boolean): number; + swap16(): Buffer; + swap32(): Buffer; + swap64(): Buffer; + writeUInt8(value: number, offset: number, noAssert?: boolean): number; + writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeInt8(value: number, offset: number, noAssert?: boolean): number; + writeInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeFloatLE(value: number, offset: number, noAssert?: boolean): number; + writeFloatBE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; + fill(value: any, offset?: number, end?: number): this; + indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; + + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + */ + constructor (str: string, encoding?: string); + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + */ + constructor (size: number); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: Uint8Array); + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}. + * + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + */ + constructor (arrayBuffer: ArrayBuffer); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: any[]); + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + */ + constructor (buffer: Buffer); + prototype: Buffer; + /** + * Allocates a new Buffer using an {array} of octets. + * + * @param array + */ + static from(array: any[]): Buffer; + /** + * When passed a reference to the .buffer property of a TypedArray instance, + * the newly created Buffer will share the same allocated memory as the TypedArray. + * The optional {byteOffset} and {length} arguments specify a memory range + * within the {arrayBuffer} that will be shared by the Buffer. + * + * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() + * @param byteOffset + * @param length + */ + static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; + /** + * Copies the passed {buffer} data onto a new Buffer instance. + * + * @param buffer + */ + static from(buffer: Buffer): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + * + * @param str + */ + static from(str: string, encoding?: string): Buffer; + /** + * Returns true if {obj} is a Buffer + * + * @param obj object to test. + */ + static isBuffer(obj: any): obj is Buffer; + /** + * Returns true if {encoding} is a valid encoding argument. + * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' + * + * @param encoding string to test. + */ + static isEncoding(encoding: string): boolean; + /** + * Gives the actual byte length of a string. encoding defaults to 'utf8'. + * This is not the same as String.prototype.length since that returns the number of characters in a string. + * + * @param string string to test. + * @param encoding encoding used to evaluate (defaults to 'utf8') + */ + static byteLength(string: string, encoding?: string): number; + /** + * Returns a buffer which is the result of concatenating all the buffers in the list together. + * + * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. + * If the list has exactly one item, then the first item of the list is returned. + * If the list has more than one item, then a new Buffer is created. + * + * @param list An array of Buffer objects to concatenate + * @param totalLength Total length of the buffers when concatenated. + * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. + */ + static concat(list: Buffer[], totalLength?: number): Buffer; + /** + * The same as buf1.compare(buf2). + */ + static compare(buf1: Buffer, buf2: Buffer): number; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @param fill if specified, buffer will be initialized by calling buf.fill(fill). + * If parameter is omitted, buffer will be filled with zeros. + * @param encoding encoding used for call to buf.fill while initalizing + */ + static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; + /** + * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafe(size: number): Buffer; + /** + * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafeSlow(size: number): Buffer; + } +} \ No newline at end of file diff --git a/mybulma/node_modules/safe-buffer/index.js b/mybulma/node_modules/safe-buffer/index.js new file mode 100644 index 0000000..f8d3ec9 --- /dev/null +++ b/mybulma/node_modules/safe-buffer/index.js @@ -0,0 +1,65 @@ +/*! safe-buffer. MIT License. Feross Aboukhadijeh */ +/* eslint-disable node/no-deprecated-api */ +var buffer = require('buffer') +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.prototype = Object.create(Buffer.prototype) + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} diff --git a/mybulma/node_modules/safe-buffer/package.json b/mybulma/node_modules/safe-buffer/package.json new file mode 100644 index 0000000..f2869e2 --- /dev/null +++ b/mybulma/node_modules/safe-buffer/package.json @@ -0,0 +1,51 @@ +{ + "name": "safe-buffer", + "description": "Safer Node.js Buffer API", + "version": "5.2.1", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "https://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/safe-buffer/issues" + }, + "devDependencies": { + "standard": "*", + "tape": "^5.0.0" + }, + "homepage": "https://github.com/feross/safe-buffer", + "keywords": [ + "buffer", + "buffer allocate", + "node security", + "safe", + "safe-buffer", + "security", + "uninitialized" + ], + "license": "MIT", + "main": "index.js", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "git://github.com/feross/safe-buffer.git" + }, + "scripts": { + "test": "standard && tape test/*.js" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] +} diff --git a/mybulma/node_modules/safer-buffer/LICENSE b/mybulma/node_modules/safer-buffer/LICENSE new file mode 100644 index 0000000..4fe9e6f --- /dev/null +++ b/mybulma/node_modules/safer-buffer/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 Nikita Skovoroda + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/mybulma/node_modules/safer-buffer/Porting-Buffer.md b/mybulma/node_modules/safer-buffer/Porting-Buffer.md new file mode 100644 index 0000000..68d86ba --- /dev/null +++ b/mybulma/node_modules/safer-buffer/Porting-Buffer.md @@ -0,0 +1,268 @@ +# Porting to the Buffer.from/Buffer.alloc API + + +## Overview + +- [Variant 1: Drop support for Node.js ≤ 4.4.x and 5.0.0 — 5.9.x.](#variant-1) (*recommended*) +- [Variant 2: Use a polyfill](#variant-2) +- [Variant 3: manual detection, with safeguards](#variant-3) + +### Finding problematic bits of code using grep + +Just run `grep -nrE '[^a-zA-Z](Slow)?Buffer\s*\(' --exclude-dir node_modules`. + +It will find all the potentially unsafe places in your own code (with some considerably unlikely +exceptions). + +### Finding problematic bits of code using Node.js 8 + +If you’re using Node.js ≥ 8.0.0 (which is recommended), Node.js exposes multiple options that help with finding the relevant pieces of code: + +- `--trace-warnings` will make Node.js show a stack trace for this warning and other warnings that are printed by Node.js. +- `--trace-deprecation` does the same thing, but only for deprecation warnings. +- `--pending-deprecation` will show more types of deprecation warnings. In particular, it will show the `Buffer()` deprecation warning, even on Node.js 8. + +You can set these flags using an environment variable: + +```console +$ export NODE_OPTIONS='--trace-warnings --pending-deprecation' +$ cat example.js +'use strict'; +const foo = new Buffer('foo'); +$ node example.js +(node:7147) [DEP0005] DeprecationWarning: The Buffer() and new Buffer() constructors are not recommended for use due to security and usability concerns. Please use the new Buffer.alloc(), Buffer.allocUnsafe(), or Buffer.from() construction methods instead. + at showFlaggedDeprecation (buffer.js:127:13) + at new Buffer (buffer.js:148:3) + at Object. (/path/to/example.js:2:13) + [... more stack trace lines ...] +``` + +### Finding problematic bits of code using linters + +Eslint rules [no-buffer-constructor](https://eslint.org/docs/rules/no-buffer-constructor) +or +[node/no-deprecated-api](https://github.com/mysticatea/eslint-plugin-node/blob/master/docs/rules/no-deprecated-api.md) +also find calls to deprecated `Buffer()` API. Those rules are included in some pre-sets. + +There is a drawback, though, that it doesn't always +[work correctly](https://github.com/chalker/safer-buffer#why-not-safe-buffer) when `Buffer` is +overriden e.g. with a polyfill, so recommended is a combination of this and some other method +described above. + + +## Variant 1: Drop support for Node.js ≤ 4.4.x and 5.0.0 — 5.9.x. + +This is the recommended solution nowadays that would imply only minimal overhead. + +The Node.js 5.x release line has been unsupported since July 2016, and the Node.js 4.x release line reaches its End of Life in April 2018 (→ [Schedule](https://github.com/nodejs/Release#release-schedule)). This means that these versions of Node.js will *not* receive any updates, even in case of security issues, so using these release lines should be avoided, if at all possible. + +What you would do in this case is to convert all `new Buffer()` or `Buffer()` calls to use `Buffer.alloc()` or `Buffer.from()`, in the following way: + +- For `new Buffer(number)`, replace it with `Buffer.alloc(number)`. +- For `new Buffer(string)` (or `new Buffer(string, encoding)`), replace it with `Buffer.from(string)` (or `Buffer.from(string, encoding)`). +- For all other combinations of arguments (these are much rarer), also replace `new Buffer(...arguments)` with `Buffer.from(...arguments)`. + +Note that `Buffer.alloc()` is also _faster_ on the current Node.js versions than +`new Buffer(size).fill(0)`, which is what you would otherwise need to ensure zero-filling. + +Enabling eslint rule [no-buffer-constructor](https://eslint.org/docs/rules/no-buffer-constructor) +or +[node/no-deprecated-api](https://github.com/mysticatea/eslint-plugin-node/blob/master/docs/rules/no-deprecated-api.md) +is recommended to avoid accidential unsafe Buffer API usage. + +There is also a [JSCodeshift codemod](https://github.com/joyeecheung/node-dep-codemod#dep005) +for automatically migrating Buffer constructors to `Buffer.alloc()` or `Buffer.from()`. +Note that it currently only works with cases where the arguments are literals or where the +constructor is invoked with two arguments. + +_If you currently support those older Node.js versions and dropping them would be a semver-major change +for you, or if you support older branches of your packages, consider using [Variant 2](#variant-2) +or [Variant 3](#variant-3) on older branches, so people using those older branches will also receive +the fix. That way, you will eradicate potential issues caused by unguarded Buffer API usage and +your users will not observe a runtime deprecation warning when running your code on Node.js 10._ + + +## Variant 2: Use a polyfill + +Utilize [safer-buffer](https://www.npmjs.com/package/safer-buffer) as a polyfill to support older +Node.js versions. + +You would take exacly the same steps as in [Variant 1](#variant-1), but with a polyfill +`const Buffer = require('safer-buffer').Buffer` in all files where you use the new `Buffer` api. + +Make sure that you do not use old `new Buffer` API — in any files where the line above is added, +using old `new Buffer()` API will _throw_. It will be easy to notice that in CI, though. + +Alternatively, you could use [buffer-from](https://www.npmjs.com/package/buffer-from) and/or +[buffer-alloc](https://www.npmjs.com/package/buffer-alloc) [ponyfills](https://ponyfill.com/) — +those are great, the only downsides being 4 deps in the tree and slightly more code changes to +migrate off them (as you would be using e.g. `Buffer.from` under a different name). If you need only +`Buffer.from` polyfilled — `buffer-from` alone which comes with no extra dependencies. + +_Alternatively, you could use [safe-buffer](https://www.npmjs.com/package/safe-buffer) — it also +provides a polyfill, but takes a different approach which has +[it's drawbacks](https://github.com/chalker/safer-buffer#why-not-safe-buffer). It will allow you +to also use the older `new Buffer()` API in your code, though — but that's arguably a benefit, as +it is problematic, can cause issues in your code, and will start emitting runtime deprecation +warnings starting with Node.js 10._ + +Note that in either case, it is important that you also remove all calls to the old Buffer +API manually — just throwing in `safe-buffer` doesn't fix the problem by itself, it just provides +a polyfill for the new API. I have seen people doing that mistake. + +Enabling eslint rule [no-buffer-constructor](https://eslint.org/docs/rules/no-buffer-constructor) +or +[node/no-deprecated-api](https://github.com/mysticatea/eslint-plugin-node/blob/master/docs/rules/no-deprecated-api.md) +is recommended. + +_Don't forget to drop the polyfill usage once you drop support for Node.js < 4.5.0._ + + +## Variant 3 — manual detection, with safeguards + +This is useful if you create Buffer instances in only a few places (e.g. one), or you have your own +wrapper around them. + +### Buffer(0) + +This special case for creating empty buffers can be safely replaced with `Buffer.concat([])`, which +returns the same result all the way down to Node.js 0.8.x. + +### Buffer(notNumber) + +Before: + +```js +var buf = new Buffer(notNumber, encoding); +``` + +After: + +```js +var buf; +if (Buffer.from && Buffer.from !== Uint8Array.from) { + buf = Buffer.from(notNumber, encoding); +} else { + if (typeof notNumber === 'number') + throw new Error('The "size" argument must be of type number.'); + buf = new Buffer(notNumber, encoding); +} +``` + +`encoding` is optional. + +Note that the `typeof notNumber` before `new Buffer` is required (for cases when `notNumber` argument is not +hard-coded) and _is not caused by the deprecation of Buffer constructor_ — it's exactly _why_ the +Buffer constructor is deprecated. Ecosystem packages lacking this type-check caused numereous +security issues — situations when unsanitized user input could end up in the `Buffer(arg)` create +problems ranging from DoS to leaking sensitive information to the attacker from the process memory. + +When `notNumber` argument is hardcoded (e.g. literal `"abc"` or `[0,1,2]`), the `typeof` check can +be omitted. + +Also note that using TypeScript does not fix this problem for you — when libs written in +`TypeScript` are used from JS, or when user input ends up there — it behaves exactly as pure JS, as +all type checks are translation-time only and are not present in the actual JS code which TS +compiles to. + +### Buffer(number) + +For Node.js 0.10.x (and below) support: + +```js +var buf; +if (Buffer.alloc) { + buf = Buffer.alloc(number); +} else { + buf = new Buffer(number); + buf.fill(0); +} +``` + +Otherwise (Node.js ≥ 0.12.x): + +```js +const buf = Buffer.alloc ? Buffer.alloc(number) : new Buffer(number).fill(0); +``` + +## Regarding Buffer.allocUnsafe + +Be extra cautious when using `Buffer.allocUnsafe`: + * Don't use it if you don't have a good reason to + * e.g. you probably won't ever see a performance difference for small buffers, in fact, those + might be even faster with `Buffer.alloc()`, + * if your code is not in the hot code path — you also probably won't notice a difference, + * keep in mind that zero-filling minimizes the potential risks. + * If you use it, make sure that you never return the buffer in a partially-filled state, + * if you are writing to it sequentially — always truncate it to the actuall written length + +Errors in handling buffers allocated with `Buffer.allocUnsafe` could result in various issues, +ranged from undefined behaviour of your code to sensitive data (user input, passwords, certs) +leaking to the remote attacker. + +_Note that the same applies to `new Buffer` usage without zero-filling, depending on the Node.js +version (and lacking type checks also adds DoS to the list of potential problems)._ + + +## FAQ + + +### What is wrong with the `Buffer` constructor? + +The `Buffer` constructor could be used to create a buffer in many different ways: + +- `new Buffer(42)` creates a `Buffer` of 42 bytes. Before Node.js 8, this buffer contained + *arbitrary memory* for performance reasons, which could include anything ranging from + program source code to passwords and encryption keys. +- `new Buffer('abc')` creates a `Buffer` that contains the UTF-8-encoded version of + the string `'abc'`. A second argument could specify another encoding: For example, + `new Buffer(string, 'base64')` could be used to convert a Base64 string into the original + sequence of bytes that it represents. +- There are several other combinations of arguments. + +This meant that, in code like `var buffer = new Buffer(foo);`, *it is not possible to tell +what exactly the contents of the generated buffer are* without knowing the type of `foo`. + +Sometimes, the value of `foo` comes from an external source. For example, this function +could be exposed as a service on a web server, converting a UTF-8 string into its Base64 form: + +``` +function stringToBase64(req, res) { + // The request body should have the format of `{ string: 'foobar' }` + const rawBytes = new Buffer(req.body.string) + const encoded = rawBytes.toString('base64') + res.end({ encoded: encoded }) +} +``` + +Note that this code does *not* validate the type of `req.body.string`: + +- `req.body.string` is expected to be a string. If this is the case, all goes well. +- `req.body.string` is controlled by the client that sends the request. +- If `req.body.string` is the *number* `50`, the `rawBytes` would be 50 bytes: + - Before Node.js 8, the content would be uninitialized + - After Node.js 8, the content would be `50` bytes with the value `0` + +Because of the missing type check, an attacker could intentionally send a number +as part of the request. Using this, they can either: + +- Read uninitialized memory. This **will** leak passwords, encryption keys and other + kinds of sensitive information. (Information leak) +- Force the program to allocate a large amount of memory. For example, when specifying + `500000000` as the input value, each request will allocate 500MB of memory. + This can be used to either exhaust the memory available of a program completely + and make it crash, or slow it down significantly. (Denial of Service) + +Both of these scenarios are considered serious security issues in a real-world +web server context. + +when using `Buffer.from(req.body.string)` instead, passing a number will always +throw an exception instead, giving a controlled behaviour that can always be +handled by the program. + + +### The `Buffer()` constructor has been deprecated for a while. Is this really an issue? + +Surveys of code in the `npm` ecosystem have shown that the `Buffer()` constructor is still +widely used. This includes new code, and overall usage of such code has actually been +*increasing*. diff --git a/mybulma/node_modules/safer-buffer/Readme.md b/mybulma/node_modules/safer-buffer/Readme.md new file mode 100644 index 0000000..14b0822 --- /dev/null +++ b/mybulma/node_modules/safer-buffer/Readme.md @@ -0,0 +1,156 @@ +# safer-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![javascript style guide][standard-image]][standard-url] [![Security Responsible Disclosure][secuirty-image]][secuirty-url] + +[travis-image]: https://travis-ci.org/ChALkeR/safer-buffer.svg?branch=master +[travis-url]: https://travis-ci.org/ChALkeR/safer-buffer +[npm-image]: https://img.shields.io/npm/v/safer-buffer.svg +[npm-url]: https://npmjs.org/package/safer-buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com +[secuirty-image]: https://img.shields.io/badge/Security-Responsible%20Disclosure-green.svg +[secuirty-url]: https://github.com/nodejs/security-wg/blob/master/processes/responsible_disclosure_template.md + +Modern Buffer API polyfill without footguns, working on Node.js from 0.8 to current. + +## How to use? + +First, port all `Buffer()` and `new Buffer()` calls to `Buffer.alloc()` and `Buffer.from()` API. + +Then, to achieve compatibility with outdated Node.js versions (`<4.5.0` and 5.x `<5.9.0`), use +`const Buffer = require('safer-buffer').Buffer` in all files where you make calls to the new +Buffer API. _Use `var` instead of `const` if you need that for your Node.js version range support._ + +Also, see the +[porting Buffer](https://github.com/ChALkeR/safer-buffer/blob/master/Porting-Buffer.md) guide. + +## Do I need it? + +Hopefully, not — dropping support for outdated Node.js versions should be fine nowdays, and that +is the recommended path forward. You _do_ need to port to the `Buffer.alloc()` and `Buffer.from()` +though. + +See the [porting guide](https://github.com/ChALkeR/safer-buffer/blob/master/Porting-Buffer.md) +for a better description. + +## Why not [safe-buffer](https://npmjs.com/safe-buffer)? + +_In short: while `safe-buffer` serves as a polyfill for the new API, it allows old API usage and +itself contains footguns._ + +`safe-buffer` could be used safely to get the new API while still keeping support for older +Node.js versions (like this module), but while analyzing ecosystem usage of the old Buffer API +I found out that `safe-buffer` is itself causing problems in some cases. + +For example, consider the following snippet: + +```console +$ cat example.unsafe.js +console.log(Buffer(20)) +$ ./node-v6.13.0-linux-x64/bin/node example.unsafe.js + +$ standard example.unsafe.js +standard: Use JavaScript Standard Style (https://standardjs.com) + /home/chalker/repo/safer-buffer/example.unsafe.js:2:13: 'Buffer()' was deprecated since v6. Use 'Buffer.alloc()' or 'Buffer.from()' (use 'https://www.npmjs.com/package/safe-buffer' for '<4.5.0') instead. +``` + +This is allocates and writes to console an uninitialized chunk of memory. +[standard](https://www.npmjs.com/package/standard) linter (among others) catch that and warn people +to avoid using unsafe API. + +Let's now throw in `safe-buffer`! + +```console +$ cat example.safe-buffer.js +const Buffer = require('safe-buffer').Buffer +console.log(Buffer(20)) +$ standard example.safe-buffer.js +$ ./node-v6.13.0-linux-x64/bin/node example.safe-buffer.js + +``` + +See the problem? Adding in `safe-buffer` _magically removes the lint warning_, but the behavior +remains identiсal to what we had before, and when launched on Node.js 6.x LTS — this dumps out +chunks of uninitialized memory. +_And this code will still emit runtime warnings on Node.js 10.x and above._ + +That was done by design. I first considered changing `safe-buffer`, prohibiting old API usage or +emitting warnings on it, but that significantly diverges from `safe-buffer` design. After some +discussion, it was decided to move my approach into a separate package, and _this is that separate +package_. + +This footgun is not imaginary — I observed top-downloaded packages doing that kind of thing, +«fixing» the lint warning by blindly including `safe-buffer` without any actual changes. + +Also in some cases, even if the API _was_ migrated to use of safe Buffer API — a random pull request +can bring unsafe Buffer API usage back to the codebase by adding new calls — and that could go +unnoticed even if you have a linter prohibiting that (becase of the reason stated above), and even +pass CI. _I also observed that being done in popular packages._ + +Some examples: + * [webdriverio](https://github.com/webdriverio/webdriverio/commit/05cbd3167c12e4930f09ef7cf93b127ba4effae4#diff-124380949022817b90b622871837d56cR31) + (a module with 548 759 downloads/month), + * [websocket-stream](https://github.com/maxogden/websocket-stream/commit/c9312bd24d08271687d76da0fe3c83493871cf61) + (218 288 d/m, fix in [maxogden/websocket-stream#142](https://github.com/maxogden/websocket-stream/pull/142)), + * [node-serialport](https://github.com/node-serialport/node-serialport/commit/e8d9d2b16c664224920ce1c895199b1ce2def48c) + (113 138 d/m, fix in [node-serialport/node-serialport#1510](https://github.com/node-serialport/node-serialport/pull/1510)), + * [karma](https://github.com/karma-runner/karma/commit/3d94b8cf18c695104ca195334dc75ff054c74eec) + (3 973 193 d/m, fix in [karma-runner/karma#2947](https://github.com/karma-runner/karma/pull/2947)), + * [spdy-transport](https://github.com/spdy-http2/spdy-transport/commit/5375ac33f4a62a4f65bcfc2827447d42a5dbe8b1) + (5 970 727 d/m, fix in [spdy-http2/spdy-transport#53](https://github.com/spdy-http2/spdy-transport/pull/53)). + * And there are a lot more over the ecosystem. + +I filed a PR at +[mysticatea/eslint-plugin-node#110](https://github.com/mysticatea/eslint-plugin-node/pull/110) to +partially fix that (for cases when that lint rule is used), but it is a semver-major change for +linter rules and presets, so it would take significant time for that to reach actual setups. +_It also hasn't been released yet (2018-03-20)._ + +Also, `safer-buffer` discourages the usage of `.allocUnsafe()`, which is often done by a mistake. +It still supports it with an explicit concern barier, by placing it under +`require('safer-buffer/dangereous')`. + +## But isn't throwing bad? + +Not really. It's an error that could be noticed and fixed early, instead of causing havoc later like +unguarded `new Buffer()` calls that end up receiving user input can do. + +This package affects only the files where `var Buffer = require('safer-buffer').Buffer` was done, so +it is really simple to keep track of things and make sure that you don't mix old API usage with that. +Also, CI should hint anything that you might have missed. + +New commits, if tested, won't land new usage of unsafe Buffer API this way. +_Node.js 10.x also deals with that by printing a runtime depecation warning._ + +### Would it affect third-party modules? + +No, unless you explicitly do an awful thing like monkey-patching or overriding the built-in `Buffer`. +Don't do that. + +### But I don't want throwing… + +That is also fine! + +Also, it could be better in some cases when you don't comprehensive enough test coverage. + +In that case — just don't override `Buffer` and use +`var SaferBuffer = require('safer-buffer').Buffer` instead. + +That way, everything using `Buffer` natively would still work, but there would be two drawbacks: + +* `Buffer.from`/`Buffer.alloc` won't be polyfilled — use `SaferBuffer.from` and + `SaferBuffer.alloc` instead. +* You are still open to accidentally using the insecure deprecated API — use a linter to catch that. + +Note that using a linter to catch accidential `Buffer` constructor usage in this case is strongly +recommended. `Buffer` is not overriden in this usecase, so linters won't get confused. + +## «Without footguns»? + +Well, it is still possible to do _some_ things with `Buffer` API, e.g. accessing `.buffer` property +on older versions and duping things from there. You shouldn't do that in your code, probabably. + +The intention is to remove the most significant footguns that affect lots of packages in the +ecosystem, and to do it in the proper way. + +Also, this package doesn't protect against security issues affecting some Node.js versions, so for +usage in your own production code, it is still recommended to update to a Node.js version +[supported by upstream](https://github.com/nodejs/release#release-schedule). diff --git a/mybulma/node_modules/safer-buffer/dangerous.js b/mybulma/node_modules/safer-buffer/dangerous.js new file mode 100644 index 0000000..ca41fdc --- /dev/null +++ b/mybulma/node_modules/safer-buffer/dangerous.js @@ -0,0 +1,58 @@ +/* eslint-disable node/no-deprecated-api */ + +'use strict' + +var buffer = require('buffer') +var Buffer = buffer.Buffer +var safer = require('./safer.js') +var Safer = safer.Buffer + +var dangerous = {} + +var key + +for (key in safer) { + if (!safer.hasOwnProperty(key)) continue + dangerous[key] = safer[key] +} + +var Dangereous = dangerous.Buffer = {} + +// Copy Safer API +for (key in Safer) { + if (!Safer.hasOwnProperty(key)) continue + Dangereous[key] = Safer[key] +} + +// Copy those missing unsafe methods, if they are present +for (key in Buffer) { + if (!Buffer.hasOwnProperty(key)) continue + if (Dangereous.hasOwnProperty(key)) continue + Dangereous[key] = Buffer[key] +} + +if (!Dangereous.allocUnsafe) { + Dangereous.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('The "size" argument must be of type number. Received type ' + typeof size) + } + if (size < 0 || size >= 2 * (1 << 30)) { + throw new RangeError('The value "' + size + '" is invalid for option "size"') + } + return Buffer(size) + } +} + +if (!Dangereous.allocUnsafeSlow) { + Dangereous.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('The "size" argument must be of type number. Received type ' + typeof size) + } + if (size < 0 || size >= 2 * (1 << 30)) { + throw new RangeError('The value "' + size + '" is invalid for option "size"') + } + return buffer.SlowBuffer(size) + } +} + +module.exports = dangerous diff --git a/mybulma/node_modules/safer-buffer/package.json b/mybulma/node_modules/safer-buffer/package.json new file mode 100644 index 0000000..d452b04 --- /dev/null +++ b/mybulma/node_modules/safer-buffer/package.json @@ -0,0 +1,34 @@ +{ + "name": "safer-buffer", + "version": "2.1.2", + "description": "Modern Buffer API polyfill without footguns", + "main": "safer.js", + "scripts": { + "browserify-test": "browserify --external tape tests.js > browserify-tests.js && tape browserify-tests.js", + "test": "standard && tape tests.js" + }, + "author": { + "name": "Nikita Skovoroda", + "email": "chalkerx@gmail.com", + "url": "https://github.com/ChALkeR" + }, + "license": "MIT", + "repository": { + "type": "git", + "url": "git+https://github.com/ChALkeR/safer-buffer.git" + }, + "bugs": { + "url": "https://github.com/ChALkeR/safer-buffer/issues" + }, + "devDependencies": { + "standard": "^11.0.1", + "tape": "^4.9.0" + }, + "files": [ + "Porting-Buffer.md", + "Readme.md", + "tests.js", + "dangerous.js", + "safer.js" + ] +} diff --git a/mybulma/node_modules/safer-buffer/safer.js b/mybulma/node_modules/safer-buffer/safer.js new file mode 100644 index 0000000..37c7e1a --- /dev/null +++ b/mybulma/node_modules/safer-buffer/safer.js @@ -0,0 +1,77 @@ +/* eslint-disable node/no-deprecated-api */ + +'use strict' + +var buffer = require('buffer') +var Buffer = buffer.Buffer + +var safer = {} + +var key + +for (key in buffer) { + if (!buffer.hasOwnProperty(key)) continue + if (key === 'SlowBuffer' || key === 'Buffer') continue + safer[key] = buffer[key] +} + +var Safer = safer.Buffer = {} +for (key in Buffer) { + if (!Buffer.hasOwnProperty(key)) continue + if (key === 'allocUnsafe' || key === 'allocUnsafeSlow') continue + Safer[key] = Buffer[key] +} + +safer.Buffer.prototype = Buffer.prototype + +if (!Safer.from || Safer.from === Uint8Array.from) { + Safer.from = function (value, encodingOrOffset, length) { + if (typeof value === 'number') { + throw new TypeError('The "value" argument must not be of type number. Received type ' + typeof value) + } + if (value && typeof value.length === 'undefined') { + throw new TypeError('The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type ' + typeof value) + } + return Buffer(value, encodingOrOffset, length) + } +} + +if (!Safer.alloc) { + Safer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('The "size" argument must be of type number. Received type ' + typeof size) + } + if (size < 0 || size >= 2 * (1 << 30)) { + throw new RangeError('The value "' + size + '" is invalid for option "size"') + } + var buf = Buffer(size) + if (!fill || fill.length === 0) { + buf.fill(0) + } else if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + return buf + } +} + +if (!safer.kStringMaxLength) { + try { + safer.kStringMaxLength = process.binding('buffer').kStringMaxLength + } catch (e) { + // we can't determine kStringMaxLength in environments where process.binding + // is unsupported, so let's not set it + } +} + +if (!safer.constants) { + safer.constants = { + MAX_LENGTH: safer.kMaxLength + } + if (safer.kStringMaxLength) { + safer.constants.MAX_STRING_LENGTH = safer.kStringMaxLength + } +} + +module.exports = safer diff --git a/mybulma/node_modules/safer-buffer/tests.js b/mybulma/node_modules/safer-buffer/tests.js new file mode 100644 index 0000000..7ed2777 --- /dev/null +++ b/mybulma/node_modules/safer-buffer/tests.js @@ -0,0 +1,406 @@ +/* eslint-disable node/no-deprecated-api */ + +'use strict' + +var test = require('tape') + +var buffer = require('buffer') + +var index = require('./') +var safer = require('./safer') +var dangerous = require('./dangerous') + +/* Inheritance tests */ + +test('Default is Safer', function (t) { + t.equal(index, safer) + t.notEqual(safer, dangerous) + t.notEqual(index, dangerous) + t.end() +}) + +test('Is not a function', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.equal(typeof impl, 'object') + t.equal(typeof impl.Buffer, 'object') + }); + [buffer].forEach(function (impl) { + t.equal(typeof impl, 'object') + t.equal(typeof impl.Buffer, 'function') + }) + t.end() +}) + +test('Constructor throws', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.throws(function () { impl.Buffer() }) + t.throws(function () { impl.Buffer(0) }) + t.throws(function () { impl.Buffer('a') }) + t.throws(function () { impl.Buffer('a', 'utf-8') }) + t.throws(function () { return new impl.Buffer() }) + t.throws(function () { return new impl.Buffer(0) }) + t.throws(function () { return new impl.Buffer('a') }) + t.throws(function () { return new impl.Buffer('a', 'utf-8') }) + }) + t.end() +}) + +test('Safe methods exist', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.equal(typeof impl.Buffer.alloc, 'function', 'alloc') + t.equal(typeof impl.Buffer.from, 'function', 'from') + }) + t.end() +}) + +test('Unsafe methods exist only in Dangerous', function (t) { + [index, safer].forEach(function (impl) { + t.equal(typeof impl.Buffer.allocUnsafe, 'undefined') + t.equal(typeof impl.Buffer.allocUnsafeSlow, 'undefined') + }); + [dangerous].forEach(function (impl) { + t.equal(typeof impl.Buffer.allocUnsafe, 'function') + t.equal(typeof impl.Buffer.allocUnsafeSlow, 'function') + }) + t.end() +}) + +test('Generic methods/properties are defined and equal', function (t) { + ['poolSize', 'isBuffer', 'concat', 'byteLength'].forEach(function (method) { + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl.Buffer[method], buffer.Buffer[method], method) + t.notEqual(typeof impl.Buffer[method], 'undefined', method) + }) + }) + t.end() +}) + +test('Built-in buffer static methods/properties are inherited', function (t) { + Object.keys(buffer).forEach(function (method) { + if (method === 'SlowBuffer' || method === 'Buffer') return; + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl[method], buffer[method], method) + t.notEqual(typeof impl[method], 'undefined', method) + }) + }) + t.end() +}) + +test('Built-in Buffer static methods/properties are inherited', function (t) { + Object.keys(buffer.Buffer).forEach(function (method) { + if (method === 'allocUnsafe' || method === 'allocUnsafeSlow') return; + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl.Buffer[method], buffer.Buffer[method], method) + t.notEqual(typeof impl.Buffer[method], 'undefined', method) + }) + }) + t.end() +}) + +test('.prototype property of Buffer is inherited', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl.Buffer.prototype, buffer.Buffer.prototype, 'prototype') + t.notEqual(typeof impl.Buffer.prototype, 'undefined', 'prototype') + }) + t.end() +}) + +test('All Safer methods are present in Dangerous', function (t) { + Object.keys(safer).forEach(function (method) { + if (method === 'Buffer') return; + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl[method], safer[method], method) + if (method !== 'kStringMaxLength') { + t.notEqual(typeof impl[method], 'undefined', method) + } + }) + }) + Object.keys(safer.Buffer).forEach(function (method) { + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl.Buffer[method], safer.Buffer[method], method) + t.notEqual(typeof impl.Buffer[method], 'undefined', method) + }) + }) + t.end() +}) + +test('Safe methods from Dangerous methods are present in Safer', function (t) { + Object.keys(dangerous).forEach(function (method) { + if (method === 'Buffer') return; + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl[method], dangerous[method], method) + if (method !== 'kStringMaxLength') { + t.notEqual(typeof impl[method], 'undefined', method) + } + }) + }) + Object.keys(dangerous.Buffer).forEach(function (method) { + if (method === 'allocUnsafe' || method === 'allocUnsafeSlow') return; + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl.Buffer[method], dangerous.Buffer[method], method) + t.notEqual(typeof impl.Buffer[method], 'undefined', method) + }) + }) + t.end() +}) + +/* Behaviour tests */ + +test('Methods return Buffers', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.ok(buffer.Buffer.isBuffer(impl.Buffer.alloc(0))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.alloc(0, 10))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.alloc(0, 'a'))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.alloc(10))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.alloc(10, 'x'))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.alloc(9, 'ab'))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.from(''))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.from('string'))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.from('string', 'utf-8'))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.from('b25ldHdvdGhyZWU=', 'base64'))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.from([0, 42, 3]))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.from(new Uint8Array([0, 42, 3])))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.from([]))) + }); + ['allocUnsafe', 'allocUnsafeSlow'].forEach(function (method) { + t.ok(buffer.Buffer.isBuffer(dangerous.Buffer[method](0))) + t.ok(buffer.Buffer.isBuffer(dangerous.Buffer[method](10))) + }) + t.end() +}) + +test('Constructor is buffer.Buffer', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl.Buffer.alloc(0).constructor, buffer.Buffer) + t.equal(impl.Buffer.alloc(0, 10).constructor, buffer.Buffer) + t.equal(impl.Buffer.alloc(0, 'a').constructor, buffer.Buffer) + t.equal(impl.Buffer.alloc(10).constructor, buffer.Buffer) + t.equal(impl.Buffer.alloc(10, 'x').constructor, buffer.Buffer) + t.equal(impl.Buffer.alloc(9, 'ab').constructor, buffer.Buffer) + t.equal(impl.Buffer.from('').constructor, buffer.Buffer) + t.equal(impl.Buffer.from('string').constructor, buffer.Buffer) + t.equal(impl.Buffer.from('string', 'utf-8').constructor, buffer.Buffer) + t.equal(impl.Buffer.from('b25ldHdvdGhyZWU=', 'base64').constructor, buffer.Buffer) + t.equal(impl.Buffer.from([0, 42, 3]).constructor, buffer.Buffer) + t.equal(impl.Buffer.from(new Uint8Array([0, 42, 3])).constructor, buffer.Buffer) + t.equal(impl.Buffer.from([]).constructor, buffer.Buffer) + }); + [0, 10, 100].forEach(function (arg) { + t.equal(dangerous.Buffer.allocUnsafe(arg).constructor, buffer.Buffer) + t.equal(dangerous.Buffer.allocUnsafeSlow(arg).constructor, buffer.SlowBuffer(0).constructor) + }) + t.end() +}) + +test('Invalid calls throw', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.throws(function () { impl.Buffer.from(0) }) + t.throws(function () { impl.Buffer.from(10) }) + t.throws(function () { impl.Buffer.from(10, 'utf-8') }) + t.throws(function () { impl.Buffer.from('string', 'invalid encoding') }) + t.throws(function () { impl.Buffer.from(-10) }) + t.throws(function () { impl.Buffer.from(1e90) }) + t.throws(function () { impl.Buffer.from(Infinity) }) + t.throws(function () { impl.Buffer.from(-Infinity) }) + t.throws(function () { impl.Buffer.from(NaN) }) + t.throws(function () { impl.Buffer.from(null) }) + t.throws(function () { impl.Buffer.from(undefined) }) + t.throws(function () { impl.Buffer.from() }) + t.throws(function () { impl.Buffer.from({}) }) + t.throws(function () { impl.Buffer.alloc('') }) + t.throws(function () { impl.Buffer.alloc('string') }) + t.throws(function () { impl.Buffer.alloc('string', 'utf-8') }) + t.throws(function () { impl.Buffer.alloc('b25ldHdvdGhyZWU=', 'base64') }) + t.throws(function () { impl.Buffer.alloc(-10) }) + t.throws(function () { impl.Buffer.alloc(1e90) }) + t.throws(function () { impl.Buffer.alloc(2 * (1 << 30)) }) + t.throws(function () { impl.Buffer.alloc(Infinity) }) + t.throws(function () { impl.Buffer.alloc(-Infinity) }) + t.throws(function () { impl.Buffer.alloc(null) }) + t.throws(function () { impl.Buffer.alloc(undefined) }) + t.throws(function () { impl.Buffer.alloc() }) + t.throws(function () { impl.Buffer.alloc([]) }) + t.throws(function () { impl.Buffer.alloc([0, 42, 3]) }) + t.throws(function () { impl.Buffer.alloc({}) }) + }); + ['allocUnsafe', 'allocUnsafeSlow'].forEach(function (method) { + t.throws(function () { dangerous.Buffer[method]('') }) + t.throws(function () { dangerous.Buffer[method]('string') }) + t.throws(function () { dangerous.Buffer[method]('string', 'utf-8') }) + t.throws(function () { dangerous.Buffer[method](2 * (1 << 30)) }) + t.throws(function () { dangerous.Buffer[method](Infinity) }) + if (dangerous.Buffer[method] === buffer.Buffer.allocUnsafe) { + t.skip('Skipping, older impl of allocUnsafe coerced negative sizes to 0') + } else { + t.throws(function () { dangerous.Buffer[method](-10) }) + t.throws(function () { dangerous.Buffer[method](-1e90) }) + t.throws(function () { dangerous.Buffer[method](-Infinity) }) + } + t.throws(function () { dangerous.Buffer[method](null) }) + t.throws(function () { dangerous.Buffer[method](undefined) }) + t.throws(function () { dangerous.Buffer[method]() }) + t.throws(function () { dangerous.Buffer[method]([]) }) + t.throws(function () { dangerous.Buffer[method]([0, 42, 3]) }) + t.throws(function () { dangerous.Buffer[method]({}) }) + }) + t.end() +}) + +test('Buffers have appropriate lengths', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl.Buffer.alloc(0).length, 0) + t.equal(impl.Buffer.alloc(10).length, 10) + t.equal(impl.Buffer.from('').length, 0) + t.equal(impl.Buffer.from('string').length, 6) + t.equal(impl.Buffer.from('string', 'utf-8').length, 6) + t.equal(impl.Buffer.from('b25ldHdvdGhyZWU=', 'base64').length, 11) + t.equal(impl.Buffer.from([0, 42, 3]).length, 3) + t.equal(impl.Buffer.from(new Uint8Array([0, 42, 3])).length, 3) + t.equal(impl.Buffer.from([]).length, 0) + }); + ['allocUnsafe', 'allocUnsafeSlow'].forEach(function (method) { + t.equal(dangerous.Buffer[method](0).length, 0) + t.equal(dangerous.Buffer[method](10).length, 10) + }) + t.end() +}) + +test('Buffers have appropriate lengths (2)', function (t) { + t.equal(index.Buffer.alloc, safer.Buffer.alloc) + t.equal(index.Buffer.alloc, dangerous.Buffer.alloc) + var ok = true; + [ safer.Buffer.alloc, + dangerous.Buffer.allocUnsafe, + dangerous.Buffer.allocUnsafeSlow + ].forEach(function (method) { + for (var i = 0; i < 1e2; i++) { + var length = Math.round(Math.random() * 1e5) + var buf = method(length) + if (!buffer.Buffer.isBuffer(buf)) ok = false + if (buf.length !== length) ok = false + } + }) + t.ok(ok) + t.end() +}) + +test('.alloc(size) is zero-filled and has correct length', function (t) { + t.equal(index.Buffer.alloc, safer.Buffer.alloc) + t.equal(index.Buffer.alloc, dangerous.Buffer.alloc) + var ok = true + for (var i = 0; i < 1e2; i++) { + var length = Math.round(Math.random() * 2e6) + var buf = index.Buffer.alloc(length) + if (!buffer.Buffer.isBuffer(buf)) ok = false + if (buf.length !== length) ok = false + var j + for (j = 0; j < length; j++) { + if (buf[j] !== 0) ok = false + } + buf.fill(1) + for (j = 0; j < length; j++) { + if (buf[j] !== 1) ok = false + } + } + t.ok(ok) + t.end() +}) + +test('.allocUnsafe / .allocUnsafeSlow are fillable and have correct lengths', function (t) { + ['allocUnsafe', 'allocUnsafeSlow'].forEach(function (method) { + var ok = true + for (var i = 0; i < 1e2; i++) { + var length = Math.round(Math.random() * 2e6) + var buf = dangerous.Buffer[method](length) + if (!buffer.Buffer.isBuffer(buf)) ok = false + if (buf.length !== length) ok = false + buf.fill(0, 0, length) + var j + for (j = 0; j < length; j++) { + if (buf[j] !== 0) ok = false + } + buf.fill(1, 0, length) + for (j = 0; j < length; j++) { + if (buf[j] !== 1) ok = false + } + } + t.ok(ok, method) + }) + t.end() +}) + +test('.alloc(size, fill) is `fill`-filled', function (t) { + t.equal(index.Buffer.alloc, safer.Buffer.alloc) + t.equal(index.Buffer.alloc, dangerous.Buffer.alloc) + var ok = true + for (var i = 0; i < 1e2; i++) { + var length = Math.round(Math.random() * 2e6) + var fill = Math.round(Math.random() * 255) + var buf = index.Buffer.alloc(length, fill) + if (!buffer.Buffer.isBuffer(buf)) ok = false + if (buf.length !== length) ok = false + for (var j = 0; j < length; j++) { + if (buf[j] !== fill) ok = false + } + } + t.ok(ok) + t.end() +}) + +test('.alloc(size, fill) is `fill`-filled', function (t) { + t.equal(index.Buffer.alloc, safer.Buffer.alloc) + t.equal(index.Buffer.alloc, dangerous.Buffer.alloc) + var ok = true + for (var i = 0; i < 1e2; i++) { + var length = Math.round(Math.random() * 2e6) + var fill = Math.round(Math.random() * 255) + var buf = index.Buffer.alloc(length, fill) + if (!buffer.Buffer.isBuffer(buf)) ok = false + if (buf.length !== length) ok = false + for (var j = 0; j < length; j++) { + if (buf[j] !== fill) ok = false + } + } + t.ok(ok) + t.deepEqual(index.Buffer.alloc(9, 'a'), index.Buffer.alloc(9, 97)) + t.notDeepEqual(index.Buffer.alloc(9, 'a'), index.Buffer.alloc(9, 98)) + + var tmp = new buffer.Buffer(2) + tmp.fill('ok') + if (tmp[1] === tmp[0]) { + // Outdated Node.js + t.deepEqual(index.Buffer.alloc(5, 'ok'), index.Buffer.from('ooooo')) + } else { + t.deepEqual(index.Buffer.alloc(5, 'ok'), index.Buffer.from('okoko')) + } + t.notDeepEqual(index.Buffer.alloc(5, 'ok'), index.Buffer.from('kokok')) + + t.end() +}) + +test('safer.Buffer.from returns results same as Buffer constructor', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.deepEqual(impl.Buffer.from(''), new buffer.Buffer('')) + t.deepEqual(impl.Buffer.from('string'), new buffer.Buffer('string')) + t.deepEqual(impl.Buffer.from('string', 'utf-8'), new buffer.Buffer('string', 'utf-8')) + t.deepEqual(impl.Buffer.from('b25ldHdvdGhyZWU=', 'base64'), new buffer.Buffer('b25ldHdvdGhyZWU=', 'base64')) + t.deepEqual(impl.Buffer.from([0, 42, 3]), new buffer.Buffer([0, 42, 3])) + t.deepEqual(impl.Buffer.from(new Uint8Array([0, 42, 3])), new buffer.Buffer(new Uint8Array([0, 42, 3]))) + t.deepEqual(impl.Buffer.from([]), new buffer.Buffer([])) + }) + t.end() +}) + +test('safer.Buffer.from returns consistent results', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.deepEqual(impl.Buffer.from(''), impl.Buffer.alloc(0)) + t.deepEqual(impl.Buffer.from([]), impl.Buffer.alloc(0)) + t.deepEqual(impl.Buffer.from(new Uint8Array([])), impl.Buffer.alloc(0)) + t.deepEqual(impl.Buffer.from('string', 'utf-8'), impl.Buffer.from('string')) + t.deepEqual(impl.Buffer.from('string'), impl.Buffer.from([115, 116, 114, 105, 110, 103])) + t.deepEqual(impl.Buffer.from('string'), impl.Buffer.from(impl.Buffer.from('string'))) + t.deepEqual(impl.Buffer.from('b25ldHdvdGhyZWU=', 'base64'), impl.Buffer.from('onetwothree')) + t.notDeepEqual(impl.Buffer.from('b25ldHdvdGhyZWU='), impl.Buffer.from('onetwothree')) + }) + t.end() +}) diff --git a/mybulma/node_modules/sass-graph/LICENSE b/mybulma/node_modules/sass-graph/LICENSE new file mode 100644 index 0000000..4ec017f --- /dev/null +++ b/mybulma/node_modules/sass-graph/LICENSE @@ -0,0 +1,7 @@ +Copyright 2014 Michael Mifsud + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/mybulma/node_modules/sass-graph/bin/sassgraph b/mybulma/node_modules/sass-graph/bin/sassgraph new file mode 100644 index 0000000..0d6f842 --- /dev/null +++ b/mybulma/node_modules/sass-graph/bin/sassgraph @@ -0,0 +1,122 @@ +#!/usr/bin/env node +var fs = require('fs'); +var path = require('path'); + +var command, directory, file; + +var yargs = require('yargs') + .usage('Usage: $0 [options] [file]') + // .demand(1) + + .command('ancestors', 'Output the ancestors') + .command('descendents', 'Output the descendents') + + .example('$0 ancestors -I src src/ src/_footer.scss', 'outputs the ancestors of src/_footer.scss') + + .option('I', { + alias: 'load-path', + default: [process.cwd()], + describe: 'Add directories to the sass load path', + type: 'array', + }) + + .option('e', { + alias: 'extensions', + default: ['scss', 'sass'], + describe: 'File extensions to include in the graph', + type: 'array', + }) + + .option('f', { + alias: 'follow', + default: false, + describe: 'Follow symbolic links', + type: 'bool', + }) + + .option('j', { + alias: 'json', + default: false, + describe: 'Output the index in json', + type: 'bool', + }) + + .version() + .alias('v', 'version') + + .help('h') + .alias('h', 'help'); + +var argv = yargs.argv; + +if (argv._.length === 0) { + yargs.showHelp(); + process.exit(1); +} + +if (['ancestors', 'descendents'].indexOf(argv._[0]) !== -1) { + command = argv._.shift(); +} + +if (argv._.length && path.extname(argv._[0]) === '') { + directory = argv._.shift(); +} + +if (argv._.length && path.extname(argv._[0])) { + file = argv._.shift(); +} + + +try { + if (!directory) { + throw new Error('Missing directory'); + } + + if (!command && !argv.json) { + throw new Error('Missing command'); + } + + if (!file && (command === 'ancestors' || command === 'descendents')) { + throw new Error(command + ' command requires a file'); + } + + var loadPaths = argv.loadPath; + if(process.env.SASS_PATH) { + loadPaths = loadPaths.concat(process.env.SASS_PATH.split(/:/).map(function(f) { + return path.resolve(f); + })); + } + + var graph = require('../').parseDir(directory, { + extensions: argv.extensions, + loadPaths: loadPaths, + follow: argv.follow, + }); + + if(argv.json) { + console.log(JSON.stringify(graph.index, null, 4)); + process.exit(0); + } + + if (command === 'ancestors') { + graph.visitAncestors(path.resolve(file), function(f) { + console.log(f); + }); + } + + if (command === 'descendents') { + graph.visitDescendents(path.resolve(file), function(f) { + console.log(f); + }); + } +} catch(e) { + if (e.code === 'ENOENT') { + console.error('Error: no such file or directory "' + e.path + '"'); + } + else { + console.log('Error: ' + e.message); + } + + // console.log(e.stack); + process.exit(1); +} diff --git a/mybulma/node_modules/sass-graph/package.json b/mybulma/node_modules/sass-graph/package.json new file mode 100644 index 0000000..1ca02db --- /dev/null +++ b/mybulma/node_modules/sass-graph/package.json @@ -0,0 +1,41 @@ +{ + "name": "sass-graph", + "version": "4.0.1", + "description": "Parse sass files and extract a graph of imports", + "license": "MIT", + "repository": "xzyfer/sass-graph", + "author": "xzyfer", + "main": "sass-graph.js", + "directories": { + "bin": "./bin" + }, + "scripts": { + "test": "nyc mocha", + "coverage": "nyc report --reporter=text-lcov | coveralls" + }, + "keywords": [ + "sass", + "graph" + ], + "dependencies": { + "glob": "^7.0.0", + "lodash": "^4.17.11", + "scss-tokenizer": "^0.4.3", + "yargs": "^17.2.1" + }, + "devDependencies": { + "assert": "^1.3.0", + "chai": "^4.1.2", + "coveralls": "^3.0.0", + "mocha": "^5.2.0", + "nyc": "^13.1.0" + }, + "engines": { + "node": ">=12" + }, + "files": [ + "bin", + "parse-imports.js", + "sass-graph.js" + ] +} diff --git a/mybulma/node_modules/sass-graph/parse-imports.js b/mybulma/node_modules/sass-graph/parse-imports.js new file mode 100644 index 0000000..634253e --- /dev/null +++ b/mybulma/node_modules/sass-graph/parse-imports.js @@ -0,0 +1,64 @@ +var tokenizer = require('scss-tokenizer'); + +function parseImports(content, isIndentedSyntax) { + var tokens = tokenizer.tokenize(content); + var results = []; + var tmp = ''; + var inImport = false; + var inParen = false; + var prevToken = tokens[0]; + + var i, token; + for (i = 1; i < tokens.length; i++) { + token = tokens[i]; + + if (inImport && !inParen && token[0] === 'string') { + results.push(token[1]); + } + else if (token[1] === 'import' && prevToken[1] === '@') { + if (inImport && !isIndentedSyntax) { + throw new Error('Encountered invalid @import syntax.'); + } + + inImport = true; + } + else if (inImport && !inParen && (token[0] === 'ident' || token[0] === '/')) { + tmp += token[1]; + } + else if (inImport && !inParen && (token[0] === 'space' || token[0] === 'newline')) { + if (tmp !== '') { + results.push(tmp); + tmp = ''; + + if (isIndentedSyntax) { + inImport = false; + } + } + } + else if (inImport && token[0] === ';') { + inImport = false; + + if (tmp !== '') { + results.push(tmp); + tmp = ''; + } + } + else if (inImport && token[0] === '(') { + inParen = true; + tmp = ''; + } + else if (inParen && token[0] === ')') { + inParen = false; + } + + prevToken = token; + } + + if (tmp !== '') { + results.push(tmp); + } + + return results; +} + +module.exports = parseImports; diff --git a/mybulma/node_modules/sass-graph/readme.md b/mybulma/node_modules/sass-graph/readme.md new file mode 100644 index 0000000..16346e2 --- /dev/null +++ b/mybulma/node_modules/sass-graph/readme.md @@ -0,0 +1,130 @@ +# Sass Graph + +Parses Sass files in a directory and exposes a graph of dependencies + +[![Build Status](https://travis-ci.org/xzyfer/sass-graph.svg?branch=master)](https://travis-ci.org/xzyfer/sass-graph) +[![Coverage Status](https://coveralls.io/repos/github/xzyfer/sass-graph/badge.svg?branch=master)](https://coveralls.io/github/xzyfer/sass-graph?branch=master) +[![npm version](https://badge.fury.io/js/sass-graph.svg)](http://badge.fury.io/js/sass-graph) +[![Dependency Status](https://david-dm.org/xzyfer/sass-graph.svg?theme=shields.io)](https://david-dm.org/xzyfer/sass-graph) +[![devDependency Status](https://david-dm.org/xzyfer/sass-graph/dev-status.svg?theme=shields.io)](https://david-dm.org/xzyfer/sass-graph#info=devDependencies) + +## Install + +Install with [npm](https://npmjs.org/package/sass-graph) + +``` +npm install --save-dev sass-graph +``` + +## Usage + +Usage as a Node library: + +```js +var sassGraph = require('./sass-graph'); +``` + +Usage as a command line tool: + +The command line tool will parse a graph and then either display ancestors, descendents or both. + +``` +$ ./bin/sassgraph --help +Usage: bin/sassgraph [options] [file] + +Commands: + ancestors Output the ancestors + descendents Output the descendents + +Options: + -I, --load-path Add directories to the sass load path + -e, --extensions File extensions to include in the graph + -j, --json Output the index in json + -h, --help Show help + -v, --version Show version number + +Examples: + ./bin/sassgraph descendents test/fixtures test/fixtures/a.scss + /path/to/test/fixtures/b.scss + /path/to/test/fixtures/_c.scss +``` + +## API + +#### parseDir + +Parses a directory and builds a dependency graph of all requested file extensions. + +#### parseFile + +Parses a file and builds its dependency graph. + +## Options + +#### loadPaths + +Type: `Array` +Default: `[process.cwd]` + +Directories to use when resolved `@import` directives. + +#### extensions + +Type: `Array` +Default: `['scss', 'sass']` + +File types to be parsed. + +#### follow + +Type: `Boolean` +Default: `false` + +Follow symbolic links. + +#### exclude + +Type: `RegExp` +Default: `undefined` + +Exclude files matching regular expression. + +## Example + +```js +var sassGraph = require('./sass-graph'); +console.log(sassGraph.parseDir('test/fixtures')); + +//{ index: {, +// '/path/to/test/fixtures/a.scss': { +// imports: ['b.scss'], +// importedBy: [], +// }, +// '/path/to/test/fixtures/b.scss': { +// imports: ['_c.scss'], +// importedBy: ['a.scss'], +// }, +// '/path/to/test/fixtures/_c.scss': { +// imports: [], +// importedBy: ['b/scss'], +// }, +//}} +``` + +## Running Mocha tests + +You can run the tests by executing the following commands: + +``` +npm install +npm test +``` + +## Authors + +Sass graph was originally written by [Lachlan Donald](http://lachlan.me). +It is now maintained by [Michael Mifsud](http://twitter.com/xzyfer). + +## License + +MIT diff --git a/mybulma/node_modules/sass-graph/sass-graph.js b/mybulma/node_modules/sass-graph/sass-graph.js new file mode 100644 index 0000000..e3a272d --- /dev/null +++ b/mybulma/node_modules/sass-graph/sass-graph.js @@ -0,0 +1,171 @@ +'use strict'; + +var fs = require('fs'); +var path = require('path'); +var _ = require('lodash'); +var glob = require('glob'); +var parseImports = require('./parse-imports'); + +// resolve a sass module to a path +function resolveSassPath(sassPath, loadPaths, extensions) { + // trim sass file extensions + var re = new RegExp('(\.('+extensions.join('|')+'))$', 'i'); + var sassPathName = sassPath.replace(re, ''); + // check all load paths + var i, j, length = loadPaths.length, scssPath, partialPath; + for (i = 0; i < length; i++) { + for (j = 0; j < extensions.length; j++) { + scssPath = path.normalize(loadPaths[i] + '/' + sassPathName + '.' + extensions[j]); + try { + if (fs.lstatSync(scssPath).isFile()) { + return scssPath; + } + } catch (e) {} + } + + // special case for _partials + for (j = 0; j < extensions.length; j++) { + scssPath = path.normalize(loadPaths[i] + '/' + sassPathName + '.' + extensions[j]); + partialPath = path.join(path.dirname(scssPath), '_' + path.basename(scssPath)); + try { + if (fs.lstatSync(partialPath).isFile()) { + return partialPath; + } + } catch (e) {} + } + } + + // File to import not found or unreadable so we assume this is a custom import + return false; +} + +function Graph(options, dir) { + this.dir = dir; + this.extensions = options.extensions || []; + this.exclude = options.exclude instanceof RegExp ? options.exclude : null; + this.index = {}; + this.follow = options.follow || false; + this.loadPaths = _(options.loadPaths).map(function(p) { + return path.resolve(p); + }).value(); + + if (dir) { + var graph = this; + _.each(glob.sync(dir+'/**/*.@('+this.extensions.join('|')+')', { dot: true, nodir: true, follow: this.follow }), function(file) { + try { + graph.addFile(path.resolve(file)); + } catch (e) {} + }); + } +} + +// add a sass file to the graph +Graph.prototype.addFile = function(filepath, parent) { + if (this.exclude !== null && this.exclude.test(filepath)) return; + + var entry = this.index[filepath] = this.index[filepath] || { + imports: [], + importedBy: [], + modified: fs.statSync(filepath).mtime + }; + + var resolvedParent; + var isIndentedSyntax = path.extname(filepath) === '.sass'; + var imports = parseImports(fs.readFileSync(filepath, 'utf-8'), isIndentedSyntax); + var cwd = path.dirname(filepath); + + var i, length = imports.length, loadPaths, resolved; + for (i = 0; i < length; i++) { + loadPaths = _([cwd, this.dir]).concat(this.loadPaths).filter().uniq().value(); + resolved = resolveSassPath(imports[i], loadPaths, this.extensions); + if (!resolved) continue; + + // check exclcude regex + if (this.exclude !== null && this.exclude.test(resolved)) continue; + + // recurse into dependencies if not already enumerated + if (!_.includes(entry.imports, resolved)) { + entry.imports.push(resolved); + this.addFile(fs.realpathSync(resolved), filepath); + } + } + + // add link back to parent + if (parent) { + resolvedParent = _(parent).intersection(this.loadPaths).value(); + + if (resolvedParent) { + resolvedParent = parent.substr(parent.indexOf(resolvedParent)); + } else { + resolvedParent = parent; + } + + // check exclcude regex + if (!(this.exclude !== null && this.exclude.test(resolvedParent))) { + entry.importedBy.push(resolvedParent); + } + } +}; + +// visits all files that are ancestors of the provided file +Graph.prototype.visitAncestors = function(filepath, callback) { + this.visit(filepath, callback, function(err, node) { + if (err || !node) return []; + return node.importedBy; + }); +}; + +// visits all files that are descendents of the provided file +Graph.prototype.visitDescendents = function(filepath, callback) { + this.visit(filepath, callback, function(err, node) { + if (err || !node) return []; + return node.imports; + }); +}; + +// a generic visitor that uses an edgeCallback to find the edges to traverse for a node +Graph.prototype.visit = function(filepath, callback, edgeCallback, visited) { + filepath = fs.realpathSync(filepath); + var visited = visited || []; + if (!this.index.hasOwnProperty(filepath)) { + edgeCallback('Graph doesn\'t contain ' + filepath, null); + } + var edges = edgeCallback(null, this.index[filepath]); + + var i, length = edges.length; + for (i = 0; i < length; i++) { + if (!_.includes(visited, edges[i])) { + visited.push(edges[i]); + callback(edges[i], this.index[edges[i]]); + this.visit(edges[i], callback, edgeCallback, visited); + } + } +}; + +function processOptions(options) { + return Object.assign({ + loadPaths: [process.cwd()], + extensions: ['scss', 'sass'], + }, options); +} + +module.exports.parseFile = function(filepath, options) { + if (fs.lstatSync(filepath).isFile()) { + filepath = path.resolve(filepath); + options = processOptions(options); + var graph = new Graph(options); + graph.addFile(filepath); + return graph; + } + // throws +}; + +module.exports.parseDir = function(dirpath, options) { + if (fs.lstatSync(dirpath).isDirectory()) { + dirpath = path.resolve(dirpath); + options = processOptions(options); + var graph = new Graph(options, dirpath); + return graph; + } + // throws +}; diff --git a/mybulma/node_modules/scss-tokenizer/LICENSE b/mybulma/node_modules/scss-tokenizer/LICENSE new file mode 100644 index 0000000..eb44a43 --- /dev/null +++ b/mybulma/node_modules/scss-tokenizer/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015 sasstools + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/mybulma/node_modules/scss-tokenizer/README.md b/mybulma/node_modules/scss-tokenizer/README.md new file mode 100644 index 0000000..97f4c8d --- /dev/null +++ b/mybulma/node_modules/scss-tokenizer/README.md @@ -0,0 +1,50 @@ +# scss-tokenizer +A tokenizer for Sass' SCSS syntax + +![https://travis-ci.org/sasstools/scss-tokenizer.svg?branch=master](https://img.shields.io/travis/sasstools/scss-tokenizer.svg) +![https://www.npmjs.com/package/scss-tokenizer](https://img.shields.io/npm/v/scss-tokenizer.svg) +![https://github.com/sasstools/scss-tokenizer/issues](https://img.shields.io/github/issues/sasstools/scss-tokenizer.svg) +![](https://img.shields.io/github/license/sasstools/scss-tokenizer.svg) + +# Install + +``` +npm install scss-tokenizer +``` + +# Usage + +```js +var scss = require('scss-tokenizer'); +scss.tokenize(css); +``` + +# API + +### `tokenize` + +Tokenizes source `css` and returns an ordered array of tokens with positional +data. + +```js +var tokenizer = require('scss-tokenizer'); +var tokens = tokenize.tokenize(css); +``` + +Arguments: + +* `css (string|#toString)`: String with input CSS or any object + with `toString()` method, like file stream. +* `opts (object) optional`: options: + * `from`: the path to the source CSS file. You should always set `from`, + because it is used in map generation and in syntax error messages. + +# Test + +``` +npm test +``` + +## Attribution + +This project started as a fork of the [PostCSS](https://github.com/postcss/postcss) tokenizer. diff --git a/mybulma/node_modules/scss-tokenizer/index.js b/mybulma/node_modules/scss-tokenizer/index.js new file mode 100644 index 0000000..51d496c --- /dev/null +++ b/mybulma/node_modules/scss-tokenizer/index.js @@ -0,0 +1 @@ +module.exports = require('./lib/entry').default; diff --git a/mybulma/node_modules/scss-tokenizer/lib/entry.js b/mybulma/node_modules/scss-tokenizer/lib/entry.js new file mode 100644 index 0000000..a9fc589 --- /dev/null +++ b/mybulma/node_modules/scss-tokenizer/lib/entry.js @@ -0,0 +1,23 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _input = require('./input'); + +var _input2 = _interopRequireDefault(_input); + +var _tokenize = require('./tokenize'); + +var _tokenize2 = _interopRequireDefault(_tokenize); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var scss = {}; +scss.tokenize = function (css) { + var input = new _input2.default(css); + return (0, _tokenize2.default)(input); +}; + +exports.default = scss; \ No newline at end of file diff --git a/mybulma/node_modules/scss-tokenizer/lib/input.js b/mybulma/node_modules/scss-tokenizer/lib/input.js new file mode 100644 index 0000000..3477a14 --- /dev/null +++ b/mybulma/node_modules/scss-tokenizer/lib/input.js @@ -0,0 +1,64 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +var _previousMap = require('./previous-map'); + +var _previousMap2 = _interopRequireDefault(_previousMap); + +var _path = require('path'); + +var _path2 = _interopRequireDefault(_path); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +var sequence = 0; + +var Input = function () { + function Input(css) { + var opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + _classCallCheck(this, Input); + + this.css = css.toString(); + + if (this.css[0] === '\uFEFF' || this.css[0] === '\uFFFE') { + this.css = this.css.slice(1); + } + + if (opts.from) this.file = _path2.default.resolve(opts.from); + + var map = new _previousMap2.default(this.css, opts, this.id); + if (map.text) { + this.map = map; + var file = map.consumer().file; + if (!this.file && file) this.file = this.mapResolve(file); + } + + if (this.file) { + this.from = this.file; + } else { + sequence += 1; + this.id = ''; + this.from = this.id; + } + if (this.map) this.map.file = this.from; + } + + _createClass(Input, [{ + key: 'mapResolve', + value: function mapResolve(file) { + return _path2.default.resolve(this.map.consumer().sourceRoot || '.', file); + } + }]); + + return Input; +}(); + +exports.default = Input; \ No newline at end of file diff --git a/mybulma/node_modules/scss-tokenizer/lib/previous-map.js b/mybulma/node_modules/scss-tokenizer/lib/previous-map.js new file mode 100644 index 0000000..e34ef3c --- /dev/null +++ b/mybulma/node_modules/scss-tokenizer/lib/previous-map.js @@ -0,0 +1,123 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; + +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +var _jsBase = require('js-base64'); + +var _sourceMap = require('source-map'); + +var _sourceMap2 = _interopRequireDefault(_sourceMap); + +var _path = require('path'); + +var _path2 = _interopRequireDefault(_path); + +var _fs = require('fs'); + +var _fs2 = _interopRequireDefault(_fs); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +var PreviousMap = function () { + function PreviousMap(css, opts) { + _classCallCheck(this, PreviousMap); + + this.loadAnnotation(css); + this.inline = this.startWith(this.annotation, 'data:'); + + var prev = opts.map ? opts.map.prev : undefined; + var text = this.loadMap(opts.from, prev); + if (text) this.text = text; + } + + _createClass(PreviousMap, [{ + key: 'consumer', + value: function consumer() { + if (!this.consumerCache) { + this.consumerCache = new _sourceMap2.default.SourceMapConsumer(this.text); + } + return this.consumerCache; + } + }, { + key: 'withContent', + value: function withContent() { + return !!(this.consumer().sourcesContent && this.consumer().sourcesContent.length > 0); + } + }, { + key: 'startWith', + value: function startWith(string, start) { + if (!string) return false; + return string.substr(0, start.length) === start; + } + }, { + key: 'loadAnnotation', + value: function loadAnnotation(css) { + var match = css.match(/\/\*\s*# sourceMappingURL=((?:(?!sourceMappingURL=).)*)\s*\*\//); + if (match) this.annotation = match[1].trim(); + } + }, { + key: 'decodeInline', + value: function decodeInline(text) { + var utfd64 = 'data:application/json;charset=utf-8;base64,'; + var utf64 = 'data:application/json;charset=utf8;base64,'; + var b64 = 'data:application/json;base64,'; + var uri = 'data:application/json,'; + + if (this.startWith(text, uri)) { + return decodeURIComponent(text.substr(uri.length)); + } else if (this.startWith(text, base64)) { + return _jsBase.Base64.decode(text.substr(base64.length)); + } else if (this.startWith(text, utf64)) { + return _jsBase.Base64.decode(text.substr(utf64.length)); + } else if (this.startWith(text, utfd64)) { + return _jsBase.Base64.decode(text.substr(utfd64.length)); + } else { + var encoding = text.match(/data:application\/json;([^,]+),/)[1]; + throw new Error('Unsupported source map encoding ' + encoding); + } + } + }, { + key: 'loadMap', + value: function loadMap(file, prev) { + if (prev === false) return false; + + if (prev) { + if (typeof prev === 'string') { + return prev; + } else if (prev instanceof _sourceMap2.default.SourceMapConsumer) { + return _sourceMap2.default.SourceMapGenerator.fromSourceMap(prev).toString(); + } else if (prev instanceof _sourceMap2.default.SourceMapGenerator) { + return prev.toString(); + } else if ((typeof prev === 'undefined' ? 'undefined' : _typeof(prev)) === 'object' && prev.mappings) { + return JSON.stringify(prev); + } else { + throw new Error('Unsupported previous source map format: ' + prev.toString()); + } + } else if (this.inline) { + return this.decodeInline(this.annotation); + } else if (this.annotation) { + var map = this.annotation; + if (file) map = _path2.default.join(_path2.default.dirname(file), map); + + this.root = _path2.default.dirname(map); + if (_fs2.default.existsSync && _fs2.default.existsSync(map)) { + return _fs2.default.readFileSync(map, 'utf-8').toString().trim(); + } else { + return false; + } + } + } + }]); + + return PreviousMap; +}(); + +exports.default = PreviousMap; \ No newline at end of file diff --git a/mybulma/node_modules/scss-tokenizer/lib/tokenize-comment.js b/mybulma/node_modules/scss-tokenizer/lib/tokenize-comment.js new file mode 100644 index 0000000..70f8413 --- /dev/null +++ b/mybulma/node_modules/scss-tokenizer/lib/tokenize-comment.js @@ -0,0 +1,154 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = tokenize; + +var _input = require('./input'); + +var _input2 = _interopRequireDefault(_input); + +var _tokenizeString = require('./tokenize-string'); + +var _tokenizeString2 = _interopRequireDefault(_tokenizeString); + +var _tokenizeInterpolant2 = require('./tokenize-interpolant'); + +var _tokenizeInterpolant3 = _interopRequireDefault(_tokenizeInterpolant2); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var newline = '\n'.charCodeAt(0), + space = ' '.charCodeAt(0), + feed = '\f'.charCodeAt(0), + tab = '\t'.charCodeAt(0), + cr = '\r'.charCodeAt(0), + hash = '#'.charCodeAt(0), + backslash = '\\'.charCodeAt(0), + slash = '/'.charCodeAt(0), + openCurly = '{'.charCodeAt(0), + closeCurly = '}'.charCodeAt(0), + asterisk = '*'.charCodeAt(0), + wordEnd = /[ \n\t\r\(\)\{\},:;@!'"\\]|\*(?=\/)|#(?={)/g; + +function tokenize(input, l, p, o) { + var tokens = []; + var css = input.css.valueOf(); + + var code = void 0, + next = void 0, + lines = void 0, + last = void 0, + content = void 0, + escape = void 0, + nextLine = void 0, + nextOffset = void 0, + escaped = void 0, + escapePos = void 0, + inInterpolant = void 0, + inComment = void 0, + inString = void 0; + + var length = css.length; + var offset = o || -1; + var line = l || 1; + var pos = p || 0; + + loop: while (pos < length) { + code = css.charCodeAt(pos); + + if (code === newline) { + offset = pos; + line += 1; + } + + switch (code) { + case space: + case tab: + case cr: + case feed: + next = pos; + do { + next += 1; + code = css.charCodeAt(next); + if (code === newline) { + offset = next; + line += 1; + } + } while (code === space || code === tab || code === cr || code === feed); + + tokens.push(['space', css.slice(pos, next)]); + pos = next - 1; + break; + + case newline: + tokens.push(['newline', '\n', line, pos - offset]); + break; + + case closeCurly: + tokens.push(['endInterpolant', '}', line, pos - offset]); + break; + + case backslash: + next = pos; + escape = true; + while (css.charCodeAt(next + 1) === backslash) { + next += 1; + escape = !escape; + } + code = css.charCodeAt(next + 1); + if (escape && code !== slash && code !== space && code !== newline && code !== tab && code !== cr && code !== feed) { + next += 1; + } + tokens.push(['word', css.slice(pos, next + 1), line, pos - offset, line, next - offset]); + pos = next; + break; + + default: + + if (code === asterisk && css.charCodeAt(pos + 1) === slash) { + next = pos; + pos = next - 1; + break loop; + } + + if (code === hash && css.charCodeAt(pos + 1) === openCurly) { + tokens.push(['startInterpolant', '#{', line, pos + 1 - offset]); + next = pos + 1; + + var _tokenizeInterpolant = (0, _tokenizeInterpolant3.default)(input, line, next + 1, offset), + t = _tokenizeInterpolant.tokens, + _l = _tokenizeInterpolant.line, + _p = _tokenizeInterpolant.pos, + _o = _tokenizeInterpolant.offset; + + tokens = tokens.concat(t); + next = _p; + line = _l; + offset = _o; + + pos = next; + break; + } + + wordEnd.lastIndex = pos + 1; + wordEnd.test(css); + if (wordEnd.lastIndex === 0) { + next = css.length - 1; + } else { + next = wordEnd.lastIndex - 2; + } + + tokens.push(['word', css.slice(pos, next + 1), line, pos - offset, line, next - offset]); + + pos = next; + + break; + } + + pos++; + } + + return { tokens: tokens, line: line, pos: pos, offset: offset }; +} \ No newline at end of file diff --git a/mybulma/node_modules/scss-tokenizer/lib/tokenize-interpolant.js b/mybulma/node_modules/scss-tokenizer/lib/tokenize-interpolant.js new file mode 100644 index 0000000..8fcef5b --- /dev/null +++ b/mybulma/node_modules/scss-tokenizer/lib/tokenize-interpolant.js @@ -0,0 +1,304 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = tokenize; + +var _input = require('./input'); + +var _input2 = _interopRequireDefault(_input); + +var _tokenizeString2 = require('./tokenize-string'); + +var _tokenizeString3 = _interopRequireDefault(_tokenizeString2); + +var _tokenizeComment2 = require('./tokenize-comment'); + +var _tokenizeComment3 = _interopRequireDefault(_tokenizeComment2); + +var _tokenizeInterpolant2 = require('./tokenize-interpolant'); + +var _tokenizeInterpolant3 = _interopRequireDefault(_tokenizeInterpolant2); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var singleQuote = "'".charCodeAt(0), + doubleQuote = '"'.charCodeAt(0), + dollar = '$'.charCodeAt(0), + hash = '#'.charCodeAt(0), + backslash = '\\'.charCodeAt(0), + slash = '/'.charCodeAt(0), + newline = '\n'.charCodeAt(0), + space = ' '.charCodeAt(0), + feed = '\f'.charCodeAt(0), + tab = '\t'.charCodeAt(0), + cr = '\r'.charCodeAt(0), + openBracket = '('.charCodeAt(0), + closeBracket = ')'.charCodeAt(0), + openCurly = '{'.charCodeAt(0), + closeCurly = '}'.charCodeAt(0), + semicolon = ';'.charCodeAt(0), + asterisk = '*'.charCodeAt(0), + colon = ':'.charCodeAt(0), + at = '@'.charCodeAt(0), + comma = ','.charCodeAt(0), + plus = '+'.charCodeAt(0), + minus = '-'.charCodeAt(0), + decComb = '>'.charCodeAt(0), + adjComb = '~'.charCodeAt(0), + number = /[+-]?(\d+(\.\d+)?|\.\d+)|(e[+-]\d+)/gi, + sQuoteEnd = /(.*?)[^\\](?=((#{)|'))/gm, + dQuoteEnd = /(.*?)[^\\](?=((#{)|"))/gm, + wordEnd = /[ \n\t\r\(\)\{\},:;@!'"\\]|\/(?=\*)|#(?={)/g, + ident = /-?([a-z_]|\\[^\\])([a-z-_0-9]|\\[^\\])*/gi; + +function tokenize(input, l, p, o) { + var tokens = []; + var css = input.css.valueOf(); + + var code = void 0, + next = void 0, + quote = void 0, + lines = void 0, + last = void 0, + content = void 0, + escape = void 0, + nextLine = void 0, + nextOffset = void 0, + escaped = void 0, + escapePos = void 0, + inInterpolant = void 0, + inComment = void 0, + inString = void 0; + + var length = css.length; + var offset = o || -1; + var line = l || 1; + var pos = p || 0; + + loop: while (pos < length) { + code = css.charCodeAt(pos); + + if (code === newline) { + offset = pos; + line += 1; + } + + switch (code) { + case space: + case tab: + case cr: + case feed: + next = pos; + do { + next += 1; + code = css.charCodeAt(next); + if (code === newline) { + offset = next; + line += 1; + } + } while (code === space || code === tab || code === cr || code === feed); + + tokens.push(['space', css.slice(pos, next)]); + pos = next - 1; + break; + + case newline: + tokens.push(['newline', '\n', line, pos - offset]); + break; + + case plus: + tokens.push(['+', '+', line, pos - offset]); + break; + + case minus: + tokens.push(['-', '-', line, pos - offset]); + break; + + case decComb: + tokens.push(['>', '>', line, pos - offset]); + break; + + case adjComb: + tokens.push(['~', '~', line, pos - offset]); + break; + + case openCurly: + tokens.push(['{', '{', line, pos - offset]); + break; + + case closeCurly: + tokens.push(['endInterpolant', '}', line, pos - offset]); + break loop; + + case comma: + tokens.push([',', ',', line, pos - offset]); + break; + + case dollar: + tokens.push(['$', '$', line, pos - offset]); + break; + + case colon: + tokens.push([':', ':', line, pos - offset]); + break; + + case semicolon: + tokens.push([';', ';', line, pos - offset]); + break; + + case openBracket: + tokens.push(['(', '(', line, pos - offset]); + break; + + case closeBracket: + tokens.push([')', ')', line, pos - offset]); + break; + + case singleQuote: + case doubleQuote: + quote = code === singleQuote ? "'" : '"'; + tokens.push([quote, quote, line, pos - offset]); + next = pos + 1; + + var _tokenizeString = (0, _tokenizeString3.default)(input, line, next, offset, quote), + t = _tokenizeString.tokens, + _l = _tokenizeString.line, + _p = _tokenizeString.pos, + _o = _tokenizeString.offset; + + tokens = tokens.concat(t); + next = _p; + line = _l; + offset = _o; + + pos = next; + break; + + case at: + tokens.push(['@', '@', line, pos - offset]); + break; + + case backslash: + next = pos; + escape = true; + while (css.charCodeAt(next + 1) === backslash) { + next += 1; + escape = !escape; + } + code = css.charCodeAt(next + 1); + if (escape && code !== space && code !== newline && code !== tab && code !== cr && code !== feed) { + next += 1; + } + tokens.push(['word', css.slice(pos, next + 1), line, pos - offset, line, next - offset]); + pos = next; + break; + + default: + ident.lastIndex = pos; + number.lastIndex = pos; + wordEnd.lastIndex = pos; + + if (code === slash && css.charCodeAt(pos + 1) === asterisk) { + inComment = true; + tokens.push(['startComment', '/*', line, pos + 1 - offset]); + next = pos + 1; + + var _tokenizeComment = (0, _tokenizeComment3.default)(input, line, next + 1, offset), + _t = _tokenizeComment.tokens, + _l2 = _tokenizeComment.line, + _p2 = _tokenizeComment.pos, + _o2 = _tokenizeComment.offset; + + tokens = tokens.concat(_t); + next = _p2; + line = _l2; + offset = _o2; + + pos = next; + break; + } + + if (code === asterisk && css.charCodeAt(pos + 1) !== slash) { + tokens.push(['*', '*', line, pos - offset]); + break; + } + + if (inComment && code === asterisk && css.charCodeAt(pos + 1) === slash) { + inComment = false; + tokens.push(['endComment', '*/', line, pos + 1 - offset]); + pos += 2; + break; + } + + if (code === slash && css.charCodeAt(pos + 1) !== slash) { + tokens.push(['/', '/', line, pos - offset]); + pos += 2; + break; + } + + if (code === hash && css.charCodeAt(pos + 1) === openCurly) { + inInterpolant = true; + tokens.push(['startInterpolant', '#{', line, pos + 1 - offset]); + next = pos + 1; + + var _tokenizeInterpolant = (0, _tokenizeInterpolant3.default)(input, line, next + 1), + _t2 = _tokenizeInterpolant.tokens, + _p3 = _tokenizeInterpolant.pos; + + tokens = tokens.concat(_t2); + next = _p3; + + pos = next; + break; + } + + if (code === slash && css.charCodeAt(pos + 1) === slash) { + next = css.indexOf('\n\n', pos + 2); + next = next > 0 ? next : css.length; + + tokens.push(['scssComment', css.slice(pos, next), line, pos - offset, line, next - offset]); + + pos = next; + break; + } + + if (ident.test(css) && (ident.lastIndex = pos || 1) && ident.exec(css).index === pos) { + next = ident.lastIndex - 1; + + tokens.push(['ident', css.slice(pos, next + 1), line, pos - offset, line, next - offset]); + + pos = next; + break; + } + + if (number.test(css) && (number.lastIndex = pos || 1) && number.exec(css).index === pos) { + next = number.lastIndex - 1; + + tokens.push(['number', css.slice(pos, next + 1), line, pos - offset, line, next - offset]); + + pos = next; + break; + } + + wordEnd.lastIndex = pos + 1; + wordEnd.test(css); + if (wordEnd.lastIndex === 0) { + next = css.length - 1; + } else { + next = wordEnd.lastIndex - 2; + } + + tokens.push(['word', css.slice(pos, next + 1), line, pos - offset, line, next - offset]); + + pos = next; + + break; + } + + pos++; + } + + return { tokens: tokens, line: line, pos: pos, offset: offset }; +} \ No newline at end of file diff --git a/mybulma/node_modules/scss-tokenizer/lib/tokenize-string.js b/mybulma/node_modules/scss-tokenizer/lib/tokenize-string.js new file mode 100644 index 0000000..6fc2fe6 --- /dev/null +++ b/mybulma/node_modules/scss-tokenizer/lib/tokenize-string.js @@ -0,0 +1,136 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = tokenize; + +var _input = require('./input'); + +var _input2 = _interopRequireDefault(_input); + +var _tokenizeString = require('./tokenize-string'); + +var _tokenizeString2 = _interopRequireDefault(_tokenizeString); + +var _tokenizeInterpolant2 = require('./tokenize-interpolant'); + +var _tokenizeInterpolant3 = _interopRequireDefault(_tokenizeInterpolant2); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var singleQuote = "'".charCodeAt(0), + doubleQuote = '"'.charCodeAt(0), + newline = '\n'.charCodeAt(0), + space = ' '.charCodeAt(0), + feed = '\f'.charCodeAt(0), + tab = '\t'.charCodeAt(0), + cr = '\r'.charCodeAt(0), + hash = '#'.charCodeAt(0), + backslash = '\\'.charCodeAt(0), + slash = '/'.charCodeAt(0), + openCurly = '{'.charCodeAt(0), + closeCurly = '}'.charCodeAt(0), + interpolantEnd = /([.\s]*?)[^\\](?=(}))/gm, + sQuoteEnd = /([.\s]*?)[^\\](?=((#{)|'))/gm, + dQuoteEnd = /([.\s]*?)[^\\](?=((#{)|"))/gm; + +function tokenize(input, l, p, o, quote) { + var tokens = []; + var css = input.css.valueOf(); + + var code = void 0, + next = void 0, + lines = void 0, + last = void 0, + content = void 0, + escape = void 0, + nextLine = void 0, + nextOffset = void 0, + escaped = void 0, + escapePos = void 0, + inInterpolant = void 0, + inComment = void 0, + inString = void 0; + + var length = css.length; + var offset = o || -1; + var line = l || 1; + var pos = p || 0; + + var quoteEnd = quote === "'" ? sQuoteEnd : dQuoteEnd; + var quoteChar = quote.charCodeAt(0); + + loop: while (pos < length) { + code = css.charCodeAt(pos); + + if (code === newline) { + offset = pos; + line += 1; + } + + switch (code) { + + case closeCurly: + tokens.push(['endInterpolant', '}', line, pos - offset]); + break; + + case quoteChar: + tokens.push([quote, quote, line, pos - offset]); + break loop; + + case backslash: + next = pos; + escape = true; + while (css.charCodeAt(next + 1) === backslash) { + next += 1; + escape = !escape; + } + code = css.charCodeAt(next + 1); + if (escape && code !== slash && code !== space && code !== newline && code !== tab && code !== cr && code !== feed) { + next += 1; + } + tokens.push(['string', css.slice(pos, next + 1), line, pos - offset, line, next - offset]); + pos = next; + break; + + default: + if (code === hash && css.charCodeAt(pos + 1) === openCurly) { + tokens.push(['startInterpolant', '#{', line, pos + 1 - offset]); + next = pos + 1; + + var _tokenizeInterpolant = (0, _tokenizeInterpolant3.default)(input, line, next + 1, offset), + t = _tokenizeInterpolant.tokens, + _l = _tokenizeInterpolant.line, + _p = _tokenizeInterpolant.pos, + _o = _tokenizeInterpolant.offset; + + tokens = tokens.concat(t); + next = _p; + line = _l; + offset = _o; + + pos = next; + } else { + quoteEnd.lastIndex = pos; + quoteEnd.test(css); + + if (quoteEnd.lastIndex === 0) { + next = css.length - 1; + } else { + next = quoteEnd.lastIndex - 1; + } + + tokens.push(['string', css.slice(pos, next + 1), line, pos - offset, line, next - offset]); + + pos = next; + } + + break; + } + + pos++; + } + + return { tokens: tokens, line: line, pos: pos, offset: offset }; +} \ No newline at end of file diff --git a/mybulma/node_modules/scss-tokenizer/lib/tokenize.js b/mybulma/node_modules/scss-tokenizer/lib/tokenize.js new file mode 100644 index 0000000..4ef702a --- /dev/null +++ b/mybulma/node_modules/scss-tokenizer/lib/tokenize.js @@ -0,0 +1,312 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = tokenize; + +var _input = require('./input'); + +var _input2 = _interopRequireDefault(_input); + +var _tokenizeString2 = require('./tokenize-string'); + +var _tokenizeString3 = _interopRequireDefault(_tokenizeString2); + +var _tokenizeComment2 = require('./tokenize-comment'); + +var _tokenizeComment3 = _interopRequireDefault(_tokenizeComment2); + +var _tokenizeInterpolant2 = require('./tokenize-interpolant'); + +var _tokenizeInterpolant3 = _interopRequireDefault(_tokenizeInterpolant2); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var singleQuote = "'".charCodeAt(0), + doubleQuote = '"'.charCodeAt(0), + dollar = '$'.charCodeAt(0), + hash = '#'.charCodeAt(0), + backslash = '\\'.charCodeAt(0), + slash = '/'.charCodeAt(0), + newline = '\n'.charCodeAt(0), + space = ' '.charCodeAt(0), + feed = '\f'.charCodeAt(0), + tab = '\t'.charCodeAt(0), + cr = '\r'.charCodeAt(0), + openBracket = '('.charCodeAt(0), + closeBracket = ')'.charCodeAt(0), + openCurly = '{'.charCodeAt(0), + closeCurly = '}'.charCodeAt(0), + semicolon = ';'.charCodeAt(0), + asterisk = '*'.charCodeAt(0), + colon = ':'.charCodeAt(0), + at = '@'.charCodeAt(0), + comma = ','.charCodeAt(0), + plus = '+'.charCodeAt(0), + minus = '-'.charCodeAt(0), + decComb = '>'.charCodeAt(0), + adjComb = '~'.charCodeAt(0), + number = /[+-]?(\d+(\.\d+)?|\.\d+)|(e[+-]\d+)/gi, + sQuoteEnd = /(.*?)[^\\](?=((#{)|'))/gm, + dQuoteEnd = /(.*?)[^\\](?=((#{)|"))/gm, + wordEnd = /[ \n\t\r\(\)\{\},:;@!'"\\]|\/(?=\*)|#(?={)/g, + ident = /-?([a-z_]|\\[^\\])([a-z-_0-9]|\\[^\\])*/gi; + +function tokenize(input, l, p) { + var tokens = []; + var css = input.css.valueOf(); + + var code = void 0, + next = void 0, + quote = void 0, + lines = void 0, + last = void 0, + content = void 0, + escape = void 0, + nextLine = void 0, + nextOffset = void 0, + escaped = void 0, + escapePos = void 0, + inInterpolant = void 0, + inComment = void 0, + inString = void 0; + + var length = css.length; + var offset = -1; + var line = l || 1; + var pos = p || 0; + + while (pos < length) { + code = css.charCodeAt(pos); + + if (code === newline) { + offset = pos; + line += 1; + } + + switch (code) { + case space: + case tab: + case cr: + case feed: + next = pos; + do { + next += 1; + code = css.charCodeAt(next); + if (code === newline) { + offset = next; + line += 1; + } + } while (code === space || code === tab || code === cr || code === feed); + + tokens.push(['space', css.slice(pos, next)]); + pos = next - 1; + break; + + case newline: + tokens.push(['newline', '\n', line, pos - offset]); + break; + + case plus: + tokens.push(['+', '+', line, pos - offset]); + break; + + case minus: + tokens.push(['-', '-', line, pos - offset]); + break; + + case decComb: + tokens.push(['>', '>', line, pos - offset]); + break; + + case adjComb: + tokens.push(['~', '~', line, pos - offset]); + break; + + case openCurly: + tokens.push(['{', '{', line, pos - offset]); + break; + + case closeCurly: + if (inInterpolant) { + inInterpolant = false; + tokens.push(['endInterpolant', '}', line, pos - offset]); + } else { + tokens.push(['}', '}', line, pos - offset]); + } + break; + + case comma: + tokens.push([',', ',', line, pos - offset]); + break; + + case dollar: + tokens.push(['$', '$', line, pos - offset]); + break; + + case colon: + tokens.push([':', ':', line, pos - offset]); + break; + + case semicolon: + tokens.push([';', ';', line, pos - offset]); + break; + + case openBracket: + tokens.push(['(', '(', line, pos - offset]); + break; + + case closeBracket: + tokens.push([')', ')', line, pos - offset]); + break; + + case singleQuote: + case doubleQuote: + quote = code === singleQuote ? "'" : '"'; + tokens.push([quote, quote, line, pos - offset]); + next = pos + 1; + + var _tokenizeString = (0, _tokenizeString3.default)(input, line, next, offset, quote), + t = _tokenizeString.tokens, + _l = _tokenizeString.line, + _p = _tokenizeString.pos, + o = _tokenizeString.offset; + + tokens = tokens.concat(t); + next = _p; + line = _l; + offset = o; + + pos = next; + break; + + case at: + tokens.push(['@', '@', line, pos - offset]); + break; + + case backslash: + next = pos; + escape = true; + while (css.charCodeAt(next + 1) === backslash) { + next += 1; + escape = !escape; + } + code = css.charCodeAt(next + 1); + if (escape && code !== space && code !== newline && code !== tab && code !== cr && code !== feed) { + next += 1; + } + tokens.push(['word', css.slice(pos, next + 1), line, pos - offset, line, next - offset]); + pos = next; + break; + + default: + ident.lastIndex = pos; + number.lastIndex = pos; + wordEnd.lastIndex = pos; + + if (code === slash && css.charCodeAt(pos + 1) === asterisk) { + inComment = true; + tokens.push(['startComment', '/*', line, pos + 1 - offset]); + next = pos + 1; + + var _tokenizeComment = (0, _tokenizeComment3.default)(input, line, next + 1, offset), + _t = _tokenizeComment.tokens, + _l2 = _tokenizeComment.line, + _p2 = _tokenizeComment.pos, + _o = _tokenizeComment.offset; + + tokens = tokens.concat(_t); + next = _p2; + line = _l2; + offset = _o; + + pos = next; + break; + } + + if (code === asterisk && css.charCodeAt(pos + 1) !== slash) { + tokens.push(['*', '*', line, pos - offset]); + break; + } + + if (inComment && code === asterisk && css.charCodeAt(pos + 1) === slash) { + inComment = false; + tokens.push(['endComment', '*/', line, pos + 1 - offset]); + pos += 2; + break; + } + + if (code === slash && css.charCodeAt(pos + 1) !== slash) { + tokens.push(['/', '/', line, pos - offset]); + break; + } + + if (code === hash && css.charCodeAt(pos + 1) === openCurly) { + inInterpolant = true; + tokens.push(['startInterpolant', '#{', line, pos + 1 - offset]); + next = pos + 1; + + var _tokenizeInterpolant = (0, _tokenizeInterpolant3.default)(input, line, next + 1, offset), + _t2 = _tokenizeInterpolant.tokens, + _l3 = _tokenizeInterpolant.line, + _p3 = _tokenizeInterpolant.pos, + _o2 = _tokenizeInterpolant.offset; + + tokens = tokens.concat(_t2); + next = _p3; + line = _l3; + offset = _o2; + + pos = next; + break; + } + + if (code === slash && css.charCodeAt(pos + 1) === slash) { + next = css.indexOf('\n', pos + 2); + next = (next > 0 ? next : css.length) - 1; + + tokens.push(['scssComment', css.slice(pos, next + 1), line, pos - offset, line, next - offset]); + + pos = next; + break; + } + + if (ident.test(css) && (ident.lastIndex = pos || 1) && ident.exec(css).index === pos) { + next = ident.lastIndex - 1; + + tokens.push(['ident', css.slice(pos, next + 1), line, pos - offset, line, next - offset]); + + pos = next; + break; + } + + if (number.test(css) && (number.lastIndex = pos || 1) && number.exec(css).index === pos) { + next = number.lastIndex - 1; + + tokens.push(['number', css.slice(pos, next + 1), line, pos - offset, line, next - offset]); + + pos = next; + break; + } + + wordEnd.lastIndex = pos + 1; + wordEnd.test(css); + if (wordEnd.lastIndex === 0) { + next = css.length - 1; + } else { + next = wordEnd.lastIndex - 2; + } + + tokens.push(['word', css.slice(pos, next + 1), line, pos - offset, line, next - offset]); + + pos = next; + + break; + } + + pos++; + } + + return tokens; +} \ No newline at end of file diff --git a/mybulma/node_modules/scss-tokenizer/package.json b/mybulma/node_modules/scss-tokenizer/package.json new file mode 100644 index 0000000..ddd56fb --- /dev/null +++ b/mybulma/node_modules/scss-tokenizer/package.json @@ -0,0 +1,46 @@ +{ + "name": "scss-tokenizer", + "version": "0.4.3", + "description": "A tokenzier for Sass' SCSS syntax", + "main": "index.js", + "scripts": { + "test": "jest", + "clean": "rm lib/*", + "build": "npm run clean; babel src/ --out-dir lib", + "prepublishOnly": "npm run build" + }, + "files": [ + "index.js", + "lib" + ], + "repository": { + "type": "git", + "url": "https://github.com/sasstools/scss-tokenizer.git" + }, + "keywords": [ + "parser", + "tokenizer", + "sass", + "scss", + "libsass" + ], + "author": "xzyfer", + "license": "MIT", + "bugs": { + "url": "https://github.com/sasstools/scss-tokenizer/issues" + }, + "homepage": "https://github.com/sasstools/scss-tokenizer", + "dependencies": { + "js-base64": "^2.4.9", + "source-map": "^0.7.3" + }, + "devDependencies": { + "babel-cli": "^6.26.0", + "babel-core": "^6.26.3", + "babel-jest": "^23.6.0", + "babel-preset-env": "^1.7.0", + "glob": "^7.1.3", + "jest": "^23.6.0", + "sass-spec": "3.5.1" + } +} diff --git a/mybulma/node_modules/semver/bin/semver.js b/mybulma/node_modules/semver/bin/semver.js new file mode 100644 index 0000000..8d1b557 --- /dev/null +++ b/mybulma/node_modules/semver/bin/semver.js @@ -0,0 +1,183 @@ +#!/usr/bin/env node +// Standalone semver comparison program. +// Exits successfully and prints matching version(s) if +// any supplied version is valid and passes all tests. + +const argv = process.argv.slice(2) + +let versions = [] + +const range = [] + +let inc = null + +const version = require('../package.json').version + +let loose = false + +let includePrerelease = false + +let coerce = false + +let rtl = false + +let identifier + +const semver = require('../') + +let reverse = false + +let options = {} + +const main = () => { + if (!argv.length) { + return help() + } + while (argv.length) { + let a = argv.shift() + const indexOfEqualSign = a.indexOf('=') + if (indexOfEqualSign !== -1) { + const value = a.slice(indexOfEqualSign + 1) + a = a.slice(0, indexOfEqualSign) + argv.unshift(value) + } + switch (a) { + case '-rv': case '-rev': case '--rev': case '--reverse': + reverse = true + break + case '-l': case '--loose': + loose = true + break + case '-p': case '--include-prerelease': + includePrerelease = true + break + case '-v': case '--version': + versions.push(argv.shift()) + break + case '-i': case '--inc': case '--increment': + switch (argv[0]) { + case 'major': case 'minor': case 'patch': case 'prerelease': + case 'premajor': case 'preminor': case 'prepatch': + inc = argv.shift() + break + default: + inc = 'patch' + break + } + break + case '--preid': + identifier = argv.shift() + break + case '-r': case '--range': + range.push(argv.shift()) + break + case '-c': case '--coerce': + coerce = true + break + case '--rtl': + rtl = true + break + case '--ltr': + rtl = false + break + case '-h': case '--help': case '-?': + return help() + default: + versions.push(a) + break + } + } + + options = { loose: loose, includePrerelease: includePrerelease, rtl: rtl } + + versions = versions.map((v) => { + return coerce ? (semver.coerce(v, options) || { version: v }).version : v + }).filter((v) => { + return semver.valid(v) + }) + if (!versions.length) { + return fail() + } + if (inc && (versions.length !== 1 || range.length)) { + return failInc() + } + + for (let i = 0, l = range.length; i < l; i++) { + versions = versions.filter((v) => { + return semver.satisfies(v, range[i], options) + }) + if (!versions.length) { + return fail() + } + } + return success(versions) +} + +const failInc = () => { + console.error('--inc can only be used on a single version with no range') + fail() +} + +const fail = () => process.exit(1) + +const success = () => { + const compare = reverse ? 'rcompare' : 'compare' + versions.sort((a, b) => { + return semver[compare](a, b, options) + }).map((v) => { + return semver.clean(v, options) + }).map((v) => { + return inc ? semver.inc(v, inc, options, identifier) : v + }).forEach((v, i, _) => { + console.log(v) + }) +} + +const help = () => console.log( +`SemVer ${version} + +A JavaScript implementation of the https://semver.org/ specification +Copyright Isaac Z. Schlueter + +Usage: semver [options] [ [...]] +Prints valid versions sorted by SemVer precedence + +Options: +-r --range + Print versions that match the specified range. + +-i --increment [] + Increment a version by the specified level. Level can + be one of: major, minor, patch, premajor, preminor, + prepatch, or prerelease. Default level is 'patch'. + Only one version may be specified. + +--preid + Identifier to be used to prefix premajor, preminor, + prepatch or prerelease version increments. + +-l --loose + Interpret versions and ranges loosely + +-p --include-prerelease + Always include prerelease versions in range matching + +-c --coerce + Coerce a string into SemVer if possible + (does not imply --loose) + +--rtl + Coerce version strings right to left + +--ltr + Coerce version strings left to right (default) + +Program exits successfully if any valid version satisfies +all supplied ranges, and prints all satisfying versions. + +If no satisfying versions are found, then exits failure. + +Versions are printed in ascending order, so supplying +multiple versions to the utility will just sort them.`) + +main() diff --git a/mybulma/node_modules/semver/classes/comparator.js b/mybulma/node_modules/semver/classes/comparator.js new file mode 100644 index 0000000..62cd204 --- /dev/null +++ b/mybulma/node_modules/semver/classes/comparator.js @@ -0,0 +1,136 @@ +const ANY = Symbol('SemVer ANY') +// hoisted class for cyclic dependency +class Comparator { + static get ANY () { + return ANY + } + + constructor (comp, options) { + options = parseOptions(options) + + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value + } + } + + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) + + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } + + debug('comp', this) + } + + parse (comp) { + const r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] + const m = comp.match(r) + + if (!m) { + throw new TypeError(`Invalid comparator: ${comp}`) + } + + this.operator = m[1] !== undefined ? m[1] : '' + if (this.operator === '=') { + this.operator = '' + } + + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } + } + + toString () { + return this.value + } + + test (version) { + debug('Comparator.test', version, this.options.loose) + + if (this.semver === ANY || version === ANY) { + return true + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + return cmp(version, this.operator, this.semver, this.options) + } + + intersects (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } + + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false, + } + } + + if (this.operator === '') { + if (this.value === '') { + return true + } + return new Range(comp.value, options).test(this.value) + } else if (comp.operator === '') { + if (comp.value === '') { + return true + } + return new Range(this.value, options).test(comp.semver) + } + + const sameDirectionIncreasing = + (this.operator === '>=' || this.operator === '>') && + (comp.operator === '>=' || comp.operator === '>') + const sameDirectionDecreasing = + (this.operator === '<=' || this.operator === '<') && + (comp.operator === '<=' || comp.operator === '<') + const sameSemVer = this.semver.version === comp.semver.version + const differentDirectionsInclusive = + (this.operator === '>=' || this.operator === '<=') && + (comp.operator === '>=' || comp.operator === '<=') + const oppositeDirectionsLessThan = + cmp(this.semver, '<', comp.semver, options) && + (this.operator === '>=' || this.operator === '>') && + (comp.operator === '<=' || comp.operator === '<') + const oppositeDirectionsGreaterThan = + cmp(this.semver, '>', comp.semver, options) && + (this.operator === '<=' || this.operator === '<') && + (comp.operator === '>=' || comp.operator === '>') + + return ( + sameDirectionIncreasing || + sameDirectionDecreasing || + (sameSemVer && differentDirectionsInclusive) || + oppositeDirectionsLessThan || + oppositeDirectionsGreaterThan + ) + } +} + +module.exports = Comparator + +const parseOptions = require('../internal/parse-options') +const { re, t } = require('../internal/re') +const cmp = require('../functions/cmp') +const debug = require('../internal/debug') +const SemVer = require('./semver') +const Range = require('./range') diff --git a/mybulma/node_modules/semver/classes/index.js b/mybulma/node_modules/semver/classes/index.js new file mode 100644 index 0000000..5e3f5c9 --- /dev/null +++ b/mybulma/node_modules/semver/classes/index.js @@ -0,0 +1,5 @@ +module.exports = { + SemVer: require('./semver.js'), + Range: require('./range.js'), + Comparator: require('./comparator.js'), +} diff --git a/mybulma/node_modules/semver/classes/range.js b/mybulma/node_modules/semver/classes/range.js new file mode 100644 index 0000000..a791d91 --- /dev/null +++ b/mybulma/node_modules/semver/classes/range.js @@ -0,0 +1,522 @@ +// hoisted class for cyclic dependency +class Range { + constructor (range, options) { + options = parseOptions(options) + + if (range instanceof Range) { + if ( + range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease + ) { + return range + } else { + return new Range(range.raw, options) + } + } + + if (range instanceof Comparator) { + // just put it in the set and return + this.raw = range.value + this.set = [[range]] + this.format() + return this + } + + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease + + // First, split based on boolean or || + this.raw = range + this.set = range + .split('||') + // map the range to a 2d array of comparators + .map(r => this.parseRange(r.trim())) + // throw out any comparator lists that are empty + // this generally means that it was not a valid range, which is allowed + // in loose mode, but will still throw if the WHOLE range is invalid. + .filter(c => c.length) + + if (!this.set.length) { + throw new TypeError(`Invalid SemVer Range: ${range}`) + } + + // if we have any that are not the null set, throw out null sets. + if (this.set.length > 1) { + // keep the first one, in case they're all null sets + const first = this.set[0] + this.set = this.set.filter(c => !isNullSet(c[0])) + if (this.set.length === 0) { + this.set = [first] + } else if (this.set.length > 1) { + // if we have any that are *, then the range is just * + for (const c of this.set) { + if (c.length === 1 && isAny(c[0])) { + this.set = [c] + break + } + } + } + } + + this.format() + } + + format () { + this.range = this.set + .map((comps) => { + return comps.join(' ').trim() + }) + .join('||') + .trim() + return this.range + } + + toString () { + return this.range + } + + parseRange (range) { + range = range.trim() + + // memoize range parsing for performance. + // this is a very hot path, and fully deterministic. + const memoOpts = Object.keys(this.options).join(',') + const memoKey = `parseRange:${memoOpts}:${range}` + const cached = cache.get(memoKey) + if (cached) { + return cached + } + + const loose = this.options.loose + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + const hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] + range = range.replace(hr, hyphenReplace(this.options.includePrerelease)) + debug('hyphen replace', range) + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range) + + // `~ 1.2.3` => `~1.2.3` + range = range.replace(re[t.TILDETRIM], tildeTrimReplace) + + // `^ 1.2.3` => `^1.2.3` + range = range.replace(re[t.CARETTRIM], caretTrimReplace) + + // normalize spaces + range = range.split(/\s+/).join(' ') + + // At this point, the range is completely trimmed and + // ready to be split into comparators. + + let rangeList = range + .split(' ') + .map(comp => parseComparator(comp, this.options)) + .join(' ') + .split(/\s+/) + // >=0.0.0 is equivalent to * + .map(comp => replaceGTE0(comp, this.options)) + + if (loose) { + // in loose mode, throw out any that are not valid comparators + rangeList = rangeList.filter(comp => { + debug('loose invalid filter', comp, this.options) + return !!comp.match(re[t.COMPARATORLOOSE]) + }) + } + debug('range list', rangeList) + + // if any comparators are the null set, then replace with JUST null set + // if more than one comparator, remove any * comparators + // also, don't include the same comparator more than once + const rangeMap = new Map() + const comparators = rangeList.map(comp => new Comparator(comp, this.options)) + for (const comp of comparators) { + if (isNullSet(comp)) { + return [comp] + } + rangeMap.set(comp.value, comp) + } + if (rangeMap.size > 1 && rangeMap.has('')) { + rangeMap.delete('') + } + + const result = [...rangeMap.values()] + cache.set(memoKey, result) + return result + } + + intersects (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') + } + + return this.set.some((thisComparators) => { + return ( + isSatisfiable(thisComparators, options) && + range.set.some((rangeComparators) => { + return ( + isSatisfiable(rangeComparators, options) && + thisComparators.every((thisComparator) => { + return rangeComparators.every((rangeComparator) => { + return thisComparator.intersects(rangeComparator, options) + }) + }) + ) + }) + ) + }) + } + + // if ANY of the sets match ALL of its comparators, then pass + test (version) { + if (!version) { + return false + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + for (let i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true + } + } + return false + } +} +module.exports = Range + +const LRU = require('lru-cache') +const cache = new LRU({ max: 1000 }) + +const parseOptions = require('../internal/parse-options') +const Comparator = require('./comparator') +const debug = require('../internal/debug') +const SemVer = require('./semver') +const { + re, + t, + comparatorTrimReplace, + tildeTrimReplace, + caretTrimReplace, +} = require('../internal/re') + +const isNullSet = c => c.value === '<0.0.0-0' +const isAny = c => c.value === '' + +// take a set of comparators and determine whether there +// exists a version which can satisfy it +const isSatisfiable = (comparators, options) => { + let result = true + const remainingComparators = comparators.slice() + let testComparator = remainingComparators.pop() + + while (result && remainingComparators.length) { + result = remainingComparators.every((otherComparator) => { + return testComparator.intersects(otherComparator, options) + }) + + testComparator = remainingComparators.pop() + } + + return result +} + +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +const parseComparator = (comp, options) => { + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp +} + +const isX = id => !id || id.toLowerCase() === 'x' || id === '*' + +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0-0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0-0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0-0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0 +// ~0.0.1 --> >=0.0.1 <0.1.0-0 +const replaceTildes = (comp, options) => + comp.trim().split(/\s+/).map((c) => { + return replaceTilde(c, options) + }).join(' ') + +const replaceTilde = (comp, options) => { + const r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] + return comp.replace(r, (_, M, m, p, pr) => { + debug('tilde', comp, _, M, m, p, pr) + let ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = `>=${M}.0.0 <${+M + 1}.0.0-0` + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0-0 + ret = `>=${M}.${m}.0 <${M}.${+m + 1}.0-0` + } else if (pr) { + debug('replaceTilde pr', pr) + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${+m + 1}.0-0` + } else { + // ~1.2.3 == >=1.2.3 <1.3.0-0 + ret = `>=${M}.${m}.${p + } <${M}.${+m + 1}.0-0` + } + + debug('tilde return', ret) + return ret + }) +} + +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0-0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0-0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0-0 +// ^1.2.3 --> >=1.2.3 <2.0.0-0 +// ^1.2.0 --> >=1.2.0 <2.0.0-0 +// ^0.0.1 --> >=0.0.1 <0.0.2-0 +// ^0.1.0 --> >=0.1.0 <0.2.0-0 +const replaceCarets = (comp, options) => + comp.trim().split(/\s+/).map((c) => { + return replaceCaret(c, options) + }).join(' ') + +const replaceCaret = (comp, options) => { + debug('caret', comp, options) + const r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] + const z = options.includePrerelease ? '-0' : '' + return comp.replace(r, (_, M, m, p, pr) => { + debug('caret', comp, _, M, m, p, pr) + let ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = `>=${M}.0.0${z} <${+M + 1}.0.0-0` + } else if (isX(p)) { + if (M === '0') { + ret = `>=${M}.${m}.0${z} <${M}.${+m + 1}.0-0` + } else { + ret = `>=${M}.${m}.0${z} <${+M + 1}.0.0-0` + } + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${m}.${+p + 1}-0` + } else { + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${+m + 1}.0-0` + } + } else { + ret = `>=${M}.${m}.${p}-${pr + } <${+M + 1}.0.0-0` + } + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = `>=${M}.${m}.${p + }${z} <${M}.${m}.${+p + 1}-0` + } else { + ret = `>=${M}.${m}.${p + }${z} <${M}.${+m + 1}.0-0` + } + } else { + ret = `>=${M}.${m}.${p + } <${+M + 1}.0.0-0` + } + } + + debug('caret return', ret) + return ret + }) +} + +const replaceXRanges = (comp, options) => { + debug('replaceXRanges', comp, options) + return comp.split(/\s+/).map((c) => { + return replaceXRange(c, options) + }).join(' ') +} + +const replaceXRange = (comp, options) => { + comp = comp.trim() + const r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] + return comp.replace(r, (ret, gtlt, M, m, p, pr) => { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + const xM = isX(M) + const xm = xM || isX(m) + const xp = xm || isX(p) + const anyX = xp + + if (gtlt === '=' && anyX) { + gtlt = '' + } + + // if we're including prereleases in the match, then we need + // to fix this to -0, the lowest possible prerelease value + pr = options.includePrerelease ? '-0' : '' + + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0-0' + } else { + // nothing is forbidden + ret = '*' + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 + } + p = 0 + + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } + } + + if (gtlt === '<') { + pr = '-0' + } + + ret = `${gtlt + M}.${m}.${p}${pr}` + } else if (xm) { + ret = `>=${M}.0.0${pr} <${+M + 1}.0.0-0` + } else if (xp) { + ret = `>=${M}.${m}.0${pr + } <${M}.${+m + 1}.0-0` + } + + debug('xRange return', ret) + + return ret + }) +} + +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +const replaceStars = (comp, options) => { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp.trim().replace(re[t.STAR], '') +} + +const replaceGTE0 = (comp, options) => { + debug('replaceGTE0', comp, options) + return comp.trim() + .replace(re[options.includePrerelease ? t.GTE0PRE : t.GTE0], '') +} + +// This function is passed to string.replace(re[t.HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0-0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0-0 +const hyphenReplace = incPr => ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) => { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = `>=${fM}.0.0${incPr ? '-0' : ''}` + } else if (isX(fp)) { + from = `>=${fM}.${fm}.0${incPr ? '-0' : ''}` + } else if (fpr) { + from = `>=${from}` + } else { + from = `>=${from}${incPr ? '-0' : ''}` + } + + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = `<${+tM + 1}.0.0-0` + } else if (isX(tp)) { + to = `<${tM}.${+tm + 1}.0-0` + } else if (tpr) { + to = `<=${tM}.${tm}.${tp}-${tpr}` + } else if (incPr) { + to = `<${tM}.${tm}.${+tp + 1}-0` + } else { + to = `<=${to}` + } + + return (`${from} ${to}`).trim() +} + +const testSet = (set, version, options) => { + for (let i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } + } + + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (let i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === Comparator.ANY) { + continue + } + + if (set[i].semver.prerelease.length > 0) { + const allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } + } + + // Version has a -pre, but it's not one of the ones we like. + return false + } + + return true +} diff --git a/mybulma/node_modules/semver/classes/semver.js b/mybulma/node_modules/semver/classes/semver.js new file mode 100644 index 0000000..af62955 --- /dev/null +++ b/mybulma/node_modules/semver/classes/semver.js @@ -0,0 +1,287 @@ +const debug = require('../internal/debug') +const { MAX_LENGTH, MAX_SAFE_INTEGER } = require('../internal/constants') +const { re, t } = require('../internal/re') + +const parseOptions = require('../internal/parse-options') +const { compareIdentifiers } = require('../internal/identifiers') +class SemVer { + constructor (version, options) { + options = parseOptions(options) + + if (version instanceof SemVer) { + if (version.loose === !!options.loose && + version.includePrerelease === !!options.includePrerelease) { + return version + } else { + version = version.version + } + } else if (typeof version !== 'string') { + throw new TypeError(`Invalid Version: ${version}`) + } + + if (version.length > MAX_LENGTH) { + throw new TypeError( + `version is longer than ${MAX_LENGTH} characters` + ) + } + + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose + // this isn't actually relevant for versions, but keep it so that we + // don't run into trouble passing this.options around. + this.includePrerelease = !!options.includePrerelease + + const m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) + + if (!m) { + throw new TypeError(`Invalid Version: ${version}`) + } + + this.raw = version + + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] + + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } + + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } + + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] + } else { + this.prerelease = m[4].split('.').map((id) => { + if (/^[0-9]+$/.test(id)) { + const num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num + } + } + return id + }) + } + + this.build = m[5] ? m[5].split('.') : [] + this.format() + } + + format () { + this.version = `${this.major}.${this.minor}.${this.patch}` + if (this.prerelease.length) { + this.version += `-${this.prerelease.join('.')}` + } + return this.version + } + + toString () { + return this.version + } + + compare (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + if (typeof other === 'string' && other === this.version) { + return 0 + } + other = new SemVer(other, this.options) + } + + if (other.version === this.version) { + return 0 + } + + return this.compareMain(other) || this.comparePre(other) + } + + compareMain (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return ( + compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) + ) + } + + comparePre (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } + + let i = 0 + do { + const a = this.prerelease[i] + const b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) + } + + compareBuild (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + let i = 0 + do { + const a = this.build[i] + const b = other.build[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) + } + + // preminor will bump the version up to the next minor release, and immediately + // down to pre-release. premajor and prepatch work the same way. + inc (release, identifier) { + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier) + this.inc('pre', identifier) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier) + } + this.inc('pre', identifier) + break + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if ( + this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0 + ) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 'pre' would become 1.0.0-0 which is the wrong direction. + case 'pre': + if (this.prerelease.length === 0) { + this.prerelease = [0] + } else { + let i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } + } + if (i === -1) { + // didn't increment anything + this.prerelease.push(0) + } + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + if (compareIdentifiers(this.prerelease[0], identifier) === 0) { + if (isNaN(this.prerelease[1])) { + this.prerelease = [identifier, 0] + } + } else { + this.prerelease = [identifier, 0] + } + } + break + + default: + throw new Error(`invalid increment argument: ${release}`) + } + this.format() + this.raw = this.version + return this + } +} + +module.exports = SemVer diff --git a/mybulma/node_modules/semver/functions/clean.js b/mybulma/node_modules/semver/functions/clean.js new file mode 100644 index 0000000..811fe6b --- /dev/null +++ b/mybulma/node_modules/semver/functions/clean.js @@ -0,0 +1,6 @@ +const parse = require('./parse') +const clean = (version, options) => { + const s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null +} +module.exports = clean diff --git a/mybulma/node_modules/semver/functions/cmp.js b/mybulma/node_modules/semver/functions/cmp.js new file mode 100644 index 0000000..4011909 --- /dev/null +++ b/mybulma/node_modules/semver/functions/cmp.js @@ -0,0 +1,52 @@ +const eq = require('./eq') +const neq = require('./neq') +const gt = require('./gt') +const gte = require('./gte') +const lt = require('./lt') +const lte = require('./lte') + +const cmp = (a, op, b, loose) => { + switch (op) { + case '===': + if (typeof a === 'object') { + a = a.version + } + if (typeof b === 'object') { + b = b.version + } + return a === b + + case '!==': + if (typeof a === 'object') { + a = a.version + } + if (typeof b === 'object') { + b = b.version + } + return a !== b + + case '': + case '=': + case '==': + return eq(a, b, loose) + + case '!=': + return neq(a, b, loose) + + case '>': + return gt(a, b, loose) + + case '>=': + return gte(a, b, loose) + + case '<': + return lt(a, b, loose) + + case '<=': + return lte(a, b, loose) + + default: + throw new TypeError(`Invalid operator: ${op}`) + } +} +module.exports = cmp diff --git a/mybulma/node_modules/semver/functions/coerce.js b/mybulma/node_modules/semver/functions/coerce.js new file mode 100644 index 0000000..2e01452 --- /dev/null +++ b/mybulma/node_modules/semver/functions/coerce.js @@ -0,0 +1,52 @@ +const SemVer = require('../classes/semver') +const parse = require('./parse') +const { re, t } = require('../internal/re') + +const coerce = (version, options) => { + if (version instanceof SemVer) { + return version + } + + if (typeof version === 'number') { + version = String(version) + } + + if (typeof version !== 'string') { + return null + } + + options = options || {} + + let match = null + if (!options.rtl) { + match = version.match(re[t.COERCE]) + } else { + // Find the right-most coercible string that does not share + // a terminus with a more left-ward coercible string. + // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' + // + // Walk through the string checking with a /g regexp + // Manually set the index so as to pick up overlapping matches. + // Stop when we get a match that ends at the string end, since no + // coercible string can be more right-ward without the same terminus. + let next + while ((next = re[t.COERCERTL].exec(version)) && + (!match || match.index + match[0].length !== version.length) + ) { + if (!match || + next.index + next[0].length !== match.index + match[0].length) { + match = next + } + re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length + } + // leave it in a clean state + re[t.COERCERTL].lastIndex = -1 + } + + if (match === null) { + return null + } + + return parse(`${match[2]}.${match[3] || '0'}.${match[4] || '0'}`, options) +} +module.exports = coerce diff --git a/mybulma/node_modules/semver/functions/compare-build.js b/mybulma/node_modules/semver/functions/compare-build.js new file mode 100644 index 0000000..9eb881b --- /dev/null +++ b/mybulma/node_modules/semver/functions/compare-build.js @@ -0,0 +1,7 @@ +const SemVer = require('../classes/semver') +const compareBuild = (a, b, loose) => { + const versionA = new SemVer(a, loose) + const versionB = new SemVer(b, loose) + return versionA.compare(versionB) || versionA.compareBuild(versionB) +} +module.exports = compareBuild diff --git a/mybulma/node_modules/semver/functions/compare-loose.js b/mybulma/node_modules/semver/functions/compare-loose.js new file mode 100644 index 0000000..4881fbe --- /dev/null +++ b/mybulma/node_modules/semver/functions/compare-loose.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const compareLoose = (a, b) => compare(a, b, true) +module.exports = compareLoose diff --git a/mybulma/node_modules/semver/functions/compare.js b/mybulma/node_modules/semver/functions/compare.js new file mode 100644 index 0000000..748b7af --- /dev/null +++ b/mybulma/node_modules/semver/functions/compare.js @@ -0,0 +1,5 @@ +const SemVer = require('../classes/semver') +const compare = (a, b, loose) => + new SemVer(a, loose).compare(new SemVer(b, loose)) + +module.exports = compare diff --git a/mybulma/node_modules/semver/functions/diff.js b/mybulma/node_modules/semver/functions/diff.js new file mode 100644 index 0000000..87200ef --- /dev/null +++ b/mybulma/node_modules/semver/functions/diff.js @@ -0,0 +1,23 @@ +const parse = require('./parse') +const eq = require('./eq') + +const diff = (version1, version2) => { + if (eq(version1, version2)) { + return null + } else { + const v1 = parse(version1) + const v2 = parse(version2) + const hasPre = v1.prerelease.length || v2.prerelease.length + const prefix = hasPre ? 'pre' : '' + const defaultResult = hasPre ? 'prerelease' : '' + for (const key in v1) { + if (key === 'major' || key === 'minor' || key === 'patch') { + if (v1[key] !== v2[key]) { + return prefix + key + } + } + } + return defaultResult // may be undefined + } +} +module.exports = diff diff --git a/mybulma/node_modules/semver/functions/eq.js b/mybulma/node_modules/semver/functions/eq.js new file mode 100644 index 0000000..271fed9 --- /dev/null +++ b/mybulma/node_modules/semver/functions/eq.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const eq = (a, b, loose) => compare(a, b, loose) === 0 +module.exports = eq diff --git a/mybulma/node_modules/semver/functions/gt.js b/mybulma/node_modules/semver/functions/gt.js new file mode 100644 index 0000000..d9b2156 --- /dev/null +++ b/mybulma/node_modules/semver/functions/gt.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const gt = (a, b, loose) => compare(a, b, loose) > 0 +module.exports = gt diff --git a/mybulma/node_modules/semver/functions/gte.js b/mybulma/node_modules/semver/functions/gte.js new file mode 100644 index 0000000..5aeaa63 --- /dev/null +++ b/mybulma/node_modules/semver/functions/gte.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const gte = (a, b, loose) => compare(a, b, loose) >= 0 +module.exports = gte diff --git a/mybulma/node_modules/semver/functions/inc.js b/mybulma/node_modules/semver/functions/inc.js new file mode 100644 index 0000000..62d1da2 --- /dev/null +++ b/mybulma/node_modules/semver/functions/inc.js @@ -0,0 +1,18 @@ +const SemVer = require('../classes/semver') + +const inc = (version, release, options, identifier) => { + if (typeof (options) === 'string') { + identifier = options + options = undefined + } + + try { + return new SemVer( + version instanceof SemVer ? version.version : version, + options + ).inc(release, identifier).version + } catch (er) { + return null + } +} +module.exports = inc diff --git a/mybulma/node_modules/semver/functions/lt.js b/mybulma/node_modules/semver/functions/lt.js new file mode 100644 index 0000000..b440ab7 --- /dev/null +++ b/mybulma/node_modules/semver/functions/lt.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const lt = (a, b, loose) => compare(a, b, loose) < 0 +module.exports = lt diff --git a/mybulma/node_modules/semver/functions/lte.js b/mybulma/node_modules/semver/functions/lte.js new file mode 100644 index 0000000..6dcc956 --- /dev/null +++ b/mybulma/node_modules/semver/functions/lte.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const lte = (a, b, loose) => compare(a, b, loose) <= 0 +module.exports = lte diff --git a/mybulma/node_modules/semver/functions/major.js b/mybulma/node_modules/semver/functions/major.js new file mode 100644 index 0000000..4283165 --- /dev/null +++ b/mybulma/node_modules/semver/functions/major.js @@ -0,0 +1,3 @@ +const SemVer = require('../classes/semver') +const major = (a, loose) => new SemVer(a, loose).major +module.exports = major diff --git a/mybulma/node_modules/semver/functions/minor.js b/mybulma/node_modules/semver/functions/minor.js new file mode 100644 index 0000000..57b3455 --- /dev/null +++ b/mybulma/node_modules/semver/functions/minor.js @@ -0,0 +1,3 @@ +const SemVer = require('../classes/semver') +const minor = (a, loose) => new SemVer(a, loose).minor +module.exports = minor diff --git a/mybulma/node_modules/semver/functions/neq.js b/mybulma/node_modules/semver/functions/neq.js new file mode 100644 index 0000000..f944c01 --- /dev/null +++ b/mybulma/node_modules/semver/functions/neq.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const neq = (a, b, loose) => compare(a, b, loose) !== 0 +module.exports = neq diff --git a/mybulma/node_modules/semver/functions/parse.js b/mybulma/node_modules/semver/functions/parse.js new file mode 100644 index 0000000..a66663a --- /dev/null +++ b/mybulma/node_modules/semver/functions/parse.js @@ -0,0 +1,33 @@ +const { MAX_LENGTH } = require('../internal/constants') +const { re, t } = require('../internal/re') +const SemVer = require('../classes/semver') + +const parseOptions = require('../internal/parse-options') +const parse = (version, options) => { + options = parseOptions(options) + + if (version instanceof SemVer) { + return version + } + + if (typeof version !== 'string') { + return null + } + + if (version.length > MAX_LENGTH) { + return null + } + + const r = options.loose ? re[t.LOOSE] : re[t.FULL] + if (!r.test(version)) { + return null + } + + try { + return new SemVer(version, options) + } catch (er) { + return null + } +} + +module.exports = parse diff --git a/mybulma/node_modules/semver/functions/patch.js b/mybulma/node_modules/semver/functions/patch.js new file mode 100644 index 0000000..63afca2 --- /dev/null +++ b/mybulma/node_modules/semver/functions/patch.js @@ -0,0 +1,3 @@ +const SemVer = require('../classes/semver') +const patch = (a, loose) => new SemVer(a, loose).patch +module.exports = patch diff --git a/mybulma/node_modules/semver/functions/prerelease.js b/mybulma/node_modules/semver/functions/prerelease.js new file mode 100644 index 0000000..06aa132 --- /dev/null +++ b/mybulma/node_modules/semver/functions/prerelease.js @@ -0,0 +1,6 @@ +const parse = require('./parse') +const prerelease = (version, options) => { + const parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null +} +module.exports = prerelease diff --git a/mybulma/node_modules/semver/functions/rcompare.js b/mybulma/node_modules/semver/functions/rcompare.js new file mode 100644 index 0000000..0ac509e --- /dev/null +++ b/mybulma/node_modules/semver/functions/rcompare.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const rcompare = (a, b, loose) => compare(b, a, loose) +module.exports = rcompare diff --git a/mybulma/node_modules/semver/functions/rsort.js b/mybulma/node_modules/semver/functions/rsort.js new file mode 100644 index 0000000..82404c5 --- /dev/null +++ b/mybulma/node_modules/semver/functions/rsort.js @@ -0,0 +1,3 @@ +const compareBuild = require('./compare-build') +const rsort = (list, loose) => list.sort((a, b) => compareBuild(b, a, loose)) +module.exports = rsort diff --git a/mybulma/node_modules/semver/functions/satisfies.js b/mybulma/node_modules/semver/functions/satisfies.js new file mode 100644 index 0000000..50af1c1 --- /dev/null +++ b/mybulma/node_modules/semver/functions/satisfies.js @@ -0,0 +1,10 @@ +const Range = require('../classes/range') +const satisfies = (version, range, options) => { + try { + range = new Range(range, options) + } catch (er) { + return false + } + return range.test(version) +} +module.exports = satisfies diff --git a/mybulma/node_modules/semver/functions/sort.js b/mybulma/node_modules/semver/functions/sort.js new file mode 100644 index 0000000..4d10917 --- /dev/null +++ b/mybulma/node_modules/semver/functions/sort.js @@ -0,0 +1,3 @@ +const compareBuild = require('./compare-build') +const sort = (list, loose) => list.sort((a, b) => compareBuild(a, b, loose)) +module.exports = sort diff --git a/mybulma/node_modules/semver/functions/valid.js b/mybulma/node_modules/semver/functions/valid.js new file mode 100644 index 0000000..f27bae1 --- /dev/null +++ b/mybulma/node_modules/semver/functions/valid.js @@ -0,0 +1,6 @@ +const parse = require('./parse') +const valid = (version, options) => { + const v = parse(version, options) + return v ? v.version : null +} +module.exports = valid diff --git a/mybulma/node_modules/semver/internal/constants.js b/mybulma/node_modules/semver/internal/constants.js new file mode 100644 index 0000000..4f0de59 --- /dev/null +++ b/mybulma/node_modules/semver/internal/constants.js @@ -0,0 +1,17 @@ +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +const SEMVER_SPEC_VERSION = '2.0.0' + +const MAX_LENGTH = 256 +const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || +/* istanbul ignore next */ 9007199254740991 + +// Max safe segment length for coercion. +const MAX_SAFE_COMPONENT_LENGTH = 16 + +module.exports = { + SEMVER_SPEC_VERSION, + MAX_LENGTH, + MAX_SAFE_INTEGER, + MAX_SAFE_COMPONENT_LENGTH, +} diff --git a/mybulma/node_modules/semver/internal/debug.js b/mybulma/node_modules/semver/internal/debug.js new file mode 100644 index 0000000..1c00e13 --- /dev/null +++ b/mybulma/node_modules/semver/internal/debug.js @@ -0,0 +1,9 @@ +const debug = ( + typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG) +) ? (...args) => console.error('SEMVER', ...args) + : () => {} + +module.exports = debug diff --git a/mybulma/node_modules/semver/internal/identifiers.js b/mybulma/node_modules/semver/internal/identifiers.js new file mode 100644 index 0000000..e612d0a --- /dev/null +++ b/mybulma/node_modules/semver/internal/identifiers.js @@ -0,0 +1,23 @@ +const numeric = /^[0-9]+$/ +const compareIdentifiers = (a, b) => { + const anum = numeric.test(a) + const bnum = numeric.test(b) + + if (anum && bnum) { + a = +a + b = +b + } + + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 +} + +const rcompareIdentifiers = (a, b) => compareIdentifiers(b, a) + +module.exports = { + compareIdentifiers, + rcompareIdentifiers, +} diff --git a/mybulma/node_modules/semver/internal/parse-options.js b/mybulma/node_modules/semver/internal/parse-options.js new file mode 100644 index 0000000..bbd9ec7 --- /dev/null +++ b/mybulma/node_modules/semver/internal/parse-options.js @@ -0,0 +1,11 @@ +// parse out just the options we care about so we always get a consistent +// obj with keys in a consistent order. +const opts = ['includePrerelease', 'loose', 'rtl'] +const parseOptions = options => + !options ? {} + : typeof options !== 'object' ? { loose: true } + : opts.filter(k => options[k]).reduce((o, k) => { + o[k] = true + return o + }, {}) +module.exports = parseOptions diff --git a/mybulma/node_modules/semver/internal/re.js b/mybulma/node_modules/semver/internal/re.js new file mode 100644 index 0000000..ed88398 --- /dev/null +++ b/mybulma/node_modules/semver/internal/re.js @@ -0,0 +1,182 @@ +const { MAX_SAFE_COMPONENT_LENGTH } = require('./constants') +const debug = require('./debug') +exports = module.exports = {} + +// The actual regexps go on exports.re +const re = exports.re = [] +const src = exports.src = [] +const t = exports.t = {} +let R = 0 + +const createToken = (name, value, isGlobal) => { + const index = R++ + debug(name, index, value) + t[name] = index + src[index] = value + re[index] = new RegExp(value, isGlobal ? 'g' : undefined) +} + +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. + +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. + +createToken('NUMERICIDENTIFIER', '0|[1-9]\\d*') +createToken('NUMERICIDENTIFIERLOOSE', '[0-9]+') + +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. + +createToken('NONNUMERICIDENTIFIER', '\\d*[a-zA-Z-][a-zA-Z0-9-]*') + +// ## Main Version +// Three dot-separated numeric identifiers. + +createToken('MAINVERSION', `(${src[t.NUMERICIDENTIFIER]})\\.` + + `(${src[t.NUMERICIDENTIFIER]})\\.` + + `(${src[t.NUMERICIDENTIFIER]})`) + +createToken('MAINVERSIONLOOSE', `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + + `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + + `(${src[t.NUMERICIDENTIFIERLOOSE]})`) + +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. + +createToken('PRERELEASEIDENTIFIER', `(?:${src[t.NUMERICIDENTIFIER] +}|${src[t.NONNUMERICIDENTIFIER]})`) + +createToken('PRERELEASEIDENTIFIERLOOSE', `(?:${src[t.NUMERICIDENTIFIERLOOSE] +}|${src[t.NONNUMERICIDENTIFIER]})`) + +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. + +createToken('PRERELEASE', `(?:-(${src[t.PRERELEASEIDENTIFIER] +}(?:\\.${src[t.PRERELEASEIDENTIFIER]})*))`) + +createToken('PRERELEASELOOSE', `(?:-?(${src[t.PRERELEASEIDENTIFIERLOOSE] +}(?:\\.${src[t.PRERELEASEIDENTIFIERLOOSE]})*))`) + +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. + +createToken('BUILDIDENTIFIER', '[0-9A-Za-z-]+') + +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. + +createToken('BUILD', `(?:\\+(${src[t.BUILDIDENTIFIER] +}(?:\\.${src[t.BUILDIDENTIFIER]})*))`) + +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. + +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. + +createToken('FULLPLAIN', `v?${src[t.MAINVERSION] +}${src[t.PRERELEASE]}?${ + src[t.BUILD]}?`) + +createToken('FULL', `^${src[t.FULLPLAIN]}$`) + +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +createToken('LOOSEPLAIN', `[v=\\s]*${src[t.MAINVERSIONLOOSE] +}${src[t.PRERELEASELOOSE]}?${ + src[t.BUILD]}?`) + +createToken('LOOSE', `^${src[t.LOOSEPLAIN]}$`) + +createToken('GTLT', '((?:<|>)?=?)') + +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +createToken('XRANGEIDENTIFIERLOOSE', `${src[t.NUMERICIDENTIFIERLOOSE]}|x|X|\\*`) +createToken('XRANGEIDENTIFIER', `${src[t.NUMERICIDENTIFIER]}|x|X|\\*`) + +createToken('XRANGEPLAIN', `[v=\\s]*(${src[t.XRANGEIDENTIFIER]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + + `(?:${src[t.PRERELEASE]})?${ + src[t.BUILD]}?` + + `)?)?`) + +createToken('XRANGEPLAINLOOSE', `[v=\\s]*(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:${src[t.PRERELEASELOOSE]})?${ + src[t.BUILD]}?` + + `)?)?`) + +createToken('XRANGE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAIN]}$`) +createToken('XRANGELOOSE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAINLOOSE]}$`) + +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +createToken('COERCE', `${'(^|[^\\d])' + + '(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` + + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + + `(?:$|[^\\d])`) +createToken('COERCERTL', src[t.COERCE], true) + +// Tilde ranges. +// Meaning is "reasonably at or greater than" +createToken('LONETILDE', '(?:~>?)') + +createToken('TILDETRIM', `(\\s*)${src[t.LONETILDE]}\\s+`, true) +exports.tildeTrimReplace = '$1~' + +createToken('TILDE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAIN]}$`) +createToken('TILDELOOSE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAINLOOSE]}$`) + +// Caret ranges. +// Meaning is "at least and backwards compatible with" +createToken('LONECARET', '(?:\\^)') + +createToken('CARETTRIM', `(\\s*)${src[t.LONECARET]}\\s+`, true) +exports.caretTrimReplace = '$1^' + +createToken('CARET', `^${src[t.LONECARET]}${src[t.XRANGEPLAIN]}$`) +createToken('CARETLOOSE', `^${src[t.LONECARET]}${src[t.XRANGEPLAINLOOSE]}$`) + +// A simple gt/lt/eq thing, or just "" to indicate "any version" +createToken('COMPARATORLOOSE', `^${src[t.GTLT]}\\s*(${src[t.LOOSEPLAIN]})$|^$`) +createToken('COMPARATOR', `^${src[t.GTLT]}\\s*(${src[t.FULLPLAIN]})$|^$`) + +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +createToken('COMPARATORTRIM', `(\\s*)${src[t.GTLT] +}\\s*(${src[t.LOOSEPLAIN]}|${src[t.XRANGEPLAIN]})`, true) +exports.comparatorTrimReplace = '$1$2$3' + +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +createToken('HYPHENRANGE', `^\\s*(${src[t.XRANGEPLAIN]})` + + `\\s+-\\s+` + + `(${src[t.XRANGEPLAIN]})` + + `\\s*$`) + +createToken('HYPHENRANGELOOSE', `^\\s*(${src[t.XRANGEPLAINLOOSE]})` + + `\\s+-\\s+` + + `(${src[t.XRANGEPLAINLOOSE]})` + + `\\s*$`) + +// Star ranges basically just allow anything at all. +createToken('STAR', '(<|>)?=?\\s*\\*') +// >=0.0.0 is like a star +createToken('GTE0', '^\\s*>=\\s*0\\.0\\.0\\s*$') +createToken('GTE0PRE', '^\\s*>=\\s*0\\.0\\.0-0\\s*$') diff --git a/mybulma/node_modules/semver/node_modules/lru-cache/LICENSE b/mybulma/node_modules/semver/node_modules/lru-cache/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/mybulma/node_modules/semver/node_modules/lru-cache/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/mybulma/node_modules/semver/node_modules/lru-cache/README.md b/mybulma/node_modules/semver/node_modules/lru-cache/README.md new file mode 100644 index 0000000..435dfeb --- /dev/null +++ b/mybulma/node_modules/semver/node_modules/lru-cache/README.md @@ -0,0 +1,166 @@ +# lru cache + +A cache object that deletes the least-recently-used items. + +[![Build Status](https://travis-ci.org/isaacs/node-lru-cache.svg?branch=master)](https://travis-ci.org/isaacs/node-lru-cache) [![Coverage Status](https://coveralls.io/repos/isaacs/node-lru-cache/badge.svg?service=github)](https://coveralls.io/github/isaacs/node-lru-cache) + +## Installation: + +```javascript +npm install lru-cache --save +``` + +## Usage: + +```javascript +var LRU = require("lru-cache") + , options = { max: 500 + , length: function (n, key) { return n * 2 + key.length } + , dispose: function (key, n) { n.close() } + , maxAge: 1000 * 60 * 60 } + , cache = new LRU(options) + , otherCache = new LRU(50) // sets just the max size + +cache.set("key", "value") +cache.get("key") // "value" + +// non-string keys ARE fully supported +// but note that it must be THE SAME object, not +// just a JSON-equivalent object. +var someObject = { a: 1 } +cache.set(someObject, 'a value') +// Object keys are not toString()-ed +cache.set('[object Object]', 'a different value') +assert.equal(cache.get(someObject), 'a value') +// A similar object with same keys/values won't work, +// because it's a different object identity +assert.equal(cache.get({ a: 1 }), undefined) + +cache.reset() // empty the cache +``` + +If you put more stuff in it, then items will fall out. + +If you try to put an oversized thing in it, then it'll fall out right +away. + +## Options + +* `max` The maximum size of the cache, checked by applying the length + function to all values in the cache. Not setting this is kind of + silly, since that's the whole purpose of this lib, but it defaults + to `Infinity`. Setting it to a non-number or negative number will + throw a `TypeError`. Setting it to 0 makes it be `Infinity`. +* `maxAge` Maximum age in ms. Items are not pro-actively pruned out + as they age, but if you try to get an item that is too old, it'll + drop it and return undefined instead of giving it to you. + Setting this to a negative value will make everything seem old! + Setting it to a non-number will throw a `TypeError`. +* `length` Function that is used to calculate the length of stored + items. If you're storing strings or buffers, then you probably want + to do something like `function(n, key){return n.length}`. The default is + `function(){return 1}`, which is fine if you want to store `max` + like-sized things. The item is passed as the first argument, and + the key is passed as the second argumnet. +* `dispose` Function that is called on items when they are dropped + from the cache. This can be handy if you want to close file + descriptors or do other cleanup tasks when items are no longer + accessible. Called with `key, value`. It's called *before* + actually removing the item from the internal cache, so if you want + to immediately put it back in, you'll have to do that in a + `nextTick` or `setTimeout` callback or it won't do anything. +* `stale` By default, if you set a `maxAge`, it'll only actually pull + stale items out of the cache when you `get(key)`. (That is, it's + not pre-emptively doing a `setTimeout` or anything.) If you set + `stale:true`, it'll return the stale value before deleting it. If + you don't set this, then it'll return `undefined` when you try to + get a stale entry, as if it had already been deleted. +* `noDisposeOnSet` By default, if you set a `dispose()` method, then + it'll be called whenever a `set()` operation overwrites an existing + key. If you set this option, `dispose()` will only be called when a + key falls out of the cache, not when it is overwritten. +* `updateAgeOnGet` When using time-expiring entries with `maxAge`, + setting this to `true` will make each item's effective time update + to the current time whenever it is retrieved from cache, causing it + to not expire. (It can still fall out of cache based on recency of + use, of course.) + +## API + +* `set(key, value, maxAge)` +* `get(key) => value` + + Both of these will update the "recently used"-ness of the key. + They do what you think. `maxAge` is optional and overrides the + cache `maxAge` option if provided. + + If the key is not found, `get()` will return `undefined`. + + The key and val can be any value. + +* `peek(key)` + + Returns the key value (or `undefined` if not found) without + updating the "recently used"-ness of the key. + + (If you find yourself using this a lot, you *might* be using the + wrong sort of data structure, but there are some use cases where + it's handy.) + +* `del(key)` + + Deletes a key out of the cache. + +* `reset()` + + Clear the cache entirely, throwing away all values. + +* `has(key)` + + Check if a key is in the cache, without updating the recent-ness + or deleting it for being stale. + +* `forEach(function(value,key,cache), [thisp])` + + Just like `Array.prototype.forEach`. Iterates over all the keys + in the cache, in order of recent-ness. (Ie, more recently used + items are iterated over first.) + +* `rforEach(function(value,key,cache), [thisp])` + + The same as `cache.forEach(...)` but items are iterated over in + reverse order. (ie, less recently used items are iterated over + first.) + +* `keys()` + + Return an array of the keys in the cache. + +* `values()` + + Return an array of the values in the cache. + +* `length` + + Return total length of objects in cache taking into account + `length` options function. + +* `itemCount` + + Return total quantity of objects currently in cache. Note, that + `stale` (see options) items are returned as part of this item + count. + +* `dump()` + + Return an array of the cache entries ready for serialization and usage + with 'destinationCache.load(arr)`. + +* `load(cacheEntriesArray)` + + Loads another cache entries array, obtained with `sourceCache.dump()`, + into the cache. The destination cache is reset before loading new entries + +* `prune()` + + Manually iterates over the entire cache proactively pruning old entries diff --git a/mybulma/node_modules/semver/node_modules/lru-cache/index.js b/mybulma/node_modules/semver/node_modules/lru-cache/index.js new file mode 100644 index 0000000..573b6b8 --- /dev/null +++ b/mybulma/node_modules/semver/node_modules/lru-cache/index.js @@ -0,0 +1,334 @@ +'use strict' + +// A linked list to keep track of recently-used-ness +const Yallist = require('yallist') + +const MAX = Symbol('max') +const LENGTH = Symbol('length') +const LENGTH_CALCULATOR = Symbol('lengthCalculator') +const ALLOW_STALE = Symbol('allowStale') +const MAX_AGE = Symbol('maxAge') +const DISPOSE = Symbol('dispose') +const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet') +const LRU_LIST = Symbol('lruList') +const CACHE = Symbol('cache') +const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet') + +const naiveLength = () => 1 + +// lruList is a yallist where the head is the youngest +// item, and the tail is the oldest. the list contains the Hit +// objects as the entries. +// Each Hit object has a reference to its Yallist.Node. This +// never changes. +// +// cache is a Map (or PseudoMap) that matches the keys to +// the Yallist.Node object. +class LRUCache { + constructor (options) { + if (typeof options === 'number') + options = { max: options } + + if (!options) + options = {} + + if (options.max && (typeof options.max !== 'number' || options.max < 0)) + throw new TypeError('max must be a non-negative number') + // Kind of weird to have a default max of Infinity, but oh well. + const max = this[MAX] = options.max || Infinity + + const lc = options.length || naiveLength + this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc + this[ALLOW_STALE] = options.stale || false + if (options.maxAge && typeof options.maxAge !== 'number') + throw new TypeError('maxAge must be a number') + this[MAX_AGE] = options.maxAge || 0 + this[DISPOSE] = options.dispose + this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false + this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false + this.reset() + } + + // resize the cache when the max changes. + set max (mL) { + if (typeof mL !== 'number' || mL < 0) + throw new TypeError('max must be a non-negative number') + + this[MAX] = mL || Infinity + trim(this) + } + get max () { + return this[MAX] + } + + set allowStale (allowStale) { + this[ALLOW_STALE] = !!allowStale + } + get allowStale () { + return this[ALLOW_STALE] + } + + set maxAge (mA) { + if (typeof mA !== 'number') + throw new TypeError('maxAge must be a non-negative number') + + this[MAX_AGE] = mA + trim(this) + } + get maxAge () { + return this[MAX_AGE] + } + + // resize the cache when the lengthCalculator changes. + set lengthCalculator (lC) { + if (typeof lC !== 'function') + lC = naiveLength + + if (lC !== this[LENGTH_CALCULATOR]) { + this[LENGTH_CALCULATOR] = lC + this[LENGTH] = 0 + this[LRU_LIST].forEach(hit => { + hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key) + this[LENGTH] += hit.length + }) + } + trim(this) + } + get lengthCalculator () { return this[LENGTH_CALCULATOR] } + + get length () { return this[LENGTH] } + get itemCount () { return this[LRU_LIST].length } + + rforEach (fn, thisp) { + thisp = thisp || this + for (let walker = this[LRU_LIST].tail; walker !== null;) { + const prev = walker.prev + forEachStep(this, fn, walker, thisp) + walker = prev + } + } + + forEach (fn, thisp) { + thisp = thisp || this + for (let walker = this[LRU_LIST].head; walker !== null;) { + const next = walker.next + forEachStep(this, fn, walker, thisp) + walker = next + } + } + + keys () { + return this[LRU_LIST].toArray().map(k => k.key) + } + + values () { + return this[LRU_LIST].toArray().map(k => k.value) + } + + reset () { + if (this[DISPOSE] && + this[LRU_LIST] && + this[LRU_LIST].length) { + this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value)) + } + + this[CACHE] = new Map() // hash of items by key + this[LRU_LIST] = new Yallist() // list of items in order of use recency + this[LENGTH] = 0 // length of items in the list + } + + dump () { + return this[LRU_LIST].map(hit => + isStale(this, hit) ? false : { + k: hit.key, + v: hit.value, + e: hit.now + (hit.maxAge || 0) + }).toArray().filter(h => h) + } + + dumpLru () { + return this[LRU_LIST] + } + + set (key, value, maxAge) { + maxAge = maxAge || this[MAX_AGE] + + if (maxAge && typeof maxAge !== 'number') + throw new TypeError('maxAge must be a number') + + const now = maxAge ? Date.now() : 0 + const len = this[LENGTH_CALCULATOR](value, key) + + if (this[CACHE].has(key)) { + if (len > this[MAX]) { + del(this, this[CACHE].get(key)) + return false + } + + const node = this[CACHE].get(key) + const item = node.value + + // dispose of the old one before overwriting + // split out into 2 ifs for better coverage tracking + if (this[DISPOSE]) { + if (!this[NO_DISPOSE_ON_SET]) + this[DISPOSE](key, item.value) + } + + item.now = now + item.maxAge = maxAge + item.value = value + this[LENGTH] += len - item.length + item.length = len + this.get(key) + trim(this) + return true + } + + const hit = new Entry(key, value, len, now, maxAge) + + // oversized objects fall out of cache automatically. + if (hit.length > this[MAX]) { + if (this[DISPOSE]) + this[DISPOSE](key, value) + + return false + } + + this[LENGTH] += hit.length + this[LRU_LIST].unshift(hit) + this[CACHE].set(key, this[LRU_LIST].head) + trim(this) + return true + } + + has (key) { + if (!this[CACHE].has(key)) return false + const hit = this[CACHE].get(key).value + return !isStale(this, hit) + } + + get (key) { + return get(this, key, true) + } + + peek (key) { + return get(this, key, false) + } + + pop () { + const node = this[LRU_LIST].tail + if (!node) + return null + + del(this, node) + return node.value + } + + del (key) { + del(this, this[CACHE].get(key)) + } + + load (arr) { + // reset the cache + this.reset() + + const now = Date.now() + // A previous serialized cache has the most recent items first + for (let l = arr.length - 1; l >= 0; l--) { + const hit = arr[l] + const expiresAt = hit.e || 0 + if (expiresAt === 0) + // the item was created without expiration in a non aged cache + this.set(hit.k, hit.v) + else { + const maxAge = expiresAt - now + // dont add already expired items + if (maxAge > 0) { + this.set(hit.k, hit.v, maxAge) + } + } + } + } + + prune () { + this[CACHE].forEach((value, key) => get(this, key, false)) + } +} + +const get = (self, key, doUse) => { + const node = self[CACHE].get(key) + if (node) { + const hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) + return undefined + } else { + if (doUse) { + if (self[UPDATE_AGE_ON_GET]) + node.value.now = Date.now() + self[LRU_LIST].unshiftNode(node) + } + } + return hit.value + } +} + +const isStale = (self, hit) => { + if (!hit || (!hit.maxAge && !self[MAX_AGE])) + return false + + const diff = Date.now() - hit.now + return hit.maxAge ? diff > hit.maxAge + : self[MAX_AGE] && (diff > self[MAX_AGE]) +} + +const trim = self => { + if (self[LENGTH] > self[MAX]) { + for (let walker = self[LRU_LIST].tail; + self[LENGTH] > self[MAX] && walker !== null;) { + // We know that we're about to delete this one, and also + // what the next least recently used key will be, so just + // go ahead and set it now. + const prev = walker.prev + del(self, walker) + walker = prev + } + } +} + +const del = (self, node) => { + if (node) { + const hit = node.value + if (self[DISPOSE]) + self[DISPOSE](hit.key, hit.value) + + self[LENGTH] -= hit.length + self[CACHE].delete(hit.key) + self[LRU_LIST].removeNode(node) + } +} + +class Entry { + constructor (key, value, length, now, maxAge) { + this.key = key + this.value = value + this.length = length + this.now = now + this.maxAge = maxAge || 0 + } +} + +const forEachStep = (self, fn, node, thisp) => { + let hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) + hit = undefined + } + if (hit) + fn.call(thisp, hit.value, hit.key, self) +} + +module.exports = LRUCache diff --git a/mybulma/node_modules/semver/node_modules/lru-cache/package.json b/mybulma/node_modules/semver/node_modules/lru-cache/package.json new file mode 100644 index 0000000..43b7502 --- /dev/null +++ b/mybulma/node_modules/semver/node_modules/lru-cache/package.json @@ -0,0 +1,34 @@ +{ + "name": "lru-cache", + "description": "A cache object that deletes the least-recently-used items.", + "version": "6.0.0", + "author": "Isaac Z. Schlueter ", + "keywords": [ + "mru", + "lru", + "cache" + ], + "scripts": { + "test": "tap", + "snap": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" + }, + "main": "index.js", + "repository": "git://github.com/isaacs/node-lru-cache.git", + "devDependencies": { + "benchmark": "^2.1.4", + "tap": "^14.10.7" + }, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "files": [ + "index.js" + ], + "engines": { + "node": ">=10" + } +} diff --git a/mybulma/node_modules/semver/ranges/gtr.js b/mybulma/node_modules/semver/ranges/gtr.js new file mode 100644 index 0000000..db7e355 --- /dev/null +++ b/mybulma/node_modules/semver/ranges/gtr.js @@ -0,0 +1,4 @@ +// Determine if version is greater than all the versions possible in the range. +const outside = require('./outside') +const gtr = (version, range, options) => outside(version, range, '>', options) +module.exports = gtr diff --git a/mybulma/node_modules/semver/ranges/intersects.js b/mybulma/node_modules/semver/ranges/intersects.js new file mode 100644 index 0000000..3d1a6f3 --- /dev/null +++ b/mybulma/node_modules/semver/ranges/intersects.js @@ -0,0 +1,7 @@ +const Range = require('../classes/range') +const intersects = (r1, r2, options) => { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2) +} +module.exports = intersects diff --git a/mybulma/node_modules/semver/ranges/ltr.js b/mybulma/node_modules/semver/ranges/ltr.js new file mode 100644 index 0000000..528a885 --- /dev/null +++ b/mybulma/node_modules/semver/ranges/ltr.js @@ -0,0 +1,4 @@ +const outside = require('./outside') +// Determine if version is less than all the versions possible in the range +const ltr = (version, range, options) => outside(version, range, '<', options) +module.exports = ltr diff --git a/mybulma/node_modules/semver/ranges/max-satisfying.js b/mybulma/node_modules/semver/ranges/max-satisfying.js new file mode 100644 index 0000000..6e3d993 --- /dev/null +++ b/mybulma/node_modules/semver/ranges/max-satisfying.js @@ -0,0 +1,25 @@ +const SemVer = require('../classes/semver') +const Range = require('../classes/range') + +const maxSatisfying = (versions, range, options) => { + let max = null + let maxSV = null + let rangeObj = null + try { + rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach((v) => { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } + } + }) + return max +} +module.exports = maxSatisfying diff --git a/mybulma/node_modules/semver/ranges/min-satisfying.js b/mybulma/node_modules/semver/ranges/min-satisfying.js new file mode 100644 index 0000000..9b60974 --- /dev/null +++ b/mybulma/node_modules/semver/ranges/min-satisfying.js @@ -0,0 +1,24 @@ +const SemVer = require('../classes/semver') +const Range = require('../classes/range') +const minSatisfying = (versions, range, options) => { + let min = null + let minSV = null + let rangeObj = null + try { + rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach((v) => { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) + } + } + }) + return min +} +module.exports = minSatisfying diff --git a/mybulma/node_modules/semver/ranges/min-version.js b/mybulma/node_modules/semver/ranges/min-version.js new file mode 100644 index 0000000..350e1f7 --- /dev/null +++ b/mybulma/node_modules/semver/ranges/min-version.js @@ -0,0 +1,61 @@ +const SemVer = require('../classes/semver') +const Range = require('../classes/range') +const gt = require('../functions/gt') + +const minVersion = (range, loose) => { + range = new Range(range, loose) + + let minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver + } + + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver + } + + minver = null + for (let i = 0; i < range.set.length; ++i) { + const comparators = range.set[i] + + let setMin = null + comparators.forEach((comparator) => { + // Clone to avoid manipulating the comparator's semver object. + const compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!setMin || gt(compver, setMin)) { + setMin = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error(`Unexpected operation: ${comparator.operator}`) + } + }) + if (setMin && (!minver || gt(minver, setMin))) { + minver = setMin + } + } + + if (minver && range.test(minver)) { + return minver + } + + return null +} +module.exports = minVersion diff --git a/mybulma/node_modules/semver/ranges/outside.js b/mybulma/node_modules/semver/ranges/outside.js new file mode 100644 index 0000000..ae99b10 --- /dev/null +++ b/mybulma/node_modules/semver/ranges/outside.js @@ -0,0 +1,80 @@ +const SemVer = require('../classes/semver') +const Comparator = require('../classes/comparator') +const { ANY } = Comparator +const Range = require('../classes/range') +const satisfies = require('../functions/satisfies') +const gt = require('../functions/gt') +const lt = require('../functions/lt') +const lte = require('../functions/lte') +const gte = require('../functions/gte') + +const outside = (version, range, hilo, options) => { + version = new SemVer(version, options) + range = new Range(range, options) + + let gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') + } + + // If it satisfies the range it is not outside + if (satisfies(version, range, options)) { + return false + } + + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. + + for (let i = 0; i < range.set.length; ++i) { + const comparators = range.set[i] + + let high = null + let low = null + + comparators.forEach((comparator) => { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) + + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } + + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } + } + return true +} + +module.exports = outside diff --git a/mybulma/node_modules/semver/ranges/simplify.js b/mybulma/node_modules/semver/ranges/simplify.js new file mode 100644 index 0000000..618d5b6 --- /dev/null +++ b/mybulma/node_modules/semver/ranges/simplify.js @@ -0,0 +1,47 @@ +// given a set of versions and a range, create a "simplified" range +// that includes the same versions that the original range does +// If the original range is shorter than the simplified one, return that. +const satisfies = require('../functions/satisfies.js') +const compare = require('../functions/compare.js') +module.exports = (versions, range, options) => { + const set = [] + let first = null + let prev = null + const v = versions.sort((a, b) => compare(a, b, options)) + for (const version of v) { + const included = satisfies(version, range, options) + if (included) { + prev = version + if (!first) { + first = version + } + } else { + if (prev) { + set.push([first, prev]) + } + prev = null + first = null + } + } + if (first) { + set.push([first, null]) + } + + const ranges = [] + for (const [min, max] of set) { + if (min === max) { + ranges.push(min) + } else if (!max && min === v[0]) { + ranges.push('*') + } else if (!max) { + ranges.push(`>=${min}`) + } else if (min === v[0]) { + ranges.push(`<=${max}`) + } else { + ranges.push(`${min} - ${max}`) + } + } + const simplified = ranges.join(' || ') + const original = typeof range.raw === 'string' ? range.raw : String(range) + return simplified.length < original.length ? simplified : range +} diff --git a/mybulma/node_modules/semver/ranges/subset.js b/mybulma/node_modules/semver/ranges/subset.js new file mode 100644 index 0000000..e0dea43 --- /dev/null +++ b/mybulma/node_modules/semver/ranges/subset.js @@ -0,0 +1,244 @@ +const Range = require('../classes/range.js') +const Comparator = require('../classes/comparator.js') +const { ANY } = Comparator +const satisfies = require('../functions/satisfies.js') +const compare = require('../functions/compare.js') + +// Complex range `r1 || r2 || ...` is a subset of `R1 || R2 || ...` iff: +// - Every simple range `r1, r2, ...` is a null set, OR +// - Every simple range `r1, r2, ...` which is not a null set is a subset of +// some `R1, R2, ...` +// +// Simple range `c1 c2 ...` is a subset of simple range `C1 C2 ...` iff: +// - If c is only the ANY comparator +// - If C is only the ANY comparator, return true +// - Else if in prerelease mode, return false +// - else replace c with `[>=0.0.0]` +// - If C is only the ANY comparator +// - if in prerelease mode, return true +// - else replace C with `[>=0.0.0]` +// - Let EQ be the set of = comparators in c +// - If EQ is more than one, return true (null set) +// - Let GT be the highest > or >= comparator in c +// - Let LT be the lowest < or <= comparator in c +// - If GT and LT, and GT.semver > LT.semver, return true (null set) +// - If any C is a = range, and GT or LT are set, return false +// - If EQ +// - If GT, and EQ does not satisfy GT, return true (null set) +// - If LT, and EQ does not satisfy LT, return true (null set) +// - If EQ satisfies every C, return true +// - Else return false +// - If GT +// - If GT.semver is lower than any > or >= comp in C, return false +// - If GT is >=, and GT.semver does not satisfy every C, return false +// - If GT.semver has a prerelease, and not in prerelease mode +// - If no C has a prerelease and the GT.semver tuple, return false +// - If LT +// - If LT.semver is greater than any < or <= comp in C, return false +// - If LT is <=, and LT.semver does not satisfy every C, return false +// - If GT.semver has a prerelease, and not in prerelease mode +// - If no C has a prerelease and the LT.semver tuple, return false +// - Else return true + +const subset = (sub, dom, options = {}) => { + if (sub === dom) { + return true + } + + sub = new Range(sub, options) + dom = new Range(dom, options) + let sawNonNull = false + + OUTER: for (const simpleSub of sub.set) { + for (const simpleDom of dom.set) { + const isSub = simpleSubset(simpleSub, simpleDom, options) + sawNonNull = sawNonNull || isSub !== null + if (isSub) { + continue OUTER + } + } + // the null set is a subset of everything, but null simple ranges in + // a complex range should be ignored. so if we saw a non-null range, + // then we know this isn't a subset, but if EVERY simple range was null, + // then it is a subset. + if (sawNonNull) { + return false + } + } + return true +} + +const simpleSubset = (sub, dom, options) => { + if (sub === dom) { + return true + } + + if (sub.length === 1 && sub[0].semver === ANY) { + if (dom.length === 1 && dom[0].semver === ANY) { + return true + } else if (options.includePrerelease) { + sub = [new Comparator('>=0.0.0-0')] + } else { + sub = [new Comparator('>=0.0.0')] + } + } + + if (dom.length === 1 && dom[0].semver === ANY) { + if (options.includePrerelease) { + return true + } else { + dom = [new Comparator('>=0.0.0')] + } + } + + const eqSet = new Set() + let gt, lt + for (const c of sub) { + if (c.operator === '>' || c.operator === '>=') { + gt = higherGT(gt, c, options) + } else if (c.operator === '<' || c.operator === '<=') { + lt = lowerLT(lt, c, options) + } else { + eqSet.add(c.semver) + } + } + + if (eqSet.size > 1) { + return null + } + + let gtltComp + if (gt && lt) { + gtltComp = compare(gt.semver, lt.semver, options) + if (gtltComp > 0) { + return null + } else if (gtltComp === 0 && (gt.operator !== '>=' || lt.operator !== '<=')) { + return null + } + } + + // will iterate one or zero times + for (const eq of eqSet) { + if (gt && !satisfies(eq, String(gt), options)) { + return null + } + + if (lt && !satisfies(eq, String(lt), options)) { + return null + } + + for (const c of dom) { + if (!satisfies(eq, String(c), options)) { + return false + } + } + + return true + } + + let higher, lower + let hasDomLT, hasDomGT + // if the subset has a prerelease, we need a comparator in the superset + // with the same tuple and a prerelease, or it's not a subset + let needDomLTPre = lt && + !options.includePrerelease && + lt.semver.prerelease.length ? lt.semver : false + let needDomGTPre = gt && + !options.includePrerelease && + gt.semver.prerelease.length ? gt.semver : false + // exception: <1.2.3-0 is the same as <1.2.3 + if (needDomLTPre && needDomLTPre.prerelease.length === 1 && + lt.operator === '<' && needDomLTPre.prerelease[0] === 0) { + needDomLTPre = false + } + + for (const c of dom) { + hasDomGT = hasDomGT || c.operator === '>' || c.operator === '>=' + hasDomLT = hasDomLT || c.operator === '<' || c.operator === '<=' + if (gt) { + if (needDomGTPre) { + if (c.semver.prerelease && c.semver.prerelease.length && + c.semver.major === needDomGTPre.major && + c.semver.minor === needDomGTPre.minor && + c.semver.patch === needDomGTPre.patch) { + needDomGTPre = false + } + } + if (c.operator === '>' || c.operator === '>=') { + higher = higherGT(gt, c, options) + if (higher === c && higher !== gt) { + return false + } + } else if (gt.operator === '>=' && !satisfies(gt.semver, String(c), options)) { + return false + } + } + if (lt) { + if (needDomLTPre) { + if (c.semver.prerelease && c.semver.prerelease.length && + c.semver.major === needDomLTPre.major && + c.semver.minor === needDomLTPre.minor && + c.semver.patch === needDomLTPre.patch) { + needDomLTPre = false + } + } + if (c.operator === '<' || c.operator === '<=') { + lower = lowerLT(lt, c, options) + if (lower === c && lower !== lt) { + return false + } + } else if (lt.operator === '<=' && !satisfies(lt.semver, String(c), options)) { + return false + } + } + if (!c.operator && (lt || gt) && gtltComp !== 0) { + return false + } + } + + // if there was a < or >, and nothing in the dom, then must be false + // UNLESS it was limited by another range in the other direction. + // Eg, >1.0.0 <1.0.1 is still a subset of <2.0.0 + if (gt && hasDomLT && !lt && gtltComp !== 0) { + return false + } + + if (lt && hasDomGT && !gt && gtltComp !== 0) { + return false + } + + // we needed a prerelease range in a specific tuple, but didn't get one + // then this isn't a subset. eg >=1.2.3-pre is not a subset of >=1.0.0, + // because it includes prereleases in the 1.2.3 tuple + if (needDomGTPre || needDomLTPre) { + return false + } + + return true +} + +// >=1.2.3 is lower than >1.2.3 +const higherGT = (a, b, options) => { + if (!a) { + return b + } + const comp = compare(a.semver, b.semver, options) + return comp > 0 ? a + : comp < 0 ? b + : b.operator === '>' && a.operator === '>=' ? b + : a +} + +// <=1.2.3 is higher than <1.2.3 +const lowerLT = (a, b, options) => { + if (!a) { + return b + } + const comp = compare(a.semver, b.semver, options) + return comp < 0 ? a + : comp > 0 ? b + : b.operator === '<' && a.operator === '<=' ? b + : a +} + +module.exports = subset diff --git a/mybulma/node_modules/semver/ranges/to-comparators.js b/mybulma/node_modules/semver/ranges/to-comparators.js new file mode 100644 index 0000000..6c8bc7e --- /dev/null +++ b/mybulma/node_modules/semver/ranges/to-comparators.js @@ -0,0 +1,8 @@ +const Range = require('../classes/range') + +// Mostly just for testing and legacy API reasons +const toComparators = (range, options) => + new Range(range, options).set + .map(comp => comp.map(c => c.value).join(' ').trim().split(' ')) + +module.exports = toComparators diff --git a/mybulma/node_modules/semver/ranges/valid.js b/mybulma/node_modules/semver/ranges/valid.js new file mode 100644 index 0000000..365f356 --- /dev/null +++ b/mybulma/node_modules/semver/ranges/valid.js @@ -0,0 +1,11 @@ +const Range = require('../classes/range') +const validRange = (range, options) => { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null + } +} +module.exports = validRange diff --git a/mybulma/node_modules/set-blocking/CHANGELOG.md b/mybulma/node_modules/set-blocking/CHANGELOG.md new file mode 100644 index 0000000..03bf591 --- /dev/null +++ b/mybulma/node_modules/set-blocking/CHANGELOG.md @@ -0,0 +1,26 @@ +# Change Log + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + + +# [2.0.0](https://github.com/yargs/set-blocking/compare/v1.0.0...v2.0.0) (2016-05-17) + + +### Features + +* add an isTTY check ([#3](https://github.com/yargs/set-blocking/issues/3)) ([66ce277](https://github.com/yargs/set-blocking/commit/66ce277)) + + +### BREAKING CHANGES + +* stdio/stderr will not be set to blocking if isTTY === false + + + + +# 1.0.0 (2016-05-14) + + +### Features + +* implemented shim for stream._handle.setBlocking ([6bde0c0](https://github.com/yargs/set-blocking/commit/6bde0c0)) diff --git a/mybulma/node_modules/set-blocking/LICENSE.txt b/mybulma/node_modules/set-blocking/LICENSE.txt new file mode 100644 index 0000000..836440b --- /dev/null +++ b/mybulma/node_modules/set-blocking/LICENSE.txt @@ -0,0 +1,14 @@ +Copyright (c) 2016, Contributors + +Permission to use, copy, modify, and/or distribute this software +for any purpose with or without fee is hereby granted, provided +that the above copyright notice and this permission notice +appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE +LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES +OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, +ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/mybulma/node_modules/set-blocking/README.md b/mybulma/node_modules/set-blocking/README.md new file mode 100644 index 0000000..e93b420 --- /dev/null +++ b/mybulma/node_modules/set-blocking/README.md @@ -0,0 +1,31 @@ +# set-blocking + +[![Build Status](https://travis-ci.org/yargs/set-blocking.svg)](https://travis-ci.org/yargs/set-blocking) +[![NPM version](https://img.shields.io/npm/v/set-blocking.svg)](https://www.npmjs.com/package/set-blocking) +[![Coverage Status](https://coveralls.io/repos/yargs/set-blocking/badge.svg?branch=)](https://coveralls.io/r/yargs/set-blocking?branch=master) +[![Standard Version](https://img.shields.io/badge/release-standard%20version-brightgreen.svg)](https://github.com/conventional-changelog/standard-version) + +set blocking `stdio` and `stderr` ensuring that terminal output does not truncate. + +```js +const setBlocking = require('set-blocking') +setBlocking(true) +console.log(someLargeStringToOutput) +``` + +## Historical Context/Word of Warning + +This was created as a shim to address the bug discussed in [node #6456](https://github.com/nodejs/node/issues/6456). This bug crops up on +newer versions of Node.js (`0.12+`), truncating terminal output. + +You should be mindful of the side-effects caused by using `set-blocking`: + +* if your module sets blocking to `true`, it will effect other modules + consuming your library. In [yargs](https://github.com/yargs/yargs/blob/master/yargs.js#L653) we only call + `setBlocking(true)` once we already know we are about to call `process.exit(code)`. +* this patch will not apply to subprocesses spawned with `isTTY = true`, this is + the [default `spawn()` behavior](https://nodejs.org/api/child_process.html#child_process_child_process_spawn_command_args_options). + +## License + +ISC diff --git a/mybulma/node_modules/set-blocking/index.js b/mybulma/node_modules/set-blocking/index.js new file mode 100644 index 0000000..6f78774 --- /dev/null +++ b/mybulma/node_modules/set-blocking/index.js @@ -0,0 +1,7 @@ +module.exports = function (blocking) { + [process.stdout, process.stderr].forEach(function (stream) { + if (stream._handle && stream.isTTY && typeof stream._handle.setBlocking === 'function') { + stream._handle.setBlocking(blocking) + } + }) +} diff --git a/mybulma/node_modules/set-blocking/package.json b/mybulma/node_modules/set-blocking/package.json new file mode 100644 index 0000000..c082db7 --- /dev/null +++ b/mybulma/node_modules/set-blocking/package.json @@ -0,0 +1,42 @@ +{ + "name": "set-blocking", + "version": "2.0.0", + "description": "set blocking stdio and stderr ensuring that terminal output does not truncate", + "main": "index.js", + "scripts": { + "pretest": "standard", + "test": "nyc mocha ./test/*.js", + "coverage": "nyc report --reporter=text-lcov | coveralls", + "version": "standard-version" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/yargs/set-blocking.git" + }, + "keywords": [ + "flush", + "terminal", + "blocking", + "shim", + "stdio", + "stderr" + ], + "author": "Ben Coe ", + "license": "ISC", + "bugs": { + "url": "https://github.com/yargs/set-blocking/issues" + }, + "homepage": "https://github.com/yargs/set-blocking#readme", + "devDependencies": { + "chai": "^3.5.0", + "coveralls": "^2.11.9", + "mocha": "^2.4.5", + "nyc": "^6.4.4", + "standard": "^7.0.1", + "standard-version": "^2.2.1" + }, + "files": [ + "index.js", + "LICENSE.txt" + ] +} \ No newline at end of file diff --git a/mybulma/node_modules/shebang-command/index.js b/mybulma/node_modules/shebang-command/index.js new file mode 100644 index 0000000..f35db30 --- /dev/null +++ b/mybulma/node_modules/shebang-command/index.js @@ -0,0 +1,19 @@ +'use strict'; +const shebangRegex = require('shebang-regex'); + +module.exports = (string = '') => { + const match = string.match(shebangRegex); + + if (!match) { + return null; + } + + const [path, argument] = match[0].replace(/#! ?/, '').split(' '); + const binary = path.split('/').pop(); + + if (binary === 'env') { + return argument; + } + + return argument ? `${binary} ${argument}` : binary; +}; diff --git a/mybulma/node_modules/shebang-command/license b/mybulma/node_modules/shebang-command/license new file mode 100644 index 0000000..db6bc32 --- /dev/null +++ b/mybulma/node_modules/shebang-command/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Kevin Mårtensson (github.com/kevva) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/mybulma/node_modules/shebang-command/package.json b/mybulma/node_modules/shebang-command/package.json new file mode 100644 index 0000000..18e3c04 --- /dev/null +++ b/mybulma/node_modules/shebang-command/package.json @@ -0,0 +1,34 @@ +{ + "name": "shebang-command", + "version": "2.0.0", + "description": "Get the command from a shebang", + "license": "MIT", + "repository": "kevva/shebang-command", + "author": { + "name": "Kevin Mårtensson", + "email": "kevinmartensson@gmail.com", + "url": "github.com/kevva" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "cmd", + "command", + "parse", + "shebang" + ], + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "devDependencies": { + "ava": "^2.3.0", + "xo": "^0.24.0" + } +} diff --git a/mybulma/node_modules/shebang-command/readme.md b/mybulma/node_modules/shebang-command/readme.md new file mode 100644 index 0000000..84feb44 --- /dev/null +++ b/mybulma/node_modules/shebang-command/readme.md @@ -0,0 +1,34 @@ +# shebang-command [![Build Status](https://travis-ci.org/kevva/shebang-command.svg?branch=master)](https://travis-ci.org/kevva/shebang-command) + +> Get the command from a shebang + + +## Install + +``` +$ npm install shebang-command +``` + + +## Usage + +```js +const shebangCommand = require('shebang-command'); + +shebangCommand('#!/usr/bin/env node'); +//=> 'node' + +shebangCommand('#!/bin/bash'); +//=> 'bash' +``` + + +## API + +### shebangCommand(string) + +#### string + +Type: `string` + +String containing a shebang. diff --git a/mybulma/node_modules/shebang-regex/index.d.ts b/mybulma/node_modules/shebang-regex/index.d.ts new file mode 100644 index 0000000..61d034b --- /dev/null +++ b/mybulma/node_modules/shebang-regex/index.d.ts @@ -0,0 +1,22 @@ +/** +Regular expression for matching a [shebang](https://en.wikipedia.org/wiki/Shebang_(Unix)) line. + +@example +``` +import shebangRegex = require('shebang-regex'); + +const string = '#!/usr/bin/env node\nconsole.log("unicorns");'; + +shebangRegex.test(string); +//=> true + +shebangRegex.exec(string)[0]; +//=> '#!/usr/bin/env node' + +shebangRegex.exec(string)[1]; +//=> '/usr/bin/env node' +``` +*/ +declare const shebangRegex: RegExp; + +export = shebangRegex; diff --git a/mybulma/node_modules/shebang-regex/index.js b/mybulma/node_modules/shebang-regex/index.js new file mode 100644 index 0000000..63fc4a0 --- /dev/null +++ b/mybulma/node_modules/shebang-regex/index.js @@ -0,0 +1,2 @@ +'use strict'; +module.exports = /^#!(.*)/; diff --git a/mybulma/node_modules/shebang-regex/license b/mybulma/node_modules/shebang-regex/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/mybulma/node_modules/shebang-regex/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/mybulma/node_modules/shebang-regex/package.json b/mybulma/node_modules/shebang-regex/package.json new file mode 100644 index 0000000..00ab30f --- /dev/null +++ b/mybulma/node_modules/shebang-regex/package.json @@ -0,0 +1,35 @@ +{ + "name": "shebang-regex", + "version": "3.0.0", + "description": "Regular expression for matching a shebang line", + "license": "MIT", + "repository": "sindresorhus/shebang-regex", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "regex", + "regexp", + "shebang", + "match", + "test", + "line" + ], + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.2", + "xo": "^0.24.0" + } +} diff --git a/mybulma/node_modules/shebang-regex/readme.md b/mybulma/node_modules/shebang-regex/readme.md new file mode 100644 index 0000000..5ecf863 --- /dev/null +++ b/mybulma/node_modules/shebang-regex/readme.md @@ -0,0 +1,33 @@ +# shebang-regex [![Build Status](https://travis-ci.org/sindresorhus/shebang-regex.svg?branch=master)](https://travis-ci.org/sindresorhus/shebang-regex) + +> Regular expression for matching a [shebang](https://en.wikipedia.org/wiki/Shebang_(Unix)) line + + +## Install + +``` +$ npm install shebang-regex +``` + + +## Usage + +```js +const shebangRegex = require('shebang-regex'); + +const string = '#!/usr/bin/env node\nconsole.log("unicorns");'; + +shebangRegex.test(string); +//=> true + +shebangRegex.exec(string)[0]; +//=> '#!/usr/bin/env node' + +shebangRegex.exec(string)[1]; +//=> '/usr/bin/env node' +``` + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/mybulma/node_modules/smart-buffer/.prettierrc.yaml b/mybulma/node_modules/smart-buffer/.prettierrc.yaml new file mode 100644 index 0000000..9a4f5ed --- /dev/null +++ b/mybulma/node_modules/smart-buffer/.prettierrc.yaml @@ -0,0 +1,5 @@ +parser: typescript +printWidth: 120 +tabWidth: 2 +singleQuote: true +trailingComma: none \ No newline at end of file diff --git a/mybulma/node_modules/smart-buffer/.travis.yml b/mybulma/node_modules/smart-buffer/.travis.yml new file mode 100644 index 0000000..eec71ce --- /dev/null +++ b/mybulma/node_modules/smart-buffer/.travis.yml @@ -0,0 +1,13 @@ +language: node_js +node_js: + - 6 + - 8 + - 10 + - 12 + - stable + +before_script: + - npm install -g typescript + - tsc -p ./ + +script: "npm run coveralls" \ No newline at end of file diff --git a/mybulma/node_modules/smart-buffer/LICENSE b/mybulma/node_modules/smart-buffer/LICENSE new file mode 100644 index 0000000..aab5771 --- /dev/null +++ b/mybulma/node_modules/smart-buffer/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2013-2017 Josh Glazebrook + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/mybulma/node_modules/smart-buffer/README.md b/mybulma/node_modules/smart-buffer/README.md new file mode 100644 index 0000000..6e49828 --- /dev/null +++ b/mybulma/node_modules/smart-buffer/README.md @@ -0,0 +1,633 @@ +smart-buffer [![Build Status](https://travis-ci.org/JoshGlazebrook/smart-buffer.svg?branch=master)](https://travis-ci.org/JoshGlazebrook/smart-buffer) [![Coverage Status](https://coveralls.io/repos/github/JoshGlazebrook/smart-buffer/badge.svg?branch=master)](https://coveralls.io/github/JoshGlazebrook/smart-buffer?branch=master) +============= + +smart-buffer is a Buffer wrapper that adds automatic read & write offset tracking, string operations, data insertions, and more. + +![stats](https://nodei.co/npm/smart-buffer.png?downloads=true&downloadRank=true&stars=true "stats") + +**Key Features**: +* Proxies all of the Buffer write and read functions +* Keeps track of read and write offsets automatically +* Grows the internal Buffer as needed +* Useful string operations. (Null terminating strings) +* Allows for inserting values at specific points in the Buffer +* Built in TypeScript +* Type Definitions Provided +* Browser Support (using Webpack/Browserify) +* Full test coverage + +**Requirements**: +* Node v4.0+ is supported at this time. (Versions prior to 2.0 will work on node 0.10) + + + +## Breaking Changes in v4.0 + +* Old constructor patterns have been completely removed. It's now required to use the SmartBuffer.fromXXX() factory constructors. +* rewind(), skip(), moveTo() have been removed. (see [offsets](#offsets)) +* Internal private properties are now prefixed with underscores (_) +* **All** writeXXX() methods that are given an offset will now **overwrite data** instead of insert. (see [write vs insert](#write-vs-insert)) +* insertXXX() methods have been added for when you want to insert data at a specific offset (this replaces the old behavior of writeXXX() when an offset was provided) + + +## Looking for v3 docs? + +Legacy documentation for version 3 and prior can be found [here](https://github.com/JoshGlazebrook/smart-buffer/blob/master/docs/README_v3.md). + +## Installing: + +`yarn add smart-buffer` + +or + +`npm install smart-buffer` + +Note: The published NPM package includes the built javascript library. +If you cloned this repo and wish to build the library manually use: + +`npm run build` + +## Using smart-buffer + +```javascript +// Javascript +const SmartBuffer = require('smart-buffer').SmartBuffer; + +// Typescript +import { SmartBuffer, SmartBufferOptions} from 'smart-buffer'; +``` + +### Simple Example + +Building a packet that uses the following protocol specification: + +`[PacketType:2][PacketLength:2][Data:XX]` + +To build this packet using the vanilla Buffer class, you would have to count up the length of the data payload beforehand. You would also need to keep track of the current "cursor" position in your Buffer so you write everything in the right places. With smart-buffer you don't have to do either of those things. + +```javascript +function createLoginPacket(username, password, age, country) { + const packet = new SmartBuffer(); + packet.writeUInt16LE(0x0060); // Some packet type + packet.writeStringNT(username); + packet.writeStringNT(password); + packet.writeUInt8(age); + packet.writeStringNT(country); + packet.insertUInt16LE(packet.length - 2, 2); + + return packet.toBuffer(); +} +``` +With the above function, you now can do this: +```javascript +const login = createLoginPacket("Josh", "secret123", 22, "United States"); + +// +``` +Notice that the `[PacketLength:2]` value (1e 00) was inserted at position 2. + +Reading back the packet we created above is just as easy: +```javascript + +const reader = SmartBuffer.fromBuffer(login); + +const logininfo = { + packetType: reader.readUInt16LE(), + packetLength: reader.readUInt16LE(), + username: reader.readStringNT(), + password: reader.readStringNT(), + age: reader.readUInt8(), + country: reader.readStringNT() +}; + +/* +{ + packetType: 96, (0x0060) + packetLength: 30, + username: 'Josh', + password: 'secret123', + age: 22, + country: 'United States' +} +*/ +``` + + +## Write vs Insert +In prior versions of SmartBuffer, .writeXXX(value, offset) calls would insert data when an offset was provided. In version 4, this will now overwrite the data at the offset position. To insert data there are now corresponding .insertXXX(value, offset) methods. + +**SmartBuffer v3**: +```javascript +const buff = SmartBuffer.fromBuffer(new Buffer([1,2,3,4,5,6])); +buff.writeInt8(7, 2); +console.log(buff.toBuffer()) + +// +``` + +**SmartBuffer v4**: +```javascript +const buff = SmartBuffer.fromBuffer(new Buffer([1,2,3,4,5,6])); +buff.writeInt8(7, 2); +console.log(buff.toBuffer()); + +// +``` + +To insert you instead should use: +```javascript +const buff = SmartBuffer.fromBuffer(new Buffer([1,2,3,4,5,6])); +buff.insertInt8(7, 2); +console.log(buff.toBuffer()); + +// +``` + +**Note:** Insert/Writing to a position beyond the currently tracked internal Buffer will zero pad to your offset. + +## Constructing a smart-buffer + +There are a few different ways to construct a SmartBuffer instance. + +```javascript +// Creating SmartBuffer from existing Buffer +const buff = SmartBuffer.fromBuffer(buffer); // Creates instance from buffer. (Uses default utf8 encoding) +const buff = SmartBuffer.fromBuffer(buffer, 'ascii'); // Creates instance from buffer with ascii encoding for strings. + +// Creating SmartBuffer with specified internal Buffer size. (Note: this is not a hard cap, the internal buffer will grow as needed). +const buff = SmartBuffer.fromSize(1024); // Creates instance with internal Buffer size of 1024. +const buff = SmartBuffer.fromSize(1024, 'utf8'); // Creates instance with internal Buffer size of 1024, and utf8 encoding for strings. + +// Creating SmartBuffer with options object. This one specifies size and encoding. +const buff = SmartBuffer.fromOptions({ + size: 1024, + encoding: 'ascii' +}); + +// Creating SmartBuffer with options object. This one specified an existing Buffer. +const buff = SmartBuffer.fromOptions({ + buff: buffer +}); + +// Creating SmartBuffer from a string. +const buff = SmartBuffer.fromBuffer(Buffer.from('some string', 'utf8')); + +// Just want a regular SmartBuffer with all default options? +const buff = new SmartBuffer(); +``` + +# Api Reference: + +**Note:** SmartBuffer is fully documented with Typescript definitions as well as jsdocs so your favorite editor/IDE will have intellisense. + +**Table of Contents** + +1. [Constructing](#constructing) +2. **Numbers** + 1. [Integers](#integers) + 2. [Floating Points](#floating-point-numbers) +3. **Strings** + 1. [Strings](#strings) + 2. [Null Terminated Strings](#null-terminated-strings) +4. [Buffers](#buffers) +5. [Offsets](#offsets) +6. [Other](#other) + + +## Constructing + +### constructor() +### constructor([options]) +- ```options``` *{SmartBufferOptions}* An optional options object to construct a SmartBuffer with. + +Examples: +```javascript +const buff = new SmartBuffer(); +const buff = new SmartBuffer({ + size: 1024, + encoding: 'ascii' +}); +``` + +### Class Method: fromBuffer(buffer[, encoding]) +- ```buffer``` *{Buffer}* The Buffer instance to wrap. +- ```encoding``` *{string}* The string encoding to use. ```Default: 'utf8'``` + +Examples: +```javascript +const someBuffer = Buffer.from('some string'); +const buff = SmartBuffer.fromBuffer(someBuffer); // Defaults to utf8 +const buff = SmartBuffer.fromBuffer(someBuffer, 'ascii'); +``` + +### Class Method: fromSize(size[, encoding]) +- ```size``` *{number}* The size to initialize the internal Buffer. +- ```encoding``` *{string}* The string encoding to use. ```Default: 'utf8'``` + +Examples: +```javascript +const buff = SmartBuffer.fromSize(1024); // Defaults to utf8 +const buff = SmartBuffer.fromSize(1024, 'ascii'); +``` + +### Class Method: fromOptions(options) +- ```options``` *{SmartBufferOptions}* The Buffer instance to wrap. + +```typescript +interface SmartBufferOptions { + encoding?: BufferEncoding; // Defaults to utf8 + size?: number; // Defaults to 4096 + buff?: Buffer; +} +``` + +Examples: +```javascript +const buff = SmartBuffer.fromOptions({ + size: 1024 +}; +const buff = SmartBuffer.fromOptions({ + size: 1024, + encoding: 'utf8' +}); +const buff = SmartBuffer.fromOptions({ + encoding: 'utf8' +}); + +const someBuff = Buffer.from('some string', 'utf8'); +const buff = SmartBuffer.fromOptions({ + buffer: someBuff, + encoding: 'utf8' +}); +``` + +## Integers + +### buff.readInt8([offset]) +### buff.readUInt8([offset]) +- ```offset``` *{number}* Optional position to start reading data from. **Default**: ```Auto managed offset``` +- Returns *{number}* + +Read a Int8 value. + +### buff.readInt16BE([offset]) +### buff.readInt16LE([offset]) +### buff.readUInt16BE([offset]) +### buff.readUInt16LE([offset]) +- ```offset``` *{number}* Optional position to start reading data from. **Default**: ```Auto managed offset``` +- Returns *{number}* + +Read a 16 bit integer value. + +### buff.readInt32BE([offset]) +### buff.readInt32LE([offset]) +### buff.readUInt32BE([offset]) +### buff.readUInt32LE([offset]) +- ```offset``` *{number}* Optional position to start reading data from. **Default**: ```Auto managed offset``` +- Returns *{number}* + +Read a 32 bit integer value. + + +### buff.writeInt8(value[, offset]) +### buff.writeUInt8(value[, offset]) +- ```value``` *{number}* The value to write. +- ```offset``` *{number}* An optional offset to write this value to. **Default:** ```Auto managed offset``` +- Returns *{this}* + +Write a Int8 value. + +### buff.insertInt8(value, offset) +### buff.insertUInt8(value, offset) +- ```value``` *{number}* The value to insert. +- ```offset``` *{number}* The offset to insert this data at. +- Returns *{this}* + +Insert a Int8 value. + + +### buff.writeInt16BE(value[, offset]) +### buff.writeInt16LE(value[, offset]) +### buff.writeUInt16BE(value[, offset]) +### buff.writeUInt16LE(value[, offset]) +- ```value``` *{number}* The value to write. +- ```offset``` *{number}* An optional offset to write this value to. **Default:** ```Auto managed offset``` +- Returns *{this}* + +Write a 16 bit integer value. + +### buff.insertInt16BE(value, offset) +### buff.insertInt16LE(value, offset) +### buff.insertUInt16BE(value, offset) +### buff.insertUInt16LE(value, offset) +- ```value``` *{number}* The value to insert. +- ```offset``` *{number}* The offset to insert this data at. +- Returns *{this}* + +Insert a 16 bit integer value. + + +### buff.writeInt32BE(value[, offset]) +### buff.writeInt32LE(value[, offset]) +### buff.writeUInt32BE(value[, offset]) +### buff.writeUInt32LE(value[, offset]) +- ```value``` *{number}* The value to write. +- ```offset``` *{number}* An optional offset to write this value to. **Default:** ```Auto managed offset``` +- Returns *{this}* + +Write a 32 bit integer value. + +### buff.insertInt32BE(value, offset) +### buff.insertInt32LE(value, offset) +### buff.insertUInt32BE(value, offset) +### buff.nsertUInt32LE(value, offset) +- ```value``` *{number}* The value to insert. +- ```offset``` *{number}* The offset to insert this data at. +- Returns *{this}* + +Insert a 32 bit integer value. + + +## Floating Point Numbers + +### buff.readFloatBE([offset]) +### buff.readFloatLE([offset]) +- ```offset``` *{number}* Optional position to start reading data from. **Default**: ```Auto managed offset``` +- Returns *{number}* + +Read a Float value. + +### buff.readDoubleBE([offset]) +### buff.readDoubleLE([offset]) +- ```offset``` *{number}* Optional position to start reading data from. **Default**: ```Auto managed offset``` +- Returns *{number}* + +Read a Double value. + + +### buff.writeFloatBE(value[, offset]) +### buff.writeFloatLE(value[, offset]) +- ```value``` *{number}* The value to write. +- ```offset``` *{number}* An optional offset to write this value to. **Default:** ```Auto managed offset``` +- Returns *{this}* + +Write a Float value. + +### buff.insertFloatBE(value, offset) +### buff.insertFloatLE(value, offset) +- ```value``` *{number}* The value to insert. +- ```offset``` *{number}* The offset to insert this data at. +- Returns *{this}* + +Insert a Float value. + + +### buff.writeDoubleBE(value[, offset]) +### buff.writeDoubleLE(value[, offset]) +- ```value``` *{number}* The value to write. +- ```offset``` *{number}* An optional offset to write this value to. **Default:** ```Auto managed offset``` +- Returns *{this}* + +Write a Double value. + +### buff.insertDoubleBE(value, offset) +### buff.insertDoubleLE(value, offset) +- ```value``` *{number}* The value to insert. +- ```offset``` *{number}* The offset to insert this data at. +- Returns *{this}* + +Insert a Double value. + +## Strings + +### buff.readString() +### buff.readString(size[, encoding]) +### buff.readString(encoding) +- ```size``` *{number}* The number of bytes to read. **Default:** ```Reads to the end of the Buffer.``` +- ```encoding``` *{string}* The string encoding to use. **Default:** ```utf8```. + +Read a string value. + +Examples: +```javascript +const buff = SmartBuffer.fromBuffer(Buffer.from('hello there', 'utf8')); +buff.readString(); // 'hello there' +buff.readString(2); // 'he' +buff.readString(2, 'utf8'); // 'he' +buff.readString('utf8'); // 'hello there' +``` + +### buff.writeString(value) +### buff.writeString(value[, offset]) +### buff.writeString(value[, encoding]) +### buff.writeString(value[, offset[, encoding]]) +- ```value``` *{string}* The string value to write. +- ```offset``` *{number}* The offset to write this value to. **Default:** ```Auto managed offset``` +- ```encoding``` *{string}* An optional string encoding to use. **Default:** ```utf8``` + +Write a string value. + +Examples: +```javascript +buff.writeString('hello'); // Auto managed offset +buff.writeString('hello', 2); +buff.writeString('hello', 'utf8') // Auto managed offset +buff.writeString('hello', 2, 'utf8'); +``` + +### buff.insertString(value, offset[, encoding]) +- ```value``` *{string}* The string value to write. +- ```offset``` *{number}* The offset to write this value to. +- ```encoding``` *{string}* An optional string encoding to use. **Default:** ```utf8``` + +Insert a string value. + +Examples: +```javascript +buff.insertString('hello', 2); +buff.insertString('hello', 2, 'utf8'); +``` + +## Null Terminated Strings + +### buff.readStringNT() +### buff.readStringNT(encoding) +- ```encoding``` *{string}* The string encoding to use. **Default:** ```utf8```. + +Read a null terminated string value. (If a null is not found, it will read to the end of the Buffer). + +Examples: +```javascript +const buff = SmartBuffer.fromBuffer(Buffer.from('hello\0 there', 'utf8')); +buff.readStringNT(); // 'hello' + +// If we called this again: +buff.readStringNT(); // ' there' +``` + +### buff.writeStringNT(value) +### buff.writeStringNT(value[, offset]) +### buff.writeStringNT(value[, encoding]) +### buff.writeStringNT(value[, offset[, encoding]]) +- ```value``` *{string}* The string value to write. +- ```offset``` *{number}* The offset to write this value to. **Default:** ```Auto managed offset``` +- ```encoding``` *{string}* An optional string encoding to use. **Default:** ```utf8``` + +Write a null terminated string value. + +Examples: +```javascript +buff.writeStringNT('hello'); // Auto managed offset +buff.writeStringNT('hello', 2); // +buff.writeStringNT('hello', 'utf8') // Auto managed offset +buff.writeStringNT('hello', 2, 'utf8'); +``` + +### buff.insertStringNT(value, offset[, encoding]) +- ```value``` *{string}* The string value to write. +- ```offset``` *{number}* The offset to write this value to. +- ```encoding``` *{string}* An optional string encoding to use. **Default:** ```utf8``` + +Insert a null terminated string value. + +Examples: +```javascript +buff.insertStringNT('hello', 2); +buff.insertStringNT('hello', 2, 'utf8'); +``` + +## Buffers + +### buff.readBuffer([length]) +- ```length``` *{number}* The number of bytes to read into a Buffer. **Default:** ```Reads to the end of the Buffer``` + +Read a Buffer of a specified size. + +### buff.writeBuffer(value[, offset]) +- ```value``` *{Buffer}* The buffer value to write. +- ```offset``` *{number}* An optional offset to write the value to. **Default:** ```Auto managed offset``` + +### buff.insertBuffer(value, offset) +- ```value``` *{Buffer}* The buffer value to write. +- ```offset``` *{number}* The offset to write the value to. + + +### buff.readBufferNT() + +Read a null terminated Buffer. + +### buff.writeBufferNT(value[, offset]) +- ```value``` *{Buffer}* The buffer value to write. +- ```offset``` *{number}* An optional offset to write the value to. **Default:** ```Auto managed offset``` + +Write a null terminated Buffer. + + +### buff.insertBufferNT(value, offset) +- ```value``` *{Buffer}* The buffer value to write. +- ```offset``` *{number}* The offset to write the value to. + +Insert a null terminated Buffer. + + +## Offsets + +### buff.readOffset +### buff.readOffset(offset) +- ```offset``` *{number}* The new read offset value to set. +- Returns: ```The current read offset``` + +Gets or sets the current read offset. + +Examples: +```javascript +const currentOffset = buff.readOffset; // 5 + +buff.readOffset = 10; + +console.log(buff.readOffset) // 10 +``` + +### buff.writeOffset +### buff.writeOffset(offset) +- ```offset``` *{number}* The new write offset value to set. +- Returns: ```The current write offset``` + +Gets or sets the current write offset. + +Examples: +```javascript +const currentOffset = buff.writeOffset; // 5 + +buff.writeOffset = 10; + +console.log(buff.writeOffset) // 10 +``` + +### buff.encoding +### buff.encoding(encoding) +- ```encoding``` *{string}* The new string encoding to set. +- Returns: ```The current string encoding``` + +Gets or sets the current string encoding. + +Examples: +```javascript +const currentEncoding = buff.encoding; // 'utf8' + +buff.encoding = 'ascii'; + +console.log(buff.encoding) // 'ascii' +``` + +## Other + +### buff.clear() + +Clear and resets the SmartBuffer instance. + +### buff.remaining() +- Returns ```Remaining data left to be read``` + +Gets the number of remaining bytes to be read. + + +### buff.internalBuffer +- Returns: *{Buffer}* + +Gets the internally managed Buffer (Includes unmanaged data). + +Examples: +```javascript +const buff = SmartBuffer.fromSize(16); +buff.writeString('hello'); +console.log(buff.InternalBuffer); // +``` + +### buff.toBuffer() +- Returns: *{Buffer}* + +Gets a sliced Buffer instance of the internally managed Buffer. (Only includes managed data) + +Examples: +```javascript +const buff = SmartBuffer.fromSize(16); +buff.writeString('hello'); +console.log(buff.toBuffer()); // +``` + +### buff.toString([encoding]) +- ```encoding``` *{string}* The string encoding to use when converting to a string. **Default:** ```utf8``` +- Returns *{string}* + +Gets a string representation of all data in the SmartBuffer. + +### buff.destroy() + +Destroys the SmartBuffer instance. + + + +## License + +This work is licensed under the [MIT license](http://en.wikipedia.org/wiki/MIT_License). diff --git a/mybulma/node_modules/smart-buffer/build/smartbuffer.js b/mybulma/node_modules/smart-buffer/build/smartbuffer.js new file mode 100644 index 0000000..5353ae1 --- /dev/null +++ b/mybulma/node_modules/smart-buffer/build/smartbuffer.js @@ -0,0 +1,1233 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const utils_1 = require("./utils"); +// The default Buffer size if one is not provided. +const DEFAULT_SMARTBUFFER_SIZE = 4096; +// The default string encoding to use for reading/writing strings. +const DEFAULT_SMARTBUFFER_ENCODING = 'utf8'; +class SmartBuffer { + /** + * Creates a new SmartBuffer instance. + * + * @param options { SmartBufferOptions } The SmartBufferOptions to apply to this instance. + */ + constructor(options) { + this.length = 0; + this._encoding = DEFAULT_SMARTBUFFER_ENCODING; + this._writeOffset = 0; + this._readOffset = 0; + if (SmartBuffer.isSmartBufferOptions(options)) { + // Checks for encoding + if (options.encoding) { + utils_1.checkEncoding(options.encoding); + this._encoding = options.encoding; + } + // Checks for initial size length + if (options.size) { + if (utils_1.isFiniteInteger(options.size) && options.size > 0) { + this._buff = Buffer.allocUnsafe(options.size); + } + else { + throw new Error(utils_1.ERRORS.INVALID_SMARTBUFFER_SIZE); + } + // Check for initial Buffer + } + else if (options.buff) { + if (Buffer.isBuffer(options.buff)) { + this._buff = options.buff; + this.length = options.buff.length; + } + else { + throw new Error(utils_1.ERRORS.INVALID_SMARTBUFFER_BUFFER); + } + } + else { + this._buff = Buffer.allocUnsafe(DEFAULT_SMARTBUFFER_SIZE); + } + } + else { + // If something was passed but it's not a SmartBufferOptions object + if (typeof options !== 'undefined') { + throw new Error(utils_1.ERRORS.INVALID_SMARTBUFFER_OBJECT); + } + // Otherwise default to sane options + this._buff = Buffer.allocUnsafe(DEFAULT_SMARTBUFFER_SIZE); + } + } + /** + * Creates a new SmartBuffer instance with the provided internal Buffer size and optional encoding. + * + * @param size { Number } The size of the internal Buffer. + * @param encoding { String } The BufferEncoding to use for strings. + * + * @return { SmartBuffer } + */ + static fromSize(size, encoding) { + return new this({ + size: size, + encoding: encoding + }); + } + /** + * Creates a new SmartBuffer instance with the provided Buffer and optional encoding. + * + * @param buffer { Buffer } The Buffer to use as the internal Buffer value. + * @param encoding { String } The BufferEncoding to use for strings. + * + * @return { SmartBuffer } + */ + static fromBuffer(buff, encoding) { + return new this({ + buff: buff, + encoding: encoding + }); + } + /** + * Creates a new SmartBuffer instance with the provided SmartBufferOptions options. + * + * @param options { SmartBufferOptions } The options to use when creating the SmartBuffer instance. + */ + static fromOptions(options) { + return new this(options); + } + /** + * Type checking function that determines if an object is a SmartBufferOptions object. + */ + static isSmartBufferOptions(options) { + const castOptions = options; + return (castOptions && + (castOptions.encoding !== undefined || castOptions.size !== undefined || castOptions.buff !== undefined)); + } + // Signed integers + /** + * Reads an Int8 value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { Number } + */ + readInt8(offset) { + return this._readNumberValue(Buffer.prototype.readInt8, 1, offset); + } + /** + * Reads an Int16BE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { Number } + */ + readInt16BE(offset) { + return this._readNumberValue(Buffer.prototype.readInt16BE, 2, offset); + } + /** + * Reads an Int16LE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { Number } + */ + readInt16LE(offset) { + return this._readNumberValue(Buffer.prototype.readInt16LE, 2, offset); + } + /** + * Reads an Int32BE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { Number } + */ + readInt32BE(offset) { + return this._readNumberValue(Buffer.prototype.readInt32BE, 4, offset); + } + /** + * Reads an Int32LE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { Number } + */ + readInt32LE(offset) { + return this._readNumberValue(Buffer.prototype.readInt32LE, 4, offset); + } + /** + * Reads a BigInt64BE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { BigInt } + */ + readBigInt64BE(offset) { + utils_1.bigIntAndBufferInt64Check('readBigInt64BE'); + return this._readNumberValue(Buffer.prototype.readBigInt64BE, 8, offset); + } + /** + * Reads a BigInt64LE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { BigInt } + */ + readBigInt64LE(offset) { + utils_1.bigIntAndBufferInt64Check('readBigInt64LE'); + return this._readNumberValue(Buffer.prototype.readBigInt64LE, 8, offset); + } + /** + * Writes an Int8 value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeInt8(value, offset) { + this._writeNumberValue(Buffer.prototype.writeInt8, 1, value, offset); + return this; + } + /** + * Inserts an Int8 value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertInt8(value, offset) { + return this._insertNumberValue(Buffer.prototype.writeInt8, 1, value, offset); + } + /** + * Writes an Int16BE value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeInt16BE(value, offset) { + return this._writeNumberValue(Buffer.prototype.writeInt16BE, 2, value, offset); + } + /** + * Inserts an Int16BE value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertInt16BE(value, offset) { + return this._insertNumberValue(Buffer.prototype.writeInt16BE, 2, value, offset); + } + /** + * Writes an Int16LE value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeInt16LE(value, offset) { + return this._writeNumberValue(Buffer.prototype.writeInt16LE, 2, value, offset); + } + /** + * Inserts an Int16LE value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertInt16LE(value, offset) { + return this._insertNumberValue(Buffer.prototype.writeInt16LE, 2, value, offset); + } + /** + * Writes an Int32BE value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeInt32BE(value, offset) { + return this._writeNumberValue(Buffer.prototype.writeInt32BE, 4, value, offset); + } + /** + * Inserts an Int32BE value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertInt32BE(value, offset) { + return this._insertNumberValue(Buffer.prototype.writeInt32BE, 4, value, offset); + } + /** + * Writes an Int32LE value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeInt32LE(value, offset) { + return this._writeNumberValue(Buffer.prototype.writeInt32LE, 4, value, offset); + } + /** + * Inserts an Int32LE value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertInt32LE(value, offset) { + return this._insertNumberValue(Buffer.prototype.writeInt32LE, 4, value, offset); + } + /** + * Writes a BigInt64BE value to the current write position (or at optional offset). + * + * @param value { BigInt } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeBigInt64BE(value, offset) { + utils_1.bigIntAndBufferInt64Check('writeBigInt64BE'); + return this._writeNumberValue(Buffer.prototype.writeBigInt64BE, 8, value, offset); + } + /** + * Inserts a BigInt64BE value at the given offset value. + * + * @param value { BigInt } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertBigInt64BE(value, offset) { + utils_1.bigIntAndBufferInt64Check('writeBigInt64BE'); + return this._insertNumberValue(Buffer.prototype.writeBigInt64BE, 8, value, offset); + } + /** + * Writes a BigInt64LE value to the current write position (or at optional offset). + * + * @param value { BigInt } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeBigInt64LE(value, offset) { + utils_1.bigIntAndBufferInt64Check('writeBigInt64LE'); + return this._writeNumberValue(Buffer.prototype.writeBigInt64LE, 8, value, offset); + } + /** + * Inserts a Int64LE value at the given offset value. + * + * @param value { BigInt } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertBigInt64LE(value, offset) { + utils_1.bigIntAndBufferInt64Check('writeBigInt64LE'); + return this._insertNumberValue(Buffer.prototype.writeBigInt64LE, 8, value, offset); + } + // Unsigned Integers + /** + * Reads an UInt8 value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { Number } + */ + readUInt8(offset) { + return this._readNumberValue(Buffer.prototype.readUInt8, 1, offset); + } + /** + * Reads an UInt16BE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { Number } + */ + readUInt16BE(offset) { + return this._readNumberValue(Buffer.prototype.readUInt16BE, 2, offset); + } + /** + * Reads an UInt16LE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { Number } + */ + readUInt16LE(offset) { + return this._readNumberValue(Buffer.prototype.readUInt16LE, 2, offset); + } + /** + * Reads an UInt32BE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { Number } + */ + readUInt32BE(offset) { + return this._readNumberValue(Buffer.prototype.readUInt32BE, 4, offset); + } + /** + * Reads an UInt32LE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { Number } + */ + readUInt32LE(offset) { + return this._readNumberValue(Buffer.prototype.readUInt32LE, 4, offset); + } + /** + * Reads a BigUInt64BE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { BigInt } + */ + readBigUInt64BE(offset) { + utils_1.bigIntAndBufferInt64Check('readBigUInt64BE'); + return this._readNumberValue(Buffer.prototype.readBigUInt64BE, 8, offset); + } + /** + * Reads a BigUInt64LE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { BigInt } + */ + readBigUInt64LE(offset) { + utils_1.bigIntAndBufferInt64Check('readBigUInt64LE'); + return this._readNumberValue(Buffer.prototype.readBigUInt64LE, 8, offset); + } + /** + * Writes an UInt8 value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeUInt8(value, offset) { + return this._writeNumberValue(Buffer.prototype.writeUInt8, 1, value, offset); + } + /** + * Inserts an UInt8 value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertUInt8(value, offset) { + return this._insertNumberValue(Buffer.prototype.writeUInt8, 1, value, offset); + } + /** + * Writes an UInt16BE value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeUInt16BE(value, offset) { + return this._writeNumberValue(Buffer.prototype.writeUInt16BE, 2, value, offset); + } + /** + * Inserts an UInt16BE value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertUInt16BE(value, offset) { + return this._insertNumberValue(Buffer.prototype.writeUInt16BE, 2, value, offset); + } + /** + * Writes an UInt16LE value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeUInt16LE(value, offset) { + return this._writeNumberValue(Buffer.prototype.writeUInt16LE, 2, value, offset); + } + /** + * Inserts an UInt16LE value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertUInt16LE(value, offset) { + return this._insertNumberValue(Buffer.prototype.writeUInt16LE, 2, value, offset); + } + /** + * Writes an UInt32BE value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeUInt32BE(value, offset) { + return this._writeNumberValue(Buffer.prototype.writeUInt32BE, 4, value, offset); + } + /** + * Inserts an UInt32BE value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertUInt32BE(value, offset) { + return this._insertNumberValue(Buffer.prototype.writeUInt32BE, 4, value, offset); + } + /** + * Writes an UInt32LE value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeUInt32LE(value, offset) { + return this._writeNumberValue(Buffer.prototype.writeUInt32LE, 4, value, offset); + } + /** + * Inserts an UInt32LE value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertUInt32LE(value, offset) { + return this._insertNumberValue(Buffer.prototype.writeUInt32LE, 4, value, offset); + } + /** + * Writes a BigUInt64BE value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeBigUInt64BE(value, offset) { + utils_1.bigIntAndBufferInt64Check('writeBigUInt64BE'); + return this._writeNumberValue(Buffer.prototype.writeBigUInt64BE, 8, value, offset); + } + /** + * Inserts a BigUInt64BE value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertBigUInt64BE(value, offset) { + utils_1.bigIntAndBufferInt64Check('writeBigUInt64BE'); + return this._insertNumberValue(Buffer.prototype.writeBigUInt64BE, 8, value, offset); + } + /** + * Writes a BigUInt64LE value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeBigUInt64LE(value, offset) { + utils_1.bigIntAndBufferInt64Check('writeBigUInt64LE'); + return this._writeNumberValue(Buffer.prototype.writeBigUInt64LE, 8, value, offset); + } + /** + * Inserts a BigUInt64LE value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertBigUInt64LE(value, offset) { + utils_1.bigIntAndBufferInt64Check('writeBigUInt64LE'); + return this._insertNumberValue(Buffer.prototype.writeBigUInt64LE, 8, value, offset); + } + // Floating Point + /** + * Reads an FloatBE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { Number } + */ + readFloatBE(offset) { + return this._readNumberValue(Buffer.prototype.readFloatBE, 4, offset); + } + /** + * Reads an FloatLE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { Number } + */ + readFloatLE(offset) { + return this._readNumberValue(Buffer.prototype.readFloatLE, 4, offset); + } + /** + * Writes a FloatBE value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeFloatBE(value, offset) { + return this._writeNumberValue(Buffer.prototype.writeFloatBE, 4, value, offset); + } + /** + * Inserts a FloatBE value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertFloatBE(value, offset) { + return this._insertNumberValue(Buffer.prototype.writeFloatBE, 4, value, offset); + } + /** + * Writes a FloatLE value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeFloatLE(value, offset) { + return this._writeNumberValue(Buffer.prototype.writeFloatLE, 4, value, offset); + } + /** + * Inserts a FloatLE value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertFloatLE(value, offset) { + return this._insertNumberValue(Buffer.prototype.writeFloatLE, 4, value, offset); + } + // Double Floating Point + /** + * Reads an DoublEBE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { Number } + */ + readDoubleBE(offset) { + return this._readNumberValue(Buffer.prototype.readDoubleBE, 8, offset); + } + /** + * Reads an DoubleLE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { Number } + */ + readDoubleLE(offset) { + return this._readNumberValue(Buffer.prototype.readDoubleLE, 8, offset); + } + /** + * Writes a DoubleBE value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeDoubleBE(value, offset) { + return this._writeNumberValue(Buffer.prototype.writeDoubleBE, 8, value, offset); + } + /** + * Inserts a DoubleBE value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertDoubleBE(value, offset) { + return this._insertNumberValue(Buffer.prototype.writeDoubleBE, 8, value, offset); + } + /** + * Writes a DoubleLE value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeDoubleLE(value, offset) { + return this._writeNumberValue(Buffer.prototype.writeDoubleLE, 8, value, offset); + } + /** + * Inserts a DoubleLE value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertDoubleLE(value, offset) { + return this._insertNumberValue(Buffer.prototype.writeDoubleLE, 8, value, offset); + } + // Strings + /** + * Reads a String from the current read position. + * + * @param arg1 { Number | String } The number of bytes to read as a String, or the BufferEncoding to use for + * the string (Defaults to instance level encoding). + * @param encoding { String } The BufferEncoding to use for the string (Defaults to instance level encoding). + * + * @return { String } + */ + readString(arg1, encoding) { + let lengthVal; + // Length provided + if (typeof arg1 === 'number') { + utils_1.checkLengthValue(arg1); + lengthVal = Math.min(arg1, this.length - this._readOffset); + } + else { + encoding = arg1; + lengthVal = this.length - this._readOffset; + } + // Check encoding + if (typeof encoding !== 'undefined') { + utils_1.checkEncoding(encoding); + } + const value = this._buff.slice(this._readOffset, this._readOffset + lengthVal).toString(encoding || this._encoding); + this._readOffset += lengthVal; + return value; + } + /** + * Inserts a String + * + * @param value { String } The String value to insert. + * @param offset { Number } The offset to insert the string at. + * @param encoding { String } The BufferEncoding to use for writing strings (defaults to instance encoding). + * + * @return this + */ + insertString(value, offset, encoding) { + utils_1.checkOffsetValue(offset); + return this._handleString(value, true, offset, encoding); + } + /** + * Writes a String + * + * @param value { String } The String value to write. + * @param arg2 { Number | String } The offset to write the string at, or the BufferEncoding to use. + * @param encoding { String } The BufferEncoding to use for writing strings (defaults to instance encoding). + * + * @return this + */ + writeString(value, arg2, encoding) { + return this._handleString(value, false, arg2, encoding); + } + /** + * Reads a null-terminated String from the current read position. + * + * @param encoding { String } The BufferEncoding to use for the string (Defaults to instance level encoding). + * + * @return { String } + */ + readStringNT(encoding) { + if (typeof encoding !== 'undefined') { + utils_1.checkEncoding(encoding); + } + // Set null character position to the end SmartBuffer instance. + let nullPos = this.length; + // Find next null character (if one is not found, default from above is used) + for (let i = this._readOffset; i < this.length; i++) { + if (this._buff[i] === 0x00) { + nullPos = i; + break; + } + } + // Read string value + const value = this._buff.slice(this._readOffset, nullPos); + // Increment internal Buffer read offset + this._readOffset = nullPos + 1; + return value.toString(encoding || this._encoding); + } + /** + * Inserts a null-terminated String. + * + * @param value { String } The String value to write. + * @param arg2 { Number | String } The offset to write the string to, or the BufferEncoding to use. + * @param encoding { String } The BufferEncoding to use for writing strings (defaults to instance encoding). + * + * @return this + */ + insertStringNT(value, offset, encoding) { + utils_1.checkOffsetValue(offset); + // Write Values + this.insertString(value, offset, encoding); + this.insertUInt8(0x00, offset + value.length); + return this; + } + /** + * Writes a null-terminated String. + * + * @param value { String } The String value to write. + * @param arg2 { Number | String } The offset to write the string to, or the BufferEncoding to use. + * @param encoding { String } The BufferEncoding to use for writing strings (defaults to instance encoding). + * + * @return this + */ + writeStringNT(value, arg2, encoding) { + // Write Values + this.writeString(value, arg2, encoding); + this.writeUInt8(0x00, typeof arg2 === 'number' ? arg2 + value.length : this.writeOffset); + return this; + } + // Buffers + /** + * Reads a Buffer from the internal read position. + * + * @param length { Number } The length of data to read as a Buffer. + * + * @return { Buffer } + */ + readBuffer(length) { + if (typeof length !== 'undefined') { + utils_1.checkLengthValue(length); + } + const lengthVal = typeof length === 'number' ? length : this.length; + const endPoint = Math.min(this.length, this._readOffset + lengthVal); + // Read buffer value + const value = this._buff.slice(this._readOffset, endPoint); + // Increment internal Buffer read offset + this._readOffset = endPoint; + return value; + } + /** + * Writes a Buffer to the current write position. + * + * @param value { Buffer } The Buffer to write. + * @param offset { Number } The offset to write the Buffer to. + * + * @return this + */ + insertBuffer(value, offset) { + utils_1.checkOffsetValue(offset); + return this._handleBuffer(value, true, offset); + } + /** + * Writes a Buffer to the current write position. + * + * @param value { Buffer } The Buffer to write. + * @param offset { Number } The offset to write the Buffer to. + * + * @return this + */ + writeBuffer(value, offset) { + return this._handleBuffer(value, false, offset); + } + /** + * Reads a null-terminated Buffer from the current read poisiton. + * + * @return { Buffer } + */ + readBufferNT() { + // Set null character position to the end SmartBuffer instance. + let nullPos = this.length; + // Find next null character (if one is not found, default from above is used) + for (let i = this._readOffset; i < this.length; i++) { + if (this._buff[i] === 0x00) { + nullPos = i; + break; + } + } + // Read value + const value = this._buff.slice(this._readOffset, nullPos); + // Increment internal Buffer read offset + this._readOffset = nullPos + 1; + return value; + } + /** + * Inserts a null-terminated Buffer. + * + * @param value { Buffer } The Buffer to write. + * @param offset { Number } The offset to write the Buffer to. + * + * @return this + */ + insertBufferNT(value, offset) { + utils_1.checkOffsetValue(offset); + // Write Values + this.insertBuffer(value, offset); + this.insertUInt8(0x00, offset + value.length); + return this; + } + /** + * Writes a null-terminated Buffer. + * + * @param value { Buffer } The Buffer to write. + * @param offset { Number } The offset to write the Buffer to. + * + * @return this + */ + writeBufferNT(value, offset) { + // Checks for valid numberic value; + if (typeof offset !== 'undefined') { + utils_1.checkOffsetValue(offset); + } + // Write Values + this.writeBuffer(value, offset); + this.writeUInt8(0x00, typeof offset === 'number' ? offset + value.length : this._writeOffset); + return this; + } + /** + * Clears the SmartBuffer instance to its original empty state. + */ + clear() { + this._writeOffset = 0; + this._readOffset = 0; + this.length = 0; + return this; + } + /** + * Gets the remaining data left to be read from the SmartBuffer instance. + * + * @return { Number } + */ + remaining() { + return this.length - this._readOffset; + } + /** + * Gets the current read offset value of the SmartBuffer instance. + * + * @return { Number } + */ + get readOffset() { + return this._readOffset; + } + /** + * Sets the read offset value of the SmartBuffer instance. + * + * @param offset { Number } - The offset value to set. + */ + set readOffset(offset) { + utils_1.checkOffsetValue(offset); + // Check for bounds. + utils_1.checkTargetOffset(offset, this); + this._readOffset = offset; + } + /** + * Gets the current write offset value of the SmartBuffer instance. + * + * @return { Number } + */ + get writeOffset() { + return this._writeOffset; + } + /** + * Sets the write offset value of the SmartBuffer instance. + * + * @param offset { Number } - The offset value to set. + */ + set writeOffset(offset) { + utils_1.checkOffsetValue(offset); + // Check for bounds. + utils_1.checkTargetOffset(offset, this); + this._writeOffset = offset; + } + /** + * Gets the currently set string encoding of the SmartBuffer instance. + * + * @return { BufferEncoding } The string Buffer encoding currently set. + */ + get encoding() { + return this._encoding; + } + /** + * Sets the string encoding of the SmartBuffer instance. + * + * @param encoding { BufferEncoding } The string Buffer encoding to set. + */ + set encoding(encoding) { + utils_1.checkEncoding(encoding); + this._encoding = encoding; + } + /** + * Gets the underlying internal Buffer. (This includes unmanaged data in the Buffer) + * + * @return { Buffer } The Buffer value. + */ + get internalBuffer() { + return this._buff; + } + /** + * Gets the value of the internal managed Buffer (Includes managed data only) + * + * @param { Buffer } + */ + toBuffer() { + return this._buff.slice(0, this.length); + } + /** + * Gets the String value of the internal managed Buffer + * + * @param encoding { String } The BufferEncoding to display the Buffer as (defaults to instance level encoding). + */ + toString(encoding) { + const encodingVal = typeof encoding === 'string' ? encoding : this._encoding; + // Check for invalid encoding. + utils_1.checkEncoding(encodingVal); + return this._buff.toString(encodingVal, 0, this.length); + } + /** + * Destroys the SmartBuffer instance. + */ + destroy() { + this.clear(); + return this; + } + /** + * Handles inserting and writing strings. + * + * @param value { String } The String value to insert. + * @param isInsert { Boolean } True if inserting a string, false if writing. + * @param arg2 { Number | String } The offset to insert the string at, or the BufferEncoding to use. + * @param encoding { String } The BufferEncoding to use for writing strings (defaults to instance encoding). + */ + _handleString(value, isInsert, arg3, encoding) { + let offsetVal = this._writeOffset; + let encodingVal = this._encoding; + // Check for offset + if (typeof arg3 === 'number') { + offsetVal = arg3; + // Check for encoding + } + else if (typeof arg3 === 'string') { + utils_1.checkEncoding(arg3); + encodingVal = arg3; + } + // Check for encoding (third param) + if (typeof encoding === 'string') { + utils_1.checkEncoding(encoding); + encodingVal = encoding; + } + // Calculate bytelength of string. + const byteLength = Buffer.byteLength(value, encodingVal); + // Ensure there is enough internal Buffer capacity. + if (isInsert) { + this.ensureInsertable(byteLength, offsetVal); + } + else { + this._ensureWriteable(byteLength, offsetVal); + } + // Write value + this._buff.write(value, offsetVal, byteLength, encodingVal); + // Increment internal Buffer write offset; + if (isInsert) { + this._writeOffset += byteLength; + } + else { + // If an offset was given, check to see if we wrote beyond the current writeOffset. + if (typeof arg3 === 'number') { + this._writeOffset = Math.max(this._writeOffset, offsetVal + byteLength); + } + else { + // If no offset was given, we wrote to the end of the SmartBuffer so increment writeOffset. + this._writeOffset += byteLength; + } + } + return this; + } + /** + * Handles writing or insert of a Buffer. + * + * @param value { Buffer } The Buffer to write. + * @param offset { Number } The offset to write the Buffer to. + */ + _handleBuffer(value, isInsert, offset) { + const offsetVal = typeof offset === 'number' ? offset : this._writeOffset; + // Ensure there is enough internal Buffer capacity. + if (isInsert) { + this.ensureInsertable(value.length, offsetVal); + } + else { + this._ensureWriteable(value.length, offsetVal); + } + // Write buffer value + value.copy(this._buff, offsetVal); + // Increment internal Buffer write offset; + if (isInsert) { + this._writeOffset += value.length; + } + else { + // If an offset was given, check to see if we wrote beyond the current writeOffset. + if (typeof offset === 'number') { + this._writeOffset = Math.max(this._writeOffset, offsetVal + value.length); + } + else { + // If no offset was given, we wrote to the end of the SmartBuffer so increment writeOffset. + this._writeOffset += value.length; + } + } + return this; + } + /** + * Ensures that the internal Buffer is large enough to read data. + * + * @param length { Number } The length of the data that needs to be read. + * @param offset { Number } The offset of the data that needs to be read. + */ + ensureReadable(length, offset) { + // Offset value defaults to managed read offset. + let offsetVal = this._readOffset; + // If an offset was provided, use it. + if (typeof offset !== 'undefined') { + // Checks for valid numberic value; + utils_1.checkOffsetValue(offset); + // Overide with custom offset. + offsetVal = offset; + } + // Checks if offset is below zero, or the offset+length offset is beyond the total length of the managed data. + if (offsetVal < 0 || offsetVal + length > this.length) { + throw new Error(utils_1.ERRORS.INVALID_READ_BEYOND_BOUNDS); + } + } + /** + * Ensures that the internal Buffer is large enough to insert data. + * + * @param dataLength { Number } The length of the data that needs to be written. + * @param offset { Number } The offset of the data to be written. + */ + ensureInsertable(dataLength, offset) { + // Checks for valid numberic value; + utils_1.checkOffsetValue(offset); + // Ensure there is enough internal Buffer capacity. + this._ensureCapacity(this.length + dataLength); + // If an offset was provided and its not the very end of the buffer, copy data into appropriate location in regards to the offset. + if (offset < this.length) { + this._buff.copy(this._buff, offset + dataLength, offset, this._buff.length); + } + // Adjust tracked smart buffer length + if (offset + dataLength > this.length) { + this.length = offset + dataLength; + } + else { + this.length += dataLength; + } + } + /** + * Ensures that the internal Buffer is large enough to write data. + * + * @param dataLength { Number } The length of the data that needs to be written. + * @param offset { Number } The offset of the data to be written (defaults to writeOffset). + */ + _ensureWriteable(dataLength, offset) { + const offsetVal = typeof offset === 'number' ? offset : this._writeOffset; + // Ensure enough capacity to write data. + this._ensureCapacity(offsetVal + dataLength); + // Adjust SmartBuffer length (if offset + length is larger than managed length, adjust length) + if (offsetVal + dataLength > this.length) { + this.length = offsetVal + dataLength; + } + } + /** + * Ensures that the internal Buffer is large enough to write at least the given amount of data. + * + * @param minLength { Number } The minimum length of the data needs to be written. + */ + _ensureCapacity(minLength) { + const oldLength = this._buff.length; + if (minLength > oldLength) { + let data = this._buff; + let newLength = (oldLength * 3) / 2 + 1; + if (newLength < minLength) { + newLength = minLength; + } + this._buff = Buffer.allocUnsafe(newLength); + data.copy(this._buff, 0, 0, oldLength); + } + } + /** + * Reads a numeric number value using the provided function. + * + * @typeparam T { number | bigint } The type of the value to be read + * + * @param func { Function(offset: number) => number } The function to read data on the internal Buffer with. + * @param byteSize { Number } The number of bytes read. + * @param offset { Number } The offset to read from (optional). When this is not provided, the managed readOffset is used instead. + * + * @returns { T } the number value + */ + _readNumberValue(func, byteSize, offset) { + this.ensureReadable(byteSize, offset); + // Call Buffer.readXXXX(); + const value = func.call(this._buff, typeof offset === 'number' ? offset : this._readOffset); + // Adjust internal read offset if an optional read offset was not provided. + if (typeof offset === 'undefined') { + this._readOffset += byteSize; + } + return value; + } + /** + * Inserts a numeric number value based on the given offset and value. + * + * @typeparam T { number | bigint } The type of the value to be written + * + * @param func { Function(offset: T, offset?) => number} The function to write data on the internal Buffer with. + * @param byteSize { Number } The number of bytes written. + * @param value { T } The number value to write. + * @param offset { Number } the offset to write the number at (REQUIRED). + * + * @returns SmartBuffer this buffer + */ + _insertNumberValue(func, byteSize, value, offset) { + // Check for invalid offset values. + utils_1.checkOffsetValue(offset); + // Ensure there is enough internal Buffer capacity. (raw offset is passed) + this.ensureInsertable(byteSize, offset); + // Call buffer.writeXXXX(); + func.call(this._buff, value, offset); + // Adjusts internally managed write offset. + this._writeOffset += byteSize; + return this; + } + /** + * Writes a numeric number value based on the given offset and value. + * + * @typeparam T { number | bigint } The type of the value to be written + * + * @param func { Function(offset: T, offset?) => number} The function to write data on the internal Buffer with. + * @param byteSize { Number } The number of bytes written. + * @param value { T } The number value to write. + * @param offset { Number } the offset to write the number at (REQUIRED). + * + * @returns SmartBuffer this buffer + */ + _writeNumberValue(func, byteSize, value, offset) { + // If an offset was provided, validate it. + if (typeof offset === 'number') { + // Check if we're writing beyond the bounds of the managed data. + if (offset < 0) { + throw new Error(utils_1.ERRORS.INVALID_WRITE_BEYOND_BOUNDS); + } + utils_1.checkOffsetValue(offset); + } + // Default to writeOffset if no offset value was given. + const offsetVal = typeof offset === 'number' ? offset : this._writeOffset; + // Ensure there is enough internal Buffer capacity. (raw offset is passed) + this._ensureWriteable(byteSize, offsetVal); + func.call(this._buff, value, offsetVal); + // If an offset was given, check to see if we wrote beyond the current writeOffset. + if (typeof offset === 'number') { + this._writeOffset = Math.max(this._writeOffset, offsetVal + byteSize); + } + else { + // If no numeric offset was given, we wrote to the end of the SmartBuffer so increment writeOffset. + this._writeOffset += byteSize; + } + return this; + } +} +exports.SmartBuffer = SmartBuffer; +//# sourceMappingURL=smartbuffer.js.map \ No newline at end of file diff --git a/mybulma/node_modules/smart-buffer/build/smartbuffer.js.map b/mybulma/node_modules/smart-buffer/build/smartbuffer.js.map new file mode 100644 index 0000000..37f0d6e --- /dev/null +++ b/mybulma/node_modules/smart-buffer/build/smartbuffer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"smartbuffer.js","sourceRoot":"","sources":["../src/smartbuffer.ts"],"names":[],"mappings":";;AAAA,mCAGiB;AAcjB,kDAAkD;AAClD,MAAM,wBAAwB,GAAW,IAAI,CAAC;AAE9C,kEAAkE;AAClE,MAAM,4BAA4B,GAAmB,MAAM,CAAC;AAE5D,MAAM,WAAW;IAQf;;;;OAIG;IACH,YAAY,OAA4B;QAZjC,WAAM,GAAW,CAAC,CAAC;QAElB,cAAS,GAAmB,4BAA4B,CAAC;QAEzD,iBAAY,GAAW,CAAC,CAAC;QACzB,gBAAW,GAAW,CAAC,CAAC;QAQ9B,IAAI,WAAW,CAAC,oBAAoB,CAAC,OAAO,CAAC,EAAE;YAC7C,sBAAsB;YACtB,IAAI,OAAO,CAAC,QAAQ,EAAE;gBACpB,qBAAa,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;gBAChC,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,QAAQ,CAAC;aACnC;YAED,iCAAiC;YACjC,IAAI,OAAO,CAAC,IAAI,EAAE;gBAChB,IAAI,uBAAe,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,OAAO,CAAC,IAAI,GAAG,CAAC,EAAE;oBACrD,IAAI,CAAC,KAAK,GAAG,MAAM,CAAC,WAAW,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;iBAC/C;qBAAM;oBACL,MAAM,IAAI,KAAK,CAAC,cAAM,CAAC,wBAAwB,CAAC,CAAC;iBAClD;gBACD,2BAA2B;aAC5B;iBAAM,IAAI,OAAO,CAAC,IAAI,EAAE;gBACvB,IAAI,MAAM,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE;oBACjC,IAAI,CAAC,KAAK,GAAG,OAAO,CAAC,IAAI,CAAC;oBAC1B,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC;iBACnC;qBAAM;oBACL,MAAM,IAAI,KAAK,CAAC,cAAM,CAAC,0BAA0B,CAAC,CAAC;iBACpD;aACF;iBAAM;gBACL,IAAI,CAAC,KAAK,GAAG,MAAM,CAAC,WAAW,CAAC,wBAAwB,CAAC,CAAC;aAC3D;SACF;aAAM;YACL,mEAAmE;YACnE,IAAI,OAAO,OAAO,KAAK,WAAW,EAAE;gBAClC,MAAM,IAAI,KAAK,CAAC,cAAM,CAAC,0BAA0B,CAAC,CAAC;aACpD;YAED,oCAAoC;YACpC,IAAI,CAAC,KAAK,GAAG,MAAM,CAAC,WAAW,CAAC,wBAAwB,CAAC,CAAC;SAC3D;IACH,CAAC;IAED;;;;;;;OAOG;IACI,MAAM,CAAC,QAAQ,CAAC,IAAY,EAAE,QAAyB;QAC5D,OAAO,IAAI,IAAI,CAAC;YACd,IAAI,EAAE,IAAI;YACV,QAAQ,EAAE,QAAQ;SACnB,CAAC,CAAC;IACL,CAAC;IAED;;;;;;;OAOG;IACI,MAAM,CAAC,UAAU,CAAC,IAAY,EAAE,QAAyB;QAC9D,OAAO,IAAI,IAAI,CAAC;YACd,IAAI,EAAE,IAAI;YACV,QAAQ,EAAE,QAAQ;SACnB,CAAC,CAAC;IACL,CAAC;IAED;;;;OAIG;IACI,MAAM,CAAC,WAAW,CAAC,OAA2B;QACnD,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC,CAAC;IAC3B,CAAC;IAED;;OAEG;IACH,MAAM,CAAC,oBAAoB,CAAC,OAA2B;QACrD,MAAM,WAAW,GAAuB,OAAO,CAAC;QAEhD,OAAO,CACL,WAAW;YACX,CAAC,WAAW,CAAC,QAAQ,KAAK,SAAS,IAAI,WAAW,CAAC,IAAI,KAAK,SAAS,IAAI,WAAW,CAAC,IAAI,KAAK,SAAS,CAAC,CACzG,CAAC;IACJ,CAAC;IAED,kBAAkB;IAElB;;;;;OAKG;IACH,QAAQ,CAAC,MAAe;QACtB,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,QAAQ,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;IACrE,CAAC;IAED;;;;;OAKG;IACH,WAAW,CAAC,MAAe;QACzB,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,WAAW,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;IACxE,CAAC;IAED;;;;;OAKG;IACH,WAAW,CAAC,MAAe;QACzB,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,WAAW,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;IACxE,CAAC;IAED;;;;;OAKG;IACH,WAAW,CAAC,MAAe;QACzB,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,WAAW,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;IACxE,CAAC;IAED;;;;;OAKG;IACH,WAAW,CAAC,MAAe;QACzB,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,WAAW,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;IACxE,CAAC;IAED;;;;;OAKG;IACH,cAAc,CAAC,MAAe;QAC5B,iCAAyB,CAAC,gBAAgB,CAAC,CAAC;QAC5C,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,cAAc,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;IAC3E,CAAC;IAED;;;;;OAKG;IACH,cAAc,CAAC,MAAe;QAC5B,iCAAyB,CAAC,gBAAgB,CAAC,CAAC;QAC5C,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,cAAc,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;IAC3E,CAAC;IAED;;;;;;;OAOG;IACH,SAAS,CAAC,KAAa,EAAE,MAAe;QACtC,IAAI,CAAC,iBAAiB,CAAC,MAAM,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;QACrE,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;;;;;OAOG;IACH,UAAU,CAAC,KAAa,EAAE,MAAc;QACtC,OAAO,IAAI,CAAC,kBAAkB,CAAC,MAAM,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IAC/E,CAAC;IAED;;;;;;;OAOG;IACH,YAAY,CAAC,KAAa,EAAE,MAAe;QACzC,OAAO,IAAI,CAAC,iBAAiB,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IACjF,CAAC;IAED;;;;;;;OAOG;IACH,aAAa,CAAC,KAAa,EAAE,MAAc;QACzC,OAAO,IAAI,CAAC,kBAAkB,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IAClF,CAAC;IAED;;;;;;;OAOG;IACH,YAAY,CAAC,KAAa,EAAE,MAAe;QACzC,OAAO,IAAI,CAAC,iBAAiB,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IACjF,CAAC;IAED;;;;;;;OAOG;IACH,aAAa,CAAC,KAAa,EAAE,MAAc;QACzC,OAAO,IAAI,CAAC,kBAAkB,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IAClF,CAAC;IAED;;;;;;;OAOG;IACH,YAAY,CAAC,KAAa,EAAE,MAAe;QACzC,OAAO,IAAI,CAAC,iBAAiB,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IACjF,CAAC;IAED;;;;;;;OAOG;IACH,aAAa,CAAC,KAAa,EAAE,MAAc;QACzC,OAAO,IAAI,CAAC,kBAAkB,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IAClF,CAAC;IAED;;;;;;;OAOG;IACH,YAAY,CAAC,KAAa,EAAE,MAAe;QACzC,OAAO,IAAI,CAAC,iBAAiB,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IACjF,CAAC;IAED;;;;;;;OAOG;IACH,aAAa,CAAC,KAAa,EAAE,MAAc;QACzC,OAAO,IAAI,CAAC,kBAAkB,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IAClF,CAAC;IAED;;;;;;;OAOG;IACH,eAAe,CAAC,KAAa,EAAE,MAAe;QAC5C,iCAAyB,CAAC,iBAAiB,CAAC,CAAC;QAC7C,OAAO,IAAI,CAAC,iBAAiB,CAAC,MAAM,CAAC,SAAS,CAAC,eAAe,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IACpF,CAAC;IAED;;;;;;;OAOG;IACH,gBAAgB,CAAC,KAAa,EAAE,MAAc;QAC5C,iCAAyB,CAAC,iBAAiB,CAAC,CAAC;QAC7C,OAAO,IAAI,CAAC,kBAAkB,CAAC,MAAM,CAAC,SAAS,CAAC,eAAe,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IACrF,CAAC;IAED;;;;;;;OAOG;IACH,eAAe,CAAC,KAAa,EAAE,MAAe;QAC5C,iCAAyB,CAAC,iBAAiB,CAAC,CAAC;QAC7C,OAAO,IAAI,CAAC,iBAAiB,CAAC,MAAM,CAAC,SAAS,CAAC,eAAe,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IACpF,CAAC;IAED;;;;;;;OAOG;IACH,gBAAgB,CAAC,KAAa,EAAE,MAAc;QAC5C,iCAAyB,CAAC,iBAAiB,CAAC,CAAC;QAC7C,OAAO,IAAI,CAAC,kBAAkB,CAAC,MAAM,CAAC,SAAS,CAAC,eAAe,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IACrF,CAAC;IAED,oBAAoB;IAEpB;;;;;OAKG;IACH,SAAS,CAAC,MAAe;QACvB,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;IACtE,CAAC;IAED;;;;;OAKG;IACH,YAAY,CAAC,MAAe;QAC1B,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;IACzE,CAAC;IAED;;;;;OAKG;IACH,YAAY,CAAC,MAAe;QAC1B,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;IACzE,CAAC;IAED;;;;;OAKG;IACH,YAAY,CAAC,MAAe;QAC1B,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;IACzE,CAAC;IAED;;;;;OAKG;IACH,YAAY,CAAC,MAAe;QAC1B,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;IACzE,CAAC;IAED;;;;;OAKG;IACH,eAAe,CAAC,MAAe;QAC7B,iCAAyB,CAAC,iBAAiB,CAAC,CAAC;QAC7C,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,eAAe,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;IAC5E,CAAC;IAED;;;;;OAKG;IACH,eAAe,CAAC,MAAe;QAC7B,iCAAyB,CAAC,iBAAiB,CAAC,CAAC;QAC7C,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,eAAe,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;IAC5E,CAAC;IAED;;;;;;;OAOG;IACH,UAAU,CAAC,KAAa,EAAE,MAAe;QACvC,OAAO,IAAI,CAAC,iBAAiB,CAAC,MAAM,CAAC,SAAS,CAAC,UAAU,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IAC/E,CAAC;IAED;;;;;;;OAOG;IACH,WAAW,CAAC,KAAa,EAAE,MAAc;QACvC,OAAO,IAAI,CAAC,kBAAkB,CAAC,MAAM,CAAC,SAAS,CAAC,UAAU,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IAChF,CAAC;IAED;;;;;;;OAOG;IACH,aAAa,CAAC,KAAa,EAAE,MAAe;QAC1C,OAAO,IAAI,CAAC,iBAAiB,CAAC,MAAM,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IAClF,CAAC;IAED;;;;;;;OAOG;IACH,cAAc,CAAC,KAAa,EAAE,MAAc;QAC1C,OAAO,IAAI,CAAC,kBAAkB,CAAC,MAAM,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IACnF,CAAC;IAED;;;;;;;OAOG;IACH,aAAa,CAAC,KAAa,EAAE,MAAe;QAC1C,OAAO,IAAI,CAAC,iBAAiB,CAAC,MAAM,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IAClF,CAAC;IAED;;;;;;;OAOG;IACH,cAAc,CAAC,KAAa,EAAE,MAAc;QAC1C,OAAO,IAAI,CAAC,kBAAkB,CAAC,MAAM,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IACnF,CAAC;IAED;;;;;;;OAOG;IACH,aAAa,CAAC,KAAa,EAAE,MAAe;QAC1C,OAAO,IAAI,CAAC,iBAAiB,CAAC,MAAM,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IAClF,CAAC;IAED;;;;;;;OAOG;IACH,cAAc,CAAC,KAAa,EAAE,MAAc;QAC1C,OAAO,IAAI,CAAC,kBAAkB,CAAC,MAAM,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IACnF,CAAC;IAED;;;;;;;OAOG;IACH,aAAa,CAAC,KAAa,EAAE,MAAe;QAC1C,OAAO,IAAI,CAAC,iBAAiB,CAAC,MAAM,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IAClF,CAAC;IAED;;;;;;;OAOG;IACH,cAAc,CAAC,KAAa,EAAE,MAAc;QAC1C,OAAO,IAAI,CAAC,kBAAkB,CAAC,MAAM,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IACnF,CAAC;IAED;;;;;;;OAOG;IACH,gBAAgB,CAAC,KAAa,EAAE,MAAe;QAC7C,iCAAyB,CAAC,kBAAkB,CAAC,CAAC;QAC9C,OAAO,IAAI,CAAC,iBAAiB,CAAC,MAAM,CAAC,SAAS,CAAC,gBAAgB,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IACrF,CAAC;IAED;;;;;;;OAOG;IACH,iBAAiB,CAAC,KAAa,EAAE,MAAc;QAC7C,iCAAyB,CAAC,kBAAkB,CAAC,CAAC;QAC9C,OAAO,IAAI,CAAC,kBAAkB,CAAC,MAAM,CAAC,SAAS,CAAC,gBAAgB,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IACtF,CAAC;IAED;;;;;;;OAOG;IACH,gBAAgB,CAAC,KAAa,EAAE,MAAe;QAC7C,iCAAyB,CAAC,kBAAkB,CAAC,CAAC;QAC9C,OAAO,IAAI,CAAC,iBAAiB,CAAC,MAAM,CAAC,SAAS,CAAC,gBAAgB,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IACrF,CAAC;IAED;;;;;;;OAOG;IACH,iBAAiB,CAAC,KAAa,EAAE,MAAc;QAC7C,iCAAyB,CAAC,kBAAkB,CAAC,CAAC;QAC9C,OAAO,IAAI,CAAC,kBAAkB,CAAC,MAAM,CAAC,SAAS,CAAC,gBAAgB,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IACtF,CAAC;IAED,iBAAiB;IAEjB;;;;;OAKG;IACH,WAAW,CAAC,MAAe;QACzB,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,WAAW,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;IACxE,CAAC;IAED;;;;;OAKG;IACH,WAAW,CAAC,MAAe;QACzB,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,WAAW,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;IACxE,CAAC;IAED;;;;;;;OAOG;IACH,YAAY,CAAC,KAAa,EAAE,MAAe;QACzC,OAAO,IAAI,CAAC,iBAAiB,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IACjF,CAAC;IAED;;;;;;;OAOG;IACH,aAAa,CAAC,KAAa,EAAE,MAAc;QACzC,OAAO,IAAI,CAAC,kBAAkB,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IAClF,CAAC;IAED;;;;;;;OAOG;IACH,YAAY,CAAC,KAAa,EAAE,MAAe;QACzC,OAAO,IAAI,CAAC,iBAAiB,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IACjF,CAAC;IAED;;;;;;;OAOG;IACH,aAAa,CAAC,KAAa,EAAE,MAAc;QACzC,OAAO,IAAI,CAAC,kBAAkB,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IAClF,CAAC;IAED,wBAAwB;IAExB;;;;;OAKG;IACH,YAAY,CAAC,MAAe;QAC1B,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;IACzE,CAAC;IAED;;;;;OAKG;IACH,YAAY,CAAC,MAAe;QAC1B,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,EAAE,MAAM,CAAC,CAAC;IACzE,CAAC;IAED;;;;;;;OAOG;IACH,aAAa,CAAC,KAAa,EAAE,MAAe;QAC1C,OAAO,IAAI,CAAC,iBAAiB,CAAC,MAAM,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IAClF,CAAC;IAED;;;;;;;OAOG;IACH,cAAc,CAAC,KAAa,EAAE,MAAc;QAC1C,OAAO,IAAI,CAAC,kBAAkB,CAAC,MAAM,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IACnF,CAAC;IAED;;;;;;;OAOG;IACH,aAAa,CAAC,KAAa,EAAE,MAAe;QAC1C,OAAO,IAAI,CAAC,iBAAiB,CAAC,MAAM,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IAClF,CAAC;IAED;;;;;;;OAOG;IACH,cAAc,CAAC,KAAa,EAAE,MAAc;QAC1C,OAAO,IAAI,CAAC,kBAAkB,CAAC,MAAM,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IACnF,CAAC;IAED,UAAU;IAEV;;;;;;;;OAQG;IACH,UAAU,CAAC,IAA8B,EAAE,QAAyB;QAClE,IAAI,SAAS,CAAC;QAEd,kBAAkB;QAClB,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YAC5B,wBAAgB,CAAC,IAAI,CAAC,CAAC;YACvB,SAAS,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,WAAW,CAAC,CAAC;SAC5D;aAAM;YACL,QAAQ,GAAG,IAAI,CAAC;YAChB,SAAS,GAAG,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,WAAW,CAAC;SAC5C;QAED,iBAAiB;QACjB,IAAI,OAAO,QAAQ,KAAK,WAAW,EAAE;YACnC,qBAAa,CAAC,QAAQ,CAAC,CAAC;SACzB;QAED,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,IAAI,CAAC,WAAW,GAAG,SAAS,CAAC,CAAC,QAAQ,CAAC,QAAQ,IAAI,IAAI,CAAC,SAAS,CAAC,CAAC;QAEpH,IAAI,CAAC,WAAW,IAAI,SAAS,CAAC;QAC9B,OAAO,KAAK,CAAC;IACf,CAAC;IAED;;;;;;;;OAQG;IACH,YAAY,CAAC,KAAa,EAAE,MAAc,EAAE,QAAyB;QACnE,wBAAgB,CAAC,MAAM,CAAC,CAAC;QAEzB,OAAO,IAAI,CAAC,aAAa,CAAC,KAAK,EAAE,IAAI,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC;IAC3D,CAAC;IAED;;;;;;;;OAQG;IACH,WAAW,CAAC,KAAa,EAAE,IAA8B,EAAE,QAAyB;QAClF,OAAO,IAAI,CAAC,aAAa,CAAC,KAAK,EAAE,KAAK,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAC;IAC1D,CAAC;IAED;;;;;;OAMG;IACH,YAAY,CAAC,QAAyB;QACpC,IAAI,OAAO,QAAQ,KAAK,WAAW,EAAE;YACnC,qBAAa,CAAC,QAAQ,CAAC,CAAC;SACzB;QAED,+DAA+D;QAC/D,IAAI,OAAO,GAAG,IAAI,CAAC,MAAM,CAAC;QAE1B,6EAA6E;QAC7E,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACnD,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,IAAI,EAAE;gBAC1B,OAAO,GAAG,CAAC,CAAC;gBACZ,MAAM;aACP;SACF;QAED,oBAAoB;QACpB,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,OAAO,CAAC,CAAC;QAE1D,wCAAwC;QACxC,IAAI,CAAC,WAAW,GAAG,OAAO,GAAG,CAAC,CAAC;QAE/B,OAAO,KAAK,CAAC,QAAQ,CAAC,QAAQ,IAAI,IAAI,CAAC,SAAS,CAAC,CAAC;IACpD,CAAC;IAED;;;;;;;;OAQG;IACH,cAAc,CAAC,KAAa,EAAE,MAAc,EAAE,QAAyB;QACrE,wBAAgB,CAAC,MAAM,CAAC,CAAC;QAEzB,eAAe;QACf,IAAI,CAAC,YAAY,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC;QAC3C,IAAI,CAAC,WAAW,CAAC,IAAI,EAAE,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;QAC9C,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;;;;;;OAQG;IACH,aAAa,CAAC,KAAa,EAAE,IAA8B,EAAE,QAAyB;QACpF,eAAe;QACf,IAAI,CAAC,WAAW,CAAC,KAAK,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAC;QACxC,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,OAAO,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;QACzF,OAAO,IAAI,CAAC;IACd,CAAC;IAED,UAAU;IAEV;;;;;;OAMG;IACH,UAAU,CAAC,MAAe;QACxB,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;YACjC,wBAAgB,CAAC,MAAM,CAAC,CAAC;SAC1B;QAED,MAAM,SAAS,GAAG,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC;QACpE,MAAM,QAAQ,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,WAAW,GAAG,SAAS,CAAC,CAAC;QAErE,oBAAoB;QACpB,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,QAAQ,CAAC,CAAC;QAE3D,wCAAwC;QACxC,IAAI,CAAC,WAAW,GAAG,QAAQ,CAAC;QAC5B,OAAO,KAAK,CAAC;IACf,CAAC;IAED;;;;;;;OAOG;IACH,YAAY,CAAC,KAAa,EAAE,MAAc;QACxC,wBAAgB,CAAC,MAAM,CAAC,CAAC;QAEzB,OAAO,IAAI,CAAC,aAAa,CAAC,KAAK,EAAE,IAAI,EAAE,MAAM,CAAC,CAAC;IACjD,CAAC;IAED;;;;;;;OAOG;IACH,WAAW,CAAC,KAAa,EAAE,MAAe;QACxC,OAAO,IAAI,CAAC,aAAa,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;IAClD,CAAC;IAED;;;;OAIG;IACH,YAAY;QACV,+DAA+D;QAC/D,IAAI,OAAO,GAAG,IAAI,CAAC,MAAM,CAAC;QAE1B,6EAA6E;QAC7E,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,WAAW,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACnD,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,IAAI,EAAE;gBAC1B,OAAO,GAAG,CAAC,CAAC;gBACZ,MAAM;aACP;SACF;QAED,aAAa;QACb,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,OAAO,CAAC,CAAC;QAE1D,wCAAwC;QACxC,IAAI,CAAC,WAAW,GAAG,OAAO,GAAG,CAAC,CAAC;QAC/B,OAAO,KAAK,CAAC;IACf,CAAC;IAED;;;;;;;OAOG;IACH,cAAc,CAAC,KAAa,EAAE,MAAc;QAC1C,wBAAgB,CAAC,MAAM,CAAC,CAAC;QAEzB,eAAe;QACf,IAAI,CAAC,YAAY,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC;QACjC,IAAI,CAAC,WAAW,CAAC,IAAI,EAAE,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;QAE9C,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;;;;;OAOG;IACH,aAAa,CAAC,KAAa,EAAE,MAAe;QAC1C,mCAAmC;QACnC,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;YACjC,wBAAgB,CAAC,MAAM,CAAC,CAAC;SAC1B;QAED,eAAe;QACf,IAAI,CAAC,WAAW,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC;QAChC,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QAE9F,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;OAEG;IACH,KAAK;QACH,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC;QACtB,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC;QACrB,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC;QAChB,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;;OAIG;IACH,SAAS;QACP,OAAO,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,WAAW,CAAC;IACxC,CAAC;IAED;;;;OAIG;IACH,IAAI,UAAU;QACZ,OAAO,IAAI,CAAC,WAAW,CAAC;IAC1B,CAAC;IAED;;;;OAIG;IACH,IAAI,UAAU,CAAC,MAAc;QAC3B,wBAAgB,CAAC,MAAM,CAAC,CAAC;QAEzB,oBAAoB;QACpB,yBAAiB,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;QAEhC,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC;IAC5B,CAAC;IAED;;;;OAIG;IACH,IAAI,WAAW;QACb,OAAO,IAAI,CAAC,YAAY,CAAC;IAC3B,CAAC;IAED;;;;OAIG;IACH,IAAI,WAAW,CAAC,MAAc;QAC5B,wBAAgB,CAAC,MAAM,CAAC,CAAC;QAEzB,oBAAoB;QACpB,yBAAiB,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;QAEhC,IAAI,CAAC,YAAY,GAAG,MAAM,CAAC;IAC7B,CAAC;IAED;;;;OAIG;IACH,IAAI,QAAQ;QACV,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAED;;;;OAIG;IACH,IAAI,QAAQ,CAAC,QAAwB;QACnC,qBAAa,CAAC,QAAQ,CAAC,CAAC;QAExB,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;IAC5B,CAAC;IAED;;;;OAIG;IACH,IAAI,cAAc;QAChB,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAED;;;;OAIG;IACH,QAAQ;QACN,OAAO,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;IAC1C,CAAC;IAED;;;;OAIG;IACH,QAAQ,CAAC,QAAyB;QAChC,MAAM,WAAW,GAAG,OAAO,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC;QAE7E,8BAA8B;QAC9B,qBAAa,CAAC,WAAW,CAAC,CAAC;QAE3B,OAAO,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;IAC1D,CAAC;IAED;;OAEG;IACH,OAAO;QACL,IAAI,CAAC,KAAK,EAAE,CAAC;QACb,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;;;;;OAOG;IACK,aAAa,CACnB,KAAa,EACb,QAAiB,EACjB,IAA8B,EAC9B,QAAyB;QAEzB,IAAI,SAAS,GAAG,IAAI,CAAC,YAAY,CAAC;QAClC,IAAI,WAAW,GAAG,IAAI,CAAC,SAAS,CAAC;QAEjC,mBAAmB;QACnB,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YAC5B,SAAS,GAAG,IAAI,CAAC;YACjB,qBAAqB;SACtB;aAAM,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YACnC,qBAAa,CAAC,IAAI,CAAC,CAAC;YACpB,WAAW,GAAG,IAAI,CAAC;SACpB;QAED,mCAAmC;QACnC,IAAI,OAAO,QAAQ,KAAK,QAAQ,EAAE;YAChC,qBAAa,CAAC,QAAQ,CAAC,CAAC;YACxB,WAAW,GAAG,QAAQ,CAAC;SACxB;QAED,kCAAkC;QAClC,MAAM,UAAU,GAAG,MAAM,CAAC,UAAU,CAAC,KAAK,EAAE,WAAW,CAAC,CAAC;QAEzD,mDAAmD;QACnD,IAAI,QAAQ,EAAE;YACZ,IAAI,CAAC,gBAAgB,CAAC,UAAU,EAAE,SAAS,CAAC,CAAC;SAC9C;aAAM;YACL,IAAI,CAAC,gBAAgB,CAAC,UAAU,EAAE,SAAS,CAAC,CAAC;SAC9C;QAED,cAAc;QACd,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,KAAK,EAAE,SAAS,EAAE,UAAU,EAAE,WAAW,CAAC,CAAC;QAE5D,0CAA0C;QAC1C,IAAI,QAAQ,EAAE;YACZ,IAAI,CAAC,YAAY,IAAI,UAAU,CAAC;SACjC;aAAM;YACL,mFAAmF;YACnF,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;gBAC5B,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,YAAY,EAAE,SAAS,GAAG,UAAU,CAAC,CAAC;aACzE;iBAAM;gBACL,2FAA2F;gBAC3F,IAAI,CAAC,YAAY,IAAI,UAAU,CAAC;aACjC;SACF;QAED,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;;;OAKG;IACK,aAAa,CAAC,KAAa,EAAE,QAAiB,EAAE,MAAe;QACrE,MAAM,SAAS,GAAG,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,YAAY,CAAC;QAE1E,mDAAmD;QACnD,IAAI,QAAQ,EAAE;YACZ,IAAI,CAAC,gBAAgB,CAAC,KAAK,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;SAChD;aAAM;YACL,IAAI,CAAC,gBAAgB,CAAC,KAAK,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;SAChD;QAED,qBAAqB;QACrB,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,SAAS,CAAC,CAAC;QAElC,0CAA0C;QAC1C,IAAI,QAAQ,EAAE;YACZ,IAAI,CAAC,YAAY,IAAI,KAAK,CAAC,MAAM,CAAC;SACnC;aAAM;YACL,mFAAmF;YACnF,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;gBAC9B,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,YAAY,EAAE,SAAS,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;aAC3E;iBAAM;gBACL,2FAA2F;gBAC3F,IAAI,CAAC,YAAY,IAAI,KAAK,CAAC,MAAM,CAAC;aACnC;SACF;QAED,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;;;OAKG;IACK,cAAc,CAAC,MAAc,EAAE,MAAe;QACpD,gDAAgD;QAChD,IAAI,SAAS,GAAG,IAAI,CAAC,WAAW,CAAC;QAEjC,qCAAqC;QACrC,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;YACjC,mCAAmC;YACnC,wBAAgB,CAAC,MAAM,CAAC,CAAC;YAEzB,8BAA8B;YAC9B,SAAS,GAAG,MAAM,CAAC;SACpB;QAED,8GAA8G;QAC9G,IAAI,SAAS,GAAG,CAAC,IAAI,SAAS,GAAG,MAAM,GAAG,IAAI,CAAC,MAAM,EAAE;YACrD,MAAM,IAAI,KAAK,CAAC,cAAM,CAAC,0BAA0B,CAAC,CAAC;SACpD;IACH,CAAC;IAED;;;;;OAKG;IACK,gBAAgB,CAAC,UAAkB,EAAE,MAAc;QACzD,mCAAmC;QACnC,wBAAgB,CAAC,MAAM,CAAC,CAAC;QAEzB,mDAAmD;QACnD,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,MAAM,GAAG,UAAU,CAAC,CAAC;QAE/C,kIAAkI;QAClI,IAAI,MAAM,GAAG,IAAI,CAAC,MAAM,EAAE;YACxB,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,MAAM,GAAG,UAAU,EAAE,MAAM,EAAE,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;SAC7E;QAED,qCAAqC;QACrC,IAAI,MAAM,GAAG,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE;YACrC,IAAI,CAAC,MAAM,GAAG,MAAM,GAAG,UAAU,CAAC;SACnC;aAAM;YACL,IAAI,CAAC,MAAM,IAAI,UAAU,CAAC;SAC3B;IACH,CAAC;IAED;;;;;OAKG;IACK,gBAAgB,CAAC,UAAkB,EAAE,MAAe;QAC1D,MAAM,SAAS,GAAG,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,YAAY,CAAC;QAE1E,wCAAwC;QACxC,IAAI,CAAC,eAAe,CAAC,SAAS,GAAG,UAAU,CAAC,CAAC;QAE7C,8FAA8F;QAC9F,IAAI,SAAS,GAAG,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE;YACxC,IAAI,CAAC,MAAM,GAAG,SAAS,GAAG,UAAU,CAAC;SACtC;IACH,CAAC;IAED;;;;OAIG;IACK,eAAe,CAAC,SAAiB;QACvC,MAAM,SAAS,GAAG,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC;QAEpC,IAAI,SAAS,GAAG,SAAS,EAAE;YACzB,IAAI,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC;YACtB,IAAI,SAAS,GAAG,CAAC,SAAS,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;YACxC,IAAI,SAAS,GAAG,SAAS,EAAE;gBACzB,SAAS,GAAG,SAAS,CAAC;aACvB;YACD,IAAI,CAAC,KAAK,GAAG,MAAM,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;YAE3C,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC,EAAE,SAAS,CAAC,CAAC;SACxC;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACK,gBAAgB,CAAI,IAA2B,EAAE,QAAgB,EAAE,MAAe;QACxF,IAAI,CAAC,cAAc,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QAEtC,0BAA0B;QAC1B,MAAM,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;QAE5F,2EAA2E;QAC3E,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;YACjC,IAAI,CAAC,WAAW,IAAI,QAAQ,CAAC;SAC9B;QAED,OAAO,KAAK,CAAC;IACf,CAAC;IAED;;;;;;;;;;;OAWG;IACK,kBAAkB,CACxB,IAA2C,EAC3C,QAAgB,EAChB,KAAQ,EACR,MAAc;QAEd,mCAAmC;QACnC,wBAAgB,CAAC,MAAM,CAAC,CAAC;QAEzB,0EAA0E;QAC1E,IAAI,CAAC,gBAAgB,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QAExC,2BAA2B;QAC3B,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;QAErC,2CAA2C;QAC3C,IAAI,CAAC,YAAY,IAAI,QAAQ,CAAC;QAC9B,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;;;;;;;;;OAWG;IACK,iBAAiB,CACvB,IAA2C,EAC3C,QAAgB,EAChB,KAAQ,EACR,MAAe;QAEf,0CAA0C;QAC1C,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;YAC9B,gEAAgE;YAChE,IAAI,MAAM,GAAG,CAAC,EAAE;gBACd,MAAM,IAAI,KAAK,CAAC,cAAM,CAAC,2BAA2B,CAAC,CAAC;aACrD;YAED,wBAAgB,CAAC,MAAM,CAAC,CAAC;SAC1B;QAED,uDAAuD;QACvD,MAAM,SAAS,GAAG,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,YAAY,CAAC;QAE1E,0EAA0E;QAC1E,IAAI,CAAC,gBAAgB,CAAC,QAAQ,EAAE,SAAS,CAAC,CAAC;QAE3C,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,KAAK,EAAE,SAAS,CAAC,CAAC;QAExC,mFAAmF;QACnF,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;YAC9B,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,YAAY,EAAE,SAAS,GAAG,QAAQ,CAAC,CAAC;SACvE;aAAM;YACL,mGAAmG;YACnG,IAAI,CAAC,YAAY,IAAI,QAAQ,CAAC;SAC/B;QAED,OAAO,IAAI,CAAC;IACd,CAAC;CACF;AAE4B,kCAAW"} \ No newline at end of file diff --git a/mybulma/node_modules/smart-buffer/build/utils.js b/mybulma/node_modules/smart-buffer/build/utils.js new file mode 100644 index 0000000..6d55981 --- /dev/null +++ b/mybulma/node_modules/smart-buffer/build/utils.js @@ -0,0 +1,108 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const buffer_1 = require("buffer"); +/** + * Error strings + */ +const ERRORS = { + INVALID_ENCODING: 'Invalid encoding provided. Please specify a valid encoding the internal Node.js Buffer supports.', + INVALID_SMARTBUFFER_SIZE: 'Invalid size provided. Size must be a valid integer greater than zero.', + INVALID_SMARTBUFFER_BUFFER: 'Invalid Buffer provided in SmartBufferOptions.', + INVALID_SMARTBUFFER_OBJECT: 'Invalid SmartBufferOptions object supplied to SmartBuffer constructor or factory methods.', + INVALID_OFFSET: 'An invalid offset value was provided.', + INVALID_OFFSET_NON_NUMBER: 'An invalid offset value was provided. A numeric value is required.', + INVALID_LENGTH: 'An invalid length value was provided.', + INVALID_LENGTH_NON_NUMBER: 'An invalid length value was provived. A numeric value is required.', + INVALID_TARGET_OFFSET: 'Target offset is beyond the bounds of the internal SmartBuffer data.', + INVALID_TARGET_LENGTH: 'Specified length value moves cursor beyong the bounds of the internal SmartBuffer data.', + INVALID_READ_BEYOND_BOUNDS: 'Attempted to read beyond the bounds of the managed data.', + INVALID_WRITE_BEYOND_BOUNDS: 'Attempted to write beyond the bounds of the managed data.' +}; +exports.ERRORS = ERRORS; +/** + * Checks if a given encoding is a valid Buffer encoding. (Throws an exception if check fails) + * + * @param { String } encoding The encoding string to check. + */ +function checkEncoding(encoding) { + if (!buffer_1.Buffer.isEncoding(encoding)) { + throw new Error(ERRORS.INVALID_ENCODING); + } +} +exports.checkEncoding = checkEncoding; +/** + * Checks if a given number is a finite integer. (Throws an exception if check fails) + * + * @param { Number } value The number value to check. + */ +function isFiniteInteger(value) { + return typeof value === 'number' && isFinite(value) && isInteger(value); +} +exports.isFiniteInteger = isFiniteInteger; +/** + * Checks if an offset/length value is valid. (Throws an exception if check fails) + * + * @param value The value to check. + * @param offset True if checking an offset, false if checking a length. + */ +function checkOffsetOrLengthValue(value, offset) { + if (typeof value === 'number') { + // Check for non finite/non integers + if (!isFiniteInteger(value) || value < 0) { + throw new Error(offset ? ERRORS.INVALID_OFFSET : ERRORS.INVALID_LENGTH); + } + } + else { + throw new Error(offset ? ERRORS.INVALID_OFFSET_NON_NUMBER : ERRORS.INVALID_LENGTH_NON_NUMBER); + } +} +/** + * Checks if a length value is valid. (Throws an exception if check fails) + * + * @param { Number } length The value to check. + */ +function checkLengthValue(length) { + checkOffsetOrLengthValue(length, false); +} +exports.checkLengthValue = checkLengthValue; +/** + * Checks if a offset value is valid. (Throws an exception if check fails) + * + * @param { Number } offset The value to check. + */ +function checkOffsetValue(offset) { + checkOffsetOrLengthValue(offset, true); +} +exports.checkOffsetValue = checkOffsetValue; +/** + * Checks if a target offset value is out of bounds. (Throws an exception if check fails) + * + * @param { Number } offset The offset value to check. + * @param { SmartBuffer } buff The SmartBuffer instance to check against. + */ +function checkTargetOffset(offset, buff) { + if (offset < 0 || offset > buff.length) { + throw new Error(ERRORS.INVALID_TARGET_OFFSET); + } +} +exports.checkTargetOffset = checkTargetOffset; +/** + * Determines whether a given number is a integer. + * @param value The number to check. + */ +function isInteger(value) { + return typeof value === 'number' && isFinite(value) && Math.floor(value) === value; +} +/** + * Throws if Node.js version is too low to support bigint + */ +function bigIntAndBufferInt64Check(bufferMethod) { + if (typeof BigInt === 'undefined') { + throw new Error('Platform does not support JS BigInt type.'); + } + if (typeof buffer_1.Buffer.prototype[bufferMethod] === 'undefined') { + throw new Error(`Platform does not support Buffer.prototype.${bufferMethod}.`); + } +} +exports.bigIntAndBufferInt64Check = bigIntAndBufferInt64Check; +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/mybulma/node_modules/smart-buffer/build/utils.js.map b/mybulma/node_modules/smart-buffer/build/utils.js.map new file mode 100644 index 0000000..fc7388d --- /dev/null +++ b/mybulma/node_modules/smart-buffer/build/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../src/utils.ts"],"names":[],"mappings":";;AACA,mCAAgC;AAEhC;;GAEG;AACH,MAAM,MAAM,GAAG;IACb,gBAAgB,EAAE,kGAAkG;IACpH,wBAAwB,EAAE,wEAAwE;IAClG,0BAA0B,EAAE,gDAAgD;IAC5E,0BAA0B,EAAE,2FAA2F;IACvH,cAAc,EAAE,uCAAuC;IACvD,yBAAyB,EAAE,oEAAoE;IAC/F,cAAc,EAAE,uCAAuC;IACvD,yBAAyB,EAAE,oEAAoE;IAC/F,qBAAqB,EAAE,sEAAsE;IAC7F,qBAAqB,EAAE,yFAAyF;IAChH,0BAA0B,EAAE,0DAA0D;IACtF,2BAA2B,EAAE,2DAA2D;CACzF,CAAC;AAuGA,wBAAM;AArGR;;;;GAIG;AACH,SAAS,aAAa,CAAC,QAAwB;IAC7C,IAAI,CAAC,eAAM,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE;QAChC,MAAM,IAAI,KAAK,CAAC,MAAM,CAAC,gBAAgB,CAAC,CAAC;KAC1C;AACH,CAAC;AA4F0B,sCAAa;AA1FxC;;;;GAIG;AACH,SAAS,eAAe,CAAC,KAAa;IACpC,OAAO,OAAO,KAAK,KAAK,QAAQ,IAAI,QAAQ,CAAC,KAAK,CAAC,IAAI,SAAS,CAAC,KAAK,CAAC,CAAC;AAC1E,CAAC;AAmFS,0CAAe;AAjFzB;;;;;GAKG;AACH,SAAS,wBAAwB,CAAC,KAAU,EAAE,MAAe;IAC3D,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QAC7B,oCAAoC;QACpC,IAAI,CAAC,eAAe,CAAC,KAAK,CAAC,IAAI,KAAK,GAAG,CAAC,EAAE;YACxC,MAAM,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;SACzE;KACF;SAAM;QACL,MAAM,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,yBAAyB,CAAC,CAAC,CAAC,MAAM,CAAC,yBAAyB,CAAC,CAAC;KAC/F;AACH,CAAC;AAED;;;;GAIG;AACH,SAAS,gBAAgB,CAAC,MAAW;IACnC,wBAAwB,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;AAC1C,CAAC;AA0DC,4CAAgB;AAxDlB;;;;GAIG;AACH,SAAS,gBAAgB,CAAC,MAAW;IACnC,wBAAwB,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;AACzC,CAAC;AAgDyC,4CAAgB;AA9C1D;;;;;GAKG;AACH,SAAS,iBAAiB,CAAC,MAAc,EAAE,IAAiB;IAC1D,IAAI,MAAM,GAAG,CAAC,IAAI,MAAM,GAAG,IAAI,CAAC,MAAM,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,MAAM,CAAC,qBAAqB,CAAC,CAAC;KAC/C;AACH,CAAC;AAqCmB,8CAAiB;AAnCrC;;;GAGG;AACH,SAAS,SAAS,CAAC,KAAa;IAC9B,OAAO,OAAO,KAAK,KAAK,QAAQ,IAAI,QAAQ,CAAC,KAAK,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,KAAK,KAAK,CAAC;AACrF,CAAC;AAcD;;GAEG;AACH,SAAS,yBAAyB,CAAC,YAA0B;IAC3D,IAAI,OAAO,MAAM,KAAK,WAAW,EAAE;QACjC,MAAM,IAAI,KAAK,CAAC,2CAA2C,CAAC,CAAC;KAC9D;IAED,IAAI,OAAO,eAAM,CAAC,SAAS,CAAC,YAAY,CAAC,KAAK,WAAW,EAAE;QACzD,MAAM,IAAI,KAAK,CAAC,8CAA8C,YAAY,GAAG,CAAC,CAAC;KAChF;AACH,CAAC;AAIsC,8DAAyB"} \ No newline at end of file diff --git a/mybulma/node_modules/smart-buffer/docs/CHANGELOG.md b/mybulma/node_modules/smart-buffer/docs/CHANGELOG.md new file mode 100644 index 0000000..1199a4d --- /dev/null +++ b/mybulma/node_modules/smart-buffer/docs/CHANGELOG.md @@ -0,0 +1,70 @@ +# Change Log +## 4.1.0 +> Released 07/24/2019 +* Adds int64 support for node v12+ +* Drops support for node v4 + +## 4.0 +> Released 10/21/2017 +* Major breaking changes arriving in v4. + +### New Features +* Ability to read data from a specific offset. ex: readInt8(5) +* Ability to write over data when an offset is given (see breaking changes) ex: writeInt8(5, 0); +* Ability to set internal read and write offsets. + + + +### Breaking Changes + +* Old constructor patterns have been completely removed. It's now required to use the SmartBuffer.fromXXX() factory constructors. Read more on the v4 docs. +* rewind(), skip(), moveTo() have been removed. +* Internal private properties are now prefixed with underscores (_). +* **All** writeXXX() methods that are given an offset will now **overwrite data** instead of insert +* insertXXX() methods have been added for when you want to insert data at a specific offset (this replaces the old behavior of writeXXX() when an offset was provided) + + +### Other Changes +* Standardizd error messaging +* Standardized offset/length bounds and sanity checking +* General overall cleanup of code. + +## 3.0.3 +> Released 02/19/2017 +* Adds missing type definitions for some internal functions. + +## 3.0.2 +> Released 02/17/2017 + +### Bug Fixes +* Fixes a bug where using readString with a length of zero resulted in reading the remaining data instead of returning an empty string. (Fixed by Seldszar) + +## 3.0.1 +> Released 02/15/2017 + +### Bug Fixes +* Fixes a bug leftover from the TypeScript refactor where .readIntXXX() resulted in .readUIntXXX() being called by mistake. + +## 3.0 +> Released 02/12/2017 + +### Bug Fixes +* readUIntXXXX() methods will now throw an exception if they attempt to read beyond the bounds of the valid buffer data available. + * **Note** This is technically a breaking change, so version is bumped to 3.x. + +## 2.0 +> Relased 01/30/2017 + +### New Features: + +* Entire package re-written in TypeScript (2.1) +* Backwards compatibility is preserved for now +* New factory methods for creating SmartBuffer instances + * SmartBuffer.fromSize() + * SmartBuffer.fromBuffer() + * SmartBuffer.fromOptions() +* New SmartBufferOptions constructor options +* Added additional tests + +### Bug Fixes: +* Fixes a bug where reading null terminated strings may result in an exception. diff --git a/mybulma/node_modules/smart-buffer/docs/README_v3.md b/mybulma/node_modules/smart-buffer/docs/README_v3.md new file mode 100644 index 0000000..b7c48b8 --- /dev/null +++ b/mybulma/node_modules/smart-buffer/docs/README_v3.md @@ -0,0 +1,367 @@ +smart-buffer [![Build Status](https://travis-ci.org/JoshGlazebrook/smart-buffer.svg?branch=master)](https://travis-ci.org/JoshGlazebrook/smart-buffer) [![Coverage Status](https://coveralls.io/repos/github/JoshGlazebrook/smart-buffer/badge.svg?branch=master)](https://coveralls.io/github/JoshGlazebrook/smart-buffer?branch=master) +============= + +smart-buffer is a light Buffer wrapper that takes away the need to keep track of what position to read and write data to and from the underlying Buffer. It also adds null terminating string operations and **grows** as you add more data. + +![stats](https://nodei.co/npm/smart-buffer.png?downloads=true&downloadRank=true&stars=true "stats") + +### What it's useful for: + +I created smart-buffer because I wanted to simplify the process of using Buffer for building and reading network packets to send over a socket. Rather than having to keep track of which position I need to write a UInt16 to after adding a string of variable length, I simply don't have to. + +Key Features: +* Proxies all of the Buffer write and read functions. +* Keeps track of read and write positions for you. +* Grows the internal Buffer as you add data to it. +* Useful string operations. (Null terminating strings) +* Allows for inserting values at specific points in the internal Buffer. +* Built in TypeScript +* Type Definitions Provided + +Requirements: +* Node v4.0+ is supported at this time. (Versions prior to 2.0 will work on node 0.10) + + +#### Note: +smart-buffer can be used for writing to an underlying buffer as well as reading from it. It however does not function correctly if you're mixing both read and write operations with each other. + +## Breaking Changes with 2.0 +The latest version (2.0+) is written in TypeScript, and are compiled to ES6 Javascript. This means the earliest Node.js it supports will be 4.x (in strict mode.) If you're using version 6 and above it will work without any issues. From an API standpoint, 2.0 is backwards compatible. The only difference is SmartBuffer is not exported directly as the root module. + +## Breaking Changes with 3.0 +Starting with 3.0, if any of the readIntXXXX() methods are called and the requested data is larger than the bounds of the internally managed valid buffer data, an exception will now be thrown. + +## Installing: + +`npm install smart-buffer` + +or + +`yarn add smart-buffer` + +Note: The published NPM package includes the built javascript library. +If you cloned this repo and wish to build the library manually use: + +`tsc -p ./` + +## Using smart-buffer + +### Example + +Say you were building a packet that had to conform to the following protocol: + +`[PacketType:2][PacketLength:2][Data:XX]` + +To build this packet using the vanilla Buffer class, you would have to count up the length of the data payload beforehand. You would also need to keep track of the current "cursor" position in your Buffer so you write everything in the right places. With smart-buffer you don't have to do either of those things. + +```javascript +// 1.x (javascript) +var SmartBuffer = require('smart-buffer'); + +// 1.x (typescript) +import SmartBuffer = require('smart-buffer'); + +// 2.x+ (javascript) +const SmartBuffer = require('smart-buffer').SmartBuffer; + +// 2.x+ (typescript) +import { SmartBuffer, SmartBufferOptions} from 'smart-buffer'; + +function createLoginPacket(username, password, age, country) { + let packet = new SmartBuffer(); + packet.writeUInt16LE(0x0060); // Login Packet Type/ID + packet.writeStringNT(username); + packet.writeStringNT(password); + packet.writeUInt8(age); + packet.writeStringNT(country); + packet.writeUInt16LE(packet.length - 2, 2); + + return packet.toBuffer(); +} +``` +With the above function, you now can do this: +```javascript +let login = createLoginPacket("Josh", "secret123", 22, "United States"); + +// +``` +Notice that the `[PacketLength:2]` part of the packet was inserted after we had added everything else, and as shown in the Buffer dump above, is in the correct location along with everything else. + +Reading back the packet we created above is just as easy: +```javascript + +let reader = SmartBuffer.fromBuffer(login); + +let logininfo = { + packetType: reader.readUInt16LE(), + packetLength: reader.readUInt16LE(), + username: reader.readStringNT(), + password: reader.readStringNT(), + age: reader.readUInt8(), + country: reader.readStringNT() +}; + +/* +{ + packetType: 96, (0x0060) + packetLength: 30, + username: 'Josh', + password: 'secret123', + age: 22, + country: 'United States' +}; +*/ +``` + +# Api Reference: + +### Constructing a smart-buffer + +smart-buffer has a few different ways to construct an instance. Starting with version 2.0, the following factory methods are preffered. + +```javascript +let SmartBuffer = require('smart-buffer'); + +// Creating SmartBuffer from existing Buffer +let buff = SmartBuffer.fromBuffer(buffer); // Creates instance from buffer. (Uses default utf8 encoding) +let buff = SmartBuffer.fromBuffer(buffer, 'ascii'); // Creates instance from buffer with ascii encoding for Strings. + +// Creating SmartBuffer with specified internal Buffer size. +let buff = SmartBuffer.fromSize(1024); // Creates instance with internal Buffer size of 1024. +let buff = SmartBuffer.fromSize(1024, 'utf8'); // Creates instance with intenral Buffer size of 1024, and utf8 encoding. + +// Creating SmartBuffer with options object. This one specifies size and encoding. +let buff = SmartBuffer.fromOptions({ + size: 1024, + encoding: 'ascii' +}); + +// Creating SmartBuffer with options object. This one specified an existing Buffer. +let buff = SmartBuffer.fromOptions({ + buff: buffer +}); + +// Just want a regular SmartBuffer with all default options? +let buff = new SmartBuffer(); +``` + +## Backwards Compatibility: + +All constructors used prior to 2.0 still are supported. However it's not recommended to use these. + +```javascript +let writer = new SmartBuffer(); // Defaults to utf8, 4096 length internal Buffer. +let writer = new SmartBuffer(1024); // Defaults to utf8, 1024 length internal Buffer. +let writer = new SmartBuffer('ascii'); // Sets to ascii encoding, 4096 length internal buffer. +let writer = new SmartBuffer(1024, 'ascii'); // Sets to ascii encoding, 1024 length internal buffer. +``` + +## Reading Data + +smart-buffer supports all of the common read functions you will find in the vanilla Buffer class. The only difference is, you do not need to specify which location to start reading from. This is possible because as you read data out of a smart-buffer, it automatically progresses an internal read offset/position to know where to pick up from on the next read. + +## Reading Numeric Values + +When numeric values, you simply need to call the function you want, and the data is returned. + +Supported Operations: +* readInt8 +* readInt16BE +* readInt16LE +* readInt32BE +* readInt32LE +* readBigInt64LE +* readBigInt64BE +* readUInt8 +* readUInt16BE +* readUInt16LE +* readUInt32BE +* readUInt32LE +* readBigUInt64LE +* readBigUInt64BE +* readFloatBE +* readFloatLE +* readDoubleBE +* readDoubleLE + +```javascript +let reader = new SmartBuffer(somebuffer); +let num = reader.readInt8(); +``` + +## Reading String Values + +When reading String values, you can either choose to read a null terminated string, or a string of a specified length. + +### SmartBuffer.readStringNT( [encoding] ) +> `String` **String encoding to use** - Defaults to the encoding set in the constructor. + +returns `String` + +> Note: When readStringNT is called and there is no null character found, smart-buffer will read to the end of the internal Buffer. + +### SmartBuffer.readString( [length] ) +### SmartBuffer.readString( [encoding] ) +### SmartBuffer.readString( [length], [encoding] ) +> `Number` **Length of the string to read** + +> `String` **String encoding to use** - Defaults to the encoding set in the constructor, or utf8. + +returns `String` + +> Note: When readString is called without a specified length, smart-buffer will read to the end of the internal Buffer. + + + +## Reading Buffer Values + +### SmartBuffer.readBuffer( length ) +> `Number` **Length of data to read into a Buffer** + +returns `Buffer` + +> Note: This function uses `slice` to retrieve the Buffer. + + +### SmartBuffer.readBufferNT() + +returns `Buffer` + +> Note: This reads the next sequence of bytes in the buffer until a null (0x00) value is found. (Null terminated buffer) +> Note: This function uses `slice` to retrieve the Buffer. + + +## Writing Data + +smart-buffer supports all of the common write functions you will find in the vanilla Buffer class. The only difference is, you do not need to specify which location to write to in your Buffer by default. You do however have the option of **inserting** a piece of data into your smart-buffer at a given location. + + +## Writing Numeric Values + + +For numeric values, you simply need to call the function you want, and the data is written at the end of the internal Buffer's current write position. You can specify a offset/position to **insert** the given value at, but keep in mind this does not override data at the given position. This feature also does not work properly when inserting a value beyond the current internal length of the smart-buffer (length being the .length property of the smart-buffer instance you're writing to) + +Supported Operations: +* writeInt8 +* writeInt16BE +* writeInt16LE +* writeInt32BE +* writeInt32LE +* writeBigInt64BE +* writeBigInt64LE +* writeUInt8 +* writeUInt16BE +* writeUInt16LE +* writeUInt32BE +* writeUInt32LE +* writeBigUInt64BE +* writeBigUInt64LE +* writeFloatBE +* writeFloatLE +* writeDoubleBE +* writeDoubleLE + +The following signature is the same for all the above functions: + +### SmartBuffer.writeInt8( value, [offset] ) +> `Number` **A valid Int8 number** + +> `Number` **The position to insert this value at** + +returns this + +> Note: All write operations return `this` to allow for chaining. + +## Writing String Values + +When reading String values, you can either choose to write a null terminated string, or a non null terminated string. + +### SmartBuffer.writeStringNT( value, [offset], [encoding] ) +### SmartBuffer.writeStringNT( value, [offset] ) +### SmartBuffer.writeStringNT( value, [encoding] ) +> `String` **String value to write** + +> `Number` **The position to insert this String at** + +> `String` **The String encoding to use.** - Defaults to the encoding set in the constructor, or utf8. + +returns this + +### SmartBuffer.writeString( value, [offset], [encoding] ) +### SmartBuffer.writeString( value, [offset] ) +### SmartBuffer.writeString( value, [encoding] ) +> `String` **String value to write** + +> `Number` **The position to insert this String at** + +> `String` **The String encoding to use** - Defaults to the encoding set in the constructor, or utf8. + +returns this + + +## Writing Buffer Values + +### SmartBuffer.writeBuffer( value, [offset] ) +> `Buffer` **Buffer value to write** + +> `Number` **The position to insert this Buffer's content at** + +returns this + +### SmartBuffer.writeBufferNT( value, [offset] ) +> `Buffer` **Buffer value to write** + +> `Number` **The position to insert this Buffer's content at** + +returns this + + +## Utility Functions + +### SmartBuffer.clear() +Resets the SmartBuffer to its default state where it can be reused for reading or writing. + +### SmartBuffer.remaining() + +returns `Number` The amount of data left to read based on the current read Position. + +### SmartBuffer.skip( value ) +> `Number` **The amount of bytes to skip ahead** + +Skips the read position ahead by the given value. + +returns this + +### SmartBuffer.rewind( value ) +> `Number` **The amount of bytes to reward backwards** + +Rewinds the read position backwards by the given value. + +returns this + +### SmartBuffer.moveTo( position ) +> `Number` **The point to skip the read position to** + +Moves the read position to the given point. +returns this + +### SmartBuffer.toBuffer() + +returns `Buffer` A Buffer containing the contents of the internal Buffer. + +> Note: This uses the slice function. + +### SmartBuffer.toString( [encoding] ) +> `String` **The String encoding to use** - Defaults to the encoding set in the constructor, or utf8. + +returns `String` The internal Buffer in String representation. + +## Properties + +### SmartBuffer.length + +returns `Number` **The length of the data that is being tracked in the internal Buffer** - Does NOT return the absolute length of the internal Buffer being written to. + +## License + +This work is licensed under the [MIT license](http://en.wikipedia.org/wiki/MIT_License). \ No newline at end of file diff --git a/mybulma/node_modules/smart-buffer/docs/ROADMAP.md b/mybulma/node_modules/smart-buffer/docs/ROADMAP.md new file mode 100644 index 0000000..e69de29 diff --git a/mybulma/node_modules/smart-buffer/package.json b/mybulma/node_modules/smart-buffer/package.json new file mode 100644 index 0000000..2f326f2 --- /dev/null +++ b/mybulma/node_modules/smart-buffer/package.json @@ -0,0 +1,79 @@ +{ + "name": "smart-buffer", + "version": "4.2.0", + "description": "smart-buffer is a Buffer wrapper that adds automatic read & write offset tracking, string operations, data insertions, and more.", + "main": "build/smartbuffer.js", + "contributors": ["syvita"], + "homepage": "https://github.com/JoshGlazebrook/smart-buffer/", + "repository": { + "type": "git", + "url": "https://github.com/JoshGlazebrook/smart-buffer.git" + }, + "bugs": { + "url": "https://github.com/JoshGlazebrook/smart-buffer/issues" + }, + "keywords": [ + "buffer", + "smart", + "packet", + "serialize", + "network", + "cursor", + "simple" + ], + "engines": { + "node": ">= 6.0.0", + "npm": ">= 3.0.0" + }, + "author": "Josh Glazebrook", + "license": "MIT", + "readmeFilename": "README.md", + "devDependencies": { + "@types/chai": "4.1.7", + "@types/mocha": "5.2.7", + "@types/node": "^12.0.0", + "chai": "4.2.0", + "coveralls": "3.0.5", + "istanbul": "^0.4.5", + "mocha": "6.2.0", + "mocha-lcov-reporter": "^1.3.0", + "nyc": "14.1.1", + "source-map-support": "0.5.12", + "ts-node": "8.3.0", + "tslint": "5.18.0", + "typescript": "^3.2.1" + }, + "typings": "typings/smartbuffer.d.ts", + "dependencies": {}, + "scripts": { + "prepublish": "npm install -g typescript && npm run build", + "test": "NODE_ENV=test mocha --recursive --require ts-node/register test/**/*.ts", + "coverage": "NODE_ENV=test nyc npm test", + "coveralls": "NODE_ENV=test nyc npm test && nyc report --reporter=text-lcov | coveralls", + "lint": "tslint --type-check --project tsconfig.json 'src/**/*.ts'", + "build": "tsc -p ./" + }, + "nyc": { + "extension": [ + ".ts", + ".tsx" + ], + "include": [ + "src/*.ts", + "src/**/*.ts" + ], + "exclude": [ + "**.*.d.ts", + "node_modules", + "typings" + ], + "require": [ + "ts-node/register" + ], + "reporter": [ + "json", + "html" + ], + "all": true + } +} diff --git a/mybulma/node_modules/smart-buffer/typings/smartbuffer.d.ts b/mybulma/node_modules/smart-buffer/typings/smartbuffer.d.ts new file mode 100644 index 0000000..d07379b --- /dev/null +++ b/mybulma/node_modules/smart-buffer/typings/smartbuffer.d.ts @@ -0,0 +1,755 @@ +/// +/** + * Object interface for constructing new SmartBuffer instances. + */ +interface SmartBufferOptions { + encoding?: BufferEncoding; + size?: number; + buff?: Buffer; +} +declare class SmartBuffer { + length: number; + private _encoding; + private _buff; + private _writeOffset; + private _readOffset; + /** + * Creates a new SmartBuffer instance. + * + * @param options { SmartBufferOptions } The SmartBufferOptions to apply to this instance. + */ + constructor(options?: SmartBufferOptions); + /** + * Creates a new SmartBuffer instance with the provided internal Buffer size and optional encoding. + * + * @param size { Number } The size of the internal Buffer. + * @param encoding { String } The BufferEncoding to use for strings. + * + * @return { SmartBuffer } + */ + static fromSize(size: number, encoding?: BufferEncoding): SmartBuffer; + /** + * Creates a new SmartBuffer instance with the provided Buffer and optional encoding. + * + * @param buffer { Buffer } The Buffer to use as the internal Buffer value. + * @param encoding { String } The BufferEncoding to use for strings. + * + * @return { SmartBuffer } + */ + static fromBuffer(buff: Buffer, encoding?: BufferEncoding): SmartBuffer; + /** + * Creates a new SmartBuffer instance with the provided SmartBufferOptions options. + * + * @param options { SmartBufferOptions } The options to use when creating the SmartBuffer instance. + */ + static fromOptions(options: SmartBufferOptions): SmartBuffer; + /** + * Type checking function that determines if an object is a SmartBufferOptions object. + */ + static isSmartBufferOptions(options: SmartBufferOptions): options is SmartBufferOptions; + /** + * Reads an Int8 value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { Number } + */ + readInt8(offset?: number): number; + /** + * Reads an Int16BE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { Number } + */ + readInt16BE(offset?: number): number; + /** + * Reads an Int16LE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { Number } + */ + readInt16LE(offset?: number): number; + /** + * Reads an Int32BE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { Number } + */ + readInt32BE(offset?: number): number; + /** + * Reads an Int32LE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { Number } + */ + readInt32LE(offset?: number): number; + /** + * Reads a BigInt64BE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { BigInt } + */ + readBigInt64BE(offset?: number): bigint; + /** + * Reads a BigInt64LE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { BigInt } + */ + readBigInt64LE(offset?: number): bigint; + /** + * Writes an Int8 value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeInt8(value: number, offset?: number): SmartBuffer; + /** + * Inserts an Int8 value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertInt8(value: number, offset: number): SmartBuffer; + /** + * Writes an Int16BE value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeInt16BE(value: number, offset?: number): SmartBuffer; + /** + * Inserts an Int16BE value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertInt16BE(value: number, offset: number): SmartBuffer; + /** + * Writes an Int16LE value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeInt16LE(value: number, offset?: number): SmartBuffer; + /** + * Inserts an Int16LE value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertInt16LE(value: number, offset: number): SmartBuffer; + /** + * Writes an Int32BE value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeInt32BE(value: number, offset?: number): SmartBuffer; + /** + * Inserts an Int32BE value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertInt32BE(value: number, offset: number): SmartBuffer; + /** + * Writes an Int32LE value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeInt32LE(value: number, offset?: number): SmartBuffer; + /** + * Inserts an Int32LE value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertInt32LE(value: number, offset: number): SmartBuffer; + /** + * Writes a BigInt64BE value to the current write position (or at optional offset). + * + * @param value { BigInt } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeBigInt64BE(value: bigint, offset?: number): SmartBuffer; + /** + * Inserts a BigInt64BE value at the given offset value. + * + * @param value { BigInt } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertBigInt64BE(value: bigint, offset: number): SmartBuffer; + /** + * Writes a BigInt64LE value to the current write position (or at optional offset). + * + * @param value { BigInt } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeBigInt64LE(value: bigint, offset?: number): SmartBuffer; + /** + * Inserts a Int64LE value at the given offset value. + * + * @param value { BigInt } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertBigInt64LE(value: bigint, offset: number): SmartBuffer; + /** + * Reads an UInt8 value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { Number } + */ + readUInt8(offset?: number): number; + /** + * Reads an UInt16BE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { Number } + */ + readUInt16BE(offset?: number): number; + /** + * Reads an UInt16LE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { Number } + */ + readUInt16LE(offset?: number): number; + /** + * Reads an UInt32BE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { Number } + */ + readUInt32BE(offset?: number): number; + /** + * Reads an UInt32LE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { Number } + */ + readUInt32LE(offset?: number): number; + /** + * Reads a BigUInt64BE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { BigInt } + */ + readBigUInt64BE(offset?: number): bigint; + /** + * Reads a BigUInt64LE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { BigInt } + */ + readBigUInt64LE(offset?: number): bigint; + /** + * Writes an UInt8 value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeUInt8(value: number, offset?: number): SmartBuffer; + /** + * Inserts an UInt8 value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertUInt8(value: number, offset: number): SmartBuffer; + /** + * Writes an UInt16BE value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeUInt16BE(value: number, offset?: number): SmartBuffer; + /** + * Inserts an UInt16BE value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertUInt16BE(value: number, offset: number): SmartBuffer; + /** + * Writes an UInt16LE value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeUInt16LE(value: number, offset?: number): SmartBuffer; + /** + * Inserts an UInt16LE value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertUInt16LE(value: number, offset: number): SmartBuffer; + /** + * Writes an UInt32BE value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeUInt32BE(value: number, offset?: number): SmartBuffer; + /** + * Inserts an UInt32BE value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertUInt32BE(value: number, offset: number): SmartBuffer; + /** + * Writes an UInt32LE value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeUInt32LE(value: number, offset?: number): SmartBuffer; + /** + * Inserts an UInt32LE value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertUInt32LE(value: number, offset: number): SmartBuffer; + /** + * Writes a BigUInt64BE value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeBigUInt64BE(value: bigint, offset?: number): SmartBuffer; + /** + * Inserts a BigUInt64BE value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertBigUInt64BE(value: bigint, offset: number): SmartBuffer; + /** + * Writes a BigUInt64LE value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeBigUInt64LE(value: bigint, offset?: number): SmartBuffer; + /** + * Inserts a BigUInt64LE value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertBigUInt64LE(value: bigint, offset: number): SmartBuffer; + /** + * Reads an FloatBE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { Number } + */ + readFloatBE(offset?: number): number; + /** + * Reads an FloatLE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { Number } + */ + readFloatLE(offset?: number): number; + /** + * Writes a FloatBE value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeFloatBE(value: number, offset?: number): SmartBuffer; + /** + * Inserts a FloatBE value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertFloatBE(value: number, offset: number): SmartBuffer; + /** + * Writes a FloatLE value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeFloatLE(value: number, offset?: number): SmartBuffer; + /** + * Inserts a FloatLE value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertFloatLE(value: number, offset: number): SmartBuffer; + /** + * Reads an DoublEBE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { Number } + */ + readDoubleBE(offset?: number): number; + /** + * Reads an DoubleLE value from the current read position or an optionally provided offset. + * + * @param offset { Number } The offset to read data from (optional) + * @return { Number } + */ + readDoubleLE(offset?: number): number; + /** + * Writes a DoubleBE value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeDoubleBE(value: number, offset?: number): SmartBuffer; + /** + * Inserts a DoubleBE value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertDoubleBE(value: number, offset: number): SmartBuffer; + /** + * Writes a DoubleLE value to the current write position (or at optional offset). + * + * @param value { Number } The value to write. + * @param offset { Number } The offset to write the value at. + * + * @return this + */ + writeDoubleLE(value: number, offset?: number): SmartBuffer; + /** + * Inserts a DoubleLE value at the given offset value. + * + * @param value { Number } The value to insert. + * @param offset { Number } The offset to insert the value at. + * + * @return this + */ + insertDoubleLE(value: number, offset: number): SmartBuffer; + /** + * Reads a String from the current read position. + * + * @param arg1 { Number | String } The number of bytes to read as a String, or the BufferEncoding to use for + * the string (Defaults to instance level encoding). + * @param encoding { String } The BufferEncoding to use for the string (Defaults to instance level encoding). + * + * @return { String } + */ + readString(arg1?: number | BufferEncoding, encoding?: BufferEncoding): string; + /** + * Inserts a String + * + * @param value { String } The String value to insert. + * @param offset { Number } The offset to insert the string at. + * @param encoding { String } The BufferEncoding to use for writing strings (defaults to instance encoding). + * + * @return this + */ + insertString(value: string, offset: number, encoding?: BufferEncoding): SmartBuffer; + /** + * Writes a String + * + * @param value { String } The String value to write. + * @param arg2 { Number | String } The offset to write the string at, or the BufferEncoding to use. + * @param encoding { String } The BufferEncoding to use for writing strings (defaults to instance encoding). + * + * @return this + */ + writeString(value: string, arg2?: number | BufferEncoding, encoding?: BufferEncoding): SmartBuffer; + /** + * Reads a null-terminated String from the current read position. + * + * @param encoding { String } The BufferEncoding to use for the string (Defaults to instance level encoding). + * + * @return { String } + */ + readStringNT(encoding?: BufferEncoding): string; + /** + * Inserts a null-terminated String. + * + * @param value { String } The String value to write. + * @param arg2 { Number | String } The offset to write the string to, or the BufferEncoding to use. + * @param encoding { String } The BufferEncoding to use for writing strings (defaults to instance encoding). + * + * @return this + */ + insertStringNT(value: string, offset: number, encoding?: BufferEncoding): SmartBuffer; + /** + * Writes a null-terminated String. + * + * @param value { String } The String value to write. + * @param arg2 { Number | String } The offset to write the string to, or the BufferEncoding to use. + * @param encoding { String } The BufferEncoding to use for writing strings (defaults to instance encoding). + * + * @return this + */ + writeStringNT(value: string, arg2?: number | BufferEncoding, encoding?: BufferEncoding): SmartBuffer; + /** + * Reads a Buffer from the internal read position. + * + * @param length { Number } The length of data to read as a Buffer. + * + * @return { Buffer } + */ + readBuffer(length?: number): Buffer; + /** + * Writes a Buffer to the current write position. + * + * @param value { Buffer } The Buffer to write. + * @param offset { Number } The offset to write the Buffer to. + * + * @return this + */ + insertBuffer(value: Buffer, offset: number): SmartBuffer; + /** + * Writes a Buffer to the current write position. + * + * @param value { Buffer } The Buffer to write. + * @param offset { Number } The offset to write the Buffer to. + * + * @return this + */ + writeBuffer(value: Buffer, offset?: number): SmartBuffer; + /** + * Reads a null-terminated Buffer from the current read poisiton. + * + * @return { Buffer } + */ + readBufferNT(): Buffer; + /** + * Inserts a null-terminated Buffer. + * + * @param value { Buffer } The Buffer to write. + * @param offset { Number } The offset to write the Buffer to. + * + * @return this + */ + insertBufferNT(value: Buffer, offset: number): SmartBuffer; + /** + * Writes a null-terminated Buffer. + * + * @param value { Buffer } The Buffer to write. + * @param offset { Number } The offset to write the Buffer to. + * + * @return this + */ + writeBufferNT(value: Buffer, offset?: number): SmartBuffer; + /** + * Clears the SmartBuffer instance to its original empty state. + */ + clear(): SmartBuffer; + /** + * Gets the remaining data left to be read from the SmartBuffer instance. + * + * @return { Number } + */ + remaining(): number; + /** + * Gets the current read offset value of the SmartBuffer instance. + * + * @return { Number } + */ + /** + * Sets the read offset value of the SmartBuffer instance. + * + * @param offset { Number } - The offset value to set. + */ + readOffset: number; + /** + * Gets the current write offset value of the SmartBuffer instance. + * + * @return { Number } + */ + /** + * Sets the write offset value of the SmartBuffer instance. + * + * @param offset { Number } - The offset value to set. + */ + writeOffset: number; + /** + * Gets the currently set string encoding of the SmartBuffer instance. + * + * @return { BufferEncoding } The string Buffer encoding currently set. + */ + /** + * Sets the string encoding of the SmartBuffer instance. + * + * @param encoding { BufferEncoding } The string Buffer encoding to set. + */ + encoding: BufferEncoding; + /** + * Gets the underlying internal Buffer. (This includes unmanaged data in the Buffer) + * + * @return { Buffer } The Buffer value. + */ + readonly internalBuffer: Buffer; + /** + * Gets the value of the internal managed Buffer (Includes managed data only) + * + * @param { Buffer } + */ + toBuffer(): Buffer; + /** + * Gets the String value of the internal managed Buffer + * + * @param encoding { String } The BufferEncoding to display the Buffer as (defaults to instance level encoding). + */ + toString(encoding?: BufferEncoding): string; + /** + * Destroys the SmartBuffer instance. + */ + destroy(): SmartBuffer; + /** + * Handles inserting and writing strings. + * + * @param value { String } The String value to insert. + * @param isInsert { Boolean } True if inserting a string, false if writing. + * @param arg2 { Number | String } The offset to insert the string at, or the BufferEncoding to use. + * @param encoding { String } The BufferEncoding to use for writing strings (defaults to instance encoding). + */ + private _handleString; + /** + * Handles writing or insert of a Buffer. + * + * @param value { Buffer } The Buffer to write. + * @param offset { Number } The offset to write the Buffer to. + */ + private _handleBuffer; + /** + * Ensures that the internal Buffer is large enough to read data. + * + * @param length { Number } The length of the data that needs to be read. + * @param offset { Number } The offset of the data that needs to be read. + */ + private ensureReadable; + /** + * Ensures that the internal Buffer is large enough to insert data. + * + * @param dataLength { Number } The length of the data that needs to be written. + * @param offset { Number } The offset of the data to be written. + */ + private ensureInsertable; + /** + * Ensures that the internal Buffer is large enough to write data. + * + * @param dataLength { Number } The length of the data that needs to be written. + * @param offset { Number } The offset of the data to be written (defaults to writeOffset). + */ + private _ensureWriteable; + /** + * Ensures that the internal Buffer is large enough to write at least the given amount of data. + * + * @param minLength { Number } The minimum length of the data needs to be written. + */ + private _ensureCapacity; + /** + * Reads a numeric number value using the provided function. + * + * @typeparam T { number | bigint } The type of the value to be read + * + * @param func { Function(offset: number) => number } The function to read data on the internal Buffer with. + * @param byteSize { Number } The number of bytes read. + * @param offset { Number } The offset to read from (optional). When this is not provided, the managed readOffset is used instead. + * + * @returns { T } the number value + */ + private _readNumberValue; + /** + * Inserts a numeric number value based on the given offset and value. + * + * @typeparam T { number | bigint } The type of the value to be written + * + * @param func { Function(offset: T, offset?) => number} The function to write data on the internal Buffer with. + * @param byteSize { Number } The number of bytes written. + * @param value { T } The number value to write. + * @param offset { Number } the offset to write the number at (REQUIRED). + * + * @returns SmartBuffer this buffer + */ + private _insertNumberValue; + /** + * Writes a numeric number value based on the given offset and value. + * + * @typeparam T { number | bigint } The type of the value to be written + * + * @param func { Function(offset: T, offset?) => number} The function to write data on the internal Buffer with. + * @param byteSize { Number } The number of bytes written. + * @param value { T } The number value to write. + * @param offset { Number } the offset to write the number at (REQUIRED). + * + * @returns SmartBuffer this buffer + */ + private _writeNumberValue; +} +export { SmartBufferOptions, SmartBuffer }; diff --git a/mybulma/node_modules/smart-buffer/typings/utils.d.ts b/mybulma/node_modules/smart-buffer/typings/utils.d.ts new file mode 100644 index 0000000..b32b4d4 --- /dev/null +++ b/mybulma/node_modules/smart-buffer/typings/utils.d.ts @@ -0,0 +1,66 @@ +/// +import { SmartBuffer } from './smartbuffer'; +import { Buffer } from 'buffer'; +/** + * Error strings + */ +declare const ERRORS: { + INVALID_ENCODING: string; + INVALID_SMARTBUFFER_SIZE: string; + INVALID_SMARTBUFFER_BUFFER: string; + INVALID_SMARTBUFFER_OBJECT: string; + INVALID_OFFSET: string; + INVALID_OFFSET_NON_NUMBER: string; + INVALID_LENGTH: string; + INVALID_LENGTH_NON_NUMBER: string; + INVALID_TARGET_OFFSET: string; + INVALID_TARGET_LENGTH: string; + INVALID_READ_BEYOND_BOUNDS: string; + INVALID_WRITE_BEYOND_BOUNDS: string; +}; +/** + * Checks if a given encoding is a valid Buffer encoding. (Throws an exception if check fails) + * + * @param { String } encoding The encoding string to check. + */ +declare function checkEncoding(encoding: BufferEncoding): void; +/** + * Checks if a given number is a finite integer. (Throws an exception if check fails) + * + * @param { Number } value The number value to check. + */ +declare function isFiniteInteger(value: number): boolean; +/** + * Checks if a length value is valid. (Throws an exception if check fails) + * + * @param { Number } length The value to check. + */ +declare function checkLengthValue(length: any): void; +/** + * Checks if a offset value is valid. (Throws an exception if check fails) + * + * @param { Number } offset The value to check. + */ +declare function checkOffsetValue(offset: any): void; +/** + * Checks if a target offset value is out of bounds. (Throws an exception if check fails) + * + * @param { Number } offset The offset value to check. + * @param { SmartBuffer } buff The SmartBuffer instance to check against. + */ +declare function checkTargetOffset(offset: number, buff: SmartBuffer): void; +interface Buffer { + readBigInt64BE(offset?: number): bigint; + readBigInt64LE(offset?: number): bigint; + readBigUInt64BE(offset?: number): bigint; + readBigUInt64LE(offset?: number): bigint; + writeBigInt64BE(value: bigint, offset?: number): number; + writeBigInt64LE(value: bigint, offset?: number): number; + writeBigUInt64BE(value: bigint, offset?: number): number; + writeBigUInt64LE(value: bigint, offset?: number): number; +} +/** + * Throws if Node.js version is too low to support bigint + */ +declare function bigIntAndBufferInt64Check(bufferMethod: keyof Buffer): void; +export { ERRORS, isFiniteInteger, checkEncoding, checkOffsetValue, checkLengthValue, checkTargetOffset, bigIntAndBufferInt64Check }; diff --git a/mybulma/node_modules/socks-proxy-agent/dist/index.d.ts b/mybulma/node_modules/socks-proxy-agent/dist/index.d.ts new file mode 100644 index 0000000..4de33b1 --- /dev/null +++ b/mybulma/node_modules/socks-proxy-agent/dist/index.d.ts @@ -0,0 +1,33 @@ +/// +import { SocksProxy } from 'socks'; +import { Agent, ClientRequest, RequestOptions } from 'agent-base'; +import { AgentOptions } from 'agent-base'; +import { Url } from 'url'; +import net from 'net'; +import tls from 'tls'; +interface BaseSocksProxyAgentOptions { + host?: string | null; + port?: string | number | null; + username?: string | null; + tls?: tls.ConnectionOptions | null; +} +interface SocksProxyAgentOptionsExtra { + timeout?: number; +} +export interface SocksProxyAgentOptions extends AgentOptions, BaseSocksProxyAgentOptions, Partial> { +} +export declare class SocksProxyAgent extends Agent { + private readonly shouldLookup; + private readonly proxy; + private readonly tlsConnectionOptions; + timeout: number | null; + constructor(input: string | SocksProxyAgentOptions, options?: SocksProxyAgentOptionsExtra); + /** + * Initiates a SOCKS connection to the specified SOCKS proxy server, + * which in turn connects to the specified remote host and port. + * + * @api protected + */ + callback(req: ClientRequest, opts: RequestOptions): Promise; +} +export {}; diff --git a/mybulma/node_modules/socks-proxy-agent/dist/index.js b/mybulma/node_modules/socks-proxy-agent/dist/index.js new file mode 100644 index 0000000..55b598b --- /dev/null +++ b/mybulma/node_modules/socks-proxy-agent/dist/index.js @@ -0,0 +1,197 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SocksProxyAgent = void 0; +const socks_1 = require("socks"); +const agent_base_1 = require("agent-base"); +const debug_1 = __importDefault(require("debug")); +const dns_1 = __importDefault(require("dns")); +const tls_1 = __importDefault(require("tls")); +const debug = (0, debug_1.default)('socks-proxy-agent'); +function parseSocksProxy(opts) { + var _a; + let port = 0; + let lookup = false; + let type = 5; + const host = opts.hostname; + if (host == null) { + throw new TypeError('No "host"'); + } + if (typeof opts.port === 'number') { + port = opts.port; + } + else if (typeof opts.port === 'string') { + port = parseInt(opts.port, 10); + } + // From RFC 1928, Section 3: https://tools.ietf.org/html/rfc1928#section-3 + // "The SOCKS service is conventionally located on TCP port 1080" + if (port == null) { + port = 1080; + } + // figure out if we want socks v4 or v5, based on the "protocol" used. + // Defaults to 5. + if (opts.protocol != null) { + switch (opts.protocol.replace(':', '')) { + case 'socks4': + lookup = true; + // pass through + case 'socks4a': + type = 4; + break; + case 'socks5': + lookup = true; + // pass through + case 'socks': // no version specified, default to 5h + case 'socks5h': + type = 5; + break; + default: + throw new TypeError(`A "socks" protocol must be specified! Got: ${String(opts.protocol)}`); + } + } + if (typeof opts.type !== 'undefined') { + if (opts.type === 4 || opts.type === 5) { + type = opts.type; + } + else { + throw new TypeError(`"type" must be 4 or 5, got: ${String(opts.type)}`); + } + } + const proxy = { + host, + port, + type + }; + let userId = (_a = opts.userId) !== null && _a !== void 0 ? _a : opts.username; + let password = opts.password; + if (opts.auth != null) { + const auth = opts.auth.split(':'); + userId = auth[0]; + password = auth[1]; + } + if (userId != null) { + Object.defineProperty(proxy, 'userId', { + value: userId, + enumerable: false + }); + } + if (password != null) { + Object.defineProperty(proxy, 'password', { + value: password, + enumerable: false + }); + } + return { lookup, proxy }; +} +const normalizeProxyOptions = (input) => { + let proxyOptions; + if (typeof input === 'string') { + proxyOptions = new URL(input); + } + else { + proxyOptions = input; + } + if (proxyOptions == null) { + throw new TypeError('a SOCKS proxy server `host` and `port` must be specified!'); + } + return proxyOptions; +}; +class SocksProxyAgent extends agent_base_1.Agent { + constructor(input, options) { + var _a; + const proxyOptions = normalizeProxyOptions(input); + super(proxyOptions); + const parsedProxy = parseSocksProxy(proxyOptions); + this.shouldLookup = parsedProxy.lookup; + this.proxy = parsedProxy.proxy; + this.tlsConnectionOptions = proxyOptions.tls != null ? proxyOptions.tls : {}; + this.timeout = (_a = options === null || options === void 0 ? void 0 : options.timeout) !== null && _a !== void 0 ? _a : null; + } + /** + * Initiates a SOCKS connection to the specified SOCKS proxy server, + * which in turn connects to the specified remote host and port. + * + * @api protected + */ + callback(req, opts) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const { shouldLookup, proxy, timeout } = this; + let { host, port, lookup: lookupCallback } = opts; + if (host == null) { + throw new Error('No `host` defined!'); + } + if (shouldLookup) { + // Client-side DNS resolution for "4" and "5" socks proxy versions. + host = yield new Promise((resolve, reject) => { + // Use the request's custom lookup, if one was configured: + const lookupFn = lookupCallback !== null && lookupCallback !== void 0 ? lookupCallback : dns_1.default.lookup; + lookupFn(host, {}, (err, res) => { + if (err) { + reject(err); + } + else { + resolve(res); + } + }); + }); + } + const socksOpts = { + proxy, + destination: { host, port }, + command: 'connect', + timeout: timeout !== null && timeout !== void 0 ? timeout : undefined + }; + const cleanup = (tlsSocket) => { + req.destroy(); + socket.destroy(); + if (tlsSocket) + tlsSocket.destroy(); + }; + debug('Creating socks proxy connection: %o', socksOpts); + const { socket } = yield socks_1.SocksClient.createConnection(socksOpts); + debug('Successfully created socks proxy connection'); + if (timeout !== null) { + socket.setTimeout(timeout); + socket.on('timeout', () => cleanup()); + } + if (opts.secureEndpoint) { + // The proxy is connecting to a TLS server, so upgrade + // this socket connection to a TLS connection. + debug('Upgrading socket connection to TLS'); + const servername = (_a = opts.servername) !== null && _a !== void 0 ? _a : opts.host; + const tlsSocket = tls_1.default.connect(Object.assign(Object.assign(Object.assign({}, omit(opts, 'host', 'hostname', 'path', 'port')), { socket, + servername }), this.tlsConnectionOptions)); + tlsSocket.once('error', (error) => { + debug('socket TLS error', error.message); + cleanup(tlsSocket); + }); + return tlsSocket; + } + return socket; + }); + } +} +exports.SocksProxyAgent = SocksProxyAgent; +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; + } + } + return ret; +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/mybulma/node_modules/socks-proxy-agent/dist/index.js.map b/mybulma/node_modules/socks-proxy-agent/dist/index.js.map new file mode 100644 index 0000000..e183e8e --- /dev/null +++ b/mybulma/node_modules/socks-proxy-agent/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;AAAA,iCAAmE;AACnE,2CAAiE;AAEjE,kDAA+B;AAE/B,8CAAqB;AAErB,8CAAqB;AAarB,MAAM,KAAK,GAAG,IAAA,eAAW,EAAC,mBAAmB,CAAC,CAAA;AAE9C,SAAS,eAAe,CAAE,IAA4B;;IACpD,IAAI,IAAI,GAAG,CAAC,CAAA;IACZ,IAAI,MAAM,GAAG,KAAK,CAAA;IAClB,IAAI,IAAI,GAAuB,CAAC,CAAA;IAEhC,MAAM,IAAI,GAAG,IAAI,CAAC,QAAQ,CAAA;IAE1B,IAAI,IAAI,IAAI,IAAI,EAAE;QAChB,MAAM,IAAI,SAAS,CAAC,WAAW,CAAC,CAAA;KACjC;IAED,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,QAAQ,EAAE;QACjC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAA;KACjB;SAAM,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,QAAQ,EAAE;QACxC,IAAI,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,CAAC,CAAA;KAC/B;IAED,0EAA0E;IAC1E,iEAAiE;IACjE,IAAI,IAAI,IAAI,IAAI,EAAE;QAChB,IAAI,GAAG,IAAI,CAAA;KACZ;IAED,sEAAsE;IACtE,iBAAiB;IACjB,IAAI,IAAI,CAAC,QAAQ,IAAI,IAAI,EAAE;QACzB,QAAQ,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE;YACtC,KAAK,QAAQ;gBACX,MAAM,GAAG,IAAI,CAAA;YACf,eAAe;YACf,KAAK,SAAS;gBACZ,IAAI,GAAG,CAAC,CAAA;gBACR,MAAK;YACP,KAAK,QAAQ;gBACX,MAAM,GAAG,IAAI,CAAA;YACf,eAAe;YACf,KAAK,OAAO,CAAC,CAAC,sCAAsC;YACpD,KAAK,SAAS;gBACZ,IAAI,GAAG,CAAC,CAAA;gBACR,MAAK;YACP;gBACE,MAAM,IAAI,SAAS,CAAC,8CAA8C,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAA;SAC7F;KACF;IAED,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,WAAW,EAAE;QACpC,IAAI,IAAI,CAAC,IAAI,KAAK,CAAC,IAAI,IAAI,CAAC,IAAI,KAAK,CAAC,EAAE;YACtC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAA;SACjB;aAAM;YACL,MAAM,IAAI,SAAS,CAAC,+BAA+B,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,CAAA;SACxE;KACF;IAED,MAAM,KAAK,GAAe;QACxB,IAAI;QACJ,IAAI;QACJ,IAAI;KACL,CAAA;IAED,IAAI,MAAM,GAAG,MAAA,IAAI,CAAC,MAAM,mCAAI,IAAI,CAAC,QAAQ,CAAA;IACzC,IAAI,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAA;IAC5B,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,EAAE;QACrB,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;QACjC,MAAM,GAAG,IAAI,CAAC,CAAC,CAAC,CAAA;QAChB,QAAQ,GAAG,IAAI,CAAC,CAAC,CAAC,CAAA;KACnB;IACD,IAAI,MAAM,IAAI,IAAI,EAAE;QAClB,MAAM,CAAC,cAAc,CAAC,KAAK,EAAE,QAAQ,EAAE;YACrC,KAAK,EAAE,MAAM;YACb,UAAU,EAAE,KAAK;SAClB,CAAC,CAAA;KACH;IACD,IAAI,QAAQ,IAAI,IAAI,EAAE;QACpB,MAAM,CAAC,cAAc,CAAC,KAAK,EAAE,UAAU,EAAE;YACvC,KAAK,EAAE,QAAQ;YACf,UAAU,EAAE,KAAK;SAClB,CAAC,CAAA;KACH;IAED,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,CAAA;AAC1B,CAAC;AAED,MAAM,qBAAqB,GAAG,CAAC,KAAsC,EAA0B,EAAE;IAC/F,IAAI,YAAoC,CAAA;IACxC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QAC7B,YAAY,GAAG,IAAI,GAAG,CAAC,KAAK,CAAC,CAAA;KAC9B;SAAM;QACL,YAAY,GAAG,KAAK,CAAA;KACrB;IACD,IAAI,YAAY,IAAI,IAAI,EAAE;QACxB,MAAM,IAAI,SAAS,CAAC,2DAA2D,CAAC,CAAA;KACjF;IAED,OAAO,YAAY,CAAA;AACrB,CAAC,CAAA;AAID,MAAa,eAAgB,SAAQ,kBAAK;IAMxC,YAAa,KAAsC,EAAE,OAAqC;;QACxF,MAAM,YAAY,GAAG,qBAAqB,CAAC,KAAK,CAAC,CAAA;QACjD,KAAK,CAAC,YAAY,CAAC,CAAA;QAEnB,MAAM,WAAW,GAAG,eAAe,CAAC,YAAY,CAAC,CAAA;QAEjD,IAAI,CAAC,YAAY,GAAG,WAAW,CAAC,MAAM,CAAA;QACtC,IAAI,CAAC,KAAK,GAAG,WAAW,CAAC,KAAK,CAAA;QAC9B,IAAI,CAAC,oBAAoB,GAAG,YAAY,CAAC,GAAG,IAAI,IAAI,CAAC,CAAC,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAA;QAC5E,IAAI,CAAC,OAAO,GAAG,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,OAAO,mCAAI,IAAI,CAAA;IACzC,CAAC;IAED;;;;;OAKG;IACG,QAAQ,CAAE,GAAkB,EAAE,IAAoB;;;YACtD,MAAM,EAAE,YAAY,EAAE,KAAK,EAAE,OAAO,EAAE,GAAG,IAAI,CAAA;YAE7C,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,MAAM,EAAE,cAAc,EAAE,GAAG,IAAI,CAAA;YAEjD,IAAI,IAAI,IAAI,IAAI,EAAE;gBAChB,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC,CAAA;aACtC;YAED,IAAI,YAAY,EAAE;gBAChB,mEAAmE;gBACnE,IAAI,GAAG,MAAM,IAAI,OAAO,CAAS,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;oBACnD,0DAA0D;oBAC1D,MAAM,QAAQ,GAAG,cAAc,aAAd,cAAc,cAAd,cAAc,GAAI,aAAG,CAAC,MAAM,CAAA;oBAC7C,QAAQ,CAAC,IAAK,EAAE,EAAE,EAAE,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE;wBAC/B,IAAI,GAAG,EAAE;4BACP,MAAM,CAAC,GAAG,CAAC,CAAA;yBACZ;6BAAM;4BACL,OAAO,CAAC,GAAG,CAAC,CAAA;yBACb;oBACH,CAAC,CAAC,CAAA;gBACJ,CAAC,CAAC,CAAA;aACH;YAED,MAAM,SAAS,GAAuB;gBACpC,KAAK;gBACL,WAAW,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE;gBAC3B,OAAO,EAAE,SAAS;gBAClB,OAAO,EAAE,OAAO,aAAP,OAAO,cAAP,OAAO,GAAI,SAAS;aAC9B,CAAA;YAED,MAAM,OAAO,GAAG,CAAC,SAAyB,EAAE,EAAE;gBAC5C,GAAG,CAAC,OAAO,EAAE,CAAA;gBACb,MAAM,CAAC,OAAO,EAAE,CAAA;gBAChB,IAAI,SAAS;oBAAE,SAAS,CAAC,OAAO,EAAE,CAAA;YACpC,CAAC,CAAA;YAED,KAAK,CAAC,qCAAqC,EAAE,SAAS,CAAC,CAAA;YACvD,MAAM,EAAE,MAAM,EAAE,GAAG,MAAM,mBAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,CAAA;YAChE,KAAK,CAAC,6CAA6C,CAAC,CAAA;YAEpD,IAAI,OAAO,KAAK,IAAI,EAAE;gBACpB,MAAM,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;gBAC1B,MAAM,CAAC,EAAE,CAAC,SAAS,EAAE,GAAG,EAAE,CAAC,OAAO,EAAE,CAAC,CAAA;aACtC;YAED,IAAI,IAAI,CAAC,cAAc,EAAE;gBACvB,sDAAsD;gBACtD,8CAA8C;gBAC9C,KAAK,CAAC,oCAAoC,CAAC,CAAA;gBAC3C,MAAM,UAAU,GAAG,MAAA,IAAI,CAAC,UAAU,mCAAI,IAAI,CAAC,IAAI,CAAA;gBAE/C,MAAM,SAAS,GAAG,aAAG,CAAC,OAAO,+CACxB,IAAI,CAAC,IAAI,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,CAAC,KACjD,MAAM;oBACN,UAAU,KACP,IAAI,CAAC,oBAAoB,EAC5B,CAAA;gBAEF,SAAS,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,KAAK,EAAE,EAAE;oBAChC,KAAK,CAAC,kBAAkB,EAAE,KAAK,CAAC,OAAO,CAAC,CAAA;oBACxC,OAAO,CAAC,SAAS,CAAC,CAAA;gBACpB,CAAC,CAAC,CAAA;gBAEF,OAAO,SAAS,CAAA;aACjB;YAED,OAAO,MAAM,CAAA;;KACd;CACF;AA7FD,0CA6FC;AAED,SAAS,IAAI,CACX,GAAM,EACN,GAAG,IAAO;IAIV,MAAM,GAAG,GAAG,EAAgD,CAAA;IAC5D,IAAI,GAAqB,CAAA;IACzB,KAAK,GAAG,IAAI,GAAG,EAAE;QACf,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;YACvB,GAAG,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC,GAAG,CAAC,CAAA;SACpB;KACF;IACD,OAAO,GAAG,CAAA;AACZ,CAAC"} \ No newline at end of file diff --git a/mybulma/node_modules/socks/.eslintrc.cjs b/mybulma/node_modules/socks/.eslintrc.cjs new file mode 100644 index 0000000..cc5d089 --- /dev/null +++ b/mybulma/node_modules/socks/.eslintrc.cjs @@ -0,0 +1,11 @@ +module.exports = { + root: true, + parser: '@typescript-eslint/parser', + plugins: [ + '@typescript-eslint', + ], + extends: [ + 'eslint:recommended', + 'plugin:@typescript-eslint/recommended', + ], +}; \ No newline at end of file diff --git a/mybulma/node_modules/socks/.prettierrc.yaml b/mybulma/node_modules/socks/.prettierrc.yaml new file mode 100644 index 0000000..d7b7335 --- /dev/null +++ b/mybulma/node_modules/socks/.prettierrc.yaml @@ -0,0 +1,7 @@ +parser: typescript +printWidth: 80 +tabWidth: 2 +singleQuote: true +trailingComma: all +arrowParens: always +bracketSpacing: false \ No newline at end of file diff --git a/mybulma/node_modules/socks/LICENSE b/mybulma/node_modules/socks/LICENSE new file mode 100644 index 0000000..b2442a9 --- /dev/null +++ b/mybulma/node_modules/socks/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2013 Josh Glazebrook + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/mybulma/node_modules/socks/README.md b/mybulma/node_modules/socks/README.md new file mode 100644 index 0000000..b796220 --- /dev/null +++ b/mybulma/node_modules/socks/README.md @@ -0,0 +1,686 @@ +# socks [![Build Status](https://travis-ci.org/JoshGlazebrook/socks.svg?branch=master)](https://travis-ci.org/JoshGlazebrook/socks) [![Coverage Status](https://coveralls.io/repos/github/JoshGlazebrook/socks/badge.svg?branch=master)](https://coveralls.io/github/JoshGlazebrook/socks?branch=v2) + +Fully featured SOCKS proxy client supporting SOCKSv4, SOCKSv4a, and SOCKSv5. Includes Bind and Associate functionality. + +> Looking for Node.js agent? Check [node-socks-proxy-agent](https://github.com/TooTallNate/node-socks-proxy-agent). + +### Features + +* Supports SOCKS v4, v4a, v5, and v5h protocols. +* Supports the CONNECT, BIND, and ASSOCIATE commands. +* Supports callbacks, promises, and events for proxy connection creation async flow control. +* Supports proxy chaining (CONNECT only). +* Supports user/password authentication. +* Supports custom authentication. +* Built in UDP frame creation & parse functions. +* Created with TypeScript, type definitions are provided. + +### Requirements + +* Node.js v10.0+ (Please use [v1](https://github.com/JoshGlazebrook/socks/tree/82d83923ad960693d8b774cafe17443ded7ed584) for older versions of Node.js) + +### Looking for v1? +* Docs for v1 are available [here](https://github.com/JoshGlazebrook/socks/tree/82d83923ad960693d8b774cafe17443ded7ed584) + +## Installation + +`yarn add socks` + +or + +`npm install --save socks` + +## Usage + +```typescript +// TypeScript +import { SocksClient, SocksClientOptions, SocksClientChainOptions } from 'socks'; + +// ES6 JavaScript +import { SocksClient } from 'socks'; + +// Legacy JavaScript +const SocksClient = require('socks').SocksClient; +``` + +## Quick Start Example + +Connect to github.com (192.30.253.113) on port 80, using a SOCKS proxy. + +```javascript +const options = { + proxy: { + host: '159.203.75.200', // ipv4 or ipv6 or hostname + port: 1080, + type: 5 // Proxy version (4 or 5) + }, + + command: 'connect', // SOCKS command (createConnection factory function only supports the connect command) + + destination: { + host: '192.30.253.113', // github.com (hostname lookups are supported with SOCKS v4a and 5) + port: 80 + } +}; + +// Async/Await +try { + const info = await SocksClient.createConnection(options); + + console.log(info.socket); + // (this is a raw net.Socket that is established to the destination host through the given proxy server) +} catch (err) { + // Handle errors +} + +// Promises +SocksClient.createConnection(options) +.then(info => { + console.log(info.socket); + // (this is a raw net.Socket that is established to the destination host through the given proxy server) +}) +.catch(err => { + // Handle errors +}); + +// Callbacks +SocksClient.createConnection(options, (err, info) => { + if (!err) { + console.log(info.socket); + // (this is a raw net.Socket that is established to the destination host through the given proxy server) + } else { + // Handle errors + } +}); +``` + +## Chaining Proxies + +**Note:** Chaining is only supported when using the SOCKS connect command, and chaining can only be done through the special factory chaining function. + +This example makes a proxy chain through two SOCKS proxies to ip-api.com. Once the connection to the destination is established it sends an HTTP request to get a JSON response that returns ip info for the requesting ip. + +```javascript +const options = { + destination: { + host: 'ip-api.com', // host names are supported with SOCKS v4a and SOCKS v5. + port: 80 + }, + command: 'connect', // Only the connect command is supported when chaining proxies. + proxies: [ // The chain order is the order in the proxies array, meaning the last proxy will establish a connection to the destination. + { + host: '159.203.75.235', // ipv4, ipv6, or hostname + port: 1081, + type: 5 + }, + { + host: '104.131.124.203', // ipv4, ipv6, or hostname + port: 1081, + type: 5 + } + ] +} + +// Async/Await +try { + const info = await SocksClient.createConnectionChain(options); + + console.log(info.socket); + // (this is a raw net.Socket that is established to the destination host through the given proxy servers) + + console.log(info.socket.remoteAddress) // The remote address of the returned socket is the first proxy in the chain. + // 159.203.75.235 + + info.socket.write('GET /json HTTP/1.1\nHost: ip-api.com\n\n'); + info.socket.on('data', (data) => { + console.log(data.toString()); // ip-api.com sees that the last proxy in the chain (104.131.124.203) is connected to it. + /* + HTTP/1.1 200 OK + Access-Control-Allow-Origin: * + Content-Type: application/json; charset=utf-8 + Date: Sun, 24 Dec 2017 03:47:51 GMT + Content-Length: 300 + + { + "as":"AS14061 Digital Ocean, Inc.", + "city":"Clifton", + "country":"United States", + "countryCode":"US", + "isp":"Digital Ocean", + "lat":40.8326, + "lon":-74.1307, + "org":"Digital Ocean", + "query":"104.131.124.203", + "region":"NJ", + "regionName":"New Jersey", + "status":"success", + "timezone":"America/New_York", + "zip":"07014" + } + */ + }); +} catch (err) { + // Handle errors +} + +// Promises +SocksClient.createConnectionChain(options) +.then(info => { + console.log(info.socket); + // (this is a raw net.Socket that is established to the destination host through the given proxy server) + + console.log(info.socket.remoteAddress) // The remote address of the returned socket is the first proxy in the chain. + // 159.203.75.235 + + info.socket.write('GET /json HTTP/1.1\nHost: ip-api.com\n\n'); + info.socket.on('data', (data) => { + console.log(data.toString()); // ip-api.com sees that the last proxy in the chain (104.131.124.203) is connected to it. + /* + HTTP/1.1 200 OK + Access-Control-Allow-Origin: * + Content-Type: application/json; charset=utf-8 + Date: Sun, 24 Dec 2017 03:47:51 GMT + Content-Length: 300 + + { + "as":"AS14061 Digital Ocean, Inc.", + "city":"Clifton", + "country":"United States", + "countryCode":"US", + "isp":"Digital Ocean", + "lat":40.8326, + "lon":-74.1307, + "org":"Digital Ocean", + "query":"104.131.124.203", + "region":"NJ", + "regionName":"New Jersey", + "status":"success", + "timezone":"America/New_York", + "zip":"07014" + } + */ + }); +}) +.catch(err => { + // Handle errors +}); + +// Callbacks +SocksClient.createConnectionChain(options, (err, info) => { + if (!err) { + console.log(info.socket); + // (this is a raw net.Socket that is established to the destination host through the given proxy server) + + console.log(info.socket.remoteAddress) // The remote address of the returned socket is the first proxy in the chain. + // 159.203.75.235 + + info.socket.write('GET /json HTTP/1.1\nHost: ip-api.com\n\n'); + info.socket.on('data', (data) => { + console.log(data.toString()); // ip-api.com sees that the last proxy in the chain (104.131.124.203) is connected to it. + /* + HTTP/1.1 200 OK + Access-Control-Allow-Origin: * + Content-Type: application/json; charset=utf-8 + Date: Sun, 24 Dec 2017 03:47:51 GMT + Content-Length: 300 + + { + "as":"AS14061 Digital Ocean, Inc.", + "city":"Clifton", + "country":"United States", + "countryCode":"US", + "isp":"Digital Ocean", + "lat":40.8326, + "lon":-74.1307, + "org":"Digital Ocean", + "query":"104.131.124.203", + "region":"NJ", + "regionName":"New Jersey", + "status":"success", + "timezone":"America/New_York", + "zip":"07014" + } + */ + }); + } else { + // Handle errors + } +}); +``` + +## Bind Example (TCP Relay) + +When the bind command is sent to a SOCKS v4/v5 proxy server, the proxy server starts listening on a new TCP port and the proxy relays then remote host information back to the client. When another remote client connects to the proxy server on this port the SOCKS proxy sends a notification that an incoming connection has been accepted to the initial client and a full duplex stream is now established to the initial client and the client that connected to that special port. + +```javascript +const options = { + proxy: { + host: '159.203.75.235', // ipv4, ipv6, or hostname + port: 1081, + type: 5 + }, + + command: 'bind', + + // When using BIND, the destination should be the remote client that is expected to connect to the SOCKS proxy. Using 0.0.0.0 makes the Proxy accept any incoming connection on that port. + destination: { + host: '0.0.0.0', + port: 0 + } +}; + +// Creates a new SocksClient instance. +const client = new SocksClient(options); + +// When the SOCKS proxy has bound a new port and started listening, this event is fired. +client.on('bound', info => { + console.log(info.remoteHost); + /* + { + host: "159.203.75.235", + port: 57362 + } + */ +}); + +// When a client connects to the newly bound port on the SOCKS proxy, this event is fired. +client.on('established', info => { + // info.remoteHost is the remote address of the client that connected to the SOCKS proxy. + console.log(info.remoteHost); + /* + host: 67.171.34.23, + port: 49823 + */ + + console.log(info.socket); + // (This is a raw net.Socket that is a connection between the initial client and the remote client that connected to the proxy) + + // Handle received data... + info.socket.on('data', data => { + console.log('recv', data); + }); +}); + +// An error occurred trying to establish this SOCKS connection. +client.on('error', err => { + console.error(err); +}); + +// Start connection to proxy +client.connect(); +``` + +## Associate Example (UDP Relay) + +When the associate command is sent to a SOCKS v5 proxy server, it sets up a UDP relay that allows the client to send UDP packets to a remote host through the proxy server, and also receive UDP packet responses back through the proxy server. + +```javascript +const options = { + proxy: { + host: '159.203.75.235', // ipv4, ipv6, or hostname + port: 1081, + type: 5 + }, + + command: 'associate', + + // When using associate, the destination should be the remote client that is expected to send UDP packets to the proxy server to be forwarded. This should be your local ip, or optionally the wildcard address (0.0.0.0) UDP Client <-> Proxy <-> UDP Client + destination: { + host: '0.0.0.0', + port: 0 + } +}; + +// Create a local UDP socket for sending packets to the proxy. +const udpSocket = dgram.createSocket('udp4'); +udpSocket.bind(); + +// Listen for incoming UDP packets from the proxy server. +udpSocket.on('message', (message, rinfo) => { + console.log(SocksClient.parseUDPFrame(message)); + /* + { frameNumber: 0, + remoteHost: { host: '165.227.108.231', port: 4444 }, // The remote host that replied with a UDP packet + data: // The data + } + */ +}); + +let client = new SocksClient(associateOptions); + +// When the UDP relay is established, this event is fired and includes the UDP relay port to send data to on the proxy server. +client.on('established', info => { + console.log(info.remoteHost); + /* + { + host: '159.203.75.235', + port: 44711 + } + */ + + // Send 'hello' to 165.227.108.231:4444 + const packet = SocksClient.createUDPFrame({ + remoteHost: { host: '165.227.108.231', port: 4444 }, + data: Buffer.from(line) + }); + udpSocket.send(packet, info.remoteHost.port, info.remoteHost.host); +}); + +// Start connection +client.connect(); +``` + +**Note:** The associate TCP connection to the proxy must remain open for the UDP relay to work. + +## Additional Examples + +[Documentation](docs/index.md) + + +## Migrating from v1 + +Looking for a guide to migrate from v1? Look [here](docs/migratingFromV1.md) + +## Api Reference: + +**Note:** socks includes full TypeScript definitions. These can even be used without using TypeScript as most IDEs (such as VS Code) will use these type definition files for auto completion intellisense even in JavaScript files. + +* Class: SocksClient + * [new SocksClient(options[, callback])](#new-socksclientoptions) + * [Class Method: SocksClient.createConnection(options[, callback])](#class-method-socksclientcreateconnectionoptions-callback) + * [Class Method: SocksClient.createConnectionChain(options[, callback])](#class-method-socksclientcreateconnectionchainoptions-callback) + * [Class Method: SocksClient.createUDPFrame(options)](#class-method-socksclientcreateudpframedetails) + * [Class Method: SocksClient.parseUDPFrame(data)](#class-method-socksclientparseudpframedata) + * [Event: 'error'](#event-error) + * [Event: 'bound'](#event-bound) + * [Event: 'established'](#event-established) + * [client.connect()](#clientconnect) + * [client.socksClientOptions](#clientconnect) + +### SocksClient + +SocksClient establishes SOCKS proxy connections to remote destination hosts. These proxy connections are fully transparent to the server and once established act as full duplex streams. SOCKS v4, v4a, v5, and v5h are supported, as well as the connect, bind, and associate commands. + +SocksClient supports creating connections using callbacks, promises, and async/await flow control using two static factory functions createConnection and createConnectionChain. It also internally extends EventEmitter which results in allowing event handling based async flow control. + +**SOCKS Compatibility Table** + +Note: When using 4a please specify type: 4, and when using 5h please specify type 5. + +| Socks Version | TCP | UDP | IPv4 | IPv6 | Hostname | +| --- | :---: | :---: | :---: | :---: | :---: | +| SOCKS v4 | ✅ | ❌ | ✅ | ❌ | ❌ | +| SOCKS v4a | ✅ | ❌ | ✅ | ❌ | ✅ | +| SOCKS v5 (includes v5h) | ✅ | ✅ | ✅ | ✅ | ✅ | + +### new SocksClient(options) + +* ```options``` {SocksClientOptions} - An object describing the SOCKS proxy to use, the command to send and establish, and the destination host to connect to. + +### SocksClientOptions + +```typescript +{ + proxy: { + host: '159.203.75.200', // ipv4, ipv6, or hostname + port: 1080, + type: 5, // Proxy version (4 or 5). For v4a use 4, for v5h use 5. + + // Optional fields + userId: 'some username', // Used for SOCKS4 userId auth, and SOCKS5 user/pass auth in conjunction with password. + password: 'some password', // Used in conjunction with userId for user/pass auth for SOCKS5 proxies. + custom_auth_method: 0x80, // If using a custom auth method, specify the type here. If this is set, ALL other custom_auth_*** options must be set as well. + custom_auth_request_handler: async () =>. { + // This will be called when it's time to send the custom auth handshake. You must return a Buffer containing the data to send as your authentication. + return Buffer.from([0x01,0x02,0x03]); + }, + // This is the expected size (bytes) of the custom auth response from the proxy server. + custom_auth_response_size: 2, + // This is called when the auth response is received. The received packet is passed in as a Buffer, and you must return a boolean indicating the response from the server said your custom auth was successful or failed. + custom_auth_response_handler: async (data) => { + return data[1] === 0x00; + } + }, + + command: 'connect', // connect, bind, associate + + destination: { + host: '192.30.253.113', // ipv4, ipv6, hostname. Hostnames work with v4a and v5. + port: 80 + }, + + // Optional fields + timeout: 30000, // How long to wait to establish a proxy connection. (defaults to 30 seconds) + + set_tcp_nodelay: true // If true, will turn on the underlying sockets TCP_NODELAY option. +} +``` + +### Class Method: SocksClient.createConnection(options[, callback]) +* ```options``` { SocksClientOptions } - An object describing the SOCKS proxy to use, the command to send and establish, and the destination host to connect to. +* ```callback``` { Function } - Optional callback function that is called when the proxy connection is established, or an error occurs. +* ```returns``` { Promise } - A Promise is returned that is resolved when the proxy connection is established, or rejected when an error occurs. + +Creates a new proxy connection through the given proxy to the given destination host. This factory function supports callbacks and promises for async flow control. + +**Note:** If a callback function is provided, the promise will always resolve regardless of an error occurring. Please be sure to exclusively use either promises or callbacks when using this factory function. + +```typescript +const options = { + proxy: { + host: '159.203.75.200', // ipv4, ipv6, or hostname + port: 1080, + type: 5 // Proxy version (4 or 5) + }, + + command: 'connect', // connect, bind, associate + + destination: { + host: '192.30.253.113', // ipv4, ipv6, or hostname + port: 80 + } +} + +// Await/Async (uses a Promise) +try { + const info = await SocksClient.createConnection(options); + console.log(info); + /* + { + socket: , // Raw net.Socket + } + */ + / (this is a raw net.Socket that is established to the destination host through the given proxy server) + +} catch (err) { + // Handle error... +} + +// Promise +SocksClient.createConnection(options) +.then(info => { + console.log(info); + /* + { + socket: , // Raw net.Socket + } + */ +}) +.catch(err => { + // Handle error... +}); + +// Callback +SocksClient.createConnection(options, (err, info) => { + if (!err) { + console.log(info); + /* + { + socket: , // Raw net.Socket + } + */ + } else { + // Handle error... + } +}); +``` + +### Class Method: SocksClient.createConnectionChain(options[, callback]) +* ```options``` { SocksClientChainOptions } - An object describing a list of SOCKS proxies to use, the command to send and establish, and the destination host to connect to. +* ```callback``` { Function } - Optional callback function that is called when the proxy connection chain is established, or an error occurs. +* ```returns``` { Promise } - A Promise is returned that is resolved when the proxy connection chain is established, or rejected when an error occurs. + +Creates a new proxy connection chain through a list of at least two SOCKS proxies to the given destination host. This factory method supports callbacks and promises for async flow control. + +**Note:** If a callback function is provided, the promise will always resolve regardless of an error occurring. Please be sure to exclusively use either promises or callbacks when using this factory function. + +**Note:** At least two proxies must be provided for the chain to be established. + +```typescript +const options = { + proxies: [ // The chain order is the order in the proxies array, meaning the last proxy will establish a connection to the destination. + { + host: '159.203.75.235', // ipv4, ipv6, or hostname + port: 1081, + type: 5 + }, + { + host: '104.131.124.203', // ipv4, ipv6, or hostname + port: 1081, + type: 5 + } + ] + + command: 'connect', // Only connect is supported in chaining mode. + + destination: { + host: '192.30.253.113', // ipv4, ipv6, hostname + port: 80 + } +} +``` + +### Class Method: SocksClient.createUDPFrame(details) +* ```details``` { SocksUDPFrameDetails } - An object containing the remote host, frame number, and frame data to use when creating a SOCKS UDP frame packet. +* ```returns``` { Buffer } - A Buffer containing all of the UDP frame data. + +Creates a SOCKS UDP frame relay packet that is sent and received via a SOCKS proxy when using the associate command for UDP packet forwarding. + +**SocksUDPFrameDetails** + +```typescript +{ + frameNumber: 0, // The frame number (used for breaking up larger packets) + + remoteHost: { // The remote host to have the proxy send data to, or the remote host that send this data. + host: '1.2.3.4', + port: 1234 + }, + + data: // A Buffer instance of data to include in the packet (actual data sent to the remote host) +} +interface SocksUDPFrameDetails { + // The frame number of the packet. + frameNumber?: number; + + // The remote host. + remoteHost: SocksRemoteHost; + + // The packet data. + data: Buffer; +} +``` + +### Class Method: SocksClient.parseUDPFrame(data) +* ```data``` { Buffer } - A Buffer instance containing SOCKS UDP frame data to parse. +* ```returns``` { SocksUDPFrameDetails } - An object containing the remote host, frame number, and frame data of the SOCKS UDP frame. + +```typescript +const frame = SocksClient.parseUDPFrame(data); +console.log(frame); +/* +{ + frameNumber: 0, + remoteHost: { + host: '1.2.3.4', + port: 1234 + }, + data: +} +*/ +``` + +Parses a Buffer instance and returns the parsed SocksUDPFrameDetails object. + +## Event: 'error' +* ```err``` { SocksClientError } - An Error object containing an error message and the original SocksClientOptions. + +This event is emitted if an error occurs when trying to establish the proxy connection. + +## Event: 'bound' +* ```info``` { SocksClientBoundEvent } An object containing a Socket and SocksRemoteHost info. + +This event is emitted when using the BIND command on a remote SOCKS proxy server. This event indicates the proxy server is now listening for incoming connections on a specified port. + +**SocksClientBoundEvent** +```typescript +{ + socket: net.Socket, // The underlying raw Socket + remoteHost: { + host: '1.2.3.4', // The remote host that is listening (usually the proxy itself) + port: 4444 // The remote port the proxy is listening on for incoming connections (when using BIND). + } +} +``` + +## Event: 'established' +* ```info``` { SocksClientEstablishedEvent } An object containing a Socket and SocksRemoteHost info. + +This event is emitted when the following conditions are met: +1. When using the CONNECT command, and a proxy connection has been established to the remote host. +2. When using the BIND command, and an incoming connection has been accepted by the proxy and a TCP relay has been established. +3. When using the ASSOCIATE command, and a UDP relay has been established. + +When using BIND, 'bound' is first emitted to indicate the SOCKS server is waiting for an incoming connection, and provides the remote port the SOCKS server is listening on. + +When using ASSOCIATE, 'established' is emitted with the remote UDP port the SOCKS server is accepting UDP frame packets on. + +**SocksClientEstablishedEvent** +```typescript +{ + socket: net.Socket, // The underlying raw Socket + remoteHost: { + host: '1.2.3.4', // The remote host that is listening (usually the proxy itself) + port: 52738 // The remote port the proxy is listening on for incoming connections (when using BIND). + } +} +``` + +## client.connect() + +Starts connecting to the remote SOCKS proxy server to establish a proxy connection to the destination host. + +## client.socksClientOptions +* ```returns``` { SocksClientOptions } The options that were passed to the SocksClient. + +Gets the options that were passed to the SocksClient when it was created. + + +**SocksClientError** +```typescript +{ // Subclassed from Error. + message: 'An error has occurred', + options: { + // SocksClientOptions + } +} +``` + +# Further Reading: + +Please read the SOCKS 5 specifications for more information on how to use BIND and Associate. +http://www.ietf.org/rfc/rfc1928.txt + +# License + +This work is licensed under the [MIT license](http://en.wikipedia.org/wiki/MIT_License). diff --git a/mybulma/node_modules/socks/build/client/socksclient.js b/mybulma/node_modules/socks/build/client/socksclient.js new file mode 100644 index 0000000..c343916 --- /dev/null +++ b/mybulma/node_modules/socks/build/client/socksclient.js @@ -0,0 +1,793 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SocksClientError = exports.SocksClient = void 0; +const events_1 = require("events"); +const net = require("net"); +const ip = require("ip"); +const smart_buffer_1 = require("smart-buffer"); +const constants_1 = require("../common/constants"); +const helpers_1 = require("../common/helpers"); +const receivebuffer_1 = require("../common/receivebuffer"); +const util_1 = require("../common/util"); +Object.defineProperty(exports, "SocksClientError", { enumerable: true, get: function () { return util_1.SocksClientError; } }); +class SocksClient extends events_1.EventEmitter { + constructor(options) { + super(); + this.options = Object.assign({}, options); + // Validate SocksClientOptions + (0, helpers_1.validateSocksClientOptions)(options); + // Default state + this.setState(constants_1.SocksClientState.Created); + } + /** + * Creates a new SOCKS connection. + * + * Note: Supports callbacks and promises. Only supports the connect command. + * @param options { SocksClientOptions } Options. + * @param callback { Function } An optional callback function. + * @returns { Promise } + */ + static createConnection(options, callback) { + return new Promise((resolve, reject) => { + // Validate SocksClientOptions + try { + (0, helpers_1.validateSocksClientOptions)(options, ['connect']); + } + catch (err) { + if (typeof callback === 'function') { + callback(err); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + return resolve(err); // Resolves pending promise (prevents memory leaks). + } + else { + return reject(err); + } + } + const client = new SocksClient(options); + client.connect(options.existing_socket); + client.once('established', (info) => { + client.removeAllListeners(); + if (typeof callback === 'function') { + callback(null, info); + resolve(info); // Resolves pending promise (prevents memory leaks). + } + else { + resolve(info); + } + }); + // Error occurred, failed to establish connection. + client.once('error', (err) => { + client.removeAllListeners(); + if (typeof callback === 'function') { + callback(err); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + resolve(err); // Resolves pending promise (prevents memory leaks). + } + else { + reject(err); + } + }); + }); + } + /** + * Creates a new SOCKS connection chain to a destination host through 2 or more SOCKS proxies. + * + * Note: Supports callbacks and promises. Only supports the connect method. + * Note: Implemented via createConnection() factory function. + * @param options { SocksClientChainOptions } Options + * @param callback { Function } An optional callback function. + * @returns { Promise } + */ + static createConnectionChain(options, callback) { + // eslint-disable-next-line no-async-promise-executor + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + // Validate SocksClientChainOptions + try { + (0, helpers_1.validateSocksClientChainOptions)(options); + } + catch (err) { + if (typeof callback === 'function') { + callback(err); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + return resolve(err); // Resolves pending promise (prevents memory leaks). + } + else { + return reject(err); + } + } + // Shuffle proxies + if (options.randomizeChain) { + (0, util_1.shuffleArray)(options.proxies); + } + try { + let sock; + for (let i = 0; i < options.proxies.length; i++) { + const nextProxy = options.proxies[i]; + // If we've reached the last proxy in the chain, the destination is the actual destination, otherwise it's the next proxy. + const nextDestination = i === options.proxies.length - 1 + ? options.destination + : { + host: options.proxies[i + 1].host || + options.proxies[i + 1].ipaddress, + port: options.proxies[i + 1].port, + }; + // Creates the next connection in the chain. + const result = yield SocksClient.createConnection({ + command: 'connect', + proxy: nextProxy, + destination: nextDestination, + existing_socket: sock, + }); + // If sock is undefined, assign it here. + sock = sock || result.socket; + } + if (typeof callback === 'function') { + callback(null, { socket: sock }); + resolve({ socket: sock }); // Resolves pending promise (prevents memory leaks). + } + else { + resolve({ socket: sock }); + } + } + catch (err) { + if (typeof callback === 'function') { + callback(err); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + resolve(err); // Resolves pending promise (prevents memory leaks). + } + else { + reject(err); + } + } + })); + } + /** + * Creates a SOCKS UDP Frame. + * @param options + */ + static createUDPFrame(options) { + const buff = new smart_buffer_1.SmartBuffer(); + buff.writeUInt16BE(0); + buff.writeUInt8(options.frameNumber || 0); + // IPv4/IPv6/Hostname + if (net.isIPv4(options.remoteHost.host)) { + buff.writeUInt8(constants_1.Socks5HostType.IPv4); + buff.writeUInt32BE(ip.toLong(options.remoteHost.host)); + } + else if (net.isIPv6(options.remoteHost.host)) { + buff.writeUInt8(constants_1.Socks5HostType.IPv6); + buff.writeBuffer(ip.toBuffer(options.remoteHost.host)); + } + else { + buff.writeUInt8(constants_1.Socks5HostType.Hostname); + buff.writeUInt8(Buffer.byteLength(options.remoteHost.host)); + buff.writeString(options.remoteHost.host); + } + // Port + buff.writeUInt16BE(options.remoteHost.port); + // Data + buff.writeBuffer(options.data); + return buff.toBuffer(); + } + /** + * Parses a SOCKS UDP frame. + * @param data + */ + static parseUDPFrame(data) { + const buff = smart_buffer_1.SmartBuffer.fromBuffer(data); + buff.readOffset = 2; + const frameNumber = buff.readUInt8(); + const hostType = buff.readUInt8(); + let remoteHost; + if (hostType === constants_1.Socks5HostType.IPv4) { + remoteHost = ip.fromLong(buff.readUInt32BE()); + } + else if (hostType === constants_1.Socks5HostType.IPv6) { + remoteHost = ip.toString(buff.readBuffer(16)); + } + else { + remoteHost = buff.readString(buff.readUInt8()); + } + const remotePort = buff.readUInt16BE(); + return { + frameNumber, + remoteHost: { + host: remoteHost, + port: remotePort, + }, + data: buff.readBuffer(), + }; + } + /** + * Internal state setter. If the SocksClient is in an error state, it cannot be changed to a non error state. + */ + setState(newState) { + if (this.state !== constants_1.SocksClientState.Error) { + this.state = newState; + } + } + /** + * Starts the connection establishment to the proxy and destination. + * @param existingSocket Connected socket to use instead of creating a new one (internal use). + */ + connect(existingSocket) { + this.onDataReceived = (data) => this.onDataReceivedHandler(data); + this.onClose = () => this.onCloseHandler(); + this.onError = (err) => this.onErrorHandler(err); + this.onConnect = () => this.onConnectHandler(); + // Start timeout timer (defaults to 30 seconds) + const timer = setTimeout(() => this.onEstablishedTimeout(), this.options.timeout || constants_1.DEFAULT_TIMEOUT); + // check whether unref is available as it differs from browser to NodeJS (#33) + if (timer.unref && typeof timer.unref === 'function') { + timer.unref(); + } + // If an existing socket is provided, use it to negotiate SOCKS handshake. Otherwise create a new Socket. + if (existingSocket) { + this.socket = existingSocket; + } + else { + this.socket = new net.Socket(); + } + // Attach Socket error handlers. + this.socket.once('close', this.onClose); + this.socket.once('error', this.onError); + this.socket.once('connect', this.onConnect); + this.socket.on('data', this.onDataReceived); + this.setState(constants_1.SocksClientState.Connecting); + this.receiveBuffer = new receivebuffer_1.ReceiveBuffer(); + if (existingSocket) { + this.socket.emit('connect'); + } + else { + this.socket.connect(this.getSocketOptions()); + if (this.options.set_tcp_nodelay !== undefined && + this.options.set_tcp_nodelay !== null) { + this.socket.setNoDelay(!!this.options.set_tcp_nodelay); + } + } + // Listen for established event so we can re-emit any excess data received during handshakes. + this.prependOnceListener('established', (info) => { + setImmediate(() => { + if (this.receiveBuffer.length > 0) { + const excessData = this.receiveBuffer.get(this.receiveBuffer.length); + info.socket.emit('data', excessData); + } + info.socket.resume(); + }); + }); + } + // Socket options (defaults host/port to options.proxy.host/options.proxy.port) + getSocketOptions() { + return Object.assign(Object.assign({}, this.options.socket_options), { host: this.options.proxy.host || this.options.proxy.ipaddress, port: this.options.proxy.port }); + } + /** + * Handles internal Socks timeout callback. + * Note: If the Socks client is not BoundWaitingForConnection or Established, the connection will be closed. + */ + onEstablishedTimeout() { + if (this.state !== constants_1.SocksClientState.Established && + this.state !== constants_1.SocksClientState.BoundWaitingForConnection) { + this.closeSocket(constants_1.ERRORS.ProxyConnectionTimedOut); + } + } + /** + * Handles Socket connect event. + */ + onConnectHandler() { + this.setState(constants_1.SocksClientState.Connected); + // Send initial handshake. + if (this.options.proxy.type === 4) { + this.sendSocks4InitialHandshake(); + } + else { + this.sendSocks5InitialHandshake(); + } + this.setState(constants_1.SocksClientState.SentInitialHandshake); + } + /** + * Handles Socket data event. + * @param data + */ + onDataReceivedHandler(data) { + /* + All received data is appended to a ReceiveBuffer. + This makes sure that all the data we need is received before we attempt to process it. + */ + this.receiveBuffer.append(data); + // Process data that we have. + this.processData(); + } + /** + * Handles processing of the data we have received. + */ + processData() { + // If we have enough data to process the next step in the SOCKS handshake, proceed. + while (this.state !== constants_1.SocksClientState.Established && + this.state !== constants_1.SocksClientState.Error && + this.receiveBuffer.length >= this.nextRequiredPacketBufferSize) { + // Sent initial handshake, waiting for response. + if (this.state === constants_1.SocksClientState.SentInitialHandshake) { + if (this.options.proxy.type === 4) { + // Socks v4 only has one handshake response. + this.handleSocks4FinalHandshakeResponse(); + } + else { + // Socks v5 has two handshakes, handle initial one here. + this.handleInitialSocks5HandshakeResponse(); + } + // Sent auth request for Socks v5, waiting for response. + } + else if (this.state === constants_1.SocksClientState.SentAuthentication) { + this.handleInitialSocks5AuthenticationHandshakeResponse(); + // Sent final Socks v5 handshake, waiting for final response. + } + else if (this.state === constants_1.SocksClientState.SentFinalHandshake) { + this.handleSocks5FinalHandshakeResponse(); + // Socks BIND established. Waiting for remote connection via proxy. + } + else if (this.state === constants_1.SocksClientState.BoundWaitingForConnection) { + if (this.options.proxy.type === 4) { + this.handleSocks4IncomingConnectionResponse(); + } + else { + this.handleSocks5IncomingConnectionResponse(); + } + } + else { + this.closeSocket(constants_1.ERRORS.InternalError); + break; + } + } + } + /** + * Handles Socket close event. + * @param had_error + */ + onCloseHandler() { + this.closeSocket(constants_1.ERRORS.SocketClosed); + } + /** + * Handles Socket error event. + * @param err + */ + onErrorHandler(err) { + this.closeSocket(err.message); + } + /** + * Removes internal event listeners on the underlying Socket. + */ + removeInternalSocketHandlers() { + // Pauses data flow of the socket (this is internally resumed after 'established' is emitted) + this.socket.pause(); + this.socket.removeListener('data', this.onDataReceived); + this.socket.removeListener('close', this.onClose); + this.socket.removeListener('error', this.onError); + this.socket.removeListener('connect', this.onConnect); + } + /** + * Closes and destroys the underlying Socket. Emits an error event. + * @param err { String } An error string to include in error event. + */ + closeSocket(err) { + // Make sure only one 'error' event is fired for the lifetime of this SocksClient instance. + if (this.state !== constants_1.SocksClientState.Error) { + // Set internal state to Error. + this.setState(constants_1.SocksClientState.Error); + // Destroy Socket + this.socket.destroy(); + // Remove internal listeners + this.removeInternalSocketHandlers(); + // Fire 'error' event. + this.emit('error', new util_1.SocksClientError(err, this.options)); + } + } + /** + * Sends initial Socks v4 handshake request. + */ + sendSocks4InitialHandshake() { + const userId = this.options.proxy.userId || ''; + const buff = new smart_buffer_1.SmartBuffer(); + buff.writeUInt8(0x04); + buff.writeUInt8(constants_1.SocksCommand[this.options.command]); + buff.writeUInt16BE(this.options.destination.port); + // Socks 4 (IPv4) + if (net.isIPv4(this.options.destination.host)) { + buff.writeBuffer(ip.toBuffer(this.options.destination.host)); + buff.writeStringNT(userId); + // Socks 4a (hostname) + } + else { + buff.writeUInt8(0x00); + buff.writeUInt8(0x00); + buff.writeUInt8(0x00); + buff.writeUInt8(0x01); + buff.writeStringNT(userId); + buff.writeStringNT(this.options.destination.host); + } + this.nextRequiredPacketBufferSize = + constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks4Response; + this.socket.write(buff.toBuffer()); + } + /** + * Handles Socks v4 handshake response. + * @param data + */ + handleSocks4FinalHandshakeResponse() { + const data = this.receiveBuffer.get(8); + if (data[1] !== constants_1.Socks4Response.Granted) { + this.closeSocket(`${constants_1.ERRORS.Socks4ProxyRejectedConnection} - (${constants_1.Socks4Response[data[1]]})`); + } + else { + // Bind response + if (constants_1.SocksCommand[this.options.command] === constants_1.SocksCommand.bind) { + const buff = smart_buffer_1.SmartBuffer.fromBuffer(data); + buff.readOffset = 2; + const remoteHost = { + port: buff.readUInt16BE(), + host: ip.fromLong(buff.readUInt32BE()), + }; + // If host is 0.0.0.0, set to proxy host. + if (remoteHost.host === '0.0.0.0') { + remoteHost.host = this.options.proxy.ipaddress; + } + this.setState(constants_1.SocksClientState.BoundWaitingForConnection); + this.emit('bound', { remoteHost, socket: this.socket }); + // Connect response + } + else { + this.setState(constants_1.SocksClientState.Established); + this.removeInternalSocketHandlers(); + this.emit('established', { socket: this.socket }); + } + } + } + /** + * Handles Socks v4 incoming connection request (BIND) + * @param data + */ + handleSocks4IncomingConnectionResponse() { + const data = this.receiveBuffer.get(8); + if (data[1] !== constants_1.Socks4Response.Granted) { + this.closeSocket(`${constants_1.ERRORS.Socks4ProxyRejectedIncomingBoundConnection} - (${constants_1.Socks4Response[data[1]]})`); + } + else { + const buff = smart_buffer_1.SmartBuffer.fromBuffer(data); + buff.readOffset = 2; + const remoteHost = { + port: buff.readUInt16BE(), + host: ip.fromLong(buff.readUInt32BE()), + }; + this.setState(constants_1.SocksClientState.Established); + this.removeInternalSocketHandlers(); + this.emit('established', { remoteHost, socket: this.socket }); + } + } + /** + * Sends initial Socks v5 handshake request. + */ + sendSocks5InitialHandshake() { + const buff = new smart_buffer_1.SmartBuffer(); + // By default we always support no auth. + const supportedAuthMethods = [constants_1.Socks5Auth.NoAuth]; + // We should only tell the proxy we support user/pass auth if auth info is actually provided. + // Note: As of Tor v0.3.5.7+, if user/pass auth is an option from the client, by default it will always take priority. + if (this.options.proxy.userId || this.options.proxy.password) { + supportedAuthMethods.push(constants_1.Socks5Auth.UserPass); + } + // Custom auth method? + if (this.options.proxy.custom_auth_method !== undefined) { + supportedAuthMethods.push(this.options.proxy.custom_auth_method); + } + // Build handshake packet + buff.writeUInt8(0x05); + buff.writeUInt8(supportedAuthMethods.length); + for (const authMethod of supportedAuthMethods) { + buff.writeUInt8(authMethod); + } + this.nextRequiredPacketBufferSize = + constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5InitialHandshakeResponse; + this.socket.write(buff.toBuffer()); + this.setState(constants_1.SocksClientState.SentInitialHandshake); + } + /** + * Handles initial Socks v5 handshake response. + * @param data + */ + handleInitialSocks5HandshakeResponse() { + const data = this.receiveBuffer.get(2); + if (data[0] !== 0x05) { + this.closeSocket(constants_1.ERRORS.InvalidSocks5IntiailHandshakeSocksVersion); + } + else if (data[1] === constants_1.SOCKS5_NO_ACCEPTABLE_AUTH) { + this.closeSocket(constants_1.ERRORS.InvalidSocks5InitialHandshakeNoAcceptedAuthType); + } + else { + // If selected Socks v5 auth method is no auth, send final handshake request. + if (data[1] === constants_1.Socks5Auth.NoAuth) { + this.socks5ChosenAuthType = constants_1.Socks5Auth.NoAuth; + this.sendSocks5CommandRequest(); + // If selected Socks v5 auth method is user/password, send auth handshake. + } + else if (data[1] === constants_1.Socks5Auth.UserPass) { + this.socks5ChosenAuthType = constants_1.Socks5Auth.UserPass; + this.sendSocks5UserPassAuthentication(); + // If selected Socks v5 auth method is the custom_auth_method, send custom handshake. + } + else if (data[1] === this.options.proxy.custom_auth_method) { + this.socks5ChosenAuthType = this.options.proxy.custom_auth_method; + this.sendSocks5CustomAuthentication(); + } + else { + this.closeSocket(constants_1.ERRORS.InvalidSocks5InitialHandshakeUnknownAuthType); + } + } + } + /** + * Sends Socks v5 user & password auth handshake. + * + * Note: No auth and user/pass are currently supported. + */ + sendSocks5UserPassAuthentication() { + const userId = this.options.proxy.userId || ''; + const password = this.options.proxy.password || ''; + const buff = new smart_buffer_1.SmartBuffer(); + buff.writeUInt8(0x01); + buff.writeUInt8(Buffer.byteLength(userId)); + buff.writeString(userId); + buff.writeUInt8(Buffer.byteLength(password)); + buff.writeString(password); + this.nextRequiredPacketBufferSize = + constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5UserPassAuthenticationResponse; + this.socket.write(buff.toBuffer()); + this.setState(constants_1.SocksClientState.SentAuthentication); + } + sendSocks5CustomAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + this.nextRequiredPacketBufferSize = + this.options.proxy.custom_auth_response_size; + this.socket.write(yield this.options.proxy.custom_auth_request_handler()); + this.setState(constants_1.SocksClientState.SentAuthentication); + }); + } + handleSocks5CustomAuthHandshakeResponse(data) { + return __awaiter(this, void 0, void 0, function* () { + return yield this.options.proxy.custom_auth_response_handler(data); + }); + } + handleSocks5AuthenticationNoAuthHandshakeResponse(data) { + return __awaiter(this, void 0, void 0, function* () { + return data[1] === 0x00; + }); + } + handleSocks5AuthenticationUserPassHandshakeResponse(data) { + return __awaiter(this, void 0, void 0, function* () { + return data[1] === 0x00; + }); + } + /** + * Handles Socks v5 auth handshake response. + * @param data + */ + handleInitialSocks5AuthenticationHandshakeResponse() { + return __awaiter(this, void 0, void 0, function* () { + this.setState(constants_1.SocksClientState.ReceivedAuthenticationResponse); + let authResult = false; + if (this.socks5ChosenAuthType === constants_1.Socks5Auth.NoAuth) { + authResult = yield this.handleSocks5AuthenticationNoAuthHandshakeResponse(this.receiveBuffer.get(2)); + } + else if (this.socks5ChosenAuthType === constants_1.Socks5Auth.UserPass) { + authResult = + yield this.handleSocks5AuthenticationUserPassHandshakeResponse(this.receiveBuffer.get(2)); + } + else if (this.socks5ChosenAuthType === this.options.proxy.custom_auth_method) { + authResult = yield this.handleSocks5CustomAuthHandshakeResponse(this.receiveBuffer.get(this.options.proxy.custom_auth_response_size)); + } + if (!authResult) { + this.closeSocket(constants_1.ERRORS.Socks5AuthenticationFailed); + } + else { + this.sendSocks5CommandRequest(); + } + }); + } + /** + * Sends Socks v5 final handshake request. + */ + sendSocks5CommandRequest() { + const buff = new smart_buffer_1.SmartBuffer(); + buff.writeUInt8(0x05); + buff.writeUInt8(constants_1.SocksCommand[this.options.command]); + buff.writeUInt8(0x00); + // ipv4, ipv6, domain? + if (net.isIPv4(this.options.destination.host)) { + buff.writeUInt8(constants_1.Socks5HostType.IPv4); + buff.writeBuffer(ip.toBuffer(this.options.destination.host)); + } + else if (net.isIPv6(this.options.destination.host)) { + buff.writeUInt8(constants_1.Socks5HostType.IPv6); + buff.writeBuffer(ip.toBuffer(this.options.destination.host)); + } + else { + buff.writeUInt8(constants_1.Socks5HostType.Hostname); + buff.writeUInt8(this.options.destination.host.length); + buff.writeString(this.options.destination.host); + } + buff.writeUInt16BE(this.options.destination.port); + this.nextRequiredPacketBufferSize = + constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseHeader; + this.socket.write(buff.toBuffer()); + this.setState(constants_1.SocksClientState.SentFinalHandshake); + } + /** + * Handles Socks v5 final handshake response. + * @param data + */ + handleSocks5FinalHandshakeResponse() { + // Peek at available data (we need at least 5 bytes to get the hostname length) + const header = this.receiveBuffer.peek(5); + if (header[0] !== 0x05 || header[1] !== constants_1.Socks5Response.Granted) { + this.closeSocket(`${constants_1.ERRORS.InvalidSocks5FinalHandshakeRejected} - ${constants_1.Socks5Response[header[1]]}`); + } + else { + // Read address type + const addressType = header[3]; + let remoteHost; + let buff; + // IPv4 + if (addressType === constants_1.Socks5HostType.IPv4) { + // Check if data is available. + const dataNeeded = constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseIPv4; + if (this.receiveBuffer.length < dataNeeded) { + this.nextRequiredPacketBufferSize = dataNeeded; + return; + } + buff = smart_buffer_1.SmartBuffer.fromBuffer(this.receiveBuffer.get(dataNeeded).slice(4)); + remoteHost = { + host: ip.fromLong(buff.readUInt32BE()), + port: buff.readUInt16BE(), + }; + // If given host is 0.0.0.0, assume remote proxy ip instead. + if (remoteHost.host === '0.0.0.0') { + remoteHost.host = this.options.proxy.ipaddress; + } + // Hostname + } + else if (addressType === constants_1.Socks5HostType.Hostname) { + const hostLength = header[4]; + const dataNeeded = constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseHostname(hostLength); // header + host length + host + port + // Check if data is available. + if (this.receiveBuffer.length < dataNeeded) { + this.nextRequiredPacketBufferSize = dataNeeded; + return; + } + buff = smart_buffer_1.SmartBuffer.fromBuffer(this.receiveBuffer.get(dataNeeded).slice(5)); + remoteHost = { + host: buff.readString(hostLength), + port: buff.readUInt16BE(), + }; + // IPv6 + } + else if (addressType === constants_1.Socks5HostType.IPv6) { + // Check if data is available. + const dataNeeded = constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseIPv6; + if (this.receiveBuffer.length < dataNeeded) { + this.nextRequiredPacketBufferSize = dataNeeded; + return; + } + buff = smart_buffer_1.SmartBuffer.fromBuffer(this.receiveBuffer.get(dataNeeded).slice(4)); + remoteHost = { + host: ip.toString(buff.readBuffer(16)), + port: buff.readUInt16BE(), + }; + } + // We have everything we need + this.setState(constants_1.SocksClientState.ReceivedFinalResponse); + // If using CONNECT, the client is now in the established state. + if (constants_1.SocksCommand[this.options.command] === constants_1.SocksCommand.connect) { + this.setState(constants_1.SocksClientState.Established); + this.removeInternalSocketHandlers(); + this.emit('established', { remoteHost, socket: this.socket }); + } + else if (constants_1.SocksCommand[this.options.command] === constants_1.SocksCommand.bind) { + /* If using BIND, the Socks client is now in BoundWaitingForConnection state. + This means that the remote proxy server is waiting for a remote connection to the bound port. */ + this.setState(constants_1.SocksClientState.BoundWaitingForConnection); + this.nextRequiredPacketBufferSize = + constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseHeader; + this.emit('bound', { remoteHost, socket: this.socket }); + /* + If using Associate, the Socks client is now Established. And the proxy server is now accepting UDP packets at the + given bound port. This initial Socks TCP connection must remain open for the UDP relay to continue to work. + */ + } + else if (constants_1.SocksCommand[this.options.command] === constants_1.SocksCommand.associate) { + this.setState(constants_1.SocksClientState.Established); + this.removeInternalSocketHandlers(); + this.emit('established', { + remoteHost, + socket: this.socket, + }); + } + } + } + /** + * Handles Socks v5 incoming connection request (BIND). + */ + handleSocks5IncomingConnectionResponse() { + // Peek at available data (we need at least 5 bytes to get the hostname length) + const header = this.receiveBuffer.peek(5); + if (header[0] !== 0x05 || header[1] !== constants_1.Socks5Response.Granted) { + this.closeSocket(`${constants_1.ERRORS.Socks5ProxyRejectedIncomingBoundConnection} - ${constants_1.Socks5Response[header[1]]}`); + } + else { + // Read address type + const addressType = header[3]; + let remoteHost; + let buff; + // IPv4 + if (addressType === constants_1.Socks5HostType.IPv4) { + // Check if data is available. + const dataNeeded = constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseIPv4; + if (this.receiveBuffer.length < dataNeeded) { + this.nextRequiredPacketBufferSize = dataNeeded; + return; + } + buff = smart_buffer_1.SmartBuffer.fromBuffer(this.receiveBuffer.get(dataNeeded).slice(4)); + remoteHost = { + host: ip.fromLong(buff.readUInt32BE()), + port: buff.readUInt16BE(), + }; + // If given host is 0.0.0.0, assume remote proxy ip instead. + if (remoteHost.host === '0.0.0.0') { + remoteHost.host = this.options.proxy.ipaddress; + } + // Hostname + } + else if (addressType === constants_1.Socks5HostType.Hostname) { + const hostLength = header[4]; + const dataNeeded = constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseHostname(hostLength); // header + host length + port + // Check if data is available. + if (this.receiveBuffer.length < dataNeeded) { + this.nextRequiredPacketBufferSize = dataNeeded; + return; + } + buff = smart_buffer_1.SmartBuffer.fromBuffer(this.receiveBuffer.get(dataNeeded).slice(5)); + remoteHost = { + host: buff.readString(hostLength), + port: buff.readUInt16BE(), + }; + // IPv6 + } + else if (addressType === constants_1.Socks5HostType.IPv6) { + // Check if data is available. + const dataNeeded = constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseIPv6; + if (this.receiveBuffer.length < dataNeeded) { + this.nextRequiredPacketBufferSize = dataNeeded; + return; + } + buff = smart_buffer_1.SmartBuffer.fromBuffer(this.receiveBuffer.get(dataNeeded).slice(4)); + remoteHost = { + host: ip.toString(buff.readBuffer(16)), + port: buff.readUInt16BE(), + }; + } + this.setState(constants_1.SocksClientState.Established); + this.removeInternalSocketHandlers(); + this.emit('established', { remoteHost, socket: this.socket }); + } + } + get socksClientOptions() { + return Object.assign({}, this.options); + } +} +exports.SocksClient = SocksClient; +//# sourceMappingURL=socksclient.js.map \ No newline at end of file diff --git a/mybulma/node_modules/socks/build/client/socksclient.js.map b/mybulma/node_modules/socks/build/client/socksclient.js.map new file mode 100644 index 0000000..f01f317 --- /dev/null +++ b/mybulma/node_modules/socks/build/client/socksclient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"socksclient.js","sourceRoot":"","sources":["../../src/client/socksclient.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,mCAAoC;AACpC,2BAA2B;AAC3B,yBAAyB;AACzB,+CAAyC;AACzC,mDAkB6B;AAC7B,+CAG2B;AAC3B,2DAAsD;AACtD,yCAA8D;AAw7B5D,iGAx7BM,uBAAgB,OAw7BN;AA95BlB,MAAM,WAAY,SAAQ,qBAAY;IAgBpC,YAAY,OAA2B;QACrC,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,OAAO,qBACP,OAAO,CACX,CAAC;QAEF,8BAA8B;QAC9B,IAAA,oCAA0B,EAAC,OAAO,CAAC,CAAC;QAEpC,gBAAgB;QAChB,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,OAAO,CAAC,CAAC;IAC1C,CAAC;IAED;;;;;;;OAOG;IACH,MAAM,CAAC,gBAAgB,CACrB,OAA2B,EAC3B,QAGS;QAET,OAAO,IAAI,OAAO,CAA8B,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YAClE,8BAA8B;YAC9B,IAAI;gBACF,IAAA,oCAA0B,EAAC,OAAO,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC;aAClD;YAAC,OAAO,GAAG,EAAE;gBACZ,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;oBAClC,QAAQ,CAAC,GAAG,CAAC,CAAC;oBACd,8DAA8D;oBAC9D,OAAO,OAAO,CAAC,GAAU,CAAC,CAAC,CAAC,oDAAoD;iBACjF;qBAAM;oBACL,OAAO,MAAM,CAAC,GAAG,CAAC,CAAC;iBACpB;aACF;YAED,MAAM,MAAM,GAAG,IAAI,WAAW,CAAC,OAAO,CAAC,CAAC;YACxC,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,eAAe,CAAC,CAAC;YACxC,MAAM,CAAC,IAAI,CAAC,aAAa,EAAE,CAAC,IAAiC,EAAE,EAAE;gBAC/D,MAAM,CAAC,kBAAkB,EAAE,CAAC;gBAC5B,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;oBAClC,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;oBACrB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,oDAAoD;iBACpE;qBAAM;oBACL,OAAO,CAAC,IAAI,CAAC,CAAC;iBACf;YACH,CAAC,CAAC,CAAC;YAEH,kDAAkD;YAClD,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,GAAU,EAAE,EAAE;gBAClC,MAAM,CAAC,kBAAkB,EAAE,CAAC;gBAC5B,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;oBAClC,QAAQ,CAAC,GAAG,CAAC,CAAC;oBACd,8DAA8D;oBAC9D,OAAO,CAAC,GAAU,CAAC,CAAC,CAAC,oDAAoD;iBAC1E;qBAAM;oBACL,MAAM,CAAC,GAAG,CAAC,CAAC;iBACb;YACH,CAAC,CAAC,CAAC;QACL,CAAC,CAAC,CAAC;IACL,CAAC;IAED;;;;;;;;OAQG;IACH,MAAM,CAAC,qBAAqB,CAC1B,OAAgC,EAChC,QAGS;QAET,qDAAqD;QACrD,OAAO,IAAI,OAAO,CAA8B,CAAO,OAAO,EAAE,MAAM,EAAE,EAAE;YACxE,mCAAmC;YACnC,IAAI;gBACF,IAAA,yCAA+B,EAAC,OAAO,CAAC,CAAC;aAC1C;YAAC,OAAO,GAAG,EAAE;gBACZ,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;oBAClC,QAAQ,CAAC,GAAG,CAAC,CAAC;oBACd,8DAA8D;oBAC9D,OAAO,OAAO,CAAC,GAAU,CAAC,CAAC,CAAC,oDAAoD;iBACjF;qBAAM;oBACL,OAAO,MAAM,CAAC,GAAG,CAAC,CAAC;iBACpB;aACF;YAED,kBAAkB;YAClB,IAAI,OAAO,CAAC,cAAc,EAAE;gBAC1B,IAAA,mBAAY,EAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAC/B;YAED,IAAI;gBACF,IAAI,IAAgB,CAAC;gBAErB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,OAAO,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;oBAC/C,MAAM,SAAS,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;oBAErC,0HAA0H;oBAC1H,MAAM,eAAe,GACnB,CAAC,KAAK,OAAO,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC;wBAC9B,CAAC,CAAC,OAAO,CAAC,WAAW;wBACrB,CAAC,CAAC;4BACE,IAAI,EACF,OAAO,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI;gCAC3B,OAAO,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,SAAS;4BAClC,IAAI,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI;yBAClC,CAAC;oBAER,4CAA4C;oBAC5C,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,gBAAgB,CAAC;wBAChD,OAAO,EAAE,SAAS;wBAClB,KAAK,EAAE,SAAS;wBAChB,WAAW,EAAE,eAAe;wBAC5B,eAAe,EAAE,IAAI;qBACtB,CAAC,CAAC;oBAEH,wCAAwC;oBACxC,IAAI,GAAG,IAAI,IAAI,MAAM,CAAC,MAAM,CAAC;iBAC9B;gBAED,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;oBAClC,QAAQ,CAAC,IAAI,EAAE,EAAC,MAAM,EAAE,IAAI,EAAC,CAAC,CAAC;oBAC/B,OAAO,CAAC,EAAC,MAAM,EAAE,IAAI,EAAC,CAAC,CAAC,CAAC,oDAAoD;iBAC9E;qBAAM;oBACL,OAAO,CAAC,EAAC,MAAM,EAAE,IAAI,EAAC,CAAC,CAAC;iBACzB;aACF;YAAC,OAAO,GAAG,EAAE;gBACZ,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;oBAClC,QAAQ,CAAC,GAAG,CAAC,CAAC;oBACd,8DAA8D;oBAC9D,OAAO,CAAC,GAAU,CAAC,CAAC,CAAC,oDAAoD;iBAC1E;qBAAM;oBACL,MAAM,CAAC,GAAG,CAAC,CAAC;iBACb;aACF;QACH,CAAC,CAAA,CAAC,CAAC;IACL,CAAC;IAED;;;OAGG;IACH,MAAM,CAAC,cAAc,CAAC,OAA6B;QACjD,MAAM,IAAI,GAAG,IAAI,0BAAW,EAAE,CAAC;QAC/B,IAAI,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;QACtB,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,WAAW,IAAI,CAAC,CAAC,CAAC;QAE1C,qBAAqB;QACrB,IAAI,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;YACvC,IAAI,CAAC,UAAU,CAAC,0BAAc,CAAC,IAAI,CAAC,CAAC;YACrC,IAAI,CAAC,aAAa,CAAC,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC;SACxD;aAAM,IAAI,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE;YAC9C,IAAI,CAAC,UAAU,CAAC,0BAAc,CAAC,IAAI,CAAC,CAAC;YACrC,IAAI,CAAC,WAAW,CAAC,EAAE,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC;SACxD;aAAM;YACL,IAAI,CAAC,UAAU,CAAC,0BAAc,CAAC,QAAQ,CAAC,CAAC;YACzC,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,UAAU,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC;YAC5D,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;SAC3C;QAED,OAAO;QACP,IAAI,CAAC,aAAa,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QAE5C,OAAO;QACP,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QAE/B,OAAO,IAAI,CAAC,QAAQ,EAAE,CAAC;IACzB,CAAC;IAED;;;OAGG;IACH,MAAM,CAAC,aAAa,CAAC,IAAY;QAC/B,MAAM,IAAI,GAAG,0BAAW,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QAC1C,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC;QAEpB,MAAM,WAAW,GAAG,IAAI,CAAC,SAAS,EAAE,CAAC;QACrC,MAAM,QAAQ,GAAmB,IAAI,CAAC,SAAS,EAAE,CAAC;QAClD,IAAI,UAAU,CAAC;QAEf,IAAI,QAAQ,KAAK,0BAAc,CAAC,IAAI,EAAE;YACpC,UAAU,GAAG,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC,CAAC;SAC/C;aAAM,IAAI,QAAQ,KAAK,0BAAc,CAAC,IAAI,EAAE;YAC3C,UAAU,GAAG,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,CAAC;SAC/C;aAAM;YACL,UAAU,GAAG,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,SAAS,EAAE,CAAC,CAAC;SAChD;QAED,MAAM,UAAU,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC;QAEvC,OAAO;YACL,WAAW;YACX,UAAU,EAAE;gBACV,IAAI,EAAE,UAAU;gBAChB,IAAI,EAAE,UAAU;aACjB;YACD,IAAI,EAAE,IAAI,CAAC,UAAU,EAAE;SACxB,CAAC;IACJ,CAAC;IAED;;OAEG;IACK,QAAQ,CAAC,QAA0B;QACzC,IAAI,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,KAAK,EAAE;YACzC,IAAI,CAAC,KAAK,GAAG,QAAQ,CAAC;SACvB;IACH,CAAC;IAED;;;OAGG;IACI,OAAO,CAAC,cAAuB;QACpC,IAAI,CAAC,cAAc,GAAG,CAAC,IAAY,EAAE,EAAE,CAAC,IAAI,CAAC,qBAAqB,CAAC,IAAI,CAAC,CAAC;QACzE,IAAI,CAAC,OAAO,GAAG,GAAG,EAAE,CAAC,IAAI,CAAC,cAAc,EAAE,CAAC;QAC3C,IAAI,CAAC,OAAO,GAAG,CAAC,GAAU,EAAE,EAAE,CAAC,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC;QACxD,IAAI,CAAC,SAAS,GAAG,GAAG,EAAE,CAAC,IAAI,CAAC,gBAAgB,EAAE,CAAC;QAE/C,+CAA+C;QAC/C,MAAM,KAAK,GAAG,UAAU,CACtB,GAAG,EAAE,CAAC,IAAI,CAAC,oBAAoB,EAAE,EACjC,IAAI,CAAC,OAAO,CAAC,OAAO,IAAI,2BAAe,CACxC,CAAC;QAEF,8EAA8E;QAC9E,IAAI,KAAK,CAAC,KAAK,IAAI,OAAO,KAAK,CAAC,KAAK,KAAK,UAAU,EAAE;YACpD,KAAK,CAAC,KAAK,EAAE,CAAC;SACf;QAED,yGAAyG;QACzG,IAAI,cAAc,EAAE;YAClB,IAAI,CAAC,MAAM,GAAG,cAAc,CAAC;SAC9B;aAAM;YACL,IAAI,CAAC,MAAM,GAAG,IAAI,GAAG,CAAC,MAAM,EAAE,CAAC;SAChC;QAED,gCAAgC;QAChC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;QACxC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;QACxC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;QAC5C,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,IAAI,CAAC,cAAc,CAAC,CAAC;QAE5C,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,UAAU,CAAC,CAAC;QAC3C,IAAI,CAAC,aAAa,GAAG,IAAI,6BAAa,EAAE,CAAC;QAEzC,IAAI,cAAc,EAAE;YAClB,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;SAC7B;aAAM;YACJ,IAAI,CAAC,MAAqB,CAAC,OAAO,CAAC,IAAI,CAAC,gBAAgB,EAAE,CAAC,CAAC;YAE7D,IACE,IAAI,CAAC,OAAO,CAAC,eAAe,KAAK,SAAS;gBAC1C,IAAI,CAAC,OAAO,CAAC,eAAe,KAAK,IAAI,EACrC;gBACC,IAAI,CAAC,MAAqB,CAAC,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,eAAe,CAAC,CAAC;aACxE;SACF;QAED,6FAA6F;QAC7F,IAAI,CAAC,mBAAmB,CAAC,aAAa,EAAE,CAAC,IAAI,EAAE,EAAE;YAC/C,YAAY,CAAC,GAAG,EAAE;gBAChB,IAAI,IAAI,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;oBACjC,MAAM,UAAU,GAAG,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,IAAI,CAAC,aAAa,CAAC,MAAM,CAAC,CAAC;oBAErE,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,UAAU,CAAC,CAAC;iBACtC;gBACD,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC;YACvB,CAAC,CAAC,CAAC;QACL,CAAC,CAAC,CAAC;IACL,CAAC;IAED,+EAA+E;IACvE,gBAAgB;QACtB,uCACK,IAAI,CAAC,OAAO,CAAC,cAAc,KAC9B,IAAI,EAAE,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,SAAS,EAC7D,IAAI,EAAE,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,IAC7B;IACJ,CAAC;IAED;;;OAGG;IACK,oBAAoB;QAC1B,IACE,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,WAAW;YAC3C,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,yBAAyB,EACzD;YACA,IAAI,CAAC,WAAW,CAAC,kBAAM,CAAC,uBAAuB,CAAC,CAAC;SAClD;IACH,CAAC;IAED;;OAEG;IACK,gBAAgB;QACtB,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,SAAS,CAAC,CAAC;QAE1C,0BAA0B;QAC1B,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,EAAE;YACjC,IAAI,CAAC,0BAA0B,EAAE,CAAC;SACnC;aAAM;YACL,IAAI,CAAC,0BAA0B,EAAE,CAAC;SACnC;QAED,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,oBAAoB,CAAC,CAAC;IACvD,CAAC;IAED;;;OAGG;IACK,qBAAqB,CAAC,IAAY;QACxC;;;UAGE;QACF,IAAI,CAAC,aAAa,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;QAEhC,6BAA6B;QAC7B,IAAI,CAAC,WAAW,EAAE,CAAC;IACrB,CAAC;IAED;;OAEG;IACK,WAAW;QACjB,mFAAmF;QACnF,OACE,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,WAAW;YAC3C,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,KAAK;YACrC,IAAI,CAAC,aAAa,CAAC,MAAM,IAAI,IAAI,CAAC,4BAA4B,EAC9D;YACA,gDAAgD;YAChD,IAAI,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,oBAAoB,EAAE;gBACxD,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,EAAE;oBACjC,4CAA4C;oBAC5C,IAAI,CAAC,kCAAkC,EAAE,CAAC;iBAC3C;qBAAM;oBACL,wDAAwD;oBACxD,IAAI,CAAC,oCAAoC,EAAE,CAAC;iBAC7C;gBACD,wDAAwD;aACzD;iBAAM,IAAI,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,kBAAkB,EAAE;gBAC7D,IAAI,CAAC,kDAAkD,EAAE,CAAC;gBAC1D,6DAA6D;aAC9D;iBAAM,IAAI,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,kBAAkB,EAAE;gBAC7D,IAAI,CAAC,kCAAkC,EAAE,CAAC;gBAC1C,mEAAmE;aACpE;iBAAM,IAAI,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,yBAAyB,EAAE;gBACpE,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,EAAE;oBACjC,IAAI,CAAC,sCAAsC,EAAE,CAAC;iBAC/C;qBAAM;oBACL,IAAI,CAAC,sCAAsC,EAAE,CAAC;iBAC/C;aACF;iBAAM;gBACL,IAAI,CAAC,WAAW,CAAC,kBAAM,CAAC,aAAa,CAAC,CAAC;gBACvC,MAAM;aACP;SACF;IACH,CAAC;IAED;;;OAGG;IACK,cAAc;QACpB,IAAI,CAAC,WAAW,CAAC,kBAAM,CAAC,YAAY,CAAC,CAAC;IACxC,CAAC;IAED;;;OAGG;IACK,cAAc,CAAC,GAAU;QAC/B,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IAChC,CAAC;IAED;;OAEG;IACK,4BAA4B;QAClC,6FAA6F;QAC7F,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC;QACpB,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,MAAM,EAAE,IAAI,CAAC,cAAc,CAAC,CAAC;QACxD,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;QAClD,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;QAClD,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,SAAS,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;IACxD,CAAC;IAED;;;OAGG;IACK,WAAW,CAAC,GAAW;QAC7B,2FAA2F;QAC3F,IAAI,IAAI,CAAC,KAAK,KAAK,4BAAgB,CAAC,KAAK,EAAE;YACzC,+BAA+B;YAC/B,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,KAAK,CAAC,CAAC;YAEtC,iBAAiB;YACjB,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;YAEtB,4BAA4B;YAC5B,IAAI,CAAC,4BAA4B,EAAE,CAAC;YAEpC,sBAAsB;YACtB,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,uBAAgB,CAAC,GAAG,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC;SAC7D;IACH,CAAC;IAED;;OAEG;IACK,0BAA0B;QAChC,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,MAAM,IAAI,EAAE,CAAC;QAE/C,MAAM,IAAI,GAAG,IAAI,0BAAW,EAAE,CAAC;QAC/B,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QACtB,IAAI,CAAC,UAAU,CAAC,wBAAY,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC;QACpD,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QAElD,iBAAiB;QACjB,IAAI,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,EAAE;YAC7C,IAAI,CAAC,WAAW,CAAC,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC,CAAC;YAC7D,IAAI,CAAC,aAAa,CAAC,MAAM,CAAC,CAAC;YAC3B,sBAAsB;SACvB;aAAM;YACL,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;YACtB,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;YACtB,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;YACtB,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;YACtB,IAAI,CAAC,aAAa,CAAC,MAAM,CAAC,CAAC;YAC3B,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;SACnD;QAED,IAAI,CAAC,4BAA4B;YAC/B,uCAA2B,CAAC,cAAc,CAAC;QAC7C,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;IACrC,CAAC;IAED;;;OAGG;IACK,kCAAkC;QACxC,MAAM,IAAI,GAAG,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QAEvC,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,0BAAc,CAAC,OAAO,EAAE;YACtC,IAAI,CAAC,WAAW,CACd,GAAG,kBAAM,CAAC,6BAA6B,OACrC,0BAAc,CAAC,IAAI,CAAC,CAAC,CAAC,CACxB,GAAG,CACJ,CAAC;SACH;aAAM;YACL,gBAAgB;YAChB,IAAI,wBAAY,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,wBAAY,CAAC,IAAI,EAAE;gBAC5D,MAAM,IAAI,GAAG,0BAAW,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;gBAC1C,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC;gBAEpB,MAAM,UAAU,GAAoB;oBAClC,IAAI,EAAE,IAAI,CAAC,YAAY,EAAE;oBACzB,IAAI,EAAE,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC;iBACvC,CAAC;gBAEF,yCAAyC;gBACzC,IAAI,UAAU,CAAC,IAAI,KAAK,SAAS,EAAE;oBACjC,UAAU,CAAC,IAAI,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,SAAS,CAAC;iBAChD;gBACD,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,yBAAyB,CAAC,CAAC;gBAC1D,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,EAAC,UAAU,EAAE,MAAM,EAAE,IAAI,CAAC,MAAM,EAAC,CAAC,CAAC;gBAEtD,mBAAmB;aACpB;iBAAM;gBACL,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,WAAW,CAAC,CAAC;gBAC5C,IAAI,CAAC,4BAA4B,EAAE,CAAC;gBACpC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,EAAC,MAAM,EAAE,IAAI,CAAC,MAAM,EAAC,CAAC,CAAC;aACjD;SACF;IACH,CAAC;IAED;;;OAGG;IACK,sCAAsC;QAC5C,MAAM,IAAI,GAAG,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QAEvC,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,0BAAc,CAAC,OAAO,EAAE;YACtC,IAAI,CAAC,WAAW,CACd,GAAG,kBAAM,CAAC,0CAA0C,OAClD,0BAAc,CAAC,IAAI,CAAC,CAAC,CAAC,CACxB,GAAG,CACJ,CAAC;SACH;aAAM;YACL,MAAM,IAAI,GAAG,0BAAW,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;YAC1C,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC;YAEpB,MAAM,UAAU,GAAoB;gBAClC,IAAI,EAAE,IAAI,CAAC,YAAY,EAAE;gBACzB,IAAI,EAAE,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC;aACvC,CAAC;YAEF,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,WAAW,CAAC,CAAC;YAC5C,IAAI,CAAC,4BAA4B,EAAE,CAAC;YACpC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,EAAC,UAAU,EAAE,MAAM,EAAE,IAAI,CAAC,MAAM,EAAC,CAAC,CAAC;SAC7D;IACH,CAAC;IAED;;OAEG;IACK,0BAA0B;QAChC,MAAM,IAAI,GAAG,IAAI,0BAAW,EAAE,CAAC;QAE/B,wCAAwC;QACxC,MAAM,oBAAoB,GAAG,CAAC,sBAAU,CAAC,MAAM,CAAC,CAAC;QAEjD,6FAA6F;QAC7F,sHAAsH;QACtH,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,MAAM,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,QAAQ,EAAE;YAC5D,oBAAoB,CAAC,IAAI,CAAC,sBAAU,CAAC,QAAQ,CAAC,CAAC;SAChD;QAED,sBAAsB;QACtB,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,kBAAkB,KAAK,SAAS,EAAE;YACvD,oBAAoB,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC;SAClE;QAED,yBAAyB;QACzB,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QACtB,IAAI,CAAC,UAAU,CAAC,oBAAoB,CAAC,MAAM,CAAC,CAAC;QAC7C,KAAK,MAAM,UAAU,IAAI,oBAAoB,EAAE;YAC7C,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;SAC7B;QAED,IAAI,CAAC,4BAA4B;YAC/B,uCAA2B,CAAC,8BAA8B,CAAC;QAC7D,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;QACnC,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,oBAAoB,CAAC,CAAC;IACvD,CAAC;IAED;;;OAGG;IACK,oCAAoC;QAC1C,MAAM,IAAI,GAAG,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QAEvC,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,IAAI,EAAE;YACpB,IAAI,CAAC,WAAW,CAAC,kBAAM,CAAC,yCAAyC,CAAC,CAAC;SACpE;aAAM,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,qCAAyB,EAAE;YAChD,IAAI,CAAC,WAAW,CAAC,kBAAM,CAAC,+CAA+C,CAAC,CAAC;SAC1E;aAAM;YACL,6EAA6E;YAC7E,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,sBAAU,CAAC,MAAM,EAAE;gBACjC,IAAI,CAAC,oBAAoB,GAAG,sBAAU,CAAC,MAAM,CAAC;gBAC9C,IAAI,CAAC,wBAAwB,EAAE,CAAC;gBAChC,0EAA0E;aAC3E;iBAAM,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,sBAAU,CAAC,QAAQ,EAAE;gBAC1C,IAAI,CAAC,oBAAoB,GAAG,sBAAU,CAAC,QAAQ,CAAC;gBAChD,IAAI,CAAC,gCAAgC,EAAE,CAAC;gBACxC,qFAAqF;aACtF;iBAAM,IAAI,IAAI,CAAC,CAAC,CAAC,KAAK,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,kBAAkB,EAAE;gBAC5D,IAAI,CAAC,oBAAoB,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,kBAAkB,CAAC;gBAClE,IAAI,CAAC,8BAA8B,EAAE,CAAC;aACvC;iBAAM;gBACL,IAAI,CAAC,WAAW,CAAC,kBAAM,CAAC,4CAA4C,CAAC,CAAC;aACvE;SACF;IACH,CAAC;IAED;;;;OAIG;IACK,gCAAgC;QACtC,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,MAAM,IAAI,EAAE,CAAC;QAC/C,MAAM,QAAQ,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,QAAQ,IAAI,EAAE,CAAC;QAEnD,MAAM,IAAI,GAAG,IAAI,0BAAW,EAAE,CAAC;QAC/B,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QACtB,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC;QAC3C,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC;QACzB,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,CAAC;QAC7C,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC;QAE3B,IAAI,CAAC,4BAA4B;YAC/B,uCAA2B,CAAC,oCAAoC,CAAC;QACnE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;QACnC,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,kBAAkB,CAAC,CAAC;IACrD,CAAC;IAEa,8BAA8B;;YAC1C,IAAI,CAAC,4BAA4B;gBAC/B,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,yBAAyB,CAAC;YAC/C,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,2BAA2B,EAAE,CAAC,CAAC;YAC1E,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,kBAAkB,CAAC,CAAC;QACrD,CAAC;KAAA;IAEa,uCAAuC,CAAC,IAAY;;YAChE,OAAO,MAAM,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,4BAA4B,CAAC,IAAI,CAAC,CAAC;QACrE,CAAC;KAAA;IAEa,iDAAiD,CAC7D,IAAY;;YAEZ,OAAO,IAAI,CAAC,CAAC,CAAC,KAAK,IAAI,CAAC;QAC1B,CAAC;KAAA;IAEa,mDAAmD,CAC/D,IAAY;;YAEZ,OAAO,IAAI,CAAC,CAAC,CAAC,KAAK,IAAI,CAAC;QAC1B,CAAC;KAAA;IAED;;;OAGG;IACW,kDAAkD;;YAC9D,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,8BAA8B,CAAC,CAAC;YAE/D,IAAI,UAAU,GAAG,KAAK,CAAC;YAEvB,IAAI,IAAI,CAAC,oBAAoB,KAAK,sBAAU,CAAC,MAAM,EAAE;gBACnD,UAAU,GAAG,MAAM,IAAI,CAAC,iDAAiD,CACvE,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC,CAAC,CAC1B,CAAC;aACH;iBAAM,IAAI,IAAI,CAAC,oBAAoB,KAAK,sBAAU,CAAC,QAAQ,EAAE;gBAC5D,UAAU;oBACR,MAAM,IAAI,CAAC,mDAAmD,CAC5D,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC,CAAC,CAC1B,CAAC;aACL;iBAAM,IACL,IAAI,CAAC,oBAAoB,KAAK,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,kBAAkB,EACnE;gBACA,UAAU,GAAG,MAAM,IAAI,CAAC,uCAAuC,CAC7D,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,yBAAyB,CAAC,CACrE,CAAC;aACH;YAED,IAAI,CAAC,UAAU,EAAE;gBACf,IAAI,CAAC,WAAW,CAAC,kBAAM,CAAC,0BAA0B,CAAC,CAAC;aACrD;iBAAM;gBACL,IAAI,CAAC,wBAAwB,EAAE,CAAC;aACjC;QACH,CAAC;KAAA;IAED;;OAEG;IACK,wBAAwB;QAC9B,MAAM,IAAI,GAAG,IAAI,0BAAW,EAAE,CAAC;QAE/B,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QACtB,IAAI,CAAC,UAAU,CAAC,wBAAY,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC;QACpD,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QAEtB,sBAAsB;QACtB,IAAI,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,EAAE;YAC7C,IAAI,CAAC,UAAU,CAAC,0BAAc,CAAC,IAAI,CAAC,CAAC;YACrC,IAAI,CAAC,WAAW,CAAC,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC,CAAC;SAC9D;aAAM,IAAI,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,EAAE;YACpD,IAAI,CAAC,UAAU,CAAC,0BAAc,CAAC,IAAI,CAAC,CAAC;YACrC,IAAI,CAAC,WAAW,CAAC,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC,CAAC;SAC9D;aAAM;YACL,IAAI,CAAC,UAAU,CAAC,0BAAc,CAAC,QAAQ,CAAC,CAAC;YACzC,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YACtD,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;SACjD;QACD,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QAElD,IAAI,CAAC,4BAA4B;YAC/B,uCAA2B,CAAC,oBAAoB,CAAC;QACnD,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC;QACnC,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,kBAAkB,CAAC,CAAC;IACrD,CAAC;IAED;;;OAGG;IACK,kCAAkC;QACxC,+EAA+E;QAC/E,MAAM,MAAM,GAAG,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QAE1C,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,IAAI,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,0BAAc,CAAC,OAAO,EAAE;YAC9D,IAAI,CAAC,WAAW,CACd,GAAG,kBAAM,CAAC,mCAAmC,MAC3C,0BAAc,CAAC,MAAM,CAAC,CAAC,CAAC,CAC1B,EAAE,CACH,CAAC;SACH;aAAM;YACL,oBAAoB;YACpB,MAAM,WAAW,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;YAE9B,IAAI,UAA2B,CAAC;YAChC,IAAI,IAAiB,CAAC;YAEtB,OAAO;YACP,IAAI,WAAW,KAAK,0BAAc,CAAC,IAAI,EAAE;gBACvC,8BAA8B;gBAC9B,MAAM,UAAU,GAAG,uCAA2B,CAAC,kBAAkB,CAAC;gBAClE,IAAI,IAAI,CAAC,aAAa,CAAC,MAAM,GAAG,UAAU,EAAE;oBAC1C,IAAI,CAAC,4BAA4B,GAAG,UAAU,CAAC;oBAC/C,OAAO;iBACR;gBAED,IAAI,GAAG,0BAAW,CAAC,UAAU,CAC3B,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAC5C,CAAC;gBAEF,UAAU,GAAG;oBACX,IAAI,EAAE,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC;oBACtC,IAAI,EAAE,IAAI,CAAC,YAAY,EAAE;iBAC1B,CAAC;gBAEF,4DAA4D;gBAC5D,IAAI,UAAU,CAAC,IAAI,KAAK,SAAS,EAAE;oBACjC,UAAU,CAAC,IAAI,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,SAAS,CAAC;iBAChD;gBAED,WAAW;aACZ;iBAAM,IAAI,WAAW,KAAK,0BAAc,CAAC,QAAQ,EAAE;gBAClD,MAAM,UAAU,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;gBAC7B,MAAM,UAAU,GACd,uCAA2B,CAAC,sBAAsB,CAAC,UAAU,CAAC,CAAC,CAAC,qCAAqC;gBAEvG,8BAA8B;gBAC9B,IAAI,IAAI,CAAC,aAAa,CAAC,MAAM,GAAG,UAAU,EAAE;oBAC1C,IAAI,CAAC,4BAA4B,GAAG,UAAU,CAAC;oBAC/C,OAAO;iBACR;gBAED,IAAI,GAAG,0BAAW,CAAC,UAAU,CAC3B,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAC5C,CAAC;gBAEF,UAAU,GAAG;oBACX,IAAI,EAAE,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC;oBACjC,IAAI,EAAE,IAAI,CAAC,YAAY,EAAE;iBAC1B,CAAC;gBACF,OAAO;aACR;iBAAM,IAAI,WAAW,KAAK,0BAAc,CAAC,IAAI,EAAE;gBAC9C,8BAA8B;gBAC9B,MAAM,UAAU,GAAG,uCAA2B,CAAC,kBAAkB,CAAC;gBAClE,IAAI,IAAI,CAAC,aAAa,CAAC,MAAM,GAAG,UAAU,EAAE;oBAC1C,IAAI,CAAC,4BAA4B,GAAG,UAAU,CAAC;oBAC/C,OAAO;iBACR;gBAED,IAAI,GAAG,0BAAW,CAAC,UAAU,CAC3B,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAC5C,CAAC;gBAEF,UAAU,GAAG;oBACX,IAAI,EAAE,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC;oBACtC,IAAI,EAAE,IAAI,CAAC,YAAY,EAAE;iBAC1B,CAAC;aACH;YAED,6BAA6B;YAC7B,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,qBAAqB,CAAC,CAAC;YAEtD,gEAAgE;YAChE,IAAI,wBAAY,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,wBAAY,CAAC,OAAO,EAAE;gBAC/D,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,WAAW,CAAC,CAAC;gBAC5C,IAAI,CAAC,4BAA4B,EAAE,CAAC;gBACpC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,EAAC,UAAU,EAAE,MAAM,EAAE,IAAI,CAAC,MAAM,EAAC,CAAC,CAAC;aAC7D;iBAAM,IAAI,wBAAY,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,wBAAY,CAAC,IAAI,EAAE;gBACnE;mHACmG;gBACnG,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,yBAAyB,CAAC,CAAC;gBAC1D,IAAI,CAAC,4BAA4B;oBAC/B,uCAA2B,CAAC,oBAAoB,CAAC;gBACnD,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,EAAC,UAAU,EAAE,MAAM,EAAE,IAAI,CAAC,MAAM,EAAC,CAAC,CAAC;gBACtD;;;kBAGE;aACH;iBAAM,IACL,wBAAY,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,wBAAY,CAAC,SAAS,EAC7D;gBACA,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,WAAW,CAAC,CAAC;gBAC5C,IAAI,CAAC,4BAA4B,EAAE,CAAC;gBACpC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE;oBACvB,UAAU;oBACV,MAAM,EAAE,IAAI,CAAC,MAAM;iBACpB,CAAC,CAAC;aACJ;SACF;IACH,CAAC;IAED;;OAEG;IACK,sCAAsC;QAC5C,+EAA+E;QAC/E,MAAM,MAAM,GAAG,IAAI,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QAE1C,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,IAAI,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,0BAAc,CAAC,OAAO,EAAE;YAC9D,IAAI,CAAC,WAAW,CACd,GAAG,kBAAM,CAAC,0CAA0C,MAClD,0BAAc,CAAC,MAAM,CAAC,CAAC,CAAC,CAC1B,EAAE,CACH,CAAC;SACH;aAAM;YACL,oBAAoB;YACpB,MAAM,WAAW,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;YAE9B,IAAI,UAA2B,CAAC;YAChC,IAAI,IAAiB,CAAC;YAEtB,OAAO;YACP,IAAI,WAAW,KAAK,0BAAc,CAAC,IAAI,EAAE;gBACvC,8BAA8B;gBAC9B,MAAM,UAAU,GAAG,uCAA2B,CAAC,kBAAkB,CAAC;gBAClE,IAAI,IAAI,CAAC,aAAa,CAAC,MAAM,GAAG,UAAU,EAAE;oBAC1C,IAAI,CAAC,4BAA4B,GAAG,UAAU,CAAC;oBAC/C,OAAO;iBACR;gBAED,IAAI,GAAG,0BAAW,CAAC,UAAU,CAC3B,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAC5C,CAAC;gBAEF,UAAU,GAAG;oBACX,IAAI,EAAE,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC;oBACtC,IAAI,EAAE,IAAI,CAAC,YAAY,EAAE;iBAC1B,CAAC;gBAEF,4DAA4D;gBAC5D,IAAI,UAAU,CAAC,IAAI,KAAK,SAAS,EAAE;oBACjC,UAAU,CAAC,IAAI,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,SAAS,CAAC;iBAChD;gBAED,WAAW;aACZ;iBAAM,IAAI,WAAW,KAAK,0BAAc,CAAC,QAAQ,EAAE;gBAClD,MAAM,UAAU,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;gBAC7B,MAAM,UAAU,GACd,uCAA2B,CAAC,sBAAsB,CAAC,UAAU,CAAC,CAAC,CAAC,8BAA8B;gBAEhG,8BAA8B;gBAC9B,IAAI,IAAI,CAAC,aAAa,CAAC,MAAM,GAAG,UAAU,EAAE;oBAC1C,IAAI,CAAC,4BAA4B,GAAG,UAAU,CAAC;oBAC/C,OAAO;iBACR;gBAED,IAAI,GAAG,0BAAW,CAAC,UAAU,CAC3B,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAC5C,CAAC;gBAEF,UAAU,GAAG;oBACX,IAAI,EAAE,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC;oBACjC,IAAI,EAAE,IAAI,CAAC,YAAY,EAAE;iBAC1B,CAAC;gBACF,OAAO;aACR;iBAAM,IAAI,WAAW,KAAK,0BAAc,CAAC,IAAI,EAAE;gBAC9C,8BAA8B;gBAC9B,MAAM,UAAU,GAAG,uCAA2B,CAAC,kBAAkB,CAAC;gBAClE,IAAI,IAAI,CAAC,aAAa,CAAC,MAAM,GAAG,UAAU,EAAE;oBAC1C,IAAI,CAAC,4BAA4B,GAAG,UAAU,CAAC;oBAC/C,OAAO;iBACR;gBAED,IAAI,GAAG,0BAAW,CAAC,UAAU,CAC3B,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAC5C,CAAC;gBAEF,UAAU,GAAG;oBACX,IAAI,EAAE,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC;oBACtC,IAAI,EAAE,IAAI,CAAC,YAAY,EAAE;iBAC1B,CAAC;aACH;YAED,IAAI,CAAC,QAAQ,CAAC,4BAAgB,CAAC,WAAW,CAAC,CAAC;YAC5C,IAAI,CAAC,4BAA4B,EAAE,CAAC;YACpC,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,EAAC,UAAU,EAAE,MAAM,EAAE,IAAI,CAAC,MAAM,EAAC,CAAC,CAAC;SAC7D;IACH,CAAC;IAED,IAAI,kBAAkB;QACpB,yBACK,IAAI,CAAC,OAAO,EACf;IACJ,CAAC;CACF;AAGC,kCAAW"} \ No newline at end of file diff --git a/mybulma/node_modules/socks/build/common/constants.js b/mybulma/node_modules/socks/build/common/constants.js new file mode 100644 index 0000000..3c9ff90 --- /dev/null +++ b/mybulma/node_modules/socks/build/common/constants.js @@ -0,0 +1,114 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SOCKS5_NO_ACCEPTABLE_AUTH = exports.SOCKS5_CUSTOM_AUTH_END = exports.SOCKS5_CUSTOM_AUTH_START = exports.SOCKS_INCOMING_PACKET_SIZES = exports.SocksClientState = exports.Socks5Response = exports.Socks5HostType = exports.Socks5Auth = exports.Socks4Response = exports.SocksCommand = exports.ERRORS = exports.DEFAULT_TIMEOUT = void 0; +const DEFAULT_TIMEOUT = 30000; +exports.DEFAULT_TIMEOUT = DEFAULT_TIMEOUT; +// prettier-ignore +const ERRORS = { + InvalidSocksCommand: 'An invalid SOCKS command was provided. Valid options are connect, bind, and associate.', + InvalidSocksCommandForOperation: 'An invalid SOCKS command was provided. Only a subset of commands are supported for this operation.', + InvalidSocksCommandChain: 'An invalid SOCKS command was provided. Chaining currently only supports the connect command.', + InvalidSocksClientOptionsDestination: 'An invalid destination host was provided.', + InvalidSocksClientOptionsExistingSocket: 'An invalid existing socket was provided. This should be an instance of stream.Duplex.', + InvalidSocksClientOptionsProxy: 'Invalid SOCKS proxy details were provided.', + InvalidSocksClientOptionsTimeout: 'An invalid timeout value was provided. Please enter a value above 0 (in ms).', + InvalidSocksClientOptionsProxiesLength: 'At least two socks proxies must be provided for chaining.', + InvalidSocksClientOptionsCustomAuthRange: 'Custom auth must be a value between 0x80 and 0xFE.', + InvalidSocksClientOptionsCustomAuthOptions: 'When a custom_auth_method is provided, custom_auth_request_handler, custom_auth_response_size, and custom_auth_response_handler must also be provided and valid.', + NegotiationError: 'Negotiation error', + SocketClosed: 'Socket closed', + ProxyConnectionTimedOut: 'Proxy connection timed out', + InternalError: 'SocksClient internal error (this should not happen)', + InvalidSocks4HandshakeResponse: 'Received invalid Socks4 handshake response', + Socks4ProxyRejectedConnection: 'Socks4 Proxy rejected connection', + InvalidSocks4IncomingConnectionResponse: 'Socks4 invalid incoming connection response', + Socks4ProxyRejectedIncomingBoundConnection: 'Socks4 Proxy rejected incoming bound connection', + InvalidSocks5InitialHandshakeResponse: 'Received invalid Socks5 initial handshake response', + InvalidSocks5IntiailHandshakeSocksVersion: 'Received invalid Socks5 initial handshake (invalid socks version)', + InvalidSocks5InitialHandshakeNoAcceptedAuthType: 'Received invalid Socks5 initial handshake (no accepted authentication type)', + InvalidSocks5InitialHandshakeUnknownAuthType: 'Received invalid Socks5 initial handshake (unknown authentication type)', + Socks5AuthenticationFailed: 'Socks5 Authentication failed', + InvalidSocks5FinalHandshake: 'Received invalid Socks5 final handshake response', + InvalidSocks5FinalHandshakeRejected: 'Socks5 proxy rejected connection', + InvalidSocks5IncomingConnectionResponse: 'Received invalid Socks5 incoming connection response', + Socks5ProxyRejectedIncomingBoundConnection: 'Socks5 Proxy rejected incoming bound connection', +}; +exports.ERRORS = ERRORS; +const SOCKS_INCOMING_PACKET_SIZES = { + Socks5InitialHandshakeResponse: 2, + Socks5UserPassAuthenticationResponse: 2, + // Command response + incoming connection (bind) + Socks5ResponseHeader: 5, + Socks5ResponseIPv4: 10, + Socks5ResponseIPv6: 22, + Socks5ResponseHostname: (hostNameLength) => hostNameLength + 7, + // Command response + incoming connection (bind) + Socks4Response: 8, // 2 header + 2 port + 4 ip +}; +exports.SOCKS_INCOMING_PACKET_SIZES = SOCKS_INCOMING_PACKET_SIZES; +var SocksCommand; +(function (SocksCommand) { + SocksCommand[SocksCommand["connect"] = 1] = "connect"; + SocksCommand[SocksCommand["bind"] = 2] = "bind"; + SocksCommand[SocksCommand["associate"] = 3] = "associate"; +})(SocksCommand || (SocksCommand = {})); +exports.SocksCommand = SocksCommand; +var Socks4Response; +(function (Socks4Response) { + Socks4Response[Socks4Response["Granted"] = 90] = "Granted"; + Socks4Response[Socks4Response["Failed"] = 91] = "Failed"; + Socks4Response[Socks4Response["Rejected"] = 92] = "Rejected"; + Socks4Response[Socks4Response["RejectedIdent"] = 93] = "RejectedIdent"; +})(Socks4Response || (Socks4Response = {})); +exports.Socks4Response = Socks4Response; +var Socks5Auth; +(function (Socks5Auth) { + Socks5Auth[Socks5Auth["NoAuth"] = 0] = "NoAuth"; + Socks5Auth[Socks5Auth["GSSApi"] = 1] = "GSSApi"; + Socks5Auth[Socks5Auth["UserPass"] = 2] = "UserPass"; +})(Socks5Auth || (Socks5Auth = {})); +exports.Socks5Auth = Socks5Auth; +const SOCKS5_CUSTOM_AUTH_START = 0x80; +exports.SOCKS5_CUSTOM_AUTH_START = SOCKS5_CUSTOM_AUTH_START; +const SOCKS5_CUSTOM_AUTH_END = 0xfe; +exports.SOCKS5_CUSTOM_AUTH_END = SOCKS5_CUSTOM_AUTH_END; +const SOCKS5_NO_ACCEPTABLE_AUTH = 0xff; +exports.SOCKS5_NO_ACCEPTABLE_AUTH = SOCKS5_NO_ACCEPTABLE_AUTH; +var Socks5Response; +(function (Socks5Response) { + Socks5Response[Socks5Response["Granted"] = 0] = "Granted"; + Socks5Response[Socks5Response["Failure"] = 1] = "Failure"; + Socks5Response[Socks5Response["NotAllowed"] = 2] = "NotAllowed"; + Socks5Response[Socks5Response["NetworkUnreachable"] = 3] = "NetworkUnreachable"; + Socks5Response[Socks5Response["HostUnreachable"] = 4] = "HostUnreachable"; + Socks5Response[Socks5Response["ConnectionRefused"] = 5] = "ConnectionRefused"; + Socks5Response[Socks5Response["TTLExpired"] = 6] = "TTLExpired"; + Socks5Response[Socks5Response["CommandNotSupported"] = 7] = "CommandNotSupported"; + Socks5Response[Socks5Response["AddressNotSupported"] = 8] = "AddressNotSupported"; +})(Socks5Response || (Socks5Response = {})); +exports.Socks5Response = Socks5Response; +var Socks5HostType; +(function (Socks5HostType) { + Socks5HostType[Socks5HostType["IPv4"] = 1] = "IPv4"; + Socks5HostType[Socks5HostType["Hostname"] = 3] = "Hostname"; + Socks5HostType[Socks5HostType["IPv6"] = 4] = "IPv6"; +})(Socks5HostType || (Socks5HostType = {})); +exports.Socks5HostType = Socks5HostType; +var SocksClientState; +(function (SocksClientState) { + SocksClientState[SocksClientState["Created"] = 0] = "Created"; + SocksClientState[SocksClientState["Connecting"] = 1] = "Connecting"; + SocksClientState[SocksClientState["Connected"] = 2] = "Connected"; + SocksClientState[SocksClientState["SentInitialHandshake"] = 3] = "SentInitialHandshake"; + SocksClientState[SocksClientState["ReceivedInitialHandshakeResponse"] = 4] = "ReceivedInitialHandshakeResponse"; + SocksClientState[SocksClientState["SentAuthentication"] = 5] = "SentAuthentication"; + SocksClientState[SocksClientState["ReceivedAuthenticationResponse"] = 6] = "ReceivedAuthenticationResponse"; + SocksClientState[SocksClientState["SentFinalHandshake"] = 7] = "SentFinalHandshake"; + SocksClientState[SocksClientState["ReceivedFinalResponse"] = 8] = "ReceivedFinalResponse"; + SocksClientState[SocksClientState["BoundWaitingForConnection"] = 9] = "BoundWaitingForConnection"; + SocksClientState[SocksClientState["Established"] = 10] = "Established"; + SocksClientState[SocksClientState["Disconnected"] = 11] = "Disconnected"; + SocksClientState[SocksClientState["Error"] = 99] = "Error"; +})(SocksClientState || (SocksClientState = {})); +exports.SocksClientState = SocksClientState; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/mybulma/node_modules/socks/build/common/constants.js.map b/mybulma/node_modules/socks/build/common/constants.js.map new file mode 100644 index 0000000..c1e070d --- /dev/null +++ b/mybulma/node_modules/socks/build/common/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/common/constants.ts"],"names":[],"mappings":";;;AAIA,MAAM,eAAe,GAAG,KAAK,CAAC;AA4M5B,0CAAe;AAxMjB,kBAAkB;AAClB,MAAM,MAAM,GAAG;IACb,mBAAmB,EAAE,wFAAwF;IAC7G,+BAA+B,EAAE,oGAAoG;IACrI,wBAAwB,EAAE,8FAA8F;IACxH,oCAAoC,EAAE,2CAA2C;IACjF,uCAAuC,EAAE,uFAAuF;IAChI,8BAA8B,EAAE,4CAA4C;IAC5E,gCAAgC,EAAE,8EAA8E;IAChH,sCAAsC,EAAE,2DAA2D;IACnG,wCAAwC,EAAE,oDAAoD;IAC9F,0CAA0C,EAAE,kKAAkK;IAC9M,gBAAgB,EAAE,mBAAmB;IACrC,YAAY,EAAE,eAAe;IAC7B,uBAAuB,EAAE,4BAA4B;IACrD,aAAa,EAAE,qDAAqD;IACpE,8BAA8B,EAAE,4CAA4C;IAC5E,6BAA6B,EAAE,kCAAkC;IACjE,uCAAuC,EAAE,6CAA6C;IACtF,0CAA0C,EAAE,iDAAiD;IAC7F,qCAAqC,EAAE,oDAAoD;IAC3F,yCAAyC,EAAE,mEAAmE;IAC9G,+CAA+C,EAAE,6EAA6E;IAC9H,4CAA4C,EAAE,yEAAyE;IACvH,0BAA0B,EAAE,8BAA8B;IAC1D,2BAA2B,EAAE,kDAAkD;IAC/E,mCAAmC,EAAE,kCAAkC;IACvE,uCAAuC,EAAE,sDAAsD;IAC/F,0CAA0C,EAAE,iDAAiD;CAC9F,CAAC;AA4KA,wBAAM;AA1KR,MAAM,2BAA2B,GAAG;IAClC,8BAA8B,EAAE,CAAC;IACjC,oCAAoC,EAAE,CAAC;IACvC,gDAAgD;IAChD,oBAAoB,EAAE,CAAC;IACvB,kBAAkB,EAAE,EAAE;IACtB,kBAAkB,EAAE,EAAE;IACtB,sBAAsB,EAAE,CAAC,cAAsB,EAAE,EAAE,CAAC,cAAc,GAAG,CAAC;IACtE,gDAAgD;IAChD,cAAc,EAAE,CAAC,EAAE,2BAA2B;CAC/C,CAAC;AAgLA,kEAA2B;AA5K7B,IAAK,YAIJ;AAJD,WAAK,YAAY;IACf,qDAAc,CAAA;IACd,+CAAW,CAAA;IACX,yDAAgB,CAAA;AAClB,CAAC,EAJI,YAAY,KAAZ,YAAY,QAIhB;AA0JC,oCAAY;AAxJd,IAAK,cAKJ;AALD,WAAK,cAAc;IACjB,0DAAc,CAAA;IACd,wDAAa,CAAA;IACb,4DAAe,CAAA;IACf,sEAAoB,CAAA;AACtB,CAAC,EALI,cAAc,KAAd,cAAc,QAKlB;AAoJC,wCAAc;AAlJhB,IAAK,UAIJ;AAJD,WAAK,UAAU;IACb,+CAAa,CAAA;IACb,+CAAa,CAAA;IACb,mDAAe,CAAA;AACjB,CAAC,EAJI,UAAU,KAAV,UAAU,QAId;AA+IC,gCAAU;AA7IZ,MAAM,wBAAwB,GAAG,IAAI,CAAC;AA0JpC,4DAAwB;AAzJ1B,MAAM,sBAAsB,GAAG,IAAI,CAAC;AA0JlC,wDAAsB;AAxJxB,MAAM,yBAAyB,GAAG,IAAI,CAAC;AAyJrC,8DAAyB;AAvJ3B,IAAK,cAUJ;AAVD,WAAK,cAAc;IACjB,yDAAc,CAAA;IACd,yDAAc,CAAA;IACd,+DAAiB,CAAA;IACjB,+EAAyB,CAAA;IACzB,yEAAsB,CAAA;IACtB,6EAAwB,CAAA;IACxB,+DAAiB,CAAA;IACjB,iFAA0B,CAAA;IAC1B,iFAA0B,CAAA;AAC5B,CAAC,EAVI,cAAc,KAAd,cAAc,QAUlB;AAgIC,wCAAc;AA9HhB,IAAK,cAIJ;AAJD,WAAK,cAAc;IACjB,mDAAW,CAAA;IACX,2DAAe,CAAA;IACf,mDAAW,CAAA;AACb,CAAC,EAJI,cAAc,KAAd,cAAc,QAIlB;AAyHC,wCAAc;AAvHhB,IAAK,gBAcJ;AAdD,WAAK,gBAAgB;IACnB,6DAAW,CAAA;IACX,mEAAc,CAAA;IACd,iEAAa,CAAA;IACb,uFAAwB,CAAA;IACxB,+GAAoC,CAAA;IACpC,mFAAsB,CAAA;IACtB,2GAAkC,CAAA;IAClC,mFAAsB,CAAA;IACtB,yFAAyB,CAAA;IACzB,iGAA6B,CAAA;IAC7B,sEAAgB,CAAA;IAChB,wEAAiB,CAAA;IACjB,0DAAU,CAAA;AACZ,CAAC,EAdI,gBAAgB,KAAhB,gBAAgB,QAcpB;AA2GC,4CAAgB"} \ No newline at end of file diff --git a/mybulma/node_modules/socks/build/common/helpers.js b/mybulma/node_modules/socks/build/common/helpers.js new file mode 100644 index 0000000..f84db8f --- /dev/null +++ b/mybulma/node_modules/socks/build/common/helpers.js @@ -0,0 +1,128 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.validateSocksClientChainOptions = exports.validateSocksClientOptions = void 0; +const util_1 = require("./util"); +const constants_1 = require("./constants"); +const stream = require("stream"); +/** + * Validates the provided SocksClientOptions + * @param options { SocksClientOptions } + * @param acceptedCommands { string[] } A list of accepted SocksProxy commands. + */ +function validateSocksClientOptions(options, acceptedCommands = ['connect', 'bind', 'associate']) { + // Check SOCKs command option. + if (!constants_1.SocksCommand[options.command]) { + throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksCommand, options); + } + // Check SocksCommand for acceptable command. + if (acceptedCommands.indexOf(options.command) === -1) { + throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksCommandForOperation, options); + } + // Check destination + if (!isValidSocksRemoteHost(options.destination)) { + throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsDestination, options); + } + // Check SOCKS proxy to use + if (!isValidSocksProxy(options.proxy)) { + throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsProxy, options); + } + // Validate custom auth (if set) + validateCustomProxyAuth(options.proxy, options); + // Check timeout + if (options.timeout && !isValidTimeoutValue(options.timeout)) { + throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsTimeout, options); + } + // Check existing_socket (if provided) + if (options.existing_socket && + !(options.existing_socket instanceof stream.Duplex)) { + throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsExistingSocket, options); + } +} +exports.validateSocksClientOptions = validateSocksClientOptions; +/** + * Validates the SocksClientChainOptions + * @param options { SocksClientChainOptions } + */ +function validateSocksClientChainOptions(options) { + // Only connect is supported when chaining. + if (options.command !== 'connect') { + throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksCommandChain, options); + } + // Check destination + if (!isValidSocksRemoteHost(options.destination)) { + throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsDestination, options); + } + // Validate proxies (length) + if (!(options.proxies && + Array.isArray(options.proxies) && + options.proxies.length >= 2)) { + throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsProxiesLength, options); + } + // Validate proxies + options.proxies.forEach((proxy) => { + if (!isValidSocksProxy(proxy)) { + throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsProxy, options); + } + // Validate custom auth (if set) + validateCustomProxyAuth(proxy, options); + }); + // Check timeout + if (options.timeout && !isValidTimeoutValue(options.timeout)) { + throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsTimeout, options); + } +} +exports.validateSocksClientChainOptions = validateSocksClientChainOptions; +function validateCustomProxyAuth(proxy, options) { + if (proxy.custom_auth_method !== undefined) { + // Invalid auth method range + if (proxy.custom_auth_method < constants_1.SOCKS5_CUSTOM_AUTH_START || + proxy.custom_auth_method > constants_1.SOCKS5_CUSTOM_AUTH_END) { + throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsCustomAuthRange, options); + } + // Missing custom_auth_request_handler + if (proxy.custom_auth_request_handler === undefined || + typeof proxy.custom_auth_request_handler !== 'function') { + throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsCustomAuthOptions, options); + } + // Missing custom_auth_response_size + if (proxy.custom_auth_response_size === undefined) { + throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsCustomAuthOptions, options); + } + // Missing/invalid custom_auth_response_handler + if (proxy.custom_auth_response_handler === undefined || + typeof proxy.custom_auth_response_handler !== 'function') { + throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsCustomAuthOptions, options); + } + } +} +/** + * Validates a SocksRemoteHost + * @param remoteHost { SocksRemoteHost } + */ +function isValidSocksRemoteHost(remoteHost) { + return (remoteHost && + typeof remoteHost.host === 'string' && + typeof remoteHost.port === 'number' && + remoteHost.port >= 0 && + remoteHost.port <= 65535); +} +/** + * Validates a SocksProxy + * @param proxy { SocksProxy } + */ +function isValidSocksProxy(proxy) { + return (proxy && + (typeof proxy.host === 'string' || typeof proxy.ipaddress === 'string') && + typeof proxy.port === 'number' && + proxy.port >= 0 && + proxy.port <= 65535 && + (proxy.type === 4 || proxy.type === 5)); +} +/** + * Validates a timeout value. + * @param value { Number } + */ +function isValidTimeoutValue(value) { + return typeof value === 'number' && value > 0; +} +//# sourceMappingURL=helpers.js.map \ No newline at end of file diff --git a/mybulma/node_modules/socks/build/common/helpers.js.map b/mybulma/node_modules/socks/build/common/helpers.js.map new file mode 100644 index 0000000..dae1248 --- /dev/null +++ b/mybulma/node_modules/socks/build/common/helpers.js.map @@ -0,0 +1 @@ +{"version":3,"file":"helpers.js","sourceRoot":"","sources":["../../src/common/helpers.ts"],"names":[],"mappings":";;;AAKA,iCAAwC;AACxC,2CAMqB;AACrB,iCAAiC;AAEjC;;;;GAIG;AACH,SAAS,0BAA0B,CACjC,OAA2B,EAC3B,gBAAgB,GAAG,CAAC,SAAS,EAAE,MAAM,EAAE,WAAW,CAAC;IAEnD,8BAA8B;IAC9B,IAAI,CAAC,wBAAY,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;QAClC,MAAM,IAAI,uBAAgB,CAAC,kBAAM,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;KACjE;IAED,6CAA6C;IAC7C,IAAI,gBAAgB,CAAC,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE;QACpD,MAAM,IAAI,uBAAgB,CAAC,kBAAM,CAAC,+BAA+B,EAAE,OAAO,CAAC,CAAC;KAC7E;IAED,oBAAoB;IACpB,IAAI,CAAC,sBAAsB,CAAC,OAAO,CAAC,WAAW,CAAC,EAAE;QAChD,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,oCAAoC,EAC3C,OAAO,CACR,CAAC;KACH;IAED,2BAA2B;IAC3B,IAAI,CAAC,iBAAiB,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;QACrC,MAAM,IAAI,uBAAgB,CAAC,kBAAM,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;KAC5E;IAED,gCAAgC;IAChC,uBAAuB,CAAC,OAAO,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;IAEhD,gBAAgB;IAChB,IAAI,OAAO,CAAC,OAAO,IAAI,CAAC,mBAAmB,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;QAC5D,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,gCAAgC,EACvC,OAAO,CACR,CAAC;KACH;IAED,sCAAsC;IACtC,IACE,OAAO,CAAC,eAAe;QACvB,CAAC,CAAC,OAAO,CAAC,eAAe,YAAY,MAAM,CAAC,MAAM,CAAC,EACnD;QACA,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,uCAAuC,EAC9C,OAAO,CACR,CAAC;KACH;AACH,CAAC;AA6IO,gEAA0B;AA3IlC;;;GAGG;AACH,SAAS,+BAA+B,CAAC,OAAgC;IACvE,2CAA2C;IAC3C,IAAI,OAAO,CAAC,OAAO,KAAK,SAAS,EAAE;QACjC,MAAM,IAAI,uBAAgB,CAAC,kBAAM,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;KACtE;IAED,oBAAoB;IACpB,IAAI,CAAC,sBAAsB,CAAC,OAAO,CAAC,WAAW,CAAC,EAAE;QAChD,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,oCAAoC,EAC3C,OAAO,CACR,CAAC;KACH;IAED,4BAA4B;IAC5B,IACE,CAAC,CACC,OAAO,CAAC,OAAO;QACf,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;QAC9B,OAAO,CAAC,OAAO,CAAC,MAAM,IAAI,CAAC,CAC5B,EACD;QACA,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,sCAAsC,EAC7C,OAAO,CACR,CAAC;KACH;IAED,mBAAmB;IACnB,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,KAAiB,EAAE,EAAE;QAC5C,IAAI,CAAC,iBAAiB,CAAC,KAAK,CAAC,EAAE;YAC7B,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,8BAA8B,EACrC,OAAO,CACR,CAAC;SACH;QAED,gCAAgC;QAChC,uBAAuB,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;IAC1C,CAAC,CAAC,CAAC;IAEH,gBAAgB;IAChB,IAAI,OAAO,CAAC,OAAO,IAAI,CAAC,mBAAmB,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;QAC5D,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,gCAAgC,EACvC,OAAO,CACR,CAAC;KACH;AACH,CAAC;AAuFmC,0EAA+B;AArFnE,SAAS,uBAAuB,CAC9B,KAAiB,EACjB,OAAqD;IAErD,IAAI,KAAK,CAAC,kBAAkB,KAAK,SAAS,EAAE;QAC1C,4BAA4B;QAC5B,IACE,KAAK,CAAC,kBAAkB,GAAG,oCAAwB;YACnD,KAAK,CAAC,kBAAkB,GAAG,kCAAsB,EACjD;YACA,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,wCAAwC,EAC/C,OAAO,CACR,CAAC;SACH;QAED,sCAAsC;QACtC,IACE,KAAK,CAAC,2BAA2B,KAAK,SAAS;YAC/C,OAAO,KAAK,CAAC,2BAA2B,KAAK,UAAU,EACvD;YACA,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,0CAA0C,EACjD,OAAO,CACR,CAAC;SACH;QAED,oCAAoC;QACpC,IAAI,KAAK,CAAC,yBAAyB,KAAK,SAAS,EAAE;YACjD,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,0CAA0C,EACjD,OAAO,CACR,CAAC;SACH;QAED,+CAA+C;QAC/C,IACE,KAAK,CAAC,4BAA4B,KAAK,SAAS;YAChD,OAAO,KAAK,CAAC,4BAA4B,KAAK,UAAU,EACxD;YACA,MAAM,IAAI,uBAAgB,CACxB,kBAAM,CAAC,0CAA0C,EACjD,OAAO,CACR,CAAC;SACH;KACF;AACH,CAAC;AAED;;;GAGG;AACH,SAAS,sBAAsB,CAAC,UAA2B;IACzD,OAAO,CACL,UAAU;QACV,OAAO,UAAU,CAAC,IAAI,KAAK,QAAQ;QACnC,OAAO,UAAU,CAAC,IAAI,KAAK,QAAQ;QACnC,UAAU,CAAC,IAAI,IAAI,CAAC;QACpB,UAAU,CAAC,IAAI,IAAI,KAAK,CACzB,CAAC;AACJ,CAAC;AAED;;;GAGG;AACH,SAAS,iBAAiB,CAAC,KAAiB;IAC1C,OAAO,CACL,KAAK;QACL,CAAC,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,IAAI,OAAO,KAAK,CAAC,SAAS,KAAK,QAAQ,CAAC;QACvE,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ;QAC9B,KAAK,CAAC,IAAI,IAAI,CAAC;QACf,KAAK,CAAC,IAAI,IAAI,KAAK;QACnB,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,IAAI,KAAK,CAAC,IAAI,KAAK,CAAC,CAAC,CACvC,CAAC;AACJ,CAAC;AAED;;;GAGG;AACH,SAAS,mBAAmB,CAAC,KAAa;IACxC,OAAO,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,GAAG,CAAC,CAAC;AAChD,CAAC"} \ No newline at end of file diff --git a/mybulma/node_modules/socks/build/common/receivebuffer.js b/mybulma/node_modules/socks/build/common/receivebuffer.js new file mode 100644 index 0000000..3dacbf9 --- /dev/null +++ b/mybulma/node_modules/socks/build/common/receivebuffer.js @@ -0,0 +1,43 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ReceiveBuffer = void 0; +class ReceiveBuffer { + constructor(size = 4096) { + this.buffer = Buffer.allocUnsafe(size); + this.offset = 0; + this.originalSize = size; + } + get length() { + return this.offset; + } + append(data) { + if (!Buffer.isBuffer(data)) { + throw new Error('Attempted to append a non-buffer instance to ReceiveBuffer.'); + } + if (this.offset + data.length >= this.buffer.length) { + const tmp = this.buffer; + this.buffer = Buffer.allocUnsafe(Math.max(this.buffer.length + this.originalSize, this.buffer.length + data.length)); + tmp.copy(this.buffer); + } + data.copy(this.buffer, this.offset); + return (this.offset += data.length); + } + peek(length) { + if (length > this.offset) { + throw new Error('Attempted to read beyond the bounds of the managed internal data.'); + } + return this.buffer.slice(0, length); + } + get(length) { + if (length > this.offset) { + throw new Error('Attempted to read beyond the bounds of the managed internal data.'); + } + const value = Buffer.allocUnsafe(length); + this.buffer.slice(0, length).copy(value); + this.buffer.copyWithin(0, length, length + this.offset - length); + this.offset -= length; + return value; + } +} +exports.ReceiveBuffer = ReceiveBuffer; +//# sourceMappingURL=receivebuffer.js.map \ No newline at end of file diff --git a/mybulma/node_modules/socks/build/common/receivebuffer.js.map b/mybulma/node_modules/socks/build/common/receivebuffer.js.map new file mode 100644 index 0000000..af5e220 --- /dev/null +++ b/mybulma/node_modules/socks/build/common/receivebuffer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"receivebuffer.js","sourceRoot":"","sources":["../../src/common/receivebuffer.ts"],"names":[],"mappings":";;;AAAA,MAAM,aAAa;IAKjB,YAAY,IAAI,GAAG,IAAI;QACrB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QACvC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC;QAChB,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC;IAC3B,CAAC;IAED,IAAI,MAAM;QACR,OAAO,IAAI,CAAC,MAAM,CAAC;IACrB,CAAC;IAED,MAAM,CAAC,IAAY;QACjB,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;YAC1B,MAAM,IAAI,KAAK,CACb,6DAA6D,CAC9D,CAAC;SACH;QAED,IAAI,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE;YACnD,MAAM,GAAG,GAAG,IAAI,CAAC,MAAM,CAAC;YACxB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,WAAW,CAC9B,IAAI,CAAC,GAAG,CACN,IAAI,CAAC,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,YAAY,EACtC,IAAI,CAAC,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CACjC,CACF,CAAC;YACF,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SACvB;QAED,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;QACpC,OAAO,CAAC,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,CAAC,CAAC;IACtC,CAAC;IAED,IAAI,CAAC,MAAc;QACjB,IAAI,MAAM,GAAG,IAAI,CAAC,MAAM,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,mEAAmE,CACpE,CAAC;SACH;QACD,OAAO,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC;IACtC,CAAC;IAED,GAAG,CAAC,MAAc;QAChB,IAAI,MAAM,GAAG,IAAI,CAAC,MAAM,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,mEAAmE,CACpE,CAAC;SACH;QAED,MAAM,KAAK,GAAG,MAAM,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC;QACzC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QACzC,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,EAAE,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC;QACjE,IAAI,CAAC,MAAM,IAAI,MAAM,CAAC;QAEtB,OAAO,KAAK,CAAC;IACf,CAAC;CACF;AAEO,sCAAa"} \ No newline at end of file diff --git a/mybulma/node_modules/socks/build/common/util.js b/mybulma/node_modules/socks/build/common/util.js new file mode 100644 index 0000000..f66b72e --- /dev/null +++ b/mybulma/node_modules/socks/build/common/util.js @@ -0,0 +1,25 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.shuffleArray = exports.SocksClientError = void 0; +/** + * Error wrapper for SocksClient + */ +class SocksClientError extends Error { + constructor(message, options) { + super(message); + this.options = options; + } +} +exports.SocksClientError = SocksClientError; +/** + * Shuffles a given array. + * @param array The array to shuffle. + */ +function shuffleArray(array) { + for (let i = array.length - 1; i > 0; i--) { + const j = Math.floor(Math.random() * (i + 1)); + [array[i], array[j]] = [array[j], array[i]]; + } +} +exports.shuffleArray = shuffleArray; +//# sourceMappingURL=util.js.map \ No newline at end of file diff --git a/mybulma/node_modules/socks/build/common/util.js.map b/mybulma/node_modules/socks/build/common/util.js.map new file mode 100644 index 0000000..f199323 --- /dev/null +++ b/mybulma/node_modules/socks/build/common/util.js.map @@ -0,0 +1 @@ +{"version":3,"file":"util.js","sourceRoot":"","sources":["../../src/common/util.ts"],"names":[],"mappings":";;;AAEA;;GAEG;AACH,MAAM,gBAAiB,SAAQ,KAAK;IAClC,YACE,OAAe,EACR,OAAqD;QAE5D,KAAK,CAAC,OAAO,CAAC,CAAC;QAFR,YAAO,GAAP,OAAO,CAA8C;IAG9D,CAAC;CACF;AAuBuB,4CAAgB;AArBxC;;;GAGG;AACH,SAAS,YAAY,CAAC,KAAgB;IACpC,KAAK,IAAI,CAAC,GAAG,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;QACzC,MAAM,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QAC9C,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;KAC7C;AACH,CAAC;AAYyC,oCAAY"} \ No newline at end of file diff --git a/mybulma/node_modules/socks/build/index.js b/mybulma/node_modules/socks/build/index.js new file mode 100644 index 0000000..05fbb1d --- /dev/null +++ b/mybulma/node_modules/socks/build/index.js @@ -0,0 +1,18 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +__exportStar(require("./client/socksclient"), exports); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/mybulma/node_modules/socks/build/index.js.map b/mybulma/node_modules/socks/build/index.js.map new file mode 100644 index 0000000..0e2bcb2 --- /dev/null +++ b/mybulma/node_modules/socks/build/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;AAAA,uDAAqC"} \ No newline at end of file diff --git a/mybulma/node_modules/socks/docs/examples/index.md b/mybulma/node_modules/socks/docs/examples/index.md new file mode 100644 index 0000000..87bfe25 --- /dev/null +++ b/mybulma/node_modules/socks/docs/examples/index.md @@ -0,0 +1,17 @@ +# socks examples + +## TypeScript Examples + +[Connect command](typescript/connectExample.md) + +[Bind command](typescript/bindExample.md) + +[Associate command](typescript/associateExample.md) + +## JavaScript Examples + +[Connect command](javascript/connectExample.md) + +[Bind command](javascript/bindExample.md) + +[Associate command](javascript/associateExample.md) \ No newline at end of file diff --git a/mybulma/node_modules/socks/docs/examples/javascript/associateExample.md b/mybulma/node_modules/socks/docs/examples/javascript/associateExample.md new file mode 100644 index 0000000..c2c7b17 --- /dev/null +++ b/mybulma/node_modules/socks/docs/examples/javascript/associateExample.md @@ -0,0 +1,90 @@ +# socks examples + +## Example for SOCKS 'associate' command + +The associate command tells the SOCKS proxy server to establish a UDP relay. The server binds to a new UDP port and communicates the newly opened port back to the origin client. From here, any SOCKS UDP frame packets sent to this special UDP port on the Proxy server will be forwarded to the desired destination, and any responses will be forwarded back to the origin client (you). + +This can be used for things such as DNS queries, and other UDP communicates. + +**Connection Steps** + +1. Client -(associate)-> Proxy (Tells the proxy to create a UDP relay and bind on a new port) +2. Client <-(port)- Proxy (Tells the origin client which port it opened and is accepting UDP frame packets on) + +At this point the proxy is accepting UDP frames on the specified port. + +3. Client --(udp frame) -> Proxy -> Destination (The origin client sends a UDP frame to the proxy on the UDP port, and the proxy then forwards it to the destination specified in the UDP frame.) +4. Client <--(udp frame) <-- Proxy <-- Destination (The destination client responds to the udp packet sent in #3) + +## Usage + +The 'associate' command can only be used by creating a new SocksClient instance and listening for the 'established' event. + +**Note:** UDP packets relayed through the proxy servers are encompassed in a special Socks UDP frame format. SocksClient.createUDPFrame() and SocksClient.parseUDPFrame() create and parse these special UDP packets. + +```typescript +const dgram = require('dgram'); +const SocksClient = require('socks').SocksClient; + +// Create a local UDP socket for sending/receiving packets to/from the proxy. +const udpSocket = dgram.createSocket('udp4'); +udpSocket.bind(); + +// Listen for incoming UDP packets from the proxy server. +udpSocket.on('message', (message, rinfo) => { + console.log(SocksClient.parseUDPFrame(message)); + /* + { frameNumber: 0, + remoteHost: { host: '8.8.8.8', port: 53 }, // The remote host that replied with a UDP packet + data: // The data + } + */ +}); + +const options = { + proxy: { + host: '104.131.124.203', + port: 1081, + type: 5 + }, + + // This should be the ip and port of the expected client that will be sending UDP frames to the newly opened UDP port on the server. + // Most SOCKS servers accept 0.0.0.0 as a wildcard address to accept UDP frames from any source. + destination: { + host: '0.0.0.0', + port: 0 + }, + + command: 'associate' +}; + +const client = new SocksClient(options); + +// This event is fired when the SOCKS server has started listening on a new UDP port for UDP relaying. +client.on('established', info => { + console.log(info); + /* + { + socket: , + remoteHost: { // This is the remote port on the SOCKS proxy server to send UDP frame packets to. + host: '104.131.124.203', + port: 58232 + } + } + */ + + // Send a udp frame to 8.8.8.8 on port 53 through the proxy. + const packet = SocksClient.createUDPFrame({ + remoteHost: { host: '8.8.8.8', port: 53 }, + data: Buffer.from('hello') // A DNS lookup in the real world. + }); + + // Send packet. + udpSocket.send(packet, info.remoteHost.port, info.remoteHost.host); +}); + +// SOCKS proxy failed to bind. +client.on('error', () => { + // Handle errors +}); +``` diff --git a/mybulma/node_modules/socks/docs/examples/javascript/bindExample.md b/mybulma/node_modules/socks/docs/examples/javascript/bindExample.md new file mode 100644 index 0000000..be601d5 --- /dev/null +++ b/mybulma/node_modules/socks/docs/examples/javascript/bindExample.md @@ -0,0 +1,83 @@ +# socks examples + +## Example for SOCKS 'bind' command + +The bind command tells the SOCKS proxy server to bind and listen on a new TCP port for an incoming connection. It communicates the newly opened port back to the origin client. Once a incoming connection is accepted by the SOCKS proxy server it then communicates the remote host that connected to the SOCKS proxy back through the same initial connection via the origin client. + +This can be used for things such as FTP clients which require incoming TCP connections, etc. + +**Connection Steps** + +1. Client -(bind)-> Proxy (Tells the proxy to bind to a new port) +2. Client <-(port)- Proxy (Tells the origin client which port it opened) +3. Client2 --> Proxy (Other client connects to the proxy on this port) +4. Client <--(client2's host info) (Proxy tells the origin client who connected to it) +5. Original connection to the proxy is now a full TCP stream between client (you) and client2. +6. Client <--> Proxy <--> Client2 + + +## Usage + +The 'bind' command can only be used by creating a new SocksClient instance and listening for 'bound' and 'established' events. + + +```typescript +const SocksClient = require('socks').SocksClient; + +const options = { + proxy: { + host: '104.131.124.203', + port: 1081, + type: 5 + }, + + // This should be the ip and port of the expected client that will connect to the SOCKS proxy server on the newly bound port. + // Most SOCKS servers accept 0.0.0.0 as a wildcard address to accept any client. + destination: { + host: '0.0.0.0', + port: 0 + }, + + command: 'bind' +}; + +const client = new SocksClient(options); + +// This event is fired when the SOCKS server has started listening on a new port for incoming connections. +client.on('bound', (info) => { + console.log(info); + /* + { + socket: , + remoteHost: { // This is the remote ip and port of the SOCKS proxy that is now accepting incoming connections. + host: '104.131.124.203', + port: 49928 + } + } + */ +}); + +// This event is fired when the SOCKS server has accepted an incoming connection on the newly bound port. +client.on('established', (info) => { + console.log(info); + /* + { + socket: , + remoteHost: { // This is the remote ip and port that connected to the SOCKS proxy on the newly bound port. + host: '1.2.3.4', + port: 58232 + } + } + */ + + // At this point info.socket is a regular net.Socket TCP connection between client and client2 (1.2.3.4) (the client which connected to the proxy on the newly bound port.) + + console.log(info.socket); + // (this is a raw net.Socket that is established to the destination host through the given proxy servers) +}); + +// SOCKS proxy failed to bind. +client.on('error', () => { + // Handle errors +}); +``` \ No newline at end of file diff --git a/mybulma/node_modules/socks/docs/examples/javascript/connectExample.md b/mybulma/node_modules/socks/docs/examples/javascript/connectExample.md new file mode 100644 index 0000000..66244c5 --- /dev/null +++ b/mybulma/node_modules/socks/docs/examples/javascript/connectExample.md @@ -0,0 +1,258 @@ +# socks examples + +## Example for SOCKS 'connect' command + +The connect command is the most common use-case for a SOCKS proxy. This establishes a direct connection to a destination host through a proxy server. The destination host only has knowledge of the proxy server connecting to it and does not know about the origin client (you). + +**Origin Client (you) <-> Proxy Server <-> Destination Server** + +In this example, we are connecting to a web server on port 80, and sending a very basic HTTP request to receive a response. It's worth noting that there are many socks-http-agents that can be used with the node http module (and libraries such as request.js) to make this easier. This HTTP request is used as a simple example. + +The 'connect' command can be used via the SocksClient.createConnection() factory function as well as by creating a SocksClient instance and using event handlers. + +### Using createConnection with async/await + +Since SocksClient.createConnection returns a Promise, we can easily use async/await for flow control. + +```typescript +const SocksClient = require('socks').SocksClient; + +const options = { + proxy: { + host: '104.131.124.203', + port: 1081, + type: 5 + }, + + destination: { + host: 'ip-api.com', // host names are supported with SOCKS v4a and SOCKS v5. + port: 80 + }, + + command: 'connect' +}; + +async function start() { + try { + const info = await SocksClient.createConnection(options); + + console.log(info.socket); + // (this is a raw net.Socket that is established to the destination host through the given proxy servers) + + info.socket.write('GET /json HTTP/1.1\nHost: ip-api.com\n\n'); + info.socket.on('data', (data) => { + console.log(data.toString()); // ip-api.com sees that the last proxy (104.131.124.203) is connected to it and not the origin client (you). + /* + HTTP/1.1 200 OK + Access-Control-Allow-Origin: * + Content-Type: application/json; charset=utf-8 + Date: Sun, 24 Dec 2017 03:47:51 GMT + Content-Length: 300 + + { + "as":"AS14061 Digital Ocean, Inc.", + "city":"Clifton", + "country":"United States", + "countryCode":"US", + "isp":"Digital Ocean", + "lat":40.8326, + "lon":-74.1307, + "org":"Digital Ocean", + "query":"104.131.124.203", + "region":"NJ", + "regionName":"New Jersey", + "status":"success", + "timezone":"America/New_York", + "zip":"07014" + } + */ + } catch (err) { + // Handle errors + } +} + +start(); +``` + +### Using createConnection with Promises + +```typescript +const SocksClient = require('socks').SocksClient; + +const options = { + proxy: { + ipaddress: '104.131.124.203', + port: 1081, + type: 5 + }, + + destination: { + host: 'ip-api.com', // host names are supported with SOCKS v4a and SOCKS v5. + port: 80 + }, + + command: 'connect' +}; + +SocksClient.createConnection(options) +.then(info => { + console.log(info.socket); + // (this is a raw net.Socket that is established to the destination host through the given proxy servers) + + info.socket.write('GET /json HTTP/1.1\nHost: ip-api.com\n\n'); + info.socket.on('data', (data) => { + console.log(data.toString()); // ip-api.com sees that the last proxy (104.131.124.203) is connected to it and not the origin client (you). + /* + HTTP/1.1 200 OK + Access-Control-Allow-Origin: * + Content-Type: application/json; charset=utf-8 + Date: Sun, 24 Dec 2017 03:47:51 GMT + Content-Length: 300 + + { + "as":"AS14061 Digital Ocean, Inc.", + "city":"Clifton", + "country":"United States", + "countryCode":"US", + "isp":"Digital Ocean", + "lat":40.8326, + "lon":-74.1307, + "org":"Digital Ocean", + "query":"104.131.124.203", + "region":"NJ", + "regionName":"New Jersey", + "status":"success", + "timezone":"America/New_York", + "zip":"07014" + } + */ +}) +.catch(err => { + // handle errors +}); +``` + +### Using createConnection with callbacks + +SocksClient.createConnection() optionally accepts a callback function as a second parameter. + +**Note:** If a callback function is provided, a Promise is still returned from the function, but the promise will always resolve regardless of if there was en error. (tldr: Do not mix callbacks and Promises). + +```typescript +const SocksClient = require('socks').SocksClient; + +const options = { + proxy: { + ipaddress: '104.131.124.203', + port: 1081, + type: 5 + }, + + destination: { + host: 'ip-api.com', // host names are supported with SOCKS v4a and SOCKS v5. + port: 80 + }, + + command: 'connect' +}; + +SocksClient.createConnection(options, (err, info) => { + if (err) { + // handle errors + } else { + console.log(info.socket); + // (this is a raw net.Socket that is established to the destination host through the given proxy servers) + + info.socket.write('GET /json HTTP/1.1\nHost: ip-api.com\n\n'); + info.socket.on('data', (data) => { + console.log(data.toString()); // ip-api.com sees that the last proxy (104.131.124.203) is connected to it and not the origin client (you). + /* + HTTP/1.1 200 OK + Access-Control-Allow-Origin: * + Content-Type: application/json; charset=utf-8 + Date: Sun, 24 Dec 2017 03:47:51 GMT + Content-Length: 300 + + { + "as":"AS14061 Digital Ocean, Inc.", + "city":"Clifton", + "country":"United States", + "countryCode":"US", + "isp":"Digital Ocean", + "lat":40.8326, + "lon":-74.1307, + "org":"Digital Ocean", + "query":"104.131.124.203", + "region":"NJ", + "regionName":"New Jersey", + "status":"success", + "timezone":"America/New_York", + "zip":"07014" + } + */ + } +}) +``` + +### Using event handlers + +SocksClient also supports instance creation of a SocksClient. This allows for event based flow control. + +```typescript +const SocksClient = require('socks').SocksClient; + +const options = { + proxy: { + ipaddress: '104.131.124.203', + port: 1081, + type: 5 + }, + + destination: { + host: 'ip-api.com', // host names are supported with SOCKS v4a and SOCKS v5. + port: 80 + }, + + command: 'connect' +}; + +const client = new SocksClient(options); + +client.on('established', (info) => { + console.log(info.socket); + // (this is a raw net.Socket that is established to the destination host through the given proxy servers) + + info.socket.write('GET /json HTTP/1.1\nHost: ip-api.com\n\n'); + info.socket.on('data', (data) => { + console.log(data.toString()); // ip-api.com sees that the last proxy (104.131.124.203) is connected to it and not the origin client (you). + /* + HTTP/1.1 200 OK + Access-Control-Allow-Origin: * + Content-Type: application/json; charset=utf-8 + Date: Sun, 24 Dec 2017 03:47:51 GMT + Content-Length: 300 + + { + "as":"AS14061 Digital Ocean, Inc.", + "city":"Clifton", + "country":"United States", + "countryCode":"US", + "isp":"Digital Ocean", + "lat":40.8326, + "lon":-74.1307, + "org":"Digital Ocean", + "query":"104.131.124.203", + "region":"NJ", + "regionName":"New Jersey", + "status":"success", + "timezone":"America/New_York", + "zip":"07014" + } + */ +}); + +// Failed to establish proxy connection to destination. +client.on('error', () => { + // Handle errors +}); +``` \ No newline at end of file diff --git a/mybulma/node_modules/socks/docs/examples/typescript/associateExample.md b/mybulma/node_modules/socks/docs/examples/typescript/associateExample.md new file mode 100644 index 0000000..e8ca193 --- /dev/null +++ b/mybulma/node_modules/socks/docs/examples/typescript/associateExample.md @@ -0,0 +1,93 @@ +# socks examples + +## Example for SOCKS 'associate' command + +The associate command tells the SOCKS proxy server to establish a UDP relay. The server binds to a new UDP port and communicates the newly opened port back to the origin client. From here, any SOCKS UDP frame packets sent to this special UDP port on the Proxy server will be forwarded to the desired destination, and any responses will be forwarded back to the origin client (you). + +This can be used for things such as DNS queries, and other UDP communicates. + +**Connection Steps** + +1. Client -(associate)-> Proxy (Tells the proxy to create a UDP relay and bind on a new port) +2. Client <-(port)- Proxy (Tells the origin client which port it opened and is accepting UDP frame packets on) + +At this point the proxy is accepting UDP frames on the specified port. + +3. Client --(udp frame) -> Proxy -> Destination (The origin client sends a UDP frame to the proxy on the UDP port, and the proxy then forwards it to the destination specified in the UDP frame.) +4. Client <--(udp frame) <-- Proxy <-- Destination (The destination client responds to the udp packet sent in #3) + +## Usage + +The 'associate' command can only be used by creating a new SocksClient instance and listening for the 'established' event. + +**Note:** UDP packets relayed through the proxy servers are packaged in a special Socks UDP frame format. SocksClient.createUDPFrame() and SocksClient.parseUDPFrame() create and parse these special UDP packets. + +```typescript +import * as dgram from 'dgram'; +import { SocksClient, SocksClientOptions } from 'socks'; + +// Create a local UDP socket for sending/receiving packets to/from the proxy. +const udpSocket = dgram.createSocket('udp4'); +udpSocket.bind(); + +// Listen for incoming UDP packets from the proxy server. +udpSocket.on('message', (message, rinfo) => { + console.log(SocksClient.parseUDPFrame(message)); + /* + { frameNumber: 0, + remoteHost: { host: '8.8.8.8', port: 53 }, // The remote host that replied with a UDP packet + data: // The data + } + */ +}); + +const options: SocksClientOptions = { + proxy: { + host: '104.131.124.203', + port: 1081, + type: 5 + }, + + // This should be the ip and port of the expected client that will be sending UDP frames to the newly opened UDP port on the server. + // Most SOCKS servers accept 0.0.0.0 as a wildcard address to accept UDP frames from any source. + destination: { + host: '0.0.0.0', + port: 0 + }, + + command: 'associate' +}; + +const client = new SocksClient(options); + +// This event is fired when the SOCKS server has started listening on a new UDP port for UDP relaying. +client.on('established', info => { + console.log(info); + /* + { + socket: , + remoteHost: { // This is the remote port on the SOCKS proxy server to send UDP frame packets to. + host: '104.131.124.203', + port: 58232 + } + } + */ + + // Send a udp frame to 8.8.8.8 on port 53 through the proxy. + const packet = SocksClient.createUDPFrame({ + remoteHost: { host: '8.8.8.8', port: 53 }, + data: Buffer.from('hello') // A DNS lookup in the real world. + }); + + // Send packet. + udpSocket.send(packet, info.remoteHost.port, info.remoteHost.host); +}); + +// SOCKS proxy failed to bind. +client.on('error', () => { + // Handle errors +}); + +// Start connection +client.connect(); +``` diff --git a/mybulma/node_modules/socks/docs/examples/typescript/bindExample.md b/mybulma/node_modules/socks/docs/examples/typescript/bindExample.md new file mode 100644 index 0000000..6b7607d --- /dev/null +++ b/mybulma/node_modules/socks/docs/examples/typescript/bindExample.md @@ -0,0 +1,86 @@ +# socks examples + +## Example for SOCKS 'bind' command + +The bind command tells the SOCKS proxy server to bind and listen on a new TCP port for an incoming connection. It communicates the newly opened port back to the origin client. Once a incoming connection is accepted by the SOCKS proxy server it then communicates the remote host that connected to the SOCKS proxy back through the same initial connection via the origin client. + +This can be used for things such as FTP clients which require incoming TCP connections, etc. + +**Connection Steps** + +1. Client -(bind)-> Proxy (Tells the proxy to bind to a new port) +2. Client <-(port)- Proxy (Tells the origin client which port it opened) +3. Client2 --> Proxy (Other client connects to the proxy on this port) +4. Client <--(client2's host info) (Proxy tells the origin client who connected to it) +5. Original connection to the proxy is now a full TCP stream between client (you) and client2. +6. Client <--> Proxy <--> Client2 + + +## Usage + +The 'bind' command can only be used by creating a new SocksClient instance and listening for 'bound' and 'established' events. + + +```typescript +import { SocksClient, SocksClientOptions } from 'socks'; + +const options: SocksClientOptions = { + proxy: { + host: '104.131.124.203', + port: 1081, + type: 5 + }, + + // This should be the ip and port of the expected client that will connect to the SOCKS proxy server on the newly bound port. + // Most SOCKS servers accept 0.0.0.0 as a wildcard address to accept any client. + destination: { + host: '0.0.0.0', + port: 0 + }, + + command: 'bind' +}; + +const client = new SocksClient(options); + +// This event is fired when the SOCKS server has started listening on a new port for incoming connections. +client.on('bound', (info) => { + console.log(info); + /* + { + socket: , + remoteHost: { // This is the remote ip and port of the SOCKS proxy that is now accepting incoming connections. + host: '104.131.124.203', + port: 49928 + } + } + */ +}); + +// This event is fired when the SOCKS server has accepted an incoming connection on the newly bound port. +client.on('established', (info) => { + console.log(info); + /* + { + socket: , + remoteHost: { // This is the remote ip and port that connected to the SOCKS proxy on the newly bound port. + host: '1.2.3.4', + port: 58232 + } + } + */ + + // At this point info.socket is a regular net.Socket TCP connection between client and client2 (1.2.3.4) (the client which connected to the proxy on the newly bound port.) + + console.log(info.socket); + // (this is a raw net.Socket that is established to the destination host through the given proxy servers) +}); + +// SOCKS proxy failed to bind. +client.on('error', () => { + // Handle errors +}); + +// Start connection +client.connect(); +``` \ No newline at end of file diff --git a/mybulma/node_modules/socks/docs/examples/typescript/connectExample.md b/mybulma/node_modules/socks/docs/examples/typescript/connectExample.md new file mode 100644 index 0000000..30606d0 --- /dev/null +++ b/mybulma/node_modules/socks/docs/examples/typescript/connectExample.md @@ -0,0 +1,265 @@ +# socks examples + +## Example for SOCKS 'connect' command + +The connect command is the most common use-case for a SOCKS proxy. This establishes a direct connection to a destination host through a proxy server. The destination host only has knowledge of the proxy server connecting to it and does not know about the origin client (you). + +**Origin Client (you) <-> Proxy Server <-> Destination Server** + +In this example, we are connecting to a web server on port 80, and sending a very basic HTTP request to receive a response. It's worth noting that there are many socks-http-agents that can be used with the node http module (and libraries such as request.js) to make this easier. This HTTP request is used as a simple example. + +The 'connect' command can be used via the SocksClient.createConnection() factory function as well as by creating a SocksClient instance and using event handlers. + +### Using createConnection with async/await + +Since SocksClient.createConnection returns a Promise, we can easily use async/await for flow control. + +```typescript +import { SocksClient, SocksClientOptions } from 'socks'; + +const options: SocksClientOptions = { + proxy: { + host: '104.131.124.203', + port: 1081, + type: 5 + }, + + destination: { + host: 'ip-api.com', // host names are supported with SOCKS v4a and SOCKS v5. + port: 80 + }, + + command: 'connect' +}; + +async function start() { + try { + const info = await SocksClient.createConnection(options); + + console.log(info.socket); + // (this is a raw net.Socket that is established to the destination host through the given proxy servers) + + info.socket.write('GET /json HTTP/1.1\nHost: ip-api.com\n\n'); + info.socket.on('data', (data) => { + console.log(data.toString()); // ip-api.com sees that the last proxy (104.131.124.203) is connected to it and not the origin client (you). + /* + HTTP/1.1 200 OK + Access-Control-Allow-Origin: * + Content-Type: application/json; charset=utf-8 + Date: Sun, 24 Dec 2017 03:47:51 GMT + Content-Length: 300 + + { + "as":"AS14061 Digital Ocean, Inc.", + "city":"Clifton", + "country":"United States", + "countryCode":"US", + "isp":"Digital Ocean", + "lat":40.8326, + "lon":-74.1307, + "org":"Digital Ocean", + "query":"104.131.124.203", + "region":"NJ", + "regionName":"New Jersey", + "status":"success", + "timezone":"America/New_York", + "zip":"07014" + } + */ + }); + } catch (err) { + // Handle errors + } +} + +start(); +``` + +### Using createConnection with Promises + +```typescript +import { SocksClient, SocksClientOptions } from 'socks'; + +const options: SocksClientOptions = { + proxy: { + ipaddress: '104.131.124.203', + port: 1081, + type: 5 + }, + + destination: { + host: 'ip-api.com', // host names are supported with SOCKS v4a and SOCKS v5. + port: 80 + }, + + command: 'connect' +}; + +SocksClient.createConnection(options) +.then(info => { + console.log(info.socket); + // (this is a raw net.Socket that is established to the destination host through the given proxy servers) + + info.socket.write('GET /json HTTP/1.1\nHost: ip-api.com\n\n'); + info.socket.on('data', (data) => { + console.log(data.toString()); // ip-api.com sees that the last proxy (104.131.124.203) is connected to it and not the origin client (you). + /* + HTTP/1.1 200 OK + Access-Control-Allow-Origin: * + Content-Type: application/json; charset=utf-8 + Date: Sun, 24 Dec 2017 03:47:51 GMT + Content-Length: 300 + + { + "as":"AS14061 Digital Ocean, Inc.", + "city":"Clifton", + "country":"United States", + "countryCode":"US", + "isp":"Digital Ocean", + "lat":40.8326, + "lon":-74.1307, + "org":"Digital Ocean", + "query":"104.131.124.203", + "region":"NJ", + "regionName":"New Jersey", + "status":"success", + "timezone":"America/New_York", + "zip":"07014" + } + */ + }); +}) +.catch(err => { + // handle errors +}); +``` + +### Using createConnection with callbacks + +SocksClient.createConnection() optionally accepts a callback function as a second parameter. + +**Note:** If a callback function is provided, a Promise is still returned from the function, but the promise will always resolve regardless of if there was en error. (tldr: Do not mix callbacks and Promises). + +```typescript +import { SocksClient, SocksClientOptions } from 'socks'; + +const options: SocksClientOptions = { + proxy: { + ipaddress: '104.131.124.203', + port: 1081, + type: 5 + }, + + destination: { + host: 'ip-api.com', // host names are supported with SOCKS v4a and SOCKS v5. + port: 80 + }, + + command: 'connect' +}; + +SocksClient.createConnection(options, (err, info) => { + if (err) { + // handle errors + } else { + console.log(info.socket); + // (this is a raw net.Socket that is established to the destination host through the given proxy servers) + + info.socket.write('GET /json HTTP/1.1\nHost: ip-api.com\n\n'); + info.socket.on('data', (data) => { + console.log(data.toString()); // ip-api.com sees that the last proxy (104.131.124.203) is connected to it and not the origin client (you). + /* + HTTP/1.1 200 OK + Access-Control-Allow-Origin: * + Content-Type: application/json; charset=utf-8 + Date: Sun, 24 Dec 2017 03:47:51 GMT + Content-Length: 300 + + { + "as":"AS14061 Digital Ocean, Inc.", + "city":"Clifton", + "country":"United States", + "countryCode":"US", + "isp":"Digital Ocean", + "lat":40.8326, + "lon":-74.1307, + "org":"Digital Ocean", + "query":"104.131.124.203", + "region":"NJ", + "regionName":"New Jersey", + "status":"success", + "timezone":"America/New_York", + "zip":"07014" + } + */ + }); + } +}) +``` + +### Using event handlers + +SocksClient also supports instance creation of a SocksClient. This allows for event based flow control. + +```typescript +import { SocksClient, SocksClientOptions } from 'socks'; + +const options: SocksClientOptions = { + proxy: { + ipaddress: '104.131.124.203', + port: 1081, + type: 5 + }, + + destination: { + host: 'ip-api.com', // host names are supported with SOCKS v4a and SOCKS v5. + port: 80 + }, + + command: 'connect' +}; + +const client = new SocksClient(options); + +client.on('established', (info) => { + console.log(info.socket); + // (this is a raw net.Socket that is established to the destination host through the given proxy servers) + + info.socket.write('GET /json HTTP/1.1\nHost: ip-api.com\n\n'); + info.socket.on('data', (data) => { + console.log(data.toString()); // ip-api.com sees that the last proxy (104.131.124.203) is connected to it and not the origin client (you). + /* + HTTP/1.1 200 OK + Access-Control-Allow-Origin: * + Content-Type: application/json; charset=utf-8 + Date: Sun, 24 Dec 2017 03:47:51 GMT + Content-Length: 300 + + { + "as":"AS14061 Digital Ocean, Inc.", + "city":"Clifton", + "country":"United States", + "countryCode":"US", + "isp":"Digital Ocean", + "lat":40.8326, + "lon":-74.1307, + "org":"Digital Ocean", + "query":"104.131.124.203", + "region":"NJ", + "regionName":"New Jersey", + "status":"success", + "timezone":"America/New_York", + "zip":"07014" + } + */ + }); +}); + +// Failed to establish proxy connection to destination. +client.on('error', () => { + // Handle errors +}); + +// Start connection +client.connect(); +``` \ No newline at end of file diff --git a/mybulma/node_modules/socks/docs/index.md b/mybulma/node_modules/socks/docs/index.md new file mode 100644 index 0000000..3eb1d71 --- /dev/null +++ b/mybulma/node_modules/socks/docs/index.md @@ -0,0 +1,5 @@ +# Documentation + +- [API Reference](https://github.com/JoshGlazebrook/socks#api-reference) + +- [Code Examples](./examples/index.md) \ No newline at end of file diff --git a/mybulma/node_modules/socks/docs/migratingFromV1.md b/mybulma/node_modules/socks/docs/migratingFromV1.md new file mode 100644 index 0000000..dd00838 --- /dev/null +++ b/mybulma/node_modules/socks/docs/migratingFromV1.md @@ -0,0 +1,86 @@ +# socks + +## Migrating from v1 + +For the most part, migrating from v1 takes minimal effort as v2 still supports factory creation of proxy connections with callback support. + +### Notable breaking changes + +- In an options object, the proxy 'command' is now required and does not default to 'connect'. +- **In an options object, 'target' is now known as 'destination'.** +- Sockets are no longer paused after a SOCKS connection is made, so socket.resume() is no longer required. (Please be sure to attach data handlers immediately to the Socket to avoid losing data). +- In v2, only the 'connect' command is supported via the factory SocksClient.createConnection function. (BIND and ASSOCIATE must be used with a SocksClient instance via event handlers). +- In v2, the factory SocksClient.createConnection function callback is called with a single object rather than separate socket and info object. +- A SOCKS http/https agent is no longer bundled into the library. + +For informational purposes, here is the original getting started example from v1 converted to work with v2. + +### Before (v1) + +```javascript +var Socks = require('socks'); + +var options = { + proxy: { + ipaddress: "202.101.228.108", + port: 1080, + type: 5 + }, + target: { + host: "google.com", + port: 80 + }, + command: 'connect' +}; + +Socks.createConnection(options, function(err, socket, info) { + if (err) + console.log(err); + else { + socket.write("GET / HTTP/1.1\nHost: google.com\n\n"); + socket.on('data', function(data) { + console.log(data.length); + console.log(data); + }); + + // PLEASE NOTE: sockets need to be resumed before any data will come in or out as they are paused right before this callback is fired. + socket.resume(); + + // 569 + // = 10.13.0", + "npm": ">= 3.0.0" + }, + "author": "Josh Glazebrook", + "contributors": [ + "castorw" + ], + "license": "MIT", + "readmeFilename": "README.md", + "devDependencies": { + "@types/ip": "1.1.0", + "@types/mocha": "^9.1.1", + "@types/node": "^18.0.6", + "@typescript-eslint/eslint-plugin": "^5.30.6", + "@typescript-eslint/parser": "^5.30.6", + "eslint": "^8.20.0", + "mocha": "^10.0.0", + "prettier": "^2.7.1", + "ts-node": "^10.9.1", + "typescript": "^4.7.4" + }, + "dependencies": { + "ip": "^2.0.0", + "smart-buffer": "^4.2.0" + }, + "scripts": { + "prepublish": "npm install -g typescript && npm run build", + "test": "NODE_ENV=test mocha --recursive --require ts-node/register test/**/*.ts", + "prettier": "prettier --write ./src/**/*.ts --config .prettierrc.yaml", + "lint": "eslint 'src/**/*.ts'", + "build": "rm -rf build typings && prettier --write ./src/**/*.ts --config .prettierrc.yaml && tsc -p ." + } +} diff --git a/mybulma/node_modules/socks/typings/client/socksclient.d.ts b/mybulma/node_modules/socks/typings/client/socksclient.d.ts new file mode 100644 index 0000000..b886d95 --- /dev/null +++ b/mybulma/node_modules/socks/typings/client/socksclient.d.ts @@ -0,0 +1,162 @@ +/// +/// +/// +import { EventEmitter } from 'events'; +import { SocksClientOptions, SocksClientChainOptions, SocksRemoteHost, SocksProxy, SocksClientBoundEvent, SocksClientEstablishedEvent, SocksUDPFrameDetails } from '../common/constants'; +import { SocksClientError } from '../common/util'; +import { Duplex } from 'stream'; +declare interface SocksClient { + on(event: 'error', listener: (err: SocksClientError) => void): this; + on(event: 'bound', listener: (info: SocksClientBoundEvent) => void): this; + on(event: 'established', listener: (info: SocksClientEstablishedEvent) => void): this; + once(event: string, listener: (...args: unknown[]) => void): this; + once(event: 'error', listener: (err: SocksClientError) => void): this; + once(event: 'bound', listener: (info: SocksClientBoundEvent) => void): this; + once(event: 'established', listener: (info: SocksClientEstablishedEvent) => void): this; + emit(event: string | symbol, ...args: unknown[]): boolean; + emit(event: 'error', err: SocksClientError): boolean; + emit(event: 'bound', info: SocksClientBoundEvent): boolean; + emit(event: 'established', info: SocksClientEstablishedEvent): boolean; +} +declare class SocksClient extends EventEmitter implements SocksClient { + private options; + private socket; + private state; + private receiveBuffer; + private nextRequiredPacketBufferSize; + private socks5ChosenAuthType; + private onDataReceived; + private onClose; + private onError; + private onConnect; + constructor(options: SocksClientOptions); + /** + * Creates a new SOCKS connection. + * + * Note: Supports callbacks and promises. Only supports the connect command. + * @param options { SocksClientOptions } Options. + * @param callback { Function } An optional callback function. + * @returns { Promise } + */ + static createConnection(options: SocksClientOptions, callback?: (error: Error | null, info?: SocksClientEstablishedEvent) => void): Promise; + /** + * Creates a new SOCKS connection chain to a destination host through 2 or more SOCKS proxies. + * + * Note: Supports callbacks and promises. Only supports the connect method. + * Note: Implemented via createConnection() factory function. + * @param options { SocksClientChainOptions } Options + * @param callback { Function } An optional callback function. + * @returns { Promise } + */ + static createConnectionChain(options: SocksClientChainOptions, callback?: (error: Error | null, socket?: SocksClientEstablishedEvent) => void): Promise; + /** + * Creates a SOCKS UDP Frame. + * @param options + */ + static createUDPFrame(options: SocksUDPFrameDetails): Buffer; + /** + * Parses a SOCKS UDP frame. + * @param data + */ + static parseUDPFrame(data: Buffer): SocksUDPFrameDetails; + /** + * Internal state setter. If the SocksClient is in an error state, it cannot be changed to a non error state. + */ + private setState; + /** + * Starts the connection establishment to the proxy and destination. + * @param existingSocket Connected socket to use instead of creating a new one (internal use). + */ + connect(existingSocket?: Duplex): void; + private getSocketOptions; + /** + * Handles internal Socks timeout callback. + * Note: If the Socks client is not BoundWaitingForConnection or Established, the connection will be closed. + */ + private onEstablishedTimeout; + /** + * Handles Socket connect event. + */ + private onConnectHandler; + /** + * Handles Socket data event. + * @param data + */ + private onDataReceivedHandler; + /** + * Handles processing of the data we have received. + */ + private processData; + /** + * Handles Socket close event. + * @param had_error + */ + private onCloseHandler; + /** + * Handles Socket error event. + * @param err + */ + private onErrorHandler; + /** + * Removes internal event listeners on the underlying Socket. + */ + private removeInternalSocketHandlers; + /** + * Closes and destroys the underlying Socket. Emits an error event. + * @param err { String } An error string to include in error event. + */ + private closeSocket; + /** + * Sends initial Socks v4 handshake request. + */ + private sendSocks4InitialHandshake; + /** + * Handles Socks v4 handshake response. + * @param data + */ + private handleSocks4FinalHandshakeResponse; + /** + * Handles Socks v4 incoming connection request (BIND) + * @param data + */ + private handleSocks4IncomingConnectionResponse; + /** + * Sends initial Socks v5 handshake request. + */ + private sendSocks5InitialHandshake; + /** + * Handles initial Socks v5 handshake response. + * @param data + */ + private handleInitialSocks5HandshakeResponse; + /** + * Sends Socks v5 user & password auth handshake. + * + * Note: No auth and user/pass are currently supported. + */ + private sendSocks5UserPassAuthentication; + private sendSocks5CustomAuthentication; + private handleSocks5CustomAuthHandshakeResponse; + private handleSocks5AuthenticationNoAuthHandshakeResponse; + private handleSocks5AuthenticationUserPassHandshakeResponse; + /** + * Handles Socks v5 auth handshake response. + * @param data + */ + private handleInitialSocks5AuthenticationHandshakeResponse; + /** + * Sends Socks v5 final handshake request. + */ + private sendSocks5CommandRequest; + /** + * Handles Socks v5 final handshake response. + * @param data + */ + private handleSocks5FinalHandshakeResponse; + /** + * Handles Socks v5 incoming connection request (BIND). + */ + private handleSocks5IncomingConnectionResponse; + get socksClientOptions(): SocksClientOptions; +} +export { SocksClient, SocksClientOptions, SocksClientChainOptions, SocksClientError, SocksRemoteHost, SocksProxy, SocksUDPFrameDetails, }; diff --git a/mybulma/node_modules/socks/typings/common/constants.d.ts b/mybulma/node_modules/socks/typings/common/constants.d.ts new file mode 100644 index 0000000..32a5705 --- /dev/null +++ b/mybulma/node_modules/socks/typings/common/constants.d.ts @@ -0,0 +1,152 @@ +/// +/// +/// +import { Duplex } from 'stream'; +import { Socket, SocketConnectOpts } from 'net'; +import { RequireOnlyOne } from './util'; +declare const DEFAULT_TIMEOUT = 30000; +declare type SocksProxyType = 4 | 5; +declare const ERRORS: { + InvalidSocksCommand: string; + InvalidSocksCommandForOperation: string; + InvalidSocksCommandChain: string; + InvalidSocksClientOptionsDestination: string; + InvalidSocksClientOptionsExistingSocket: string; + InvalidSocksClientOptionsProxy: string; + InvalidSocksClientOptionsTimeout: string; + InvalidSocksClientOptionsProxiesLength: string; + InvalidSocksClientOptionsCustomAuthRange: string; + InvalidSocksClientOptionsCustomAuthOptions: string; + NegotiationError: string; + SocketClosed: string; + ProxyConnectionTimedOut: string; + InternalError: string; + InvalidSocks4HandshakeResponse: string; + Socks4ProxyRejectedConnection: string; + InvalidSocks4IncomingConnectionResponse: string; + Socks4ProxyRejectedIncomingBoundConnection: string; + InvalidSocks5InitialHandshakeResponse: string; + InvalidSocks5IntiailHandshakeSocksVersion: string; + InvalidSocks5InitialHandshakeNoAcceptedAuthType: string; + InvalidSocks5InitialHandshakeUnknownAuthType: string; + Socks5AuthenticationFailed: string; + InvalidSocks5FinalHandshake: string; + InvalidSocks5FinalHandshakeRejected: string; + InvalidSocks5IncomingConnectionResponse: string; + Socks5ProxyRejectedIncomingBoundConnection: string; +}; +declare const SOCKS_INCOMING_PACKET_SIZES: { + Socks5InitialHandshakeResponse: number; + Socks5UserPassAuthenticationResponse: number; + Socks5ResponseHeader: number; + Socks5ResponseIPv4: number; + Socks5ResponseIPv6: number; + Socks5ResponseHostname: (hostNameLength: number) => number; + Socks4Response: number; +}; +declare type SocksCommandOption = 'connect' | 'bind' | 'associate'; +declare enum SocksCommand { + connect = 1, + bind = 2, + associate = 3 +} +declare enum Socks4Response { + Granted = 90, + Failed = 91, + Rejected = 92, + RejectedIdent = 93 +} +declare enum Socks5Auth { + NoAuth = 0, + GSSApi = 1, + UserPass = 2 +} +declare const SOCKS5_CUSTOM_AUTH_START = 128; +declare const SOCKS5_CUSTOM_AUTH_END = 254; +declare const SOCKS5_NO_ACCEPTABLE_AUTH = 255; +declare enum Socks5Response { + Granted = 0, + Failure = 1, + NotAllowed = 2, + NetworkUnreachable = 3, + HostUnreachable = 4, + ConnectionRefused = 5, + TTLExpired = 6, + CommandNotSupported = 7, + AddressNotSupported = 8 +} +declare enum Socks5HostType { + IPv4 = 1, + Hostname = 3, + IPv6 = 4 +} +declare enum SocksClientState { + Created = 0, + Connecting = 1, + Connected = 2, + SentInitialHandshake = 3, + ReceivedInitialHandshakeResponse = 4, + SentAuthentication = 5, + ReceivedAuthenticationResponse = 6, + SentFinalHandshake = 7, + ReceivedFinalResponse = 8, + BoundWaitingForConnection = 9, + Established = 10, + Disconnected = 11, + Error = 99 +} +/** + * Represents a SocksProxy + */ +declare type SocksProxy = RequireOnlyOne<{ + ipaddress?: string; + host?: string; + port: number; + type: SocksProxyType; + userId?: string; + password?: string; + custom_auth_method?: number; + custom_auth_request_handler?: () => Promise; + custom_auth_response_size?: number; + custom_auth_response_handler?: (data: Buffer) => Promise; +}, 'host' | 'ipaddress'>; +/** + * Represents a remote host + */ +interface SocksRemoteHost { + host: string; + port: number; +} +/** + * SocksClient connection options. + */ +interface SocksClientOptions { + command: SocksCommandOption; + destination: SocksRemoteHost; + proxy: SocksProxy; + timeout?: number; + existing_socket?: Duplex; + set_tcp_nodelay?: boolean; + socket_options?: SocketConnectOpts; +} +/** + * SocksClient chain connection options. + */ +interface SocksClientChainOptions { + command: 'connect'; + destination: SocksRemoteHost; + proxies: SocksProxy[]; + timeout?: number; + randomizeChain?: false; +} +interface SocksClientEstablishedEvent { + socket: Socket; + remoteHost?: SocksRemoteHost; +} +declare type SocksClientBoundEvent = SocksClientEstablishedEvent; +interface SocksUDPFrameDetails { + frameNumber?: number; + remoteHost: SocksRemoteHost; + data: Buffer; +} +export { DEFAULT_TIMEOUT, ERRORS, SocksProxyType, SocksCommand, Socks4Response, Socks5Auth, Socks5HostType, Socks5Response, SocksClientState, SocksProxy, SocksRemoteHost, SocksCommandOption, SocksClientOptions, SocksClientChainOptions, SocksClientEstablishedEvent, SocksClientBoundEvent, SocksUDPFrameDetails, SOCKS_INCOMING_PACKET_SIZES, SOCKS5_CUSTOM_AUTH_START, SOCKS5_CUSTOM_AUTH_END, SOCKS5_NO_ACCEPTABLE_AUTH, }; diff --git a/mybulma/node_modules/socks/typings/common/helpers.d.ts b/mybulma/node_modules/socks/typings/common/helpers.d.ts new file mode 100644 index 0000000..8c3a106 --- /dev/null +++ b/mybulma/node_modules/socks/typings/common/helpers.d.ts @@ -0,0 +1,13 @@ +import { SocksClientOptions, SocksClientChainOptions } from '../client/socksclient'; +/** + * Validates the provided SocksClientOptions + * @param options { SocksClientOptions } + * @param acceptedCommands { string[] } A list of accepted SocksProxy commands. + */ +declare function validateSocksClientOptions(options: SocksClientOptions, acceptedCommands?: string[]): void; +/** + * Validates the SocksClientChainOptions + * @param options { SocksClientChainOptions } + */ +declare function validateSocksClientChainOptions(options: SocksClientChainOptions): void; +export { validateSocksClientOptions, validateSocksClientChainOptions }; diff --git a/mybulma/node_modules/socks/typings/common/receivebuffer.d.ts b/mybulma/node_modules/socks/typings/common/receivebuffer.d.ts new file mode 100644 index 0000000..756e98b --- /dev/null +++ b/mybulma/node_modules/socks/typings/common/receivebuffer.d.ts @@ -0,0 +1,12 @@ +/// +declare class ReceiveBuffer { + private buffer; + private offset; + private originalSize; + constructor(size?: number); + get length(): number; + append(data: Buffer): number; + peek(length: number): Buffer; + get(length: number): Buffer; +} +export { ReceiveBuffer }; diff --git a/mybulma/node_modules/socks/typings/common/util.d.ts b/mybulma/node_modules/socks/typings/common/util.d.ts new file mode 100644 index 0000000..83f20e7 --- /dev/null +++ b/mybulma/node_modules/socks/typings/common/util.d.ts @@ -0,0 +1,17 @@ +import { SocksClientOptions, SocksClientChainOptions } from './constants'; +/** + * Error wrapper for SocksClient + */ +declare class SocksClientError extends Error { + options: SocksClientOptions | SocksClientChainOptions; + constructor(message: string, options: SocksClientOptions | SocksClientChainOptions); +} +/** + * Shuffles a given array. + * @param array The array to shuffle. + */ +declare function shuffleArray(array: unknown[]): void; +declare type RequireOnlyOne = Pick> & { + [K in Keys]?: Required> & Partial, undefined>>; +}[Keys]; +export { RequireOnlyOne, SocksClientError, shuffleArray }; diff --git a/mybulma/node_modules/socks/typings/index.d.ts b/mybulma/node_modules/socks/typings/index.d.ts new file mode 100644 index 0000000..fbf9006 --- /dev/null +++ b/mybulma/node_modules/socks/typings/index.d.ts @@ -0,0 +1 @@ +export * from './client/socksclient'; diff --git a/mybulma/node_modules/source-map/dist/source-map.js b/mybulma/node_modules/source-map/dist/source-map.js new file mode 100644 index 0000000..b75e98b --- /dev/null +++ b/mybulma/node_modules/source-map/dist/source-map.js @@ -0,0 +1 @@ +!function(e,n){"object"==typeof exports&&"object"==typeof module?module.exports=n(require("fs"),require("path")):"function"==typeof define&&define.amd?define(["fs","path"],n):"object"==typeof exports?exports.sourceMap=n(require("fs"),require("path")):e.sourceMap=n(e.fs,e.path)}(window,(function(e,n){return function(e){var n={};function t(r){if(n[r])return n[r].exports;var o=n[r]={i:r,l:!1,exports:{}};return e[r].call(o.exports,o,o.exports,t),o.l=!0,o.exports}return t.m=e,t.c=n,t.d=function(e,n,r){t.o(e,n)||Object.defineProperty(e,n,{enumerable:!0,get:r})},t.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},t.t=function(e,n){if(1&n&&(e=t(e)),8&n)return e;if(4&n&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(t.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&n&&"string"!=typeof e)for(var o in e)t.d(r,o,function(n){return e[n]}.bind(null,o));return r},t.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return t.d(n,"a",n),n},t.o=function(e,n){return Object.prototype.hasOwnProperty.call(e,n)},t.p="",t(t.s=5)}([function(e,n){n.getArg=function(e,n,t){if(n in e)return e[n];if(3===arguments.length)return t;throw new Error('"'+n+'" is a required argument.')};const t=/^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.-]*)(?::(\d+))?(.*)$/,r=/^data:.+\,.+$/;function o(e){const n=e.match(t);return n?{scheme:n[1],auth:n[2],host:n[3],port:n[4],path:n[5]}:null}function i(e){let n="";return e.scheme&&(n+=e.scheme+":"),n+="//",e.auth&&(n+=e.auth+"@"),e.host&&(n+=e.host),e.port&&(n+=":"+e.port),e.path&&(n+=e.path),n}n.urlParse=o,n.urlGenerate=i;const s=function(e){const n=[];return function(t){for(let e=0;e32&&n.pop(),r}}((function(e){let t=e;const r=o(e);if(r){if(!r.path)return e;t=r.path}const s=n.isAbsolute(t),l=[];let a=0,u=0;for(;;){if(a=u,u=t.indexOf("/",a),-1===u){l.push(t.slice(a));break}for(l.push(t.slice(a,u));u=0;u--){const e=l[u];"."===e?l.splice(u,1):".."===e?c++:c>0&&(""===e?(l.splice(u+1,c),c=0):(l.splice(u,2),c--))}return t=l.join("/"),""===t&&(t=s?"/":"."),r?(r.path=t,i(r)):t}));function l(e,n){""===e&&(e="."),""===n&&(n=".");const t=o(n),l=o(e);if(l&&(e=l.path||"/"),t&&!t.scheme)return l&&(t.scheme=l.scheme),i(t);if(t||n.match(r))return n;if(l&&!l.host&&!l.path)return l.host=n,i(l);const a="/"===n.charAt(0)?n:s(e.replace(/\/+$/,"")+"/"+n);return l?(l.path=a,i(l)):a}n.normalize=s,n.join=l,n.isAbsolute=function(e){return"/"===e.charAt(0)||t.test(e)},n.relative=function(e,n){""===e&&(e="."),e=e.replace(/\/$/,"");let t=0;for(;0!==n.indexOf(e+"/");){const r=e.lastIndexOf("/");if(r<0)return n;if((e=e.slice(0,r)).match(/^([^\/]+:\/)?\/*$/))return n;++t}return Array(t+1).join("../")+n.substr(e.length+1)};const a=!("__proto__"in Object.create(null));function u(e){return e}function c(e){if(!e)return!1;const n=e.length;if(n<9)return!1;if(95!==e.charCodeAt(n-1)||95!==e.charCodeAt(n-2)||111!==e.charCodeAt(n-3)||116!==e.charCodeAt(n-4)||111!==e.charCodeAt(n-5)||114!==e.charCodeAt(n-6)||112!==e.charCodeAt(n-7)||95!==e.charCodeAt(n-8)||95!==e.charCodeAt(n-9))return!1;for(let t=n-10;t>=0;t--)if(36!==e.charCodeAt(t))return!1;return!0}function g(e,n){return e===n?0:null===e?1:null===n?-1:e>n?1:-1}n.toSetString=a?u:function(e){return c(e)?"$"+e:e},n.fromSetString=a?u:function(e){return c(e)?e.slice(1):e},n.compareByOriginalPositions=function(e,n,t){let r=g(e.source,n.source);return 0!==r?r:(r=e.originalLine-n.originalLine,0!==r?r:(r=e.originalColumn-n.originalColumn,0!==r||t?r:(r=e.generatedColumn-n.generatedColumn,0!==r?r:(r=e.generatedLine-n.generatedLine,0!==r?r:g(e.name,n.name)))))},n.compareByGeneratedPositionsDeflated=function(e,n,t){let r=e.generatedLine-n.generatedLine;return 0!==r?r:(r=e.generatedColumn-n.generatedColumn,0!==r||t?r:(r=g(e.source,n.source),0!==r?r:(r=e.originalLine-n.originalLine,0!==r?r:(r=e.originalColumn-n.originalColumn,0!==r?r:g(e.name,n.name)))))},n.compareByGeneratedPositionsInflated=function(e,n){let t=e.generatedLine-n.generatedLine;return 0!==t?t:(t=e.generatedColumn-n.generatedColumn,0!==t?t:(t=g(e.source,n.source),0!==t?t:(t=e.originalLine-n.originalLine,0!==t?t:(t=e.originalColumn-n.originalColumn,0!==t?t:g(e.name,n.name)))))},n.parseSourceMapInput=function(e){return JSON.parse(e.replace(/^\)]}'[^\n]*\n/,""))},n.computeSourceURL=function(e,n,t){if(n=n||"",e&&("/"!==e[e.length-1]&&"/"!==n[0]&&(e+="/"),n=e+n),t){const e=o(t);if(!e)throw new Error("sourceMapURL could not be parsed");if(e.path){const n=e.path.lastIndexOf("/");n>=0&&(e.path=e.path.substring(0,n+1))}n=l(i(e),n)}return s(n)}},function(e,n,t){const r=t(2),o=t(0),i=t(3).ArraySet,s=t(7).MappingList;class l{constructor(e){e||(e={}),this._file=o.getArg(e,"file",null),this._sourceRoot=o.getArg(e,"sourceRoot",null),this._skipValidation=o.getArg(e,"skipValidation",!1),this._sources=new i,this._names=new i,this._mappings=new s,this._sourcesContents=null}static fromSourceMap(e){const n=e.sourceRoot,t=new l({file:e.file,sourceRoot:n});return e.eachMapping((function(e){const r={generated:{line:e.generatedLine,column:e.generatedColumn}};null!=e.source&&(r.source=e.source,null!=n&&(r.source=o.relative(n,r.source)),r.original={line:e.originalLine,column:e.originalColumn},null!=e.name&&(r.name=e.name)),t.addMapping(r)})),e.sources.forEach((function(r){let i=r;null!==n&&(i=o.relative(n,r)),t._sources.has(i)||t._sources.add(i);const s=e.sourceContentFor(r);null!=s&&t.setSourceContent(r,s)})),t}addMapping(e){const n=o.getArg(e,"generated"),t=o.getArg(e,"original",null);let r=o.getArg(e,"source",null),i=o.getArg(e,"name",null);this._skipValidation||this._validateMapping(n,t,r,i),null!=r&&(r=String(r),this._sources.has(r)||this._sources.add(r)),null!=i&&(i=String(i),this._names.has(i)||this._names.add(i)),this._mappings.add({generatedLine:n.line,generatedColumn:n.column,originalLine:null!=t&&t.line,originalColumn:null!=t&&t.column,source:r,name:i})}setSourceContent(e,n){let t=e;null!=this._sourceRoot&&(t=o.relative(this._sourceRoot,t)),null!=n?(this._sourcesContents||(this._sourcesContents=Object.create(null)),this._sourcesContents[o.toSetString(t)]=n):this._sourcesContents&&(delete this._sourcesContents[o.toSetString(t)],0===Object.keys(this._sourcesContents).length&&(this._sourcesContents=null))}applySourceMap(e,n,t){let r=n;if(null==n){if(null==e.file)throw new Error('SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, or the source map\'s "file" property. Both were omitted.');r=e.file}const s=this._sourceRoot;null!=s&&(r=o.relative(s,r));const l=this._mappings.toArray().length>0?new i:this._sources,a=new i;this._mappings.unsortedForEach((function(n){if(n.source===r&&null!=n.originalLine){const r=e.originalPositionFor({line:n.originalLine,column:n.originalColumn});null!=r.source&&(n.source=r.source,null!=t&&(n.source=o.join(t,n.source)),null!=s&&(n.source=o.relative(s,n.source)),n.originalLine=r.line,n.originalColumn=r.column,null!=r.name&&(n.name=r.name))}const i=n.source;null==i||l.has(i)||l.add(i);const u=n.name;null==u||a.has(u)||a.add(u)}),this),this._sources=l,this._names=a,e.sources.forEach((function(n){const r=e.sourceContentFor(n);null!=r&&(null!=t&&(n=o.join(t,n)),null!=s&&(n=o.relative(s,n)),this.setSourceContent(n,r))}),this)}_validateMapping(e,n,t,r){if(n&&"number"!=typeof n.line&&"number"!=typeof n.column)throw new Error("original.line and original.column are not numbers -- you probably meant to omit the original mapping entirely and only map the generated position. If so, pass null for the original mapping instead of an object with empty or null values.");if(e&&"line"in e&&"column"in e&&e.line>0&&e.column>=0&&!n&&!t&&!r);else if(!(e&&"line"in e&&"column"in e&&n&&"line"in n&&"column"in n&&e.line>0&&e.column>=0&&n.line>0&&n.column>=0&&t))throw new Error("Invalid mapping: "+JSON.stringify({generated:e,source:t,original:n,name:r}))}_serializeMappings(){let e,n,t,i,s=0,l=1,a=0,u=0,c=0,g=0,p="";const h=this._mappings.toArray();for(let m=0,d=h.length;m0){if(!o.compareByGeneratedPositionsInflated(n,h[m-1]))continue;e+=","}e+=r.encode(n.generatedColumn-s),s=n.generatedColumn,null!=n.source&&(i=this._sources.indexOf(n.source),e+=r.encode(i-g),g=i,e+=r.encode(n.originalLine-1-u),u=n.originalLine-1,e+=r.encode(n.originalColumn-a),a=n.originalColumn,null!=n.name&&(t=this._names.indexOf(n.name),e+=r.encode(t-c),c=t)),p+=e}return p}_generateSourcesContent(e,n){return e.map((function(e){if(!this._sourcesContents)return null;null!=n&&(e=o.relative(n,e));const t=o.toSetString(e);return Object.prototype.hasOwnProperty.call(this._sourcesContents,t)?this._sourcesContents[t]:null}),this)}toJSON(){const e={version:this._version,sources:this._sources.toArray(),names:this._names.toArray(),mappings:this._serializeMappings()};return null!=this._file&&(e.file=this._file),null!=this._sourceRoot&&(e.sourceRoot=this._sourceRoot),this._sourcesContents&&(e.sourcesContent=this._generateSourcesContent(e.sources,e.sourceRoot)),e}toString(){return JSON.stringify(this.toJSON())}}l.prototype._version=3,n.SourceMapGenerator=l},function(e,n,t){const r=t(6);n.encode=function(e){let n,t="",o=function(e){return e<0?1+(-e<<1):0+(e<<1)}(e);do{n=31&o,o>>>=5,o>0&&(n|=32),t+=r.encode(n)}while(o>0);return t}},function(e,n){class t{constructor(){this._array=[],this._set=new Map}static fromArray(e,n){const r=new t;for(let t=0,o=e.length;t=0)return n;throw new Error('"'+e+'" is not in the set.')}at(e){if(e>=0&&ee.arrayBuffer());if(n instanceof ArrayBuffer)return Promise.resolve(n);throw new Error("You must provide the string URL or ArrayBuffer contents of lib/mappings.wasm by calling SourceMapConsumer.initialize({ 'lib/mappings.wasm': ... }) before using SourceMapConsumer")},e.exports.initialize=e=>n=e}else{const r=t(10),o=t(11);e.exports=function(){return new Promise((e,t)=>{const i=o.join(n,"mappings.wasm");r.readFile(i,null,(n,r)=>{n?t(n):e(r.buffer)})})},e.exports.initialize=e=>{console.debug("SourceMapConsumer.initialize is a no-op when running in node.js")}}}).call(this,"/")},function(e,n,t){n.SourceMapGenerator=t(1).SourceMapGenerator,n.SourceMapConsumer=t(8).SourceMapConsumer,n.SourceNode=t(13).SourceNode},function(e,n){const t="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split("");n.encode=function(e){if(0<=e&&et||o==t&&s>=i||r.compareByGeneratedPositionsInflated(e,n)<=0}(this._last,e)?(this._sorted=!1,this._array.push(e)):(this._last=e,this._array.push(e))}toArray(){return this._sorted||(this._array.sort(r.compareByGeneratedPositionsInflated),this._sorted=!0),this._array}}},function(e,n,t){const r=t(0),o=t(9),i=t(3).ArraySet,s=(t(2),t(4)),l=t(12),a=Symbol("smcInternal");class u{constructor(e,n){return e==a?Promise.resolve(this):function(e,n){let t=e;"string"==typeof e&&(t=r.parseSourceMapInput(e));const o=null!=t.sections?new g(t,n):new c(t,n);return Promise.resolve(o)}(e,n)}static initialize(e){s.initialize(e["lib/mappings.wasm"])}static fromSourceMap(e,n){return function(e,n){return c.fromSourceMap(e,n)}(e,n)}static async with(e,n,t){const r=await new u(e,n);try{return await t(r)}finally{r.destroy()}}_parseMappings(e,n){throw new Error("Subclasses must implement _parseMappings")}eachMapping(e,n,t){throw new Error("Subclasses must implement eachMapping")}allGeneratedPositionsFor(e){throw new Error("Subclasses must implement allGeneratedPositionsFor")}destroy(){throw new Error("Subclasses must implement destroy")}}u.prototype._version=3,u.GENERATED_ORDER=1,u.ORIGINAL_ORDER=2,u.GREATEST_LOWER_BOUND=1,u.LEAST_UPPER_BOUND=2,n.SourceMapConsumer=u;class c extends u{constructor(e,n){return super(a).then(t=>{let o=e;"string"==typeof e&&(o=r.parseSourceMapInput(e));const s=r.getArg(o,"version");let a=r.getArg(o,"sources");const u=r.getArg(o,"names",[]);let c=r.getArg(o,"sourceRoot",null);const g=r.getArg(o,"sourcesContent",null),p=r.getArg(o,"mappings"),h=r.getArg(o,"file",null);if(s!=t._version)throw new Error("Unsupported version: "+s);return c&&(c=r.normalize(c)),a=a.map(String).map(r.normalize).map((function(e){return c&&r.isAbsolute(c)&&r.isAbsolute(e)?r.relative(c,e):e})),t._names=i.fromArray(u.map(String),!0),t._sources=i.fromArray(a,!0),t._absoluteSources=t._sources.toArray().map((function(e){return r.computeSourceURL(c,e,n)})),t.sourceRoot=c,t.sourcesContent=g,t._mappings=p,t._sourceMapURL=n,t.file=h,t._computedColumnSpans=!1,t._mappingsPtr=0,t._wasm=null,l().then(e=>(t._wasm=e,t))})}_findSourceIndex(e){let n=e;if(null!=this.sourceRoot&&(n=r.relative(this.sourceRoot,n)),this._sources.has(n))return this._sources.indexOf(n);for(let n=0;n{null!==n.source&&(n.source=this._sources.at(n.source),n.source=r.computeSourceURL(s,n.source,this._sourceMapURL),null!==n.name&&(n.name=this._names.at(n.name))),e.call(o,n)},()=>{switch(i){case u.GENERATED_ORDER:this._wasm.exports.by_generated_location(this._getMappingsPtr());break;case u.ORIGINAL_ORDER:this._wasm.exports.by_original_location(this._getMappingsPtr());break;default:throw new Error("Unknown order of iteration.")}})}allGeneratedPositionsFor(e){let n=r.getArg(e,"source");const t=r.getArg(e,"line"),o=e.column||0;if(n=this._findSourceIndex(n),n<0)return[];if(t<1)throw new Error("Line numbers must be >= 1");if(o<0)throw new Error("Column numbers must be >= 0");const i=[];return this._wasm.withMappingCallback(e=>{let n=e.lastGeneratedColumn;this._computedColumnSpans&&null===n&&(n=1/0),i.push({line:e.generatedLine,column:e.generatedColumn,lastColumn:n})},()=>{this._wasm.exports.all_generated_locations_for(this._getMappingsPtr(),n,t-1,"column"in e,o)}),i}destroy(){0!==this._mappingsPtr&&(this._wasm.exports.free_mappings(this._mappingsPtr),this._mappingsPtr=0)}computeColumnSpans(){this._computedColumnSpans||(this._wasm.exports.compute_column_spans(this._getMappingsPtr()),this._computedColumnSpans=!0)}originalPositionFor(e){const n={generatedLine:r.getArg(e,"line"),generatedColumn:r.getArg(e,"column")};if(n.generatedLine<1)throw new Error("Line numbers must be >= 1");if(n.generatedColumn<0)throw new Error("Column numbers must be >= 0");let t,o=r.getArg(e,"bias",u.GREATEST_LOWER_BOUND);if(null==o&&(o=u.GREATEST_LOWER_BOUND),this._wasm.withMappingCallback(e=>t=e,()=>{this._wasm.exports.original_location_for(this._getMappingsPtr(),n.generatedLine-1,n.generatedColumn,o)}),t&&t.generatedLine===n.generatedLine){let e=r.getArg(t,"source",null);null!==e&&(e=this._sources.at(e),e=r.computeSourceURL(this.sourceRoot,e,this._sourceMapURL));let n=r.getArg(t,"name",null);return null!==n&&(n=this._names.at(n)),{source:e,line:r.getArg(t,"originalLine",null),column:r.getArg(t,"originalColumn",null),name:n}}return{source:null,line:null,column:null,name:null}}hasContentsOfAllSources(){return!!this.sourcesContent&&(this.sourcesContent.length>=this._sources.size()&&!this.sourcesContent.some((function(e){return null==e})))}sourceContentFor(e,n){if(!this.sourcesContent)return null;const t=this._findSourceIndex(e);if(t>=0)return this.sourcesContent[t];let o,i=e;if(null!=this.sourceRoot&&(i=r.relative(this.sourceRoot,i)),null!=this.sourceRoot&&(o=r.urlParse(this.sourceRoot))){const e=i.replace(/^file:\/\//,"");if("file"==o.scheme&&this._sources.has(e))return this.sourcesContent[this._sources.indexOf(e)];if((!o.path||"/"==o.path)&&this._sources.has("/"+i))return this.sourcesContent[this._sources.indexOf("/"+i)]}if(n)return null;throw new Error('"'+i+'" is not in the SourceMap.')}generatedPositionFor(e){let n=r.getArg(e,"source");if(n=this._findSourceIndex(n),n<0)return{line:null,column:null,lastColumn:null};const t={source:n,originalLine:r.getArg(e,"line"),originalColumn:r.getArg(e,"column")};if(t.originalLine<1)throw new Error("Line numbers must be >= 1");if(t.originalColumn<0)throw new Error("Column numbers must be >= 0");let o,i=r.getArg(e,"bias",u.GREATEST_LOWER_BOUND);if(null==i&&(i=u.GREATEST_LOWER_BOUND),this._wasm.withMappingCallback(e=>o=e,()=>{this._wasm.exports.generated_location_for(this._getMappingsPtr(),t.source,t.originalLine-1,t.originalColumn,i)}),o&&o.source===t.source){let e=o.lastGeneratedColumn;return this._computedColumnSpans&&null===e&&(e=1/0),{line:r.getArg(o,"generatedLine",null),column:r.getArg(o,"generatedColumn",null),lastColumn:e}}return{line:null,column:null,lastColumn:null}}}c.prototype.consumer=u,n.BasicSourceMapConsumer=c;class g extends u{constructor(e,n){return super(a).then(t=>{let o=e;"string"==typeof e&&(o=r.parseSourceMapInput(e));const s=r.getArg(o,"version"),l=r.getArg(o,"sections");if(s!=t._version)throw new Error("Unsupported version: "+s);t._sources=new i,t._names=new i,t.__generatedMappings=null,t.__originalMappings=null,t.__generatedMappingsUnsorted=null,t.__originalMappingsUnsorted=null;let a={line:-1,column:0};return Promise.all(l.map(e=>{if(e.url)throw new Error("Support for url field in sections not implemented.");const t=r.getArg(e,"offset"),o=r.getArg(t,"line"),i=r.getArg(t,"column");if(o({generatedOffset:{generatedLine:o+1,generatedColumn:i+1},consumer:e}))})).then(e=>(t._sections=e,t))})}get _generatedMappings(){return this.__generatedMappings||this._sortGeneratedMappings(),this.__generatedMappings}get _originalMappings(){return this.__originalMappings||this._sortOriginalMappings(),this.__originalMappings}get _generatedMappingsUnsorted(){return this.__generatedMappingsUnsorted||this._parseMappings(this._mappings,this.sourceRoot),this.__generatedMappingsUnsorted}get _originalMappingsUnsorted(){return this.__originalMappingsUnsorted||this._parseMappings(this._mappings,this.sourceRoot),this.__originalMappingsUnsorted}_sortGeneratedMappings(){const e=this._generatedMappingsUnsorted;e.sort(r.compareByGeneratedPositionsDeflated),this.__generatedMappings=e}_sortOriginalMappings(){const e=this._originalMappingsUnsorted;e.sort(r.compareByOriginalPositions),this.__originalMappings=e}get sources(){const e=[];for(let n=0;ni.push(e));for(let e=0;e= 1");if(t.originalColumn<0)throw new Error("Column numbers must be >= 0");const i=[];let s=this._findMapping(t,this._originalMappings,"originalLine","originalColumn",r.compareByOriginalPositions,o.LEAST_UPPER_BOUND);if(s>=0){let t=this._originalMappings[s];if(void 0===e.column){const e=t.originalLine;for(;t&&t.originalLine===e;){let e=t.lastGeneratedColumn;this._computedColumnSpans&&null===e&&(e=1/0),i.push({line:r.getArg(t,"generatedLine",null),column:r.getArg(t,"generatedColumn",null),lastColumn:e}),t=this._originalMappings[++s]}}else{const e=t.originalColumn;for(;t&&t.originalLine===n&&t.originalColumn==e;){let e=t.lastGeneratedColumn;this._computedColumnSpans&&null===e&&(e=1/0),i.push({line:r.getArg(t,"generatedLine",null),column:r.getArg(t,"generatedColumn",null),lastColumn:e}),t=this._originalMappings[++s]}}}return i}destroy(){for(let e=0;e0?r-a>1?e(a,r,o,i,s,l):l==n.LEAST_UPPER_BOUND?r1?e(t,a,o,i,s,l):l==n.LEAST_UPPER_BOUND?a:t<0?-1:t}(-1,t.length,e,t,r,o||n.GREATEST_LOWER_BOUND);if(i<0)return-1;for(;i-1>=0&&0===r(t[i],t[i-1],!0);)--i;return i}},function(n,t){n.exports=e},function(e,t){e.exports=n},function(e,n,t){const r=t(4);function o(){this.generatedLine=0,this.generatedColumn=0,this.lastGeneratedColumn=null,this.source=null,this.originalLine=null,this.originalColumn=null,this.name=null}let i=null;e.exports=function(){if(i)return i;const e=[];return i=r().then(n=>WebAssembly.instantiate(n,{env:{mapping_callback(n,t,r,i,s,l,a,u,c,g){const p=new o;p.generatedLine=n+1,p.generatedColumn=t,r&&(p.lastGeneratedColumn=i-1),s&&(p.source=l,p.originalLine=a+1,p.originalColumn=u,c&&(p.name=g)),e[e.length-1](p)},start_all_generated_locations_for(){console.time("all_generated_locations_for")},end_all_generated_locations_for(){console.timeEnd("all_generated_locations_for")},start_compute_column_spans(){console.time("compute_column_spans")},end_compute_column_spans(){console.timeEnd("compute_column_spans")},start_generated_location_for(){console.time("generated_location_for")},end_generated_location_for(){console.timeEnd("generated_location_for")},start_original_location_for(){console.time("original_location_for")},end_original_location_for(){console.timeEnd("original_location_for")},start_parse_mappings(){console.time("parse_mappings")},end_parse_mappings(){console.timeEnd("parse_mappings")},start_sort_by_generated_location(){console.time("sort_by_generated_location")},end_sort_by_generated_location(){console.timeEnd("sort_by_generated_location")},start_sort_by_original_location(){console.time("sort_by_original_location")},end_sort_by_original_location(){console.timeEnd("sort_by_original_location")}}})).then(n=>({exports:n.instance.exports,withMappingCallback:(n,t)=>{e.push(n);try{t()}finally{e.pop()}}})).then(null,e=>{throw i=null,e}),i}},function(e,n,t){const r=t(1).SourceMapGenerator,o=t(0),i=/(\r?\n)/,s="$$$isSourceNode$$$";class l{constructor(e,n,t,r,o){this.children=[],this.sourceContents={},this.line=null==e?null:e,this.column=null==n?null:n,this.source=null==t?null:t,this.name=null==o?null:o,this[s]=!0,null!=r&&this.add(r)}static fromStringWithSourceMap(e,n,t){const r=new l,s=e.split(i);let a=0;const u=function(){return e()+(e()||"");function e(){return a=0;n--)this.prepend(e[n]);else{if(!e[s]&&"string"!=typeof e)throw new TypeError("Expected a SourceNode, string, or an array of SourceNodes and strings. Got "+e);this.children.unshift(e)}return this}walk(e){let n;for(let t=0,r=this.children.length;t0){for(n=[],t=0;t= 0) { + return idx; + } + throw new Error('"' + aStr + '" is not in the set.'); + } + + /** + * What is the element at the given index? + * + * @param Number aIdx + */ + at(aIdx) { + if (aIdx >= 0 && aIdx < this._array.length) { + return this._array[aIdx]; + } + throw new Error("No element indexed by " + aIdx); + } + + /** + * Returns the array representation of this set (which has the proper indices + * indicated by indexOf). Note that this is a copy of the internal array used + * for storing the members so that no one can mess with internal state. + */ + toArray() { + return this._array.slice(); + } +} +exports.ArraySet = ArraySet; diff --git a/mybulma/node_modules/source-map/lib/base64-vlq.js b/mybulma/node_modules/source-map/lib/base64-vlq.js new file mode 100644 index 0000000..fc1049c --- /dev/null +++ b/mybulma/node_modules/source-map/lib/base64-vlq.js @@ -0,0 +1,111 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + * + * Based on the Base 64 VLQ implementation in Closure Compiler: + * https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java + * + * Copyright 2011 The Closure Compiler Authors. All rights reserved. + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +const base64 = require("./base64"); + +// A single base 64 digit can contain 6 bits of data. For the base 64 variable +// length quantities we use in the source map spec, the first bit is the sign, +// the next four bits are the actual value, and the 6th bit is the +// continuation bit. The continuation bit tells us whether there are more +// digits in this value following this digit. +// +// Continuation +// | Sign +// | | +// V V +// 101011 + +const VLQ_BASE_SHIFT = 5; + +// binary: 100000 +const VLQ_BASE = 1 << VLQ_BASE_SHIFT; + +// binary: 011111 +const VLQ_BASE_MASK = VLQ_BASE - 1; + +// binary: 100000 +const VLQ_CONTINUATION_BIT = VLQ_BASE; + +/** + * Converts from a two-complement value to a value where the sign bit is + * placed in the least significant bit. For example, as decimals: + * 1 becomes 2 (10 binary), -1 becomes 3 (11 binary) + * 2 becomes 4 (100 binary), -2 becomes 5 (101 binary) + */ +function toVLQSigned(aValue) { + return aValue < 0 + ? ((-aValue) << 1) + 1 + : (aValue << 1) + 0; +} + +/** + * Converts to a two-complement value from a value where the sign bit is + * placed in the least significant bit. For example, as decimals: + * 2 (10 binary) becomes 1, 3 (11 binary) becomes -1 + * 4 (100 binary) becomes 2, 5 (101 binary) becomes -2 + */ +// eslint-disable-next-line no-unused-vars +function fromVLQSigned(aValue) { + const isNegative = (aValue & 1) === 1; + const shifted = aValue >> 1; + return isNegative + ? -shifted + : shifted; +} + +/** + * Returns the base 64 VLQ encoded value. + */ +exports.encode = function base64VLQ_encode(aValue) { + let encoded = ""; + let digit; + + let vlq = toVLQSigned(aValue); + + do { + digit = vlq & VLQ_BASE_MASK; + vlq >>>= VLQ_BASE_SHIFT; + if (vlq > 0) { + // There are still more digits in this value, so we must make sure the + // continuation bit is marked. + digit |= VLQ_CONTINUATION_BIT; + } + encoded += base64.encode(digit); + } while (vlq > 0); + + return encoded; +}; diff --git a/mybulma/node_modules/source-map/lib/base64.js b/mybulma/node_modules/source-map/lib/base64.js new file mode 100644 index 0000000..b9ca319 --- /dev/null +++ b/mybulma/node_modules/source-map/lib/base64.js @@ -0,0 +1,18 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +const intToCharMap = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split(""); + +/** + * Encode an integer in the range of 0 to 63 to a single base 64 digit. + */ +exports.encode = function(number) { + if (0 <= number && number < intToCharMap.length) { + return intToCharMap[number]; + } + throw new TypeError("Must be between 0 and 63: " + number); +}; diff --git a/mybulma/node_modules/source-map/lib/binary-search.js b/mybulma/node_modules/source-map/lib/binary-search.js new file mode 100644 index 0000000..d6f898e --- /dev/null +++ b/mybulma/node_modules/source-map/lib/binary-search.js @@ -0,0 +1,107 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +exports.GREATEST_LOWER_BOUND = 1; +exports.LEAST_UPPER_BOUND = 2; + +/** + * Recursive implementation of binary search. + * + * @param aLow Indices here and lower do not contain the needle. + * @param aHigh Indices here and higher do not contain the needle. + * @param aNeedle The element being searched for. + * @param aHaystack The non-empty array being searched. + * @param aCompare Function which takes two elements and returns -1, 0, or 1. + * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or + * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + */ +function recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) { + // This function terminates when one of the following is true: + // + // 1. We find the exact element we are looking for. + // + // 2. We did not find the exact element, but we can return the index of + // the next-closest element. + // + // 3. We did not find the exact element, and there is no next-closest + // element than the one we are searching for, so we return -1. + const mid = Math.floor((aHigh - aLow) / 2) + aLow; + const cmp = aCompare(aNeedle, aHaystack[mid], true); + if (cmp === 0) { + // Found the element we are looking for. + return mid; + } else if (cmp > 0) { + // Our needle is greater than aHaystack[mid]. + if (aHigh - mid > 1) { + // The element is in the upper half. + return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias); + } + + // The exact needle element was not found in this haystack. Determine if + // we are in termination case (3) or (2) and return the appropriate thing. + if (aBias == exports.LEAST_UPPER_BOUND) { + return aHigh < aHaystack.length ? aHigh : -1; + } + return mid; + } + + // Our needle is less than aHaystack[mid]. + if (mid - aLow > 1) { + // The element is in the lower half. + return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias); + } + + // we are in termination case (3) or (2) and return the appropriate thing. + if (aBias == exports.LEAST_UPPER_BOUND) { + return mid; + } + return aLow < 0 ? -1 : aLow; +} + +/** + * This is an implementation of binary search which will always try and return + * the index of the closest element if there is no exact hit. This is because + * mappings between original and generated line/col pairs are single points, + * and there is an implicit region between each of them, so a miss just means + * that you aren't on the very start of a region. + * + * @param aNeedle The element you are looking for. + * @param aHaystack The array that is being searched. + * @param aCompare A function which takes the needle and an element in the + * array and returns -1, 0, or 1 depending on whether the needle is less + * than, equal to, or greater than the element, respectively. + * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or + * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'binarySearch.GREATEST_LOWER_BOUND'. + */ +exports.search = function search(aNeedle, aHaystack, aCompare, aBias) { + if (aHaystack.length === 0) { + return -1; + } + + let index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack, + aCompare, aBias || exports.GREATEST_LOWER_BOUND); + if (index < 0) { + return -1; + } + + // We have found either the exact element, or the next-closest element than + // the one we are searching for. However, there may be more than one such + // element. Make sure we always return the smallest of these. + while (index - 1 >= 0) { + if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) { + break; + } + --index; + } + + return index; +}; diff --git a/mybulma/node_modules/source-map/lib/mapping-list.js b/mybulma/node_modules/source-map/lib/mapping-list.js new file mode 100644 index 0000000..7056861 --- /dev/null +++ b/mybulma/node_modules/source-map/lib/mapping-list.js @@ -0,0 +1,80 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2014 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +const util = require("./util"); + +/** + * Determine whether mappingB is after mappingA with respect to generated + * position. + */ +function generatedPositionAfter(mappingA, mappingB) { + // Optimized for most common case + const lineA = mappingA.generatedLine; + const lineB = mappingB.generatedLine; + const columnA = mappingA.generatedColumn; + const columnB = mappingB.generatedColumn; + return lineB > lineA || lineB == lineA && columnB >= columnA || + util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0; +} + +/** + * A data structure to provide a sorted view of accumulated mappings in a + * performance conscious manner. It trades a negligible overhead in general + * case for a large speedup in case of mappings being added in order. + */ +class MappingList { + constructor() { + this._array = []; + this._sorted = true; + // Serves as infimum + this._last = {generatedLine: -1, generatedColumn: 0}; + } + + /** + * Iterate through internal items. This method takes the same arguments that + * `Array.prototype.forEach` takes. + * + * NOTE: The order of the mappings is NOT guaranteed. + */ + unsortedForEach(aCallback, aThisArg) { + this._array.forEach(aCallback, aThisArg); + } + + /** + * Add the given source mapping. + * + * @param Object aMapping + */ + add(aMapping) { + if (generatedPositionAfter(this._last, aMapping)) { + this._last = aMapping; + this._array.push(aMapping); + } else { + this._sorted = false; + this._array.push(aMapping); + } + } + + /** + * Returns the flat, sorted array of mappings. The mappings are sorted by + * generated position. + * + * WARNING: This method returns internal data without copying, for + * performance. The return value must NOT be mutated, and should be treated as + * an immutable borrow. If you want to take ownership, you must make your own + * copy. + */ + toArray() { + if (!this._sorted) { + this._array.sort(util.compareByGeneratedPositionsInflated); + this._sorted = true; + } + return this._array; + } +} + +exports.MappingList = MappingList; diff --git a/mybulma/node_modules/source-map/lib/mappings.wasm b/mybulma/node_modules/source-map/lib/mappings.wasm new file mode 100644 index 0000000..3515370 Binary files /dev/null and b/mybulma/node_modules/source-map/lib/mappings.wasm differ diff --git a/mybulma/node_modules/source-map/lib/read-wasm.js b/mybulma/node_modules/source-map/lib/read-wasm.js new file mode 100644 index 0000000..e81c177 --- /dev/null +++ b/mybulma/node_modules/source-map/lib/read-wasm.js @@ -0,0 +1,49 @@ +/* Determine browser vs node environment by testing the default top level context. Solution courtesy of: https://stackoverflow.com/questions/17575790/environment-detection-node-js-or-browser */ +const isBrowserEnvironment = (function() { + // eslint-disable-next-line no-undef + return (typeof window !== "undefined") && (this === window); +}).call(); + +if (isBrowserEnvironment) { + // Web version of reading a wasm file into an array buffer. + + let mappingsWasm = null; + + module.exports = function readWasm() { + if (typeof mappingsWasm === "string") { + return fetch(mappingsWasm) + .then(response => response.arrayBuffer()); + } + if (mappingsWasm instanceof ArrayBuffer) { + return Promise.resolve(mappingsWasm); + } + throw new Error("You must provide the string URL or ArrayBuffer contents " + + "of lib/mappings.wasm by calling " + + "SourceMapConsumer.initialize({ 'lib/mappings.wasm': ... }) " + + "before using SourceMapConsumer"); + }; + + module.exports.initialize = input => mappingsWasm = input; +} else { + // Node version of reading a wasm file into an array buffer. + const fs = require("fs"); + const path = require("path"); + + module.exports = function readWasm() { + return new Promise((resolve, reject) => { + const wasmPath = path.join(__dirname, "mappings.wasm"); + fs.readFile(wasmPath, null, (error, data) => { + if (error) { + reject(error); + return; + } + + resolve(data.buffer); + }); + }); + }; + + module.exports.initialize = _ => { + console.debug("SourceMapConsumer.initialize is a no-op when running in node.js"); + }; +} diff --git a/mybulma/node_modules/source-map/lib/source-map-consumer.js b/mybulma/node_modules/source-map/lib/source-map-consumer.js new file mode 100644 index 0000000..9aaf92f --- /dev/null +++ b/mybulma/node_modules/source-map/lib/source-map-consumer.js @@ -0,0 +1,1237 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +const util = require("./util"); +const binarySearch = require("./binary-search"); +const ArraySet = require("./array-set").ArraySet; +const base64VLQ = require("./base64-vlq"); // eslint-disable-line no-unused-vars +const readWasm = require("../lib/read-wasm"); +const wasm = require("./wasm"); + +const INTERNAL = Symbol("smcInternal"); + +class SourceMapConsumer { + constructor(aSourceMap, aSourceMapURL) { + // If the constructor was called by super(), just return Promise. + // Yes, this is a hack to retain the pre-existing API of the base-class + // constructor also being an async factory function. + if (aSourceMap == INTERNAL) { + return Promise.resolve(this); + } + + return _factory(aSourceMap, aSourceMapURL); + } + + static initialize(opts) { + readWasm.initialize(opts["lib/mappings.wasm"]); + } + + static fromSourceMap(aSourceMap, aSourceMapURL) { + return _factoryBSM(aSourceMap, aSourceMapURL); + } + + /** + * Construct a new `SourceMapConsumer` from `rawSourceMap` and `sourceMapUrl` + * (see the `SourceMapConsumer` constructor for details. Then, invoke the `async + * function f(SourceMapConsumer) -> T` with the newly constructed consumer, wait + * for `f` to complete, call `destroy` on the consumer, and return `f`'s return + * value. + * + * You must not use the consumer after `f` completes! + * + * By using `with`, you do not have to remember to manually call `destroy` on + * the consumer, since it will be called automatically once `f` completes. + * + * ```js + * const xSquared = await SourceMapConsumer.with( + * myRawSourceMap, + * null, + * async function (consumer) { + * // Use `consumer` inside here and don't worry about remembering + * // to call `destroy`. + * + * const x = await whatever(consumer); + * return x * x; + * } + * ); + * + * // You may not use that `consumer` anymore out here; it has + * // been destroyed. But you can use `xSquared`. + * console.log(xSquared); + * ``` + */ + static async with(rawSourceMap, sourceMapUrl, f) { + const consumer = await new SourceMapConsumer(rawSourceMap, sourceMapUrl); + try { + return await f(consumer); + } finally { + consumer.destroy(); + } + } + + /** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ + _parseMappings(aStr, aSourceRoot) { + throw new Error("Subclasses must implement _parseMappings"); + } + + /** + * Iterate over each mapping between an original source/line/column and a + * generated line/column in this source map. + * + * @param Function aCallback + * The function that is called with each mapping. + * @param Object aContext + * Optional. If specified, this object will be the value of `this` every + * time that `aCallback` is called. + * @param aOrder + * Either `SourceMapConsumer.GENERATED_ORDER` or + * `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to + * iterate over the mappings sorted by the generated file's line/column + * order or the original's source/line/column order, respectively. Defaults to + * `SourceMapConsumer.GENERATED_ORDER`. + */ + eachMapping(aCallback, aContext, aOrder) { + throw new Error("Subclasses must implement eachMapping"); + } + + /** + * Returns all generated line and column information for the original source, + * line, and column provided. If no column is provided, returns all mappings + * corresponding to a either the line we are searching for or the next + * closest line that has any mappings. Otherwise, returns all mappings + * corresponding to the given line and either the column we are searching for + * or the next closest column that has any offsets. + * + * The only argument is an object with the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. The line number is 1-based. + * - column: Optional. the column number in the original source. + * The column number is 0-based. + * + * and an array of objects is returned, each with the following properties: + * + * - line: The line number in the generated source, or null. The + * line number is 1-based. + * - column: The column number in the generated source, or null. + * The column number is 0-based. + */ + allGeneratedPositionsFor(aArgs) { + throw new Error("Subclasses must implement allGeneratedPositionsFor"); + } + + destroy() { + throw new Error("Subclasses must implement destroy"); + } +} + +/** + * The version of the source mapping spec that we are consuming. + */ +SourceMapConsumer.prototype._version = 3; +SourceMapConsumer.GENERATED_ORDER = 1; +SourceMapConsumer.ORIGINAL_ORDER = 2; + +SourceMapConsumer.GREATEST_LOWER_BOUND = 1; +SourceMapConsumer.LEAST_UPPER_BOUND = 2; + +exports.SourceMapConsumer = SourceMapConsumer; + +/** + * A BasicSourceMapConsumer instance represents a parsed source map which we can + * query for information about the original file positions by giving it a file + * position in the generated source. + * + * The first parameter is the raw source map (either as a JSON string, or + * already parsed to an object). According to the spec, source maps have the + * following attributes: + * + * - version: Which version of the source map spec this map is following. + * - sources: An array of URLs to the original source files. + * - names: An array of identifiers which can be referenced by individual mappings. + * - sourceRoot: Optional. The URL root from which all sources are relative. + * - sourcesContent: Optional. An array of contents of the original source files. + * - mappings: A string of base64 VLQs which contain the actual mappings. + * - file: Optional. The generated file this source map is associated with. + * + * Here is an example source map, taken from the source map spec[0]: + * + * { + * version : 3, + * file: "out.js", + * sourceRoot : "", + * sources: ["foo.js", "bar.js"], + * names: ["src", "maps", "are", "fun"], + * mappings: "AA,AB;;ABCDE;" + * } + * + * The second parameter, if given, is a string whose value is the URL + * at which the source map was found. This URL is used to compute the + * sources array. + * + * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1# + */ +class BasicSourceMapConsumer extends SourceMapConsumer { + constructor(aSourceMap, aSourceMapURL) { + return super(INTERNAL).then(that => { + let sourceMap = aSourceMap; + if (typeof aSourceMap === "string") { + sourceMap = util.parseSourceMapInput(aSourceMap); + } + + const version = util.getArg(sourceMap, "version"); + let sources = util.getArg(sourceMap, "sources"); + // Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which + // requires the array) to play nice here. + const names = util.getArg(sourceMap, "names", []); + let sourceRoot = util.getArg(sourceMap, "sourceRoot", null); + const sourcesContent = util.getArg(sourceMap, "sourcesContent", null); + const mappings = util.getArg(sourceMap, "mappings"); + const file = util.getArg(sourceMap, "file", null); + + // Once again, Sass deviates from the spec and supplies the version as a + // string rather than a number, so we use loose equality checking here. + if (version != that._version) { + throw new Error("Unsupported version: " + version); + } + + if (sourceRoot) { + sourceRoot = util.normalize(sourceRoot); + } + + sources = sources + .map(String) + // Some source maps produce relative source paths like "./foo.js" instead of + // "foo.js". Normalize these first so that future comparisons will succeed. + // See bugzil.la/1090768. + .map(util.normalize) + // Always ensure that absolute sources are internally stored relative to + // the source root, if the source root is absolute. Not doing this would + // be particularly problematic when the source root is a prefix of the + // source (valid, but why??). See github issue #199 and bugzil.la/1188982. + .map(function(source) { + return sourceRoot && util.isAbsolute(sourceRoot) && util.isAbsolute(source) + ? util.relative(sourceRoot, source) + : source; + }); + + // Pass `true` below to allow duplicate names and sources. While source maps + // are intended to be compressed and deduplicated, the TypeScript compiler + // sometimes generates source maps with duplicates in them. See Github issue + // #72 and bugzil.la/889492. + that._names = ArraySet.fromArray(names.map(String), true); + that._sources = ArraySet.fromArray(sources, true); + + that._absoluteSources = that._sources.toArray().map(function(s) { + return util.computeSourceURL(sourceRoot, s, aSourceMapURL); + }); + + that.sourceRoot = sourceRoot; + that.sourcesContent = sourcesContent; + that._mappings = mappings; + that._sourceMapURL = aSourceMapURL; + that.file = file; + + that._computedColumnSpans = false; + that._mappingsPtr = 0; + that._wasm = null; + + return wasm().then(w => { + that._wasm = w; + return that; + }); + }); + } + + /** + * Utility function to find the index of a source. Returns -1 if not + * found. + */ + _findSourceIndex(aSource) { + let relativeSource = aSource; + if (this.sourceRoot != null) { + relativeSource = util.relative(this.sourceRoot, relativeSource); + } + + if (this._sources.has(relativeSource)) { + return this._sources.indexOf(relativeSource); + } + + // Maybe aSource is an absolute URL as returned by |sources|. In + // this case we can't simply undo the transform. + for (let i = 0; i < this._absoluteSources.length; ++i) { + if (this._absoluteSources[i] == aSource) { + return i; + } + } + + return -1; + } + + /** + * Create a BasicSourceMapConsumer from a SourceMapGenerator. + * + * @param SourceMapGenerator aSourceMap + * The source map that will be consumed. + * @param String aSourceMapURL + * The URL at which the source map can be found (optional) + * @returns BasicSourceMapConsumer + */ + static fromSourceMap(aSourceMap, aSourceMapURL) { + return new BasicSourceMapConsumer(aSourceMap.toString()); + } + + get sources() { + return this._absoluteSources.slice(); + } + + _getMappingsPtr() { + if (this._mappingsPtr === 0) { + this._parseMappings(this._mappings, this.sourceRoot); + } + + return this._mappingsPtr; + } + + /** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ + _parseMappings(aStr, aSourceRoot) { + const size = aStr.length; + + const mappingsBufPtr = this._wasm.exports.allocate_mappings(size); + const mappingsBuf = new Uint8Array(this._wasm.exports.memory.buffer, mappingsBufPtr, size); + for (let i = 0; i < size; i++) { + mappingsBuf[i] = aStr.charCodeAt(i); + } + + const mappingsPtr = this._wasm.exports.parse_mappings(mappingsBufPtr); + + if (!mappingsPtr) { + const error = this._wasm.exports.get_last_error(); + let msg = `Error parsing mappings (code ${error}): `; + + // XXX: keep these error codes in sync with `fitzgen/source-map-mappings`. + switch (error) { + case 1: + msg += "the mappings contained a negative line, column, source index, or name index"; + break; + case 2: + msg += "the mappings contained a number larger than 2**32"; + break; + case 3: + msg += "reached EOF while in the middle of parsing a VLQ"; + break; + case 4: + msg += "invalid base 64 character while parsing a VLQ"; + break; + default: + msg += "unknown error code"; + break; + } + + throw new Error(msg); + } + + this._mappingsPtr = mappingsPtr; + } + + eachMapping(aCallback, aContext, aOrder) { + const context = aContext || null; + const order = aOrder || SourceMapConsumer.GENERATED_ORDER; + const sourceRoot = this.sourceRoot; + + this._wasm.withMappingCallback( + mapping => { + if (mapping.source !== null) { + mapping.source = this._sources.at(mapping.source); + mapping.source = util.computeSourceURL(sourceRoot, mapping.source, this._sourceMapURL); + + if (mapping.name !== null) { + mapping.name = this._names.at(mapping.name); + } + } + + aCallback.call(context, mapping); + }, + () => { + switch (order) { + case SourceMapConsumer.GENERATED_ORDER: + this._wasm.exports.by_generated_location(this._getMappingsPtr()); + break; + case SourceMapConsumer.ORIGINAL_ORDER: + this._wasm.exports.by_original_location(this._getMappingsPtr()); + break; + default: + throw new Error("Unknown order of iteration."); + } + } + ); + } + + allGeneratedPositionsFor(aArgs) { + let source = util.getArg(aArgs, "source"); + const originalLine = util.getArg(aArgs, "line"); + const originalColumn = aArgs.column || 0; + + source = this._findSourceIndex(source); + if (source < 0) { + return []; + } + + if (originalLine < 1) { + throw new Error("Line numbers must be >= 1"); + } + + if (originalColumn < 0) { + throw new Error("Column numbers must be >= 0"); + } + + const mappings = []; + + this._wasm.withMappingCallback( + m => { + let lastColumn = m.lastGeneratedColumn; + if (this._computedColumnSpans && lastColumn === null) { + lastColumn = Infinity; + } + mappings.push({ + line: m.generatedLine, + column: m.generatedColumn, + lastColumn, + }); + }, () => { + this._wasm.exports.all_generated_locations_for( + this._getMappingsPtr(), + source, + originalLine - 1, + "column" in aArgs, + originalColumn + ); + } + ); + + return mappings; + } + + destroy() { + if (this._mappingsPtr !== 0) { + this._wasm.exports.free_mappings(this._mappingsPtr); + this._mappingsPtr = 0; + } + } + + /** + * Compute the last column for each generated mapping. The last column is + * inclusive. + */ + computeColumnSpans() { + if (this._computedColumnSpans) { + return; + } + + this._wasm.exports.compute_column_spans(this._getMappingsPtr()); + this._computedColumnSpans = true; + } + + /** + * Returns the original source, line, and column information for the generated + * source's line and column positions provided. The only argument is an object + * with the following properties: + * + * - line: The line number in the generated source. The line number + * is 1-based. + * - column: The column number in the generated source. The column + * number is 0-based. + * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or + * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. + * + * and an object is returned with the following properties: + * + * - source: The original source file, or null. + * - line: The line number in the original source, or null. The + * line number is 1-based. + * - column: The column number in the original source, or null. The + * column number is 0-based. + * - name: The original identifier, or null. + */ + originalPositionFor(aArgs) { + const needle = { + generatedLine: util.getArg(aArgs, "line"), + generatedColumn: util.getArg(aArgs, "column") + }; + + if (needle.generatedLine < 1) { + throw new Error("Line numbers must be >= 1"); + } + + if (needle.generatedColumn < 0) { + throw new Error("Column numbers must be >= 0"); + } + + let bias = util.getArg(aArgs, "bias", SourceMapConsumer.GREATEST_LOWER_BOUND); + if (bias == null) { + bias = SourceMapConsumer.GREATEST_LOWER_BOUND; + } + + let mapping; + this._wasm.withMappingCallback(m => mapping = m, () => { + this._wasm.exports.original_location_for( + this._getMappingsPtr(), + needle.generatedLine - 1, + needle.generatedColumn, + bias + ); + }); + + if (mapping) { + if (mapping.generatedLine === needle.generatedLine) { + let source = util.getArg(mapping, "source", null); + if (source !== null) { + source = this._sources.at(source); + source = util.computeSourceURL(this.sourceRoot, source, this._sourceMapURL); + } + + let name = util.getArg(mapping, "name", null); + if (name !== null) { + name = this._names.at(name); + } + + return { + source, + line: util.getArg(mapping, "originalLine", null), + column: util.getArg(mapping, "originalColumn", null), + name + }; + } + } + + return { + source: null, + line: null, + column: null, + name: null + }; + } + + /** + * Return true if we have the source content for every source in the source + * map, false otherwise. + */ + hasContentsOfAllSources() { + if (!this.sourcesContent) { + return false; + } + return this.sourcesContent.length >= this._sources.size() && + !this.sourcesContent.some(function(sc) { return sc == null; }); + } + + /** + * Returns the original source content. The only argument is the url of the + * original source file. Returns null if no original source content is + * available. + */ + sourceContentFor(aSource, nullOnMissing) { + if (!this.sourcesContent) { + return null; + } + + const index = this._findSourceIndex(aSource); + if (index >= 0) { + return this.sourcesContent[index]; + } + + let relativeSource = aSource; + if (this.sourceRoot != null) { + relativeSource = util.relative(this.sourceRoot, relativeSource); + } + + let url; + if (this.sourceRoot != null + && (url = util.urlParse(this.sourceRoot))) { + // XXX: file:// URIs and absolute paths lead to unexpected behavior for + // many users. We can help them out when they expect file:// URIs to + // behave like it would if they were running a local HTTP server. See + // https://bugzilla.mozilla.org/show_bug.cgi?id=885597. + const fileUriAbsPath = relativeSource.replace(/^file:\/\//, ""); + if (url.scheme == "file" + && this._sources.has(fileUriAbsPath)) { + return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)]; + } + + if ((!url.path || url.path == "/") + && this._sources.has("/" + relativeSource)) { + return this.sourcesContent[this._sources.indexOf("/" + relativeSource)]; + } + } + + // This function is used recursively from + // IndexedSourceMapConsumer.prototype.sourceContentFor. In that case, we + // don't want to throw if we can't find the source - we just want to + // return null, so we provide a flag to exit gracefully. + if (nullOnMissing) { + return null; + } + + throw new Error('"' + relativeSource + '" is not in the SourceMap.'); + } + + /** + * Returns the generated line and column information for the original source, + * line, and column positions provided. The only argument is an object with + * the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. The line number + * is 1-based. + * - column: The column number in the original source. The column + * number is 0-based. + * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or + * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the + * closest element that is smaller than or greater than the one we are + * searching for, respectively, if the exact element cannot be found. + * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. + * + * and an object is returned with the following properties: + * + * - line: The line number in the generated source, or null. The + * line number is 1-based. + * - column: The column number in the generated source, or null. + * The column number is 0-based. + */ + generatedPositionFor(aArgs) { + let source = util.getArg(aArgs, "source"); + source = this._findSourceIndex(source); + if (source < 0) { + return { + line: null, + column: null, + lastColumn: null + }; + } + + const needle = { + source, + originalLine: util.getArg(aArgs, "line"), + originalColumn: util.getArg(aArgs, "column") + }; + + if (needle.originalLine < 1) { + throw new Error("Line numbers must be >= 1"); + } + + if (needle.originalColumn < 0) { + throw new Error("Column numbers must be >= 0"); + } + + let bias = util.getArg(aArgs, "bias", SourceMapConsumer.GREATEST_LOWER_BOUND); + if (bias == null) { + bias = SourceMapConsumer.GREATEST_LOWER_BOUND; + } + + let mapping; + this._wasm.withMappingCallback(m => mapping = m, () => { + this._wasm.exports.generated_location_for( + this._getMappingsPtr(), + needle.source, + needle.originalLine - 1, + needle.originalColumn, + bias + ); + }); + + if (mapping) { + if (mapping.source === needle.source) { + let lastColumn = mapping.lastGeneratedColumn; + if (this._computedColumnSpans && lastColumn === null) { + lastColumn = Infinity; + } + return { + line: util.getArg(mapping, "generatedLine", null), + column: util.getArg(mapping, "generatedColumn", null), + lastColumn, + }; + } + } + + return { + line: null, + column: null, + lastColumn: null + }; + } +} + +BasicSourceMapConsumer.prototype.consumer = SourceMapConsumer; +exports.BasicSourceMapConsumer = BasicSourceMapConsumer; + +/** + * An IndexedSourceMapConsumer instance represents a parsed source map which + * we can query for information. It differs from BasicSourceMapConsumer in + * that it takes "indexed" source maps (i.e. ones with a "sections" field) as + * input. + * + * The first parameter is a raw source map (either as a JSON string, or already + * parsed to an object). According to the spec for indexed source maps, they + * have the following attributes: + * + * - version: Which version of the source map spec this map is following. + * - file: Optional. The generated file this source map is associated with. + * - sections: A list of section definitions. + * + * Each value under the "sections" field has two fields: + * - offset: The offset into the original specified at which this section + * begins to apply, defined as an object with a "line" and "column" + * field. + * - map: A source map definition. This source map could also be indexed, + * but doesn't have to be. + * + * Instead of the "map" field, it's also possible to have a "url" field + * specifying a URL to retrieve a source map from, but that's currently + * unsupported. + * + * Here's an example source map, taken from the source map spec[0], but + * modified to omit a section which uses the "url" field. + * + * { + * version : 3, + * file: "app.js", + * sections: [{ + * offset: {line:100, column:10}, + * map: { + * version : 3, + * file: "section.js", + * sources: ["foo.js", "bar.js"], + * names: ["src", "maps", "are", "fun"], + * mappings: "AAAA,E;;ABCDE;" + * } + * }], + * } + * + * The second parameter, if given, is a string whose value is the URL + * at which the source map was found. This URL is used to compute the + * sources array. + * + * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.535es3xeprgt + */ +class IndexedSourceMapConsumer extends SourceMapConsumer { + constructor(aSourceMap, aSourceMapURL) { + return super(INTERNAL).then(that => { + let sourceMap = aSourceMap; + if (typeof aSourceMap === "string") { + sourceMap = util.parseSourceMapInput(aSourceMap); + } + + const version = util.getArg(sourceMap, "version"); + const sections = util.getArg(sourceMap, "sections"); + + if (version != that._version) { + throw new Error("Unsupported version: " + version); + } + + that._sources = new ArraySet(); + that._names = new ArraySet(); + that.__generatedMappings = null; + that.__originalMappings = null; + that.__generatedMappingsUnsorted = null; + that.__originalMappingsUnsorted = null; + + let lastOffset = { + line: -1, + column: 0 + }; + return Promise.all(sections.map(s => { + if (s.url) { + // The url field will require support for asynchronicity. + // See https://github.com/mozilla/source-map/issues/16 + throw new Error("Support for url field in sections not implemented."); + } + const offset = util.getArg(s, "offset"); + const offsetLine = util.getArg(offset, "line"); + const offsetColumn = util.getArg(offset, "column"); + + if (offsetLine < lastOffset.line || + (offsetLine === lastOffset.line && offsetColumn < lastOffset.column)) { + throw new Error("Section offsets must be ordered and non-overlapping."); + } + lastOffset = offset; + + const cons = new SourceMapConsumer(util.getArg(s, "map"), aSourceMapURL); + return cons.then(consumer => { + return { + generatedOffset: { + // The offset fields are 0-based, but we use 1-based indices when + // encoding/decoding from VLQ. + generatedLine: offsetLine + 1, + generatedColumn: offsetColumn + 1 + }, + consumer + }; + }); + })).then(s => { + that._sections = s; + return that; + }); + }); + } + + // `__generatedMappings` and `__originalMappings` are arrays that hold the + // parsed mapping coordinates from the source map's "mappings" attribute. They + // are lazily instantiated, accessed via the `_generatedMappings` and + // `_originalMappings` getters respectively, and we only parse the mappings + // and create these arrays once queried for a source location. We jump through + // these hoops because there can be many thousands of mappings, and parsing + // them is expensive, so we only want to do it if we must. + // + // Each object in the arrays is of the form: + // + // { + // generatedLine: The line number in the generated code, + // generatedColumn: The column number in the generated code, + // source: The path to the original source file that generated this + // chunk of code, + // originalLine: The line number in the original source that + // corresponds to this chunk of generated code, + // originalColumn: The column number in the original source that + // corresponds to this chunk of generated code, + // name: The name of the original symbol which generated this chunk of + // code. + // } + // + // All properties except for `generatedLine` and `generatedColumn` can be + // `null`. + // + // `_generatedMappings` is ordered by the generated positions. + // + // `_originalMappings` is ordered by the original positions. + get _generatedMappings() { + if (!this.__generatedMappings) { + this._sortGeneratedMappings(); + } + + return this.__generatedMappings; + } + + get _originalMappings() { + if (!this.__originalMappings) { + this._sortOriginalMappings(); + } + + return this.__originalMappings; + } + + get _generatedMappingsUnsorted() { + if (!this.__generatedMappingsUnsorted) { + this._parseMappings(this._mappings, this.sourceRoot); + } + + return this.__generatedMappingsUnsorted; + } + + get _originalMappingsUnsorted() { + if (!this.__originalMappingsUnsorted) { + this._parseMappings(this._mappings, this.sourceRoot); + } + + return this.__originalMappingsUnsorted; + } + + _sortGeneratedMappings() { + const mappings = this._generatedMappingsUnsorted; + mappings.sort(util.compareByGeneratedPositionsDeflated); + this.__generatedMappings = mappings; + } + + _sortOriginalMappings() { + const mappings = this._originalMappingsUnsorted; + mappings.sort(util.compareByOriginalPositions); + this.__originalMappings = mappings; + } + + /** + * The list of original sources. + */ + get sources() { + const sources = []; + for (let i = 0; i < this._sections.length; i++) { + for (let j = 0; j < this._sections[i].consumer.sources.length; j++) { + sources.push(this._sections[i].consumer.sources[j]); + } + } + return sources; + } + + /** + * Returns the original source, line, and column information for the generated + * source's line and column positions provided. The only argument is an object + * with the following properties: + * + * - line: The line number in the generated source. The line number + * is 1-based. + * - column: The column number in the generated source. The column + * number is 0-based. + * + * and an object is returned with the following properties: + * + * - source: The original source file, or null. + * - line: The line number in the original source, or null. The + * line number is 1-based. + * - column: The column number in the original source, or null. The + * column number is 0-based. + * - name: The original identifier, or null. + */ + originalPositionFor(aArgs) { + const needle = { + generatedLine: util.getArg(aArgs, "line"), + generatedColumn: util.getArg(aArgs, "column") + }; + + // Find the section containing the generated position we're trying to map + // to an original position. + const sectionIndex = binarySearch.search(needle, this._sections, + function(aNeedle, section) { + const cmp = aNeedle.generatedLine - section.generatedOffset.generatedLine; + if (cmp) { + return cmp; + } + + return (aNeedle.generatedColumn - + section.generatedOffset.generatedColumn); + }); + const section = this._sections[sectionIndex]; + + if (!section) { + return { + source: null, + line: null, + column: null, + name: null + }; + } + + return section.consumer.originalPositionFor({ + line: needle.generatedLine - + (section.generatedOffset.generatedLine - 1), + column: needle.generatedColumn - + (section.generatedOffset.generatedLine === needle.generatedLine + ? section.generatedOffset.generatedColumn - 1 + : 0), + bias: aArgs.bias + }); + } + + /** + * Return true if we have the source content for every source in the source + * map, false otherwise. + */ + hasContentsOfAllSources() { + return this._sections.every(function(s) { + return s.consumer.hasContentsOfAllSources(); + }); + } + + /** + * Returns the original source content. The only argument is the url of the + * original source file. Returns null if no original source content is + * available. + */ + sourceContentFor(aSource, nullOnMissing) { + for (let i = 0; i < this._sections.length; i++) { + const section = this._sections[i]; + + const content = section.consumer.sourceContentFor(aSource, true); + if (content) { + return content; + } + } + if (nullOnMissing) { + return null; + } + throw new Error('"' + aSource + '" is not in the SourceMap.'); + } + + /** + * Returns the generated line and column information for the original source, + * line, and column positions provided. The only argument is an object with + * the following properties: + * + * - source: The filename of the original source. + * - line: The line number in the original source. The line number + * is 1-based. + * - column: The column number in the original source. The column + * number is 0-based. + * + * and an object is returned with the following properties: + * + * - line: The line number in the generated source, or null. The + * line number is 1-based. + * - column: The column number in the generated source, or null. + * The column number is 0-based. + */ + generatedPositionFor(aArgs) { + for (let i = 0; i < this._sections.length; i++) { + const section = this._sections[i]; + + // Only consider this section if the requested source is in the list of + // sources of the consumer. + if (section.consumer._findSourceIndex(util.getArg(aArgs, "source")) === -1) { + continue; + } + const generatedPosition = section.consumer.generatedPositionFor(aArgs); + if (generatedPosition) { + const ret = { + line: generatedPosition.line + + (section.generatedOffset.generatedLine - 1), + column: generatedPosition.column + + (section.generatedOffset.generatedLine === generatedPosition.line + ? section.generatedOffset.generatedColumn - 1 + : 0) + }; + return ret; + } + } + + return { + line: null, + column: null + }; + } + + /** + * Parse the mappings in a string in to a data structure which we can easily + * query (the ordered arrays in the `this.__generatedMappings` and + * `this.__originalMappings` properties). + */ + _parseMappings(aStr, aSourceRoot) { + const generatedMappings = this.__generatedMappingsUnsorted = []; + const originalMappings = this.__originalMappingsUnsorted = []; + for (let i = 0; i < this._sections.length; i++) { + const section = this._sections[i]; + + const sectionMappings = []; + section.consumer.eachMapping(m => sectionMappings.push(m)); + + for (let j = 0; j < sectionMappings.length; j++) { + const mapping = sectionMappings[j]; + + // TODO: test if null is correct here. The original code used + // `source`, which would actually have gotten used as null because + // var's get hoisted. + // See: https://github.com/mozilla/source-map/issues/333 + let source = util.computeSourceURL(section.consumer.sourceRoot, null, this._sourceMapURL); + this._sources.add(source); + source = this._sources.indexOf(source); + + let name = null; + if (mapping.name) { + this._names.add(mapping.name); + name = this._names.indexOf(mapping.name); + } + + // The mappings coming from the consumer for the section have + // generated positions relative to the start of the section, so we + // need to offset them to be relative to the start of the concatenated + // generated file. + const adjustedMapping = { + source, + generatedLine: mapping.generatedLine + + (section.generatedOffset.generatedLine - 1), + generatedColumn: mapping.generatedColumn + + (section.generatedOffset.generatedLine === mapping.generatedLine + ? section.generatedOffset.generatedColumn - 1 + : 0), + originalLine: mapping.originalLine, + originalColumn: mapping.originalColumn, + name + }; + + generatedMappings.push(adjustedMapping); + if (typeof adjustedMapping.originalLine === "number") { + originalMappings.push(adjustedMapping); + } + } + } + } + + eachMapping(aCallback, aContext, aOrder) { + const context = aContext || null; + const order = aOrder || SourceMapConsumer.GENERATED_ORDER; + + let mappings; + switch (order) { + case SourceMapConsumer.GENERATED_ORDER: + mappings = this._generatedMappings; + break; + case SourceMapConsumer.ORIGINAL_ORDER: + mappings = this._originalMappings; + break; + default: + throw new Error("Unknown order of iteration."); + } + + const sourceRoot = this.sourceRoot; + mappings.map(function(mapping) { + let source = null; + if (mapping.source !== null) { + source = this._sources.at(mapping.source); + source = util.computeSourceURL(sourceRoot, source, this._sourceMapURL); + } + return { + source, + generatedLine: mapping.generatedLine, + generatedColumn: mapping.generatedColumn, + originalLine: mapping.originalLine, + originalColumn: mapping.originalColumn, + name: mapping.name === null ? null : this._names.at(mapping.name) + }; + }, this).forEach(aCallback, context); + } + + /** + * Find the mapping that best matches the hypothetical "needle" mapping that + * we are searching for in the given "haystack" of mappings. + */ + _findMapping(aNeedle, aMappings, aLineName, + aColumnName, aComparator, aBias) { + // To return the position we are searching for, we must first find the + // mapping for the given position and then return the opposite position it + // points to. Because the mappings are sorted, we can use binary search to + // find the best mapping. + + if (aNeedle[aLineName] <= 0) { + throw new TypeError("Line must be greater than or equal to 1, got " + + aNeedle[aLineName]); + } + if (aNeedle[aColumnName] < 0) { + throw new TypeError("Column must be greater than or equal to 0, got " + + aNeedle[aColumnName]); + } + + return binarySearch.search(aNeedle, aMappings, aComparator, aBias); + } + + allGeneratedPositionsFor(aArgs) { + const line = util.getArg(aArgs, "line"); + + // When there is no exact match, BasicSourceMapConsumer.prototype._findMapping + // returns the index of the closest mapping less than the needle. By + // setting needle.originalColumn to 0, we thus find the last mapping for + // the given line, provided such a mapping exists. + const needle = { + source: util.getArg(aArgs, "source"), + originalLine: line, + originalColumn: util.getArg(aArgs, "column", 0) + }; + + needle.source = this._findSourceIndex(needle.source); + if (needle.source < 0) { + return []; + } + + if (needle.originalLine < 1) { + throw new Error("Line numbers must be >= 1"); + } + + if (needle.originalColumn < 0) { + throw new Error("Column numbers must be >= 0"); + } + + const mappings = []; + + let index = this._findMapping(needle, + this._originalMappings, + "originalLine", + "originalColumn", + util.compareByOriginalPositions, + binarySearch.LEAST_UPPER_BOUND); + if (index >= 0) { + let mapping = this._originalMappings[index]; + + if (aArgs.column === undefined) { + const originalLine = mapping.originalLine; + + // Iterate until either we run out of mappings, or we run into + // a mapping for a different line than the one we found. Since + // mappings are sorted, this is guaranteed to find all mappings for + // the line we found. + while (mapping && mapping.originalLine === originalLine) { + let lastColumn = mapping.lastGeneratedColumn; + if (this._computedColumnSpans && lastColumn === null) { + lastColumn = Infinity; + } + mappings.push({ + line: util.getArg(mapping, "generatedLine", null), + column: util.getArg(mapping, "generatedColumn", null), + lastColumn, + }); + + mapping = this._originalMappings[++index]; + } + } else { + const originalColumn = mapping.originalColumn; + + // Iterate until either we run out of mappings, or we run into + // a mapping for a different line than the one we were searching for. + // Since mappings are sorted, this is guaranteed to find all mappings for + // the line we are searching for. + while (mapping && + mapping.originalLine === line && + mapping.originalColumn == originalColumn) { + let lastColumn = mapping.lastGeneratedColumn; + if (this._computedColumnSpans && lastColumn === null) { + lastColumn = Infinity; + } + mappings.push({ + line: util.getArg(mapping, "generatedLine", null), + column: util.getArg(mapping, "generatedColumn", null), + lastColumn, + }); + + mapping = this._originalMappings[++index]; + } + } + } + + return mappings; + } + + destroy() { + for (let i = 0; i < this._sections.length; i++) { + this._sections[i].consumer.destroy(); + } + } +} +exports.IndexedSourceMapConsumer = IndexedSourceMapConsumer; + +/* + * Cheat to get around inter-twingled classes. `factory()` can be at the end + * where it has access to non-hoisted classes, but it gets hoisted itself. + */ +function _factory(aSourceMap, aSourceMapURL) { + let sourceMap = aSourceMap; + if (typeof aSourceMap === "string") { + sourceMap = util.parseSourceMapInput(aSourceMap); + } + + const consumer = sourceMap.sections != null + ? new IndexedSourceMapConsumer(sourceMap, aSourceMapURL) + : new BasicSourceMapConsumer(sourceMap, aSourceMapURL); + return Promise.resolve(consumer); +} + +function _factoryBSM(aSourceMap, aSourceMapURL) { + return BasicSourceMapConsumer.fromSourceMap(aSourceMap, aSourceMapURL); +} diff --git a/mybulma/node_modules/source-map/lib/source-map-generator.js b/mybulma/node_modules/source-map/lib/source-map-generator.js new file mode 100644 index 0000000..8111e06 --- /dev/null +++ b/mybulma/node_modules/source-map/lib/source-map-generator.js @@ -0,0 +1,413 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +const base64VLQ = require("./base64-vlq"); +const util = require("./util"); +const ArraySet = require("./array-set").ArraySet; +const MappingList = require("./mapping-list").MappingList; + +/** + * An instance of the SourceMapGenerator represents a source map which is + * being built incrementally. You may pass an object with the following + * properties: + * + * - file: The filename of the generated source. + * - sourceRoot: A root for all relative URLs in this source map. + */ +class SourceMapGenerator { + constructor(aArgs) { + if (!aArgs) { + aArgs = {}; + } + this._file = util.getArg(aArgs, "file", null); + this._sourceRoot = util.getArg(aArgs, "sourceRoot", null); + this._skipValidation = util.getArg(aArgs, "skipValidation", false); + this._sources = new ArraySet(); + this._names = new ArraySet(); + this._mappings = new MappingList(); + this._sourcesContents = null; + } + + /** + * Creates a new SourceMapGenerator based on a SourceMapConsumer + * + * @param aSourceMapConsumer The SourceMap. + */ + static fromSourceMap(aSourceMapConsumer) { + const sourceRoot = aSourceMapConsumer.sourceRoot; + const generator = new SourceMapGenerator({ + file: aSourceMapConsumer.file, + sourceRoot + }); + aSourceMapConsumer.eachMapping(function(mapping) { + const newMapping = { + generated: { + line: mapping.generatedLine, + column: mapping.generatedColumn + } + }; + + if (mapping.source != null) { + newMapping.source = mapping.source; + if (sourceRoot != null) { + newMapping.source = util.relative(sourceRoot, newMapping.source); + } + + newMapping.original = { + line: mapping.originalLine, + column: mapping.originalColumn + }; + + if (mapping.name != null) { + newMapping.name = mapping.name; + } + } + + generator.addMapping(newMapping); + }); + aSourceMapConsumer.sources.forEach(function(sourceFile) { + let sourceRelative = sourceFile; + if (sourceRoot !== null) { + sourceRelative = util.relative(sourceRoot, sourceFile); + } + + if (!generator._sources.has(sourceRelative)) { + generator._sources.add(sourceRelative); + } + + const content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + generator.setSourceContent(sourceFile, content); + } + }); + return generator; + } + + /** + * Add a single mapping from original source line and column to the generated + * source's line and column for this source map being created. The mapping + * object should have the following properties: + * + * - generated: An object with the generated line and column positions. + * - original: An object with the original line and column positions. + * - source: The original source file (relative to the sourceRoot). + * - name: An optional original token name for this mapping. + */ + addMapping(aArgs) { + const generated = util.getArg(aArgs, "generated"); + const original = util.getArg(aArgs, "original", null); + let source = util.getArg(aArgs, "source", null); + let name = util.getArg(aArgs, "name", null); + + if (!this._skipValidation) { + this._validateMapping(generated, original, source, name); + } + + if (source != null) { + source = String(source); + if (!this._sources.has(source)) { + this._sources.add(source); + } + } + + if (name != null) { + name = String(name); + if (!this._names.has(name)) { + this._names.add(name); + } + } + + this._mappings.add({ + generatedLine: generated.line, + generatedColumn: generated.column, + originalLine: original != null && original.line, + originalColumn: original != null && original.column, + source, + name + }); + } + + /** + * Set the source content for a source file. + */ + setSourceContent(aSourceFile, aSourceContent) { + let source = aSourceFile; + if (this._sourceRoot != null) { + source = util.relative(this._sourceRoot, source); + } + + if (aSourceContent != null) { + // Add the source content to the _sourcesContents map. + // Create a new _sourcesContents map if the property is null. + if (!this._sourcesContents) { + this._sourcesContents = Object.create(null); + } + this._sourcesContents[util.toSetString(source)] = aSourceContent; + } else if (this._sourcesContents) { + // Remove the source file from the _sourcesContents map. + // If the _sourcesContents map is empty, set the property to null. + delete this._sourcesContents[util.toSetString(source)]; + if (Object.keys(this._sourcesContents).length === 0) { + this._sourcesContents = null; + } + } + } + + /** + * Applies the mappings of a sub-source-map for a specific source file to the + * source map being generated. Each mapping to the supplied source file is + * rewritten using the supplied source map. Note: The resolution for the + * resulting mappings is the minimium of this map and the supplied map. + * + * @param aSourceMapConsumer The source map to be applied. + * @param aSourceFile Optional. The filename of the source file. + * If omitted, SourceMapConsumer's file property will be used. + * @param aSourceMapPath Optional. The dirname of the path to the source map + * to be applied. If relative, it is relative to the SourceMapConsumer. + * This parameter is needed when the two source maps aren't in the same + * directory, and the source map to be applied contains relative source + * paths. If so, those relative source paths need to be rewritten + * relative to the SourceMapGenerator. + */ + applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) { + let sourceFile = aSourceFile; + // If aSourceFile is omitted, we will use the file property of the SourceMap + if (aSourceFile == null) { + if (aSourceMapConsumer.file == null) { + throw new Error( + "SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, " + + 'or the source map\'s "file" property. Both were omitted.' + ); + } + sourceFile = aSourceMapConsumer.file; + } + const sourceRoot = this._sourceRoot; + // Make "sourceFile" relative if an absolute Url is passed. + if (sourceRoot != null) { + sourceFile = util.relative(sourceRoot, sourceFile); + } + // Applying the SourceMap can add and remove items from the sources and + // the names array. + const newSources = this._mappings.toArray().length > 0 + ? new ArraySet() + : this._sources; + const newNames = new ArraySet(); + + // Find mappings for the "sourceFile" + this._mappings.unsortedForEach(function(mapping) { + if (mapping.source === sourceFile && mapping.originalLine != null) { + // Check if it can be mapped by the source map, then update the mapping. + const original = aSourceMapConsumer.originalPositionFor({ + line: mapping.originalLine, + column: mapping.originalColumn + }); + if (original.source != null) { + // Copy mapping + mapping.source = original.source; + if (aSourceMapPath != null) { + mapping.source = util.join(aSourceMapPath, mapping.source); + } + if (sourceRoot != null) { + mapping.source = util.relative(sourceRoot, mapping.source); + } + mapping.originalLine = original.line; + mapping.originalColumn = original.column; + if (original.name != null) { + mapping.name = original.name; + } + } + } + + const source = mapping.source; + if (source != null && !newSources.has(source)) { + newSources.add(source); + } + + const name = mapping.name; + if (name != null && !newNames.has(name)) { + newNames.add(name); + } + + }, this); + this._sources = newSources; + this._names = newNames; + + // Copy sourcesContents of applied map. + aSourceMapConsumer.sources.forEach(function(srcFile) { + const content = aSourceMapConsumer.sourceContentFor(srcFile); + if (content != null) { + if (aSourceMapPath != null) { + srcFile = util.join(aSourceMapPath, srcFile); + } + if (sourceRoot != null) { + srcFile = util.relative(sourceRoot, srcFile); + } + this.setSourceContent(srcFile, content); + } + }, this); + } + + /** + * A mapping can have one of the three levels of data: + * + * 1. Just the generated position. + * 2. The Generated position, original position, and original source. + * 3. Generated and original position, original source, as well as a name + * token. + * + * To maintain consistency, we validate that any new mapping being added falls + * in to one of these categories. + */ + _validateMapping(aGenerated, aOriginal, aSource, aName) { + // When aOriginal is truthy but has empty values for .line and .column, + // it is most likely a programmer error. In this case we throw a very + // specific error message to try to guide them the right way. + // For example: https://github.com/Polymer/polymer-bundler/pull/519 + if (aOriginal && typeof aOriginal.line !== "number" && typeof aOriginal.column !== "number") { + throw new Error( + "original.line and original.column are not numbers -- you probably meant to omit " + + "the original mapping entirely and only map the generated position. If so, pass " + + "null for the original mapping instead of an object with empty or null values." + ); + } + + if (aGenerated && "line" in aGenerated && "column" in aGenerated + && aGenerated.line > 0 && aGenerated.column >= 0 + && !aOriginal && !aSource && !aName) { + // Case 1. + + } else if (aGenerated && "line" in aGenerated && "column" in aGenerated + && aOriginal && "line" in aOriginal && "column" in aOriginal + && aGenerated.line > 0 && aGenerated.column >= 0 + && aOriginal.line > 0 && aOriginal.column >= 0 + && aSource) { + // Cases 2 and 3. + + } else { + throw new Error("Invalid mapping: " + JSON.stringify({ + generated: aGenerated, + source: aSource, + original: aOriginal, + name: aName + })); + } + } + + /** + * Serialize the accumulated mappings in to the stream of base 64 VLQs + * specified by the source map format. + */ + _serializeMappings() { + let previousGeneratedColumn = 0; + let previousGeneratedLine = 1; + let previousOriginalColumn = 0; + let previousOriginalLine = 0; + let previousName = 0; + let previousSource = 0; + let result = ""; + let next; + let mapping; + let nameIdx; + let sourceIdx; + + const mappings = this._mappings.toArray(); + for (let i = 0, len = mappings.length; i < len; i++) { + mapping = mappings[i]; + next = ""; + + if (mapping.generatedLine !== previousGeneratedLine) { + previousGeneratedColumn = 0; + while (mapping.generatedLine !== previousGeneratedLine) { + next += ";"; + previousGeneratedLine++; + } + } else if (i > 0) { + if (!util.compareByGeneratedPositionsInflated(mapping, mappings[i - 1])) { + continue; + } + next += ","; + } + + next += base64VLQ.encode(mapping.generatedColumn + - previousGeneratedColumn); + previousGeneratedColumn = mapping.generatedColumn; + + if (mapping.source != null) { + sourceIdx = this._sources.indexOf(mapping.source); + next += base64VLQ.encode(sourceIdx - previousSource); + previousSource = sourceIdx; + + // lines are stored 0-based in SourceMap spec version 3 + next += base64VLQ.encode(mapping.originalLine - 1 + - previousOriginalLine); + previousOriginalLine = mapping.originalLine - 1; + + next += base64VLQ.encode(mapping.originalColumn + - previousOriginalColumn); + previousOriginalColumn = mapping.originalColumn; + + if (mapping.name != null) { + nameIdx = this._names.indexOf(mapping.name); + next += base64VLQ.encode(nameIdx - previousName); + previousName = nameIdx; + } + } + + result += next; + } + + return result; + } + + _generateSourcesContent(aSources, aSourceRoot) { + return aSources.map(function(source) { + if (!this._sourcesContents) { + return null; + } + if (aSourceRoot != null) { + source = util.relative(aSourceRoot, source); + } + const key = util.toSetString(source); + return Object.prototype.hasOwnProperty.call(this._sourcesContents, key) + ? this._sourcesContents[key] + : null; + }, this); + } + + /** + * Externalize the source map. + */ + toJSON() { + const map = { + version: this._version, + sources: this._sources.toArray(), + names: this._names.toArray(), + mappings: this._serializeMappings() + }; + if (this._file != null) { + map.file = this._file; + } + if (this._sourceRoot != null) { + map.sourceRoot = this._sourceRoot; + } + if (this._sourcesContents) { + map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot); + } + + return map; + } + + /** + * Render the source map being generated to a string. + */ + toString() { + return JSON.stringify(this.toJSON()); + } +} + +SourceMapGenerator.prototype._version = 3; +exports.SourceMapGenerator = SourceMapGenerator; diff --git a/mybulma/node_modules/source-map/lib/source-node.js b/mybulma/node_modules/source-map/lib/source-node.js new file mode 100644 index 0000000..8a7a157 --- /dev/null +++ b/mybulma/node_modules/source-map/lib/source-node.js @@ -0,0 +1,404 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +const SourceMapGenerator = require("./source-map-generator").SourceMapGenerator; +const util = require("./util"); + +// Matches a Windows-style `\r\n` newline or a `\n` newline used by all other +// operating systems these days (capturing the result). +const REGEX_NEWLINE = /(\r?\n)/; + +// Newline character code for charCodeAt() comparisons +const NEWLINE_CODE = 10; + +// Private symbol for identifying `SourceNode`s when multiple versions of +// the source-map library are loaded. This MUST NOT CHANGE across +// versions! +const isSourceNode = "$$$isSourceNode$$$"; + +/** + * SourceNodes provide a way to abstract over interpolating/concatenating + * snippets of generated JavaScript source code while maintaining the line and + * column information associated with the original source code. + * + * @param aLine The original line number. + * @param aColumn The original column number. + * @param aSource The original source's filename. + * @param aChunks Optional. An array of strings which are snippets of + * generated JS, or other SourceNodes. + * @param aName The original identifier. + */ +class SourceNode { + constructor(aLine, aColumn, aSource, aChunks, aName) { + this.children = []; + this.sourceContents = {}; + this.line = aLine == null ? null : aLine; + this.column = aColumn == null ? null : aColumn; + this.source = aSource == null ? null : aSource; + this.name = aName == null ? null : aName; + this[isSourceNode] = true; + if (aChunks != null) this.add(aChunks); + } + + /** + * Creates a SourceNode from generated code and a SourceMapConsumer. + * + * @param aGeneratedCode The generated code + * @param aSourceMapConsumer The SourceMap for the generated code + * @param aRelativePath Optional. The path that relative sources in the + * SourceMapConsumer should be relative to. + */ + static fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) { + // The SourceNode we want to fill with the generated code + // and the SourceMap + const node = new SourceNode(); + + // All even indices of this array are one line of the generated code, + // while all odd indices are the newlines between two adjacent lines + // (since `REGEX_NEWLINE` captures its match). + // Processed fragments are accessed by calling `shiftNextLine`. + const remainingLines = aGeneratedCode.split(REGEX_NEWLINE); + let remainingLinesIndex = 0; + const shiftNextLine = function() { + const lineContents = getNextLine(); + // The last line of a file might not have a newline. + const newLine = getNextLine() || ""; + return lineContents + newLine; + + function getNextLine() { + return remainingLinesIndex < remainingLines.length ? + remainingLines[remainingLinesIndex++] : undefined; + } + }; + + // We need to remember the position of "remainingLines" + let lastGeneratedLine = 1, lastGeneratedColumn = 0; + + // The generate SourceNodes we need a code range. + // To extract it current and last mapping is used. + // Here we store the last mapping. + let lastMapping = null; + let nextLine; + + aSourceMapConsumer.eachMapping(function(mapping) { + if (lastMapping !== null) { + // We add the code from "lastMapping" to "mapping": + // First check if there is a new line in between. + if (lastGeneratedLine < mapping.generatedLine) { + // Associate first line with "lastMapping" + addMappingWithCode(lastMapping, shiftNextLine()); + lastGeneratedLine++; + lastGeneratedColumn = 0; + // The remaining code is added without mapping + } else { + // There is no new line in between. + // Associate the code between "lastGeneratedColumn" and + // "mapping.generatedColumn" with "lastMapping" + nextLine = remainingLines[remainingLinesIndex] || ""; + const code = nextLine.substr(0, mapping.generatedColumn - + lastGeneratedColumn); + remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn - + lastGeneratedColumn); + lastGeneratedColumn = mapping.generatedColumn; + addMappingWithCode(lastMapping, code); + // No more remaining code, continue + lastMapping = mapping; + return; + } + } + // We add the generated code until the first mapping + // to the SourceNode without any mapping. + // Each line is added as separate string. + while (lastGeneratedLine < mapping.generatedLine) { + node.add(shiftNextLine()); + lastGeneratedLine++; + } + if (lastGeneratedColumn < mapping.generatedColumn) { + nextLine = remainingLines[remainingLinesIndex] || ""; + node.add(nextLine.substr(0, mapping.generatedColumn)); + remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn); + lastGeneratedColumn = mapping.generatedColumn; + } + lastMapping = mapping; + }, this); + // We have processed all mappings. + if (remainingLinesIndex < remainingLines.length) { + if (lastMapping) { + // Associate the remaining code in the current line with "lastMapping" + addMappingWithCode(lastMapping, shiftNextLine()); + } + // and add the remaining lines without any mapping + node.add(remainingLines.splice(remainingLinesIndex).join("")); + } + + // Copy sourcesContent into SourceNode + aSourceMapConsumer.sources.forEach(function(sourceFile) { + const content = aSourceMapConsumer.sourceContentFor(sourceFile); + if (content != null) { + if (aRelativePath != null) { + sourceFile = util.join(aRelativePath, sourceFile); + } + node.setSourceContent(sourceFile, content); + } + }); + + return node; + + function addMappingWithCode(mapping, code) { + if (mapping === null || mapping.source === undefined) { + node.add(code); + } else { + const source = aRelativePath + ? util.join(aRelativePath, mapping.source) + : mapping.source; + node.add(new SourceNode(mapping.originalLine, + mapping.originalColumn, + source, + code, + mapping.name)); + } + } + } + + /** + * Add a chunk of generated JS to this source node. + * + * @param aChunk A string snippet of generated JS code, another instance of + * SourceNode, or an array where each member is one of those things. + */ + add(aChunk) { + if (Array.isArray(aChunk)) { + aChunk.forEach(function(chunk) { + this.add(chunk); + }, this); + } else if (aChunk[isSourceNode] || typeof aChunk === "string") { + if (aChunk) { + this.children.push(aChunk); + } + } else { + throw new TypeError( + "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk + ); + } + return this; + } + + /** + * Add a chunk of generated JS to the beginning of this source node. + * + * @param aChunk A string snippet of generated JS code, another instance of + * SourceNode, or an array where each member is one of those things. + */ + prepend(aChunk) { + if (Array.isArray(aChunk)) { + for (let i = aChunk.length - 1; i >= 0; i--) { + this.prepend(aChunk[i]); + } + } else if (aChunk[isSourceNode] || typeof aChunk === "string") { + this.children.unshift(aChunk); + } else { + throw new TypeError( + "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk + ); + } + return this; + } + + /** + * Walk over the tree of JS snippets in this node and its children. The + * walking function is called once for each snippet of JS and is passed that + * snippet and the its original associated source's line/column location. + * + * @param aFn The traversal function. + */ + walk(aFn) { + let chunk; + for (let i = 0, len = this.children.length; i < len; i++) { + chunk = this.children[i]; + if (chunk[isSourceNode]) { + chunk.walk(aFn); + } else if (chunk !== "") { + aFn(chunk, { source: this.source, + line: this.line, + column: this.column, + name: this.name }); + } + } + } + + /** + * Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between + * each of `this.children`. + * + * @param aSep The separator. + */ + join(aSep) { + let newChildren; + let i; + const len = this.children.length; + if (len > 0) { + newChildren = []; + for (i = 0; i < len - 1; i++) { + newChildren.push(this.children[i]); + newChildren.push(aSep); + } + newChildren.push(this.children[i]); + this.children = newChildren; + } + return this; + } + + /** + * Call String.prototype.replace on the very right-most source snippet. Useful + * for trimming whitespace from the end of a source node, etc. + * + * @param aPattern The pattern to replace. + * @param aReplacement The thing to replace the pattern with. + */ + replaceRight(aPattern, aReplacement) { + const lastChild = this.children[this.children.length - 1]; + if (lastChild[isSourceNode]) { + lastChild.replaceRight(aPattern, aReplacement); + } else if (typeof lastChild === "string") { + this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement); + } else { + this.children.push("".replace(aPattern, aReplacement)); + } + return this; + } + + /** + * Set the source content for a source file. This will be added to the SourceMapGenerator + * in the sourcesContent field. + * + * @param aSourceFile The filename of the source file + * @param aSourceContent The content of the source file + */ + setSourceContent(aSourceFile, aSourceContent) { + this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent; + } + + /** + * Walk over the tree of SourceNodes. The walking function is called for each + * source file content and is passed the filename and source content. + * + * @param aFn The traversal function. + */ + walkSourceContents(aFn) { + for (let i = 0, len = this.children.length; i < len; i++) { + if (this.children[i][isSourceNode]) { + this.children[i].walkSourceContents(aFn); + } + } + + const sources = Object.keys(this.sourceContents); + for (let i = 0, len = sources.length; i < len; i++) { + aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]); + } + } + + /** + * Return the string representation of this source node. Walks over the tree + * and concatenates all the various snippets together to one string. + */ + toString() { + let str = ""; + this.walk(function(chunk) { + str += chunk; + }); + return str; + } + + /** + * Returns the string representation of this source node along with a source + * map. + */ + toStringWithSourceMap(aArgs) { + const generated = { + code: "", + line: 1, + column: 0 + }; + const map = new SourceMapGenerator(aArgs); + let sourceMappingActive = false; + let lastOriginalSource = null; + let lastOriginalLine = null; + let lastOriginalColumn = null; + let lastOriginalName = null; + this.walk(function(chunk, original) { + generated.code += chunk; + if (original.source !== null + && original.line !== null + && original.column !== null) { + if (lastOriginalSource !== original.source + || lastOriginalLine !== original.line + || lastOriginalColumn !== original.column + || lastOriginalName !== original.name) { + map.addMapping({ + source: original.source, + original: { + line: original.line, + column: original.column + }, + generated: { + line: generated.line, + column: generated.column + }, + name: original.name + }); + } + lastOriginalSource = original.source; + lastOriginalLine = original.line; + lastOriginalColumn = original.column; + lastOriginalName = original.name; + sourceMappingActive = true; + } else if (sourceMappingActive) { + map.addMapping({ + generated: { + line: generated.line, + column: generated.column + } + }); + lastOriginalSource = null; + sourceMappingActive = false; + } + for (let idx = 0, length = chunk.length; idx < length; idx++) { + if (chunk.charCodeAt(idx) === NEWLINE_CODE) { + generated.line++; + generated.column = 0; + // Mappings end at eol + if (idx + 1 === length) { + lastOriginalSource = null; + sourceMappingActive = false; + } else if (sourceMappingActive) { + map.addMapping({ + source: original.source, + original: { + line: original.line, + column: original.column + }, + generated: { + line: generated.line, + column: generated.column + }, + name: original.name + }); + } + } else { + generated.column++; + } + } + }); + this.walkSourceContents(function(sourceFile, sourceContent) { + map.setSourceContent(sourceFile, sourceContent); + }); + + return { code: generated.code, map }; + } +} + +exports.SourceNode = SourceNode; diff --git a/mybulma/node_modules/source-map/lib/util.js b/mybulma/node_modules/source-map/lib/util.js new file mode 100644 index 0000000..35bd93d --- /dev/null +++ b/mybulma/node_modules/source-map/lib/util.js @@ -0,0 +1,546 @@ +/* -*- Mode: js; js-indent-level: 2; -*- */ +/* + * Copyright 2011 Mozilla Foundation and contributors + * Licensed under the New BSD license. See LICENSE or: + * http://opensource.org/licenses/BSD-3-Clause + */ + +/** + * This is a helper function for getting values from parameter/options + * objects. + * + * @param args The object we are extracting values from + * @param name The name of the property we are getting. + * @param defaultValue An optional value to return if the property is missing + * from the object. If this is not specified and the property is missing, an + * error will be thrown. + */ +function getArg(aArgs, aName, aDefaultValue) { + if (aName in aArgs) { + return aArgs[aName]; + } else if (arguments.length === 3) { + return aDefaultValue; + } + throw new Error('"' + aName + '" is a required argument.'); + +} +exports.getArg = getArg; + +const urlRegexp = /^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.-]*)(?::(\d+))?(.*)$/; +const dataUrlRegexp = /^data:.+\,.+$/; + +function urlParse(aUrl) { + const match = aUrl.match(urlRegexp); + if (!match) { + return null; + } + return { + scheme: match[1], + auth: match[2], + host: match[3], + port: match[4], + path: match[5] + }; +} +exports.urlParse = urlParse; + +function urlGenerate(aParsedUrl) { + let url = ""; + if (aParsedUrl.scheme) { + url += aParsedUrl.scheme + ":"; + } + url += "//"; + if (aParsedUrl.auth) { + url += aParsedUrl.auth + "@"; + } + if (aParsedUrl.host) { + url += aParsedUrl.host; + } + if (aParsedUrl.port) { + url += ":" + aParsedUrl.port; + } + if (aParsedUrl.path) { + url += aParsedUrl.path; + } + return url; +} +exports.urlGenerate = urlGenerate; + +const MAX_CACHED_INPUTS = 32; + +/** + * Takes some function `f(input) -> result` and returns a memoized version of + * `f`. + * + * We keep at most `MAX_CACHED_INPUTS` memoized results of `f` alive. The + * memoization is a dumb-simple, linear least-recently-used cache. + */ +function lruMemoize(f) { + const cache = []; + + return function(input) { + for (let i = 0; i < cache.length; i++) { + if (cache[i].input === input) { + const temp = cache[0]; + cache[0] = cache[i]; + cache[i] = temp; + return cache[0].result; + } + } + + const result = f(input); + + cache.unshift({ + input, + result, + }); + + if (cache.length > MAX_CACHED_INPUTS) { + cache.pop(); + } + + return result; + }; +} + +/** + * Normalizes a path, or the path portion of a URL: + * + * - Replaces consecutive slashes with one slash. + * - Removes unnecessary '.' parts. + * - Removes unnecessary '/..' parts. + * + * Based on code in the Node.js 'path' core module. + * + * @param aPath The path or url to normalize. + */ +const normalize = lruMemoize(function normalize(aPath) { + let path = aPath; + const url = urlParse(aPath); + if (url) { + if (!url.path) { + return aPath; + } + path = url.path; + } + const isAbsolute = exports.isAbsolute(path); + + // Split the path into parts between `/` characters. This is much faster than + // using `.split(/\/+/g)`. + const parts = []; + let start = 0; + let i = 0; + while (true) { + start = i; + i = path.indexOf("/", start); + if (i === -1) { + parts.push(path.slice(start)); + break; + } else { + parts.push(path.slice(start, i)); + while (i < path.length && path[i] === "/") { + i++; + } + } + } + + let up = 0; + for (i = parts.length - 1; i >= 0; i--) { + const part = parts[i]; + if (part === ".") { + parts.splice(i, 1); + } else if (part === "..") { + up++; + } else if (up > 0) { + if (part === "") { + // The first part is blank if the path is absolute. Trying to go + // above the root is a no-op. Therefore we can remove all '..' parts + // directly after the root. + parts.splice(i + 1, up); + up = 0; + } else { + parts.splice(i, 2); + up--; + } + } + } + path = parts.join("/"); + + if (path === "") { + path = isAbsolute ? "/" : "."; + } + + if (url) { + url.path = path; + return urlGenerate(url); + } + return path; +}); +exports.normalize = normalize; + +/** + * Joins two paths/URLs. + * + * @param aRoot The root path or URL. + * @param aPath The path or URL to be joined with the root. + * + * - If aPath is a URL or a data URI, aPath is returned, unless aPath is a + * scheme-relative URL: Then the scheme of aRoot, if any, is prepended + * first. + * - Otherwise aPath is a path. If aRoot is a URL, then its path portion + * is updated with the result and aRoot is returned. Otherwise the result + * is returned. + * - If aPath is absolute, the result is aPath. + * - Otherwise the two paths are joined with a slash. + * - Joining for example 'http://' and 'www.example.com' is also supported. + */ +function join(aRoot, aPath) { + if (aRoot === "") { + aRoot = "."; + } + if (aPath === "") { + aPath = "."; + } + const aPathUrl = urlParse(aPath); + const aRootUrl = urlParse(aRoot); + if (aRootUrl) { + aRoot = aRootUrl.path || "/"; + } + + // `join(foo, '//www.example.org')` + if (aPathUrl && !aPathUrl.scheme) { + if (aRootUrl) { + aPathUrl.scheme = aRootUrl.scheme; + } + return urlGenerate(aPathUrl); + } + + if (aPathUrl || aPath.match(dataUrlRegexp)) { + return aPath; + } + + // `join('http://', 'www.example.com')` + if (aRootUrl && !aRootUrl.host && !aRootUrl.path) { + aRootUrl.host = aPath; + return urlGenerate(aRootUrl); + } + + const joined = aPath.charAt(0) === "/" + ? aPath + : normalize(aRoot.replace(/\/+$/, "") + "/" + aPath); + + if (aRootUrl) { + aRootUrl.path = joined; + return urlGenerate(aRootUrl); + } + return joined; +} +exports.join = join; + +exports.isAbsolute = function(aPath) { + return aPath.charAt(0) === "/" || urlRegexp.test(aPath); +}; + +/** + * Make a path relative to a URL or another path. + * + * @param aRoot The root path or URL. + * @param aPath The path or URL to be made relative to aRoot. + */ +function relative(aRoot, aPath) { + if (aRoot === "") { + aRoot = "."; + } + + aRoot = aRoot.replace(/\/$/, ""); + + // It is possible for the path to be above the root. In this case, simply + // checking whether the root is a prefix of the path won't work. Instead, we + // need to remove components from the root one by one, until either we find + // a prefix that fits, or we run out of components to remove. + let level = 0; + while (aPath.indexOf(aRoot + "/") !== 0) { + const index = aRoot.lastIndexOf("/"); + if (index < 0) { + return aPath; + } + + // If the only part of the root that is left is the scheme (i.e. http://, + // file:///, etc.), one or more slashes (/), or simply nothing at all, we + // have exhausted all components, so the path is not relative to the root. + aRoot = aRoot.slice(0, index); + if (aRoot.match(/^([^\/]+:\/)?\/*$/)) { + return aPath; + } + + ++level; + } + + // Make sure we add a "../" for each component we removed from the root. + return Array(level + 1).join("../") + aPath.substr(aRoot.length + 1); +} +exports.relative = relative; + +const supportsNullProto = (function() { + const obj = Object.create(null); + return !("__proto__" in obj); +}()); + +function identity(s) { + return s; +} + +/** + * Because behavior goes wacky when you set `__proto__` on objects, we + * have to prefix all the strings in our set with an arbitrary character. + * + * See https://github.com/mozilla/source-map/pull/31 and + * https://github.com/mozilla/source-map/issues/30 + * + * @param String aStr + */ +function toSetString(aStr) { + if (isProtoString(aStr)) { + return "$" + aStr; + } + + return aStr; +} +exports.toSetString = supportsNullProto ? identity : toSetString; + +function fromSetString(aStr) { + if (isProtoString(aStr)) { + return aStr.slice(1); + } + + return aStr; +} +exports.fromSetString = supportsNullProto ? identity : fromSetString; + +function isProtoString(s) { + if (!s) { + return false; + } + + const length = s.length; + + if (length < 9 /* "__proto__".length */) { + return false; + } + + /* eslint-disable no-multi-spaces */ + if (s.charCodeAt(length - 1) !== 95 /* '_' */ || + s.charCodeAt(length - 2) !== 95 /* '_' */ || + s.charCodeAt(length - 3) !== 111 /* 'o' */ || + s.charCodeAt(length - 4) !== 116 /* 't' */ || + s.charCodeAt(length - 5) !== 111 /* 'o' */ || + s.charCodeAt(length - 6) !== 114 /* 'r' */ || + s.charCodeAt(length - 7) !== 112 /* 'p' */ || + s.charCodeAt(length - 8) !== 95 /* '_' */ || + s.charCodeAt(length - 9) !== 95 /* '_' */) { + return false; + } + /* eslint-enable no-multi-spaces */ + + for (let i = length - 10; i >= 0; i--) { + if (s.charCodeAt(i) !== 36 /* '$' */) { + return false; + } + } + + return true; +} + +/** + * Comparator between two mappings where the original positions are compared. + * + * Optionally pass in `true` as `onlyCompareGenerated` to consider two + * mappings with the same original source/line/column, but different generated + * line and column the same. Useful when searching for a mapping with a + * stubbed out mapping. + */ +function compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) { + let cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0 || onlyCompareOriginal) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); +} +exports.compareByOriginalPositions = compareByOriginalPositions; + +/** + * Comparator between two mappings with deflated source and name indices where + * the generated positions are compared. + * + * Optionally pass in `true` as `onlyCompareGenerated` to consider two + * mappings with the same generated line and column, but different + * source/name/original line and column the same. Useful when searching for a + * mapping with a stubbed out mapping. + */ +function compareByGeneratedPositionsDeflated(mappingA, mappingB, onlyCompareGenerated) { + let cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0 || onlyCompareGenerated) { + return cmp; + } + + cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); +} +exports.compareByGeneratedPositionsDeflated = compareByGeneratedPositionsDeflated; + +function strcmp(aStr1, aStr2) { + if (aStr1 === aStr2) { + return 0; + } + + if (aStr1 === null) { + return 1; // aStr2 !== null + } + + if (aStr2 === null) { + return -1; // aStr1 !== null + } + + if (aStr1 > aStr2) { + return 1; + } + + return -1; +} + +/** + * Comparator between two mappings with inflated source and name strings where + * the generated positions are compared. + */ +function compareByGeneratedPositionsInflated(mappingA, mappingB) { + let cmp = mappingA.generatedLine - mappingB.generatedLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.generatedColumn - mappingB.generatedColumn; + if (cmp !== 0) { + return cmp; + } + + cmp = strcmp(mappingA.source, mappingB.source); + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalLine - mappingB.originalLine; + if (cmp !== 0) { + return cmp; + } + + cmp = mappingA.originalColumn - mappingB.originalColumn; + if (cmp !== 0) { + return cmp; + } + + return strcmp(mappingA.name, mappingB.name); +} +exports.compareByGeneratedPositionsInflated = compareByGeneratedPositionsInflated; + +/** + * Strip any JSON XSSI avoidance prefix from the string (as documented + * in the source maps specification), and then parse the string as + * JSON. + */ +function parseSourceMapInput(str) { + return JSON.parse(str.replace(/^\)]}'[^\n]*\n/, "")); +} +exports.parseSourceMapInput = parseSourceMapInput; + +/** + * Compute the URL of a source given the the source root, the source's + * URL, and the source map's URL. + */ +function computeSourceURL(sourceRoot, sourceURL, sourceMapURL) { + sourceURL = sourceURL || ""; + + if (sourceRoot) { + // This follows what Chrome does. + if (sourceRoot[sourceRoot.length - 1] !== "/" && sourceURL[0] !== "/") { + sourceRoot += "/"; + } + // The spec says: + // Line 4: An optional source root, useful for relocating source + // files on a server or removing repeated values in the + // “sources” entry. This value is prepended to the individual + // entries in the “source” field. + sourceURL = sourceRoot + sourceURL; + } + + // Historically, SourceMapConsumer did not take the sourceMapURL as + // a parameter. This mode is still somewhat supported, which is why + // this code block is conditional. However, it's preferable to pass + // the source map URL to SourceMapConsumer, so that this function + // can implement the source URL resolution algorithm as outlined in + // the spec. This block is basically the equivalent of: + // new URL(sourceURL, sourceMapURL).toString() + // ... except it avoids using URL, which wasn't available in the + // older releases of node still supported by this library. + // + // The spec says: + // If the sources are not absolute URLs after prepending of the + // “sourceRoot”, the sources are resolved relative to the + // SourceMap (like resolving script src in a html document). + if (sourceMapURL) { + const parsed = urlParse(sourceMapURL); + if (!parsed) { + throw new Error("sourceMapURL could not be parsed"); + } + if (parsed.path) { + // Strip the last path component, but keep the "/". + const index = parsed.path.lastIndexOf("/"); + if (index >= 0) { + parsed.path = parsed.path.substring(0, index + 1); + } + } + sourceURL = join(urlGenerate(parsed), sourceURL); + } + + return normalize(sourceURL); +} +exports.computeSourceURL = computeSourceURL; diff --git a/mybulma/node_modules/source-map/lib/wasm.js b/mybulma/node_modules/source-map/lib/wasm.js new file mode 100644 index 0000000..88b18be --- /dev/null +++ b/mybulma/node_modules/source-map/lib/wasm.js @@ -0,0 +1,107 @@ +const readWasm = require("../lib/read-wasm"); + +/** + * Provide the JIT with a nice shape / hidden class. + */ +function Mapping() { + this.generatedLine = 0; + this.generatedColumn = 0; + this.lastGeneratedColumn = null; + this.source = null; + this.originalLine = null; + this.originalColumn = null; + this.name = null; +} + +let cachedWasm = null; + +module.exports = function wasm() { + if (cachedWasm) { + return cachedWasm; + } + + const callbackStack = []; + + cachedWasm = readWasm().then(buffer => { + return WebAssembly.instantiate(buffer, { + env: { + mapping_callback( + generatedLine, + generatedColumn, + + hasLastGeneratedColumn, + lastGeneratedColumn, + + hasOriginal, + source, + originalLine, + originalColumn, + + hasName, + name + ) { + const mapping = new Mapping(); + // JS uses 1-based line numbers, wasm uses 0-based. + mapping.generatedLine = generatedLine + 1; + mapping.generatedColumn = generatedColumn; + + if (hasLastGeneratedColumn) { + // JS uses inclusive last generated column, wasm uses exclusive. + mapping.lastGeneratedColumn = lastGeneratedColumn - 1; + } + + if (hasOriginal) { + mapping.source = source; + // JS uses 1-based line numbers, wasm uses 0-based. + mapping.originalLine = originalLine + 1; + mapping.originalColumn = originalColumn; + + if (hasName) { + mapping.name = name; + } + } + + callbackStack[callbackStack.length - 1](mapping); + }, + + start_all_generated_locations_for() { console.time("all_generated_locations_for"); }, + end_all_generated_locations_for() { console.timeEnd("all_generated_locations_for"); }, + + start_compute_column_spans() { console.time("compute_column_spans"); }, + end_compute_column_spans() { console.timeEnd("compute_column_spans"); }, + + start_generated_location_for() { console.time("generated_location_for"); }, + end_generated_location_for() { console.timeEnd("generated_location_for"); }, + + start_original_location_for() { console.time("original_location_for"); }, + end_original_location_for() { console.timeEnd("original_location_for"); }, + + start_parse_mappings() { console.time("parse_mappings"); }, + end_parse_mappings() { console.timeEnd("parse_mappings"); }, + + start_sort_by_generated_location() { console.time("sort_by_generated_location"); }, + end_sort_by_generated_location() { console.timeEnd("sort_by_generated_location"); }, + + start_sort_by_original_location() { console.time("sort_by_original_location"); }, + end_sort_by_original_location() { console.timeEnd("sort_by_original_location"); }, + } + }); + }).then(Wasm => { + return { + exports: Wasm.instance.exports, + withMappingCallback: (mappingCallback, f) => { + callbackStack.push(mappingCallback); + try { + f(); + } finally { + callbackStack.pop(); + } + } + }; + }).then(null, e => { + cachedWasm = null; + throw e; + }); + + return cachedWasm; +}; diff --git a/mybulma/node_modules/spdx-exceptions/README.md b/mybulma/node_modules/spdx-exceptions/README.md new file mode 100644 index 0000000..6c927ec --- /dev/null +++ b/mybulma/node_modules/spdx-exceptions/README.md @@ -0,0 +1,36 @@ +The package exports an array of strings. Each string is an identifier +for a license exception under the [Software Package Data Exchange +(SPDX)][SPDX] software license metadata standard. + +[SPDX]: https://spdx.org + +## Copyright and Licensing + +### SPDX + +"SPDX" is a federally registered United States trademark of The Linux +Foundation Corporation. + +From version 2.0 of the [SPDX] specification: + +> Copyright © 2010-2015 Linux Foundation and its Contributors. Licensed +> under the Creative Commons Attribution License 3.0 Unported. All other +> rights are expressly reserved. + +The Linux Foundation and the SPDX working groups are good people. Only +they decide what "SPDX" means, as a standard and otherwise. I respect +their work and their rights. You should, too. + +### This Package + +> I created this package by copying exception identifiers out of the +> SPDX specification. That work was mechanical, routine, and required no +> creativity whatsoever. - Kyle Mitchell, package author + +United States users concerned about intellectual property may wish to +discuss the following Supreme Court decisions with their attorneys: + +- _Baker v. Selden_, 101 U.S. 99 (1879) + +- _Feist Publications, Inc., v. Rural Telephone Service Co._, + 499 U.S. 340 (1991) diff --git a/mybulma/node_modules/spdx-exceptions/index.json b/mybulma/node_modules/spdx-exceptions/index.json new file mode 100644 index 0000000..f88f088 --- /dev/null +++ b/mybulma/node_modules/spdx-exceptions/index.json @@ -0,0 +1,40 @@ +[ + "389-exception", + "Autoconf-exception-2.0", + "Autoconf-exception-3.0", + "Bison-exception-2.2", + "Bootloader-exception", + "Classpath-exception-2.0", + "CLISP-exception-2.0", + "DigiRule-FOSS-exception", + "eCos-exception-2.0", + "Fawkes-Runtime-exception", + "FLTK-exception", + "Font-exception-2.0", + "freertos-exception-2.0", + "GCC-exception-2.0", + "GCC-exception-3.1", + "gnu-javamail-exception", + "GPL-3.0-linking-exception", + "GPL-3.0-linking-source-exception", + "GPL-CC-1.0", + "i2p-gpl-java-exception", + "Libtool-exception", + "Linux-syscall-note", + "LLVM-exception", + "LZMA-exception", + "mif-exception", + "Nokia-Qt-exception-1.1", + "OCaml-LGPL-linking-exception", + "OCCT-exception-1.0", + "OpenJDK-assembly-exception-1.0", + "openvpn-openssl-exception", + "PS-or-PDF-font-exception-20170817", + "Qt-GPL-exception-1.0", + "Qt-LGPL-exception-1.1", + "Qwt-exception-1.0", + "Swift-exception", + "u-boot-exception-2.0", + "Universal-FOSS-exception-1.0", + "WxWindows-exception-3.1" +] diff --git a/mybulma/node_modules/spdx-exceptions/package.json b/mybulma/node_modules/spdx-exceptions/package.json new file mode 100644 index 0000000..2bafc6a --- /dev/null +++ b/mybulma/node_modules/spdx-exceptions/package.json @@ -0,0 +1,17 @@ +{ + "name": "spdx-exceptions", + "description": "list of SPDX standard license exceptions", + "version": "2.3.0", + "author": "The Linux Foundation", + "contributors": [ + "Kyle E. Mitchell (https://kemitchell.com/)" + ], + "license": "CC-BY-3.0", + "repository": "kemitchell/spdx-exceptions.json", + "files": [ + "index.json" + ], + "scripts": { + "build": "node build.js" + } +} diff --git a/mybulma/node_modules/spdx-license-ids/README.md b/mybulma/node_modules/spdx-license-ids/README.md new file mode 100644 index 0000000..500e32d --- /dev/null +++ b/mybulma/node_modules/spdx-license-ids/README.md @@ -0,0 +1,52 @@ +# spdx-license-ids + +[![npm version](https://img.shields.io/npm/v/spdx-license-ids.svg)](https://www.npmjs.com/package/spdx-license-ids) +[![Github Actions](https://action-badges.now.sh/jslicense/spdx-license-ids)](https://wdp9fww0r9.execute-api.us-west-2.amazonaws.com/production/results/jslicense/spdx-license-ids) + +A list of [SPDX license](https://spdx.org/licenses/) identifiers + +## Installation + +[Download JSON directly](https://raw.githubusercontent.com/jslicense/spdx-license-ids/main/index.json), or [use](https://docs.npmjs.com/cli/install) [npm](https://docs.npmjs.com/about-npm/): + +``` +npm install spdx-license-ids +``` + +## [Node.js](https://nodejs.org/) API + +### require('spdx-license-ids') + +Type: `string[]` + +All license IDs except for the currently deprecated ones. + +```javascript +const ids = require('spdx-license-ids'); +//=> ['0BSD', 'AAL', 'ADSL', 'AFL-1.1', 'AFL-1.2', 'AFL-2.0', 'AFL-2.1', 'AFL-3.0', 'AGPL-1.0-only', ...] + +ids.includes('BSD-3-Clause'); //=> true +ids.includes('CC-BY-1.0'); //=> true + +ids.includes('GPL-3.0'); //=> false +``` + +### require('spdx-license-ids/deprecated') + +Type: `string[]` + +Deprecated license IDs. + +```javascript +const deprecatedIds = require('spdx-license-ids/deprecated'); +//=> ['AGPL-1.0', 'AGPL-3.0', 'GFDL-1.1', 'GFDL-1.2', 'GFDL-1.3', 'GPL-1.0', 'GPL-2.0', ...] + +deprecatedIds.includes('BSD-3-Clause'); //=> false +deprecatedIds.includes('CC-BY-1.0'); //=> false + +deprecatedIds.includes('GPL-3.0'); //=> true +``` + +## License + +[Creative Commons Zero v1.0 Universal](https://creativecommons.org/publicdomain/zero/1.0/deed) diff --git a/mybulma/node_modules/spdx-license-ids/deprecated.json b/mybulma/node_modules/spdx-license-ids/deprecated.json new file mode 100644 index 0000000..278531e --- /dev/null +++ b/mybulma/node_modules/spdx-license-ids/deprecated.json @@ -0,0 +1,27 @@ +[ + "AGPL-1.0", + "AGPL-3.0", + "BSD-2-Clause-FreeBSD", + "BSD-2-Clause-NetBSD", + "GFDL-1.1", + "GFDL-1.2", + "GFDL-1.3", + "GPL-1.0", + "GPL-2.0", + "GPL-2.0-with-GCC-exception", + "GPL-2.0-with-autoconf-exception", + "GPL-2.0-with-bison-exception", + "GPL-2.0-with-classpath-exception", + "GPL-2.0-with-font-exception", + "GPL-3.0", + "GPL-3.0-with-GCC-exception", + "GPL-3.0-with-autoconf-exception", + "LGPL-2.0", + "LGPL-2.1", + "LGPL-3.0", + "Nunit", + "StandardML-NJ", + "bzip2-1.0.5", + "eCos-2.0", + "wxWindows" +] diff --git a/mybulma/node_modules/spdx-license-ids/index.json b/mybulma/node_modules/spdx-license-ids/index.json new file mode 100644 index 0000000..fdd78fa --- /dev/null +++ b/mybulma/node_modules/spdx-license-ids/index.json @@ -0,0 +1,467 @@ +[ + "0BSD", + "AAL", + "ADSL", + "AFL-1.1", + "AFL-1.2", + "AFL-2.0", + "AFL-2.1", + "AFL-3.0", + "AGPL-1.0-only", + "AGPL-1.0-or-later", + "AGPL-3.0-only", + "AGPL-3.0-or-later", + "AMDPLPA", + "AML", + "AMPAS", + "ANTLR-PD", + "ANTLR-PD-fallback", + "APAFML", + "APL-1.0", + "APSL-1.0", + "APSL-1.1", + "APSL-1.2", + "APSL-2.0", + "Abstyles", + "Adobe-2006", + "Adobe-Glyph", + "Afmparse", + "Aladdin", + "Apache-1.0", + "Apache-1.1", + "Apache-2.0", + "App-s2p", + "Arphic-1999", + "Artistic-1.0", + "Artistic-1.0-Perl", + "Artistic-1.0-cl8", + "Artistic-2.0", + "BSD-1-Clause", + "BSD-2-Clause", + "BSD-2-Clause-Patent", + "BSD-2-Clause-Views", + "BSD-3-Clause", + "BSD-3-Clause-Attribution", + "BSD-3-Clause-Clear", + "BSD-3-Clause-LBNL", + "BSD-3-Clause-Modification", + "BSD-3-Clause-No-Military-License", + "BSD-3-Clause-No-Nuclear-License", + "BSD-3-Clause-No-Nuclear-License-2014", + "BSD-3-Clause-No-Nuclear-Warranty", + "BSD-3-Clause-Open-MPI", + "BSD-4-Clause", + "BSD-4-Clause-Shortened", + "BSD-4-Clause-UC", + "BSD-Protection", + "BSD-Source-Code", + "BSL-1.0", + "BUSL-1.1", + "Baekmuk", + "Bahyph", + "Barr", + "Beerware", + "BitTorrent-1.0", + "BitTorrent-1.1", + "Bitstream-Vera", + "BlueOak-1.0.0", + "Borceux", + "C-UDA-1.0", + "CAL-1.0", + "CAL-1.0-Combined-Work-Exception", + "CATOSL-1.1", + "CC-BY-1.0", + "CC-BY-2.0", + "CC-BY-2.5", + "CC-BY-2.5-AU", + "CC-BY-3.0", + "CC-BY-3.0-AT", + "CC-BY-3.0-DE", + "CC-BY-3.0-IGO", + "CC-BY-3.0-NL", + "CC-BY-3.0-US", + "CC-BY-4.0", + "CC-BY-NC-1.0", + "CC-BY-NC-2.0", + "CC-BY-NC-2.5", + "CC-BY-NC-3.0", + "CC-BY-NC-3.0-DE", + "CC-BY-NC-4.0", + "CC-BY-NC-ND-1.0", + "CC-BY-NC-ND-2.0", + "CC-BY-NC-ND-2.5", + "CC-BY-NC-ND-3.0", + "CC-BY-NC-ND-3.0-DE", + "CC-BY-NC-ND-3.0-IGO", + "CC-BY-NC-ND-4.0", + "CC-BY-NC-SA-1.0", + "CC-BY-NC-SA-2.0", + "CC-BY-NC-SA-2.0-FR", + "CC-BY-NC-SA-2.0-UK", + "CC-BY-NC-SA-2.5", + "CC-BY-NC-SA-3.0", + "CC-BY-NC-SA-3.0-DE", + "CC-BY-NC-SA-3.0-IGO", + "CC-BY-NC-SA-4.0", + "CC-BY-ND-1.0", + "CC-BY-ND-2.0", + "CC-BY-ND-2.5", + "CC-BY-ND-3.0", + "CC-BY-ND-3.0-DE", + "CC-BY-ND-4.0", + "CC-BY-SA-1.0", + "CC-BY-SA-2.0", + "CC-BY-SA-2.0-UK", + "CC-BY-SA-2.1-JP", + "CC-BY-SA-2.5", + "CC-BY-SA-3.0", + "CC-BY-SA-3.0-AT", + "CC-BY-SA-3.0-DE", + "CC-BY-SA-4.0", + "CC-PDDC", + "CC0-1.0", + "CDDL-1.0", + "CDDL-1.1", + "CDL-1.0", + "CDLA-Permissive-1.0", + "CDLA-Permissive-2.0", + "CDLA-Sharing-1.0", + "CECILL-1.0", + "CECILL-1.1", + "CECILL-2.0", + "CECILL-2.1", + "CECILL-B", + "CECILL-C", + "CERN-OHL-1.1", + "CERN-OHL-1.2", + "CERN-OHL-P-2.0", + "CERN-OHL-S-2.0", + "CERN-OHL-W-2.0", + "CNRI-Jython", + "CNRI-Python", + "CNRI-Python-GPL-Compatible", + "COIL-1.0", + "CPAL-1.0", + "CPL-1.0", + "CPOL-1.02", + "CUA-OPL-1.0", + "Caldera", + "ClArtistic", + "Community-Spec-1.0", + "Condor-1.1", + "Crossword", + "CrystalStacker", + "Cube", + "D-FSL-1.0", + "DL-DE-BY-2.0", + "DOC", + "DRL-1.0", + "DSDP", + "Dotseqn", + "ECL-1.0", + "ECL-2.0", + "EFL-1.0", + "EFL-2.0", + "EPICS", + "EPL-1.0", + "EPL-2.0", + "EUDatagrid", + "EUPL-1.0", + "EUPL-1.1", + "EUPL-1.2", + "Elastic-2.0", + "Entessa", + "ErlPL-1.1", + "Eurosym", + "FDK-AAC", + "FSFAP", + "FSFUL", + "FSFULLR", + "FTL", + "Fair", + "Frameworx-1.0", + "FreeBSD-DOC", + "FreeImage", + "GD", + "GFDL-1.1-invariants-only", + "GFDL-1.1-invariants-or-later", + "GFDL-1.1-no-invariants-only", + "GFDL-1.1-no-invariants-or-later", + "GFDL-1.1-only", + "GFDL-1.1-or-later", + "GFDL-1.2-invariants-only", + "GFDL-1.2-invariants-or-later", + "GFDL-1.2-no-invariants-only", + "GFDL-1.2-no-invariants-or-later", + "GFDL-1.2-only", + "GFDL-1.2-or-later", + "GFDL-1.3-invariants-only", + "GFDL-1.3-invariants-or-later", + "GFDL-1.3-no-invariants-only", + "GFDL-1.3-no-invariants-or-later", + "GFDL-1.3-only", + "GFDL-1.3-or-later", + "GL2PS", + "GLWTPL", + "GPL-1.0-only", + "GPL-1.0-or-later", + "GPL-2.0-only", + "GPL-2.0-or-later", + "GPL-3.0-only", + "GPL-3.0-or-later", + "Giftware", + "Glide", + "Glulxe", + "HPND", + "HPND-sell-variant", + "HTMLTIDY", + "HaskellReport", + "Hippocratic-2.1", + "IBM-pibs", + "ICU", + "IJG", + "IPA", + "IPL-1.0", + "ISC", + "ImageMagick", + "Imlib2", + "Info-ZIP", + "Intel", + "Intel-ACPI", + "Interbase-1.0", + "JPNIC", + "JSON", + "Jam", + "JasPer-2.0", + "LAL-1.2", + "LAL-1.3", + "LGPL-2.0-only", + "LGPL-2.0-or-later", + "LGPL-2.1-only", + "LGPL-2.1-or-later", + "LGPL-3.0-only", + "LGPL-3.0-or-later", + "LGPLLR", + "LPL-1.0", + "LPL-1.02", + "LPPL-1.0", + "LPPL-1.1", + "LPPL-1.2", + "LPPL-1.3a", + "LPPL-1.3c", + "LZMA-SDK-9.11-to-9.20", + "LZMA-SDK-9.22", + "Latex2e", + "Leptonica", + "LiLiQ-P-1.1", + "LiLiQ-R-1.1", + "LiLiQ-Rplus-1.1", + "Libpng", + "Linux-OpenIB", + "Linux-man-pages-copyleft", + "MIT", + "MIT-0", + "MIT-CMU", + "MIT-Modern-Variant", + "MIT-advertising", + "MIT-enna", + "MIT-feh", + "MIT-open-group", + "MITNFA", + "MPL-1.0", + "MPL-1.1", + "MPL-2.0", + "MPL-2.0-no-copyleft-exception", + "MS-LPL", + "MS-PL", + "MS-RL", + "MTLL", + "MakeIndex", + "Minpack", + "MirOS", + "Motosoto", + "MulanPSL-1.0", + "MulanPSL-2.0", + "Multics", + "Mup", + "NAIST-2003", + "NASA-1.3", + "NBPL-1.0", + "NCGL-UK-2.0", + "NCSA", + "NGPL", + "NICTA-1.0", + "NIST-PD", + "NIST-PD-fallback", + "NLOD-1.0", + "NLOD-2.0", + "NLPL", + "NOSL", + "NPL-1.0", + "NPL-1.1", + "NPOSL-3.0", + "NRL", + "NTP", + "NTP-0", + "Naumen", + "Net-SNMP", + "NetCDF", + "Newsletr", + "Nokia", + "Noweb", + "O-UDA-1.0", + "OCCT-PL", + "OCLC-2.0", + "ODC-By-1.0", + "ODbL-1.0", + "OFL-1.0", + "OFL-1.0-RFN", + "OFL-1.0-no-RFN", + "OFL-1.1", + "OFL-1.1-RFN", + "OFL-1.1-no-RFN", + "OGC-1.0", + "OGDL-Taiwan-1.0", + "OGL-Canada-2.0", + "OGL-UK-1.0", + "OGL-UK-2.0", + "OGL-UK-3.0", + "OGTSL", + "OLDAP-1.1", + "OLDAP-1.2", + "OLDAP-1.3", + "OLDAP-1.4", + "OLDAP-2.0", + "OLDAP-2.0.1", + "OLDAP-2.1", + "OLDAP-2.2", + "OLDAP-2.2.1", + "OLDAP-2.2.2", + "OLDAP-2.3", + "OLDAP-2.4", + "OLDAP-2.5", + "OLDAP-2.6", + "OLDAP-2.7", + "OLDAP-2.8", + "OML", + "OPL-1.0", + "OPUBL-1.0", + "OSET-PL-2.1", + "OSL-1.0", + "OSL-1.1", + "OSL-2.0", + "OSL-2.1", + "OSL-3.0", + "OpenSSL", + "PDDL-1.0", + "PHP-3.0", + "PHP-3.01", + "PSF-2.0", + "Parity-6.0.0", + "Parity-7.0.0", + "Plexus", + "PolyForm-Noncommercial-1.0.0", + "PolyForm-Small-Business-1.0.0", + "PostgreSQL", + "Python-2.0", + "Python-2.0.1", + "QPL-1.0", + "Qhull", + "RHeCos-1.1", + "RPL-1.1", + "RPL-1.5", + "RPSL-1.0", + "RSA-MD", + "RSCPL", + "Rdisc", + "Ruby", + "SAX-PD", + "SCEA", + "SGI-B-1.0", + "SGI-B-1.1", + "SGI-B-2.0", + "SHL-0.5", + "SHL-0.51", + "SISSL", + "SISSL-1.2", + "SMLNJ", + "SMPPL", + "SNIA", + "SPL-1.0", + "SSH-OpenSSH", + "SSH-short", + "SSPL-1.0", + "SWL", + "Saxpath", + "SchemeReport", + "Sendmail", + "Sendmail-8.23", + "SimPL-2.0", + "Sleepycat", + "Spencer-86", + "Spencer-94", + "Spencer-99", + "SugarCRM-1.1.3", + "TAPR-OHL-1.0", + "TCL", + "TCP-wrappers", + "TMate", + "TORQUE-1.1", + "TOSL", + "TU-Berlin-1.0", + "TU-Berlin-2.0", + "UCL-1.0", + "UPL-1.0", + "Unicode-DFS-2015", + "Unicode-DFS-2016", + "Unicode-TOU", + "Unlicense", + "VOSTROM", + "VSL-1.0", + "Vim", + "W3C", + "W3C-19980720", + "W3C-20150513", + "WTFPL", + "Watcom-1.0", + "Wsuipa", + "X11", + "X11-distribute-modifications-variant", + "XFree86-1.1", + "XSkat", + "Xerox", + "Xnet", + "YPL-1.0", + "YPL-1.1", + "ZPL-1.1", + "ZPL-2.0", + "ZPL-2.1", + "Zed", + "Zend-2.0", + "Zimbra-1.3", + "Zimbra-1.4", + "Zlib", + "blessing", + "bzip2-1.0.6", + "copyleft-next-0.3.0", + "copyleft-next-0.3.1", + "curl", + "diffmark", + "dvipdfm", + "eGenix", + "etalab-2.0", + "gSOAP-1.3b", + "gnuplot", + "iMatix", + "libpng-2.0", + "libselinux-1.0", + "libtiff", + "mpi-permissive", + "mpich2", + "mplus", + "psfrag", + "psutils", + "xinetd", + "xpp", + "zlib-acknowledgement" +] diff --git a/mybulma/node_modules/spdx-license-ids/package.json b/mybulma/node_modules/spdx-license-ids/package.json new file mode 100644 index 0000000..e3622fc --- /dev/null +++ b/mybulma/node_modules/spdx-license-ids/package.json @@ -0,0 +1,39 @@ +{ + "name": "spdx-license-ids", + "version": "3.0.12", + "description": "A list of SPDX license identifiers", + "repository": "jslicense/spdx-license-ids", + "author": "Shinnosuke Watanabe (https://github.com/shinnn)", + "license": "CC0-1.0", + "scripts": { + "build": "node build.js", + "pretest": "eslint .", + "latest": "node latest.js", + "test": "node test.js" + }, + "files": [ + "deprecated.json", + "index.json" + ], + "keywords": [ + "spdx", + "license", + "licenses", + "id", + "identifier", + "identifiers", + "json", + "array", + "oss" + ], + "devDependencies": { + "@shinnn/eslint-config": "^7.0.0", + "eslint": "^8.2.0", + "eslint-formatter-codeframe": "^7.32.1", + "rmfr": "^2.0.0", + "tape": "^5.3.1" + }, + "eslintConfig": { + "extends": "@shinnn" + } +} diff --git a/mybulma/node_modules/ssri/LICENSE.md b/mybulma/node_modules/ssri/LICENSE.md new file mode 100644 index 0000000..e335388 --- /dev/null +++ b/mybulma/node_modules/ssri/LICENSE.md @@ -0,0 +1,16 @@ +ISC License + +Copyright 2021 (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/mybulma/node_modules/ssri/README.md b/mybulma/node_modules/ssri/README.md new file mode 100644 index 0000000..6f46aa5 --- /dev/null +++ b/mybulma/node_modules/ssri/README.md @@ -0,0 +1,528 @@ +# ssri [![npm version](https://img.shields.io/npm/v/ssri.svg)](https://npm.im/ssri) [![license](https://img.shields.io/npm/l/ssri.svg)](https://npm.im/ssri) [![Travis](https://img.shields.io/travis/npm/ssri.svg)](https://travis-ci.org/npm/ssri) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/npm/ssri?svg=true)](https://ci.appveyor.com/project/npm/ssri) [![Coverage Status](https://coveralls.io/repos/github/npm/ssri/badge.svg?branch=latest)](https://coveralls.io/github/npm/ssri?branch=latest) + +[`ssri`](https://github.com/npm/ssri), short for Standard Subresource +Integrity, is a Node.js utility for parsing, manipulating, serializing, +generating, and verifying [Subresource +Integrity](https://w3c.github.io/webappsec/specs/subresourceintegrity/) hashes. + +## Install + +`$ npm install --save ssri` + +## Table of Contents + +* [Example](#example) +* [Features](#features) +* [Contributing](#contributing) +* [API](#api) + * Parsing & Serializing + * [`parse`](#parse) + * [`stringify`](#stringify) + * [`Integrity#concat`](#integrity-concat) + * [`Integrity#merge`](#integrity-merge) + * [`Integrity#toString`](#integrity-to-string) + * [`Integrity#toJSON`](#integrity-to-json) + * [`Integrity#match`](#integrity-match) + * [`Integrity#pickAlgorithm`](#integrity-pick-algorithm) + * [`Integrity#hexDigest`](#integrity-hex-digest) + * Integrity Generation + * [`fromHex`](#from-hex) + * [`fromData`](#from-data) + * [`fromStream`](#from-stream) + * [`create`](#create) + * Integrity Verification + * [`checkData`](#check-data) + * [`checkStream`](#check-stream) + * [`integrityStream`](#integrity-stream) + +### Example + +```javascript +const ssri = require('ssri') + +const integrity = 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo' + +// Parsing and serializing +const parsed = ssri.parse(integrity) +ssri.stringify(parsed) // === integrity (works on non-Integrity objects) +parsed.toString() // === integrity + +// Async stream functions +ssri.checkStream(fs.createReadStream('./my-file'), integrity).then(...) +ssri.fromStream(fs.createReadStream('./my-file')).then(sri => { + sri.toString() === integrity +}) +fs.createReadStream('./my-file').pipe(ssri.createCheckerStream(sri)) + +// Sync data functions +ssri.fromData(fs.readFileSync('./my-file')) // === parsed +ssri.checkData(fs.readFileSync('./my-file'), integrity) // => 'sha512' +``` + +### Features + +* Parses and stringifies SRI strings. +* Generates SRI strings from raw data or Streams. +* Strict standard compliance. +* `?foo` metadata option support. +* Multiple entries for the same algorithm. +* Object-based integrity hash manipulation. +* Small footprint: no dependencies, concise implementation. +* Full test coverage. +* Customizable algorithm picker. + +### Contributing + +The ssri team enthusiastically welcomes contributions and project participation! +There's a bunch of things you can do if you want to contribute! The [Contributor +Guide](CONTRIBUTING.md) has all the information you need for everything from +reporting bugs to contributing entire new features. Please don't hesitate to +jump in if you'd like to, or even ask us questions if something isn't clear. + +### API + +#### `> ssri.parse(sri, [opts]) -> Integrity` + +Parses `sri` into an `Integrity` data structure. `sri` can be an integrity +string, an `Hash`-like with `digest` and `algorithm` fields and an optional +`options` field, or an `Integrity`-like object. The resulting object will be an +`Integrity` instance that has this shape: + +```javascript +{ + 'sha1': [{algorithm: 'sha1', digest: 'deadbeef', options: []}], + 'sha512': [ + {algorithm: 'sha512', digest: 'c0ffee', options: []}, + {algorithm: 'sha512', digest: 'bad1dea', options: ['foo']} + ], +} +``` + +If `opts.single` is truthy, a single `Hash` object will be returned. That is, a +single object that looks like `{algorithm, digest, options}`, as opposed to a +larger object with multiple of these. + +If `opts.strict` is truthy, the resulting object will be filtered such that +it strictly follows the Subresource Integrity spec, throwing away any entries +with any invalid components. This also means a restricted set of algorithms +will be used -- the spec limits them to `sha256`, `sha384`, and `sha512`. + +Strict mode is recommended if the integrity strings are intended for use in +browsers, or in other situations where strict adherence to the spec is needed. + +##### Example + +```javascript +ssri.parse('sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo') // -> Integrity object +``` + +#### `> ssri.stringify(sri, [opts]) -> String` + +This function is identical to [`Integrity#toString()`](#integrity-to-string), +except it can be used on _any_ object that [`parse`](#parse) can handle -- that +is, a string, an `Hash`-like, or an `Integrity`-like. + +The `opts.sep` option defines the string to use when joining multiple entries +together. To be spec-compliant, this _must_ be whitespace. The default is a +single space (`' '`). + +If `opts.strict` is true, the integrity string will be created using strict +parsing rules. See [`ssri.parse`](#parse). + +##### Example + +```javascript +// Useful for cleaning up input SRI strings: +ssri.stringify('\n\rsha512-foo\n\t\tsha384-bar') +// -> 'sha512-foo sha384-bar' + +// Hash-like: only a single entry. +ssri.stringify({ + algorithm: 'sha512', + digest:'9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==', + options: ['foo'] +}) +// -> +// 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo' + +// Integrity-like: full multi-entry syntax. Similar to output of `ssri.parse` +ssri.stringify({ + 'sha512': [ + { + algorithm: 'sha512', + digest:'9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==', + options: ['foo'] + } + ] +}) +// -> +// 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo' +``` + +#### `> Integrity#concat(otherIntegrity, [opts]) -> Integrity` + +Concatenates an `Integrity` object with another IntegrityLike, or an integrity +string. + +This is functionally equivalent to concatenating the string format of both +integrity arguments, and calling [`ssri.parse`](#ssri-parse) on the new string. + +If `opts.strict` is true, the new `Integrity` will be created using strict +parsing rules. See [`ssri.parse`](#parse). + +##### Example + +```javascript +// This will combine the integrity checks for two different versions of +// your index.js file so you can use a single integrity string and serve +// either of these to clients, from a single ` + +``` + +## API + +### parser(args, opts={}) + +Parses command line arguments returning a simple mapping of keys and values. + +**expects:** + +* `args`: a string or array of strings representing the options to parse. +* `opts`: provide a set of hints indicating how `args` should be parsed: + * `opts.alias`: an object representing the set of aliases for a key: `{alias: {foo: ['f']}}`. + * `opts.array`: indicate that keys should be parsed as an array: `{array: ['foo', 'bar']}`.
+ Indicate that keys should be parsed as an array and coerced to booleans / numbers:
+ `{array: [{ key: 'foo', boolean: true }, {key: 'bar', number: true}]}`. + * `opts.boolean`: arguments should be parsed as booleans: `{boolean: ['x', 'y']}`. + * `opts.coerce`: provide a custom synchronous function that returns a coerced value from the argument provided + (or throws an error). For arrays the function is called only once for the entire array:
+ `{coerce: {foo: function (arg) {return modifiedArg}}}`. + * `opts.config`: indicate a key that represents a path to a configuration file (this file will be loaded and parsed). + * `opts.configObjects`: configuration objects to parse, their properties will be set as arguments:
+ `{configObjects: [{'x': 5, 'y': 33}, {'z': 44}]}`. + * `opts.configuration`: provide configuration options to the yargs-parser (see: [configuration](#configuration)). + * `opts.count`: indicate a key that should be used as a counter, e.g., `-vvv` = `{v: 3}`. + * `opts.default`: provide default values for keys: `{default: {x: 33, y: 'hello world!'}}`. + * `opts.envPrefix`: environment variables (`process.env`) with the prefix provided should be parsed. + * `opts.narg`: specify that a key requires `n` arguments: `{narg: {x: 2}}`. + * `opts.normalize`: `path.normalize()` will be applied to values set to this key. + * `opts.number`: keys should be treated as numbers. + * `opts.string`: keys should be treated as strings (even if they resemble a number `-x 33`). + +**returns:** + +* `obj`: an object representing the parsed value of `args` + * `key/value`: key value pairs for each argument and their aliases. + * `_`: an array representing the positional arguments. + * [optional] `--`: an array with arguments after the end-of-options flag `--`. + +### require('yargs-parser').detailed(args, opts={}) + +Parses a command line string, returning detailed information required by the +yargs engine. + +**expects:** + +* `args`: a string or array of strings representing options to parse. +* `opts`: provide a set of hints indicating how `args`, inputs are identical to `require('yargs-parser')(args, opts={})`. + +**returns:** + +* `argv`: an object representing the parsed value of `args` + * `key/value`: key value pairs for each argument and their aliases. + * `_`: an array representing the positional arguments. + * [optional] `--`: an array with arguments after the end-of-options flag `--`. +* `error`: populated with an error object if an exception occurred during parsing. +* `aliases`: the inferred list of aliases built by combining lists in `opts.alias`. +* `newAliases`: any new aliases added via camel-case expansion: + * `boolean`: `{ fooBar: true }` +* `defaulted`: any new argument created by `opts.default`, no aliases included. + * `boolean`: `{ foo: true }` +* `configuration`: given by default settings and `opts.configuration`. + + + +### Configuration + +The yargs-parser applies several automated transformations on the keys provided +in `args`. These features can be turned on and off using the `configuration` field +of `opts`. + +```js +var parsed = parser(['--no-dice'], { + configuration: { + 'boolean-negation': false + } +}) +``` + +### short option groups + +* default: `true`. +* key: `short-option-groups`. + +Should a group of short-options be treated as boolean flags? + +```console +$ node example.js -abc +{ _: [], a: true, b: true, c: true } +``` + +_if disabled:_ + +```console +$ node example.js -abc +{ _: [], abc: true } +``` + +### camel-case expansion + +* default: `true`. +* key: `camel-case-expansion`. + +Should hyphenated arguments be expanded into camel-case aliases? + +```console +$ node example.js --foo-bar +{ _: [], 'foo-bar': true, fooBar: true } +``` + +_if disabled:_ + +```console +$ node example.js --foo-bar +{ _: [], 'foo-bar': true } +``` + +### dot-notation + +* default: `true` +* key: `dot-notation` + +Should keys that contain `.` be treated as objects? + +```console +$ node example.js --foo.bar +{ _: [], foo: { bar: true } } +``` + +_if disabled:_ + +```console +$ node example.js --foo.bar +{ _: [], "foo.bar": true } +``` + +### parse numbers + +* default: `true` +* key: `parse-numbers` + +Should keys that look like numbers be treated as such? + +```console +$ node example.js --foo=99.3 +{ _: [], foo: 99.3 } +``` + +_if disabled:_ + +```console +$ node example.js --foo=99.3 +{ _: [], foo: "99.3" } +``` + +### parse positional numbers + +* default: `true` +* key: `parse-positional-numbers` + +Should positional keys that look like numbers be treated as such. + +```console +$ node example.js 99.3 +{ _: [99.3] } +``` + +_if disabled:_ + +```console +$ node example.js 99.3 +{ _: ['99.3'] } +``` + +### boolean negation + +* default: `true` +* key: `boolean-negation` + +Should variables prefixed with `--no` be treated as negations? + +```console +$ node example.js --no-foo +{ _: [], foo: false } +``` + +_if disabled:_ + +```console +$ node example.js --no-foo +{ _: [], "no-foo": true } +``` + +### combine arrays + +* default: `false` +* key: `combine-arrays` + +Should arrays be combined when provided by both command line arguments and +a configuration file. + +### duplicate arguments array + +* default: `true` +* key: `duplicate-arguments-array` + +Should arguments be coerced into an array when duplicated: + +```console +$ node example.js -x 1 -x 2 +{ _: [], x: [1, 2] } +``` + +_if disabled:_ + +```console +$ node example.js -x 1 -x 2 +{ _: [], x: 2 } +``` + +### flatten duplicate arrays + +* default: `true` +* key: `flatten-duplicate-arrays` + +Should array arguments be coerced into a single array when duplicated: + +```console +$ node example.js -x 1 2 -x 3 4 +{ _: [], x: [1, 2, 3, 4] } +``` + +_if disabled:_ + +```console +$ node example.js -x 1 2 -x 3 4 +{ _: [], x: [[1, 2], [3, 4]] } +``` + +### greedy arrays + +* default: `true` +* key: `greedy-arrays` + +Should arrays consume more than one positional argument following their flag. + +```console +$ node example --arr 1 2 +{ _: [], arr: [1, 2] } +``` + +_if disabled:_ + +```console +$ node example --arr 1 2 +{ _: [2], arr: [1] } +``` + +**Note: in `v18.0.0` we are considering defaulting greedy arrays to `false`.** + +### nargs eats options + +* default: `false` +* key: `nargs-eats-options` + +Should nargs consume dash options as well as positional arguments. + +### negation prefix + +* default: `no-` +* key: `negation-prefix` + +The prefix to use for negated boolean variables. + +```console +$ node example.js --no-foo +{ _: [], foo: false } +``` + +_if set to `quux`:_ + +```console +$ node example.js --quuxfoo +{ _: [], foo: false } +``` + +### populate -- + +* default: `false`. +* key: `populate--` + +Should unparsed flags be stored in `--` or `_`. + +_If disabled:_ + +```console +$ node example.js a -b -- x y +{ _: [ 'a', 'x', 'y' ], b: true } +``` + +_If enabled:_ + +```console +$ node example.js a -b -- x y +{ _: [ 'a' ], '--': [ 'x', 'y' ], b: true } +``` + +### set placeholder key + +* default: `false`. +* key: `set-placeholder-key`. + +Should a placeholder be added for keys not set via the corresponding CLI argument? + +_If disabled:_ + +```console +$ node example.js -a 1 -c 2 +{ _: [], a: 1, c: 2 } +``` + +_If enabled:_ + +```console +$ node example.js -a 1 -c 2 +{ _: [], a: 1, b: undefined, c: 2 } +``` + +### halt at non-option + +* default: `false`. +* key: `halt-at-non-option`. + +Should parsing stop at the first positional argument? This is similar to how e.g. `ssh` parses its command line. + +_If disabled:_ + +```console +$ node example.js -a run b -x y +{ _: [ 'b' ], a: 'run', x: 'y' } +``` + +_If enabled:_ + +```console +$ node example.js -a run b -x y +{ _: [ 'b', '-x', 'y' ], a: 'run' } +``` + +### strip aliased + +* default: `false` +* key: `strip-aliased` + +Should aliases be removed before returning results? + +_If disabled:_ + +```console +$ node example.js --test-field 1 +{ _: [], 'test-field': 1, testField: 1, 'test-alias': 1, testAlias: 1 } +``` + +_If enabled:_ + +```console +$ node example.js --test-field 1 +{ _: [], 'test-field': 1, testField: 1 } +``` + +### strip dashed + +* default: `false` +* key: `strip-dashed` + +Should dashed keys be removed before returning results? This option has no effect if +`camel-case-expansion` is disabled. + +_If disabled:_ + +```console +$ node example.js --test-field 1 +{ _: [], 'test-field': 1, testField: 1 } +``` + +_If enabled:_ + +```console +$ node example.js --test-field 1 +{ _: [], testField: 1 } +``` + +### unknown options as args + +* default: `false` +* key: `unknown-options-as-args` + +Should unknown options be treated like regular arguments? An unknown option is one that is not +configured in `opts`. + +_If disabled_ + +```console +$ node example.js --unknown-option --known-option 2 --string-option --unknown-option2 +{ _: [], unknownOption: true, knownOption: 2, stringOption: '', unknownOption2: true } +``` + +_If enabled_ + +```console +$ node example.js --unknown-option --known-option 2 --string-option --unknown-option2 +{ _: ['--unknown-option'], knownOption: 2, stringOption: '--unknown-option2' } +``` + +## Supported Node.js Versions + +Libraries in this ecosystem make a best effort to track +[Node.js' release schedule](https://nodejs.org/en/about/releases/). Here's [a +post on why we think this is important](https://medium.com/the-node-js-collection/maintainers-should-consider-following-node-js-release-schedule-ab08ed4de71a). + +## Special Thanks + +The yargs project evolves from optimist and minimist. It owes its +existence to a lot of James Halliday's hard work. Thanks [substack](https://github.com/substack) **beep** **boop** \o/ + +## License + +ISC diff --git a/mybulma/node_modules/yargs-parser/browser.js b/mybulma/node_modules/yargs-parser/browser.js new file mode 100644 index 0000000..241202c --- /dev/null +++ b/mybulma/node_modules/yargs-parser/browser.js @@ -0,0 +1,29 @@ +// Main entrypoint for ESM web browser environments. Avoids using Node.js +// specific libraries, such as "path". +// +// TODO: figure out reasonable web equivalents for "resolve", "normalize", etc. +import { camelCase, decamelize, looksLikeNumber } from './build/lib/string-utils.js' +import { YargsParser } from './build/lib/yargs-parser.js' +const parser = new YargsParser({ + cwd: () => { return '' }, + format: (str, arg) => { return str.replace('%s', arg) }, + normalize: (str) => { return str }, + resolve: (str) => { return str }, + require: () => { + throw Error('loading config from files not currently supported in browser') + }, + env: () => {} +}) + +const yargsParser = function Parser (args, opts) { + const result = parser.parse(args.slice(), opts) + return result.argv +} +yargsParser.detailed = function (args, opts) { + return parser.parse(args.slice(), opts) +} +yargsParser.camelCase = camelCase +yargsParser.decamelize = decamelize +yargsParser.looksLikeNumber = looksLikeNumber + +export default yargsParser diff --git a/mybulma/node_modules/yargs-parser/build/index.cjs b/mybulma/node_modules/yargs-parser/build/index.cjs new file mode 100644 index 0000000..33b5ebd --- /dev/null +++ b/mybulma/node_modules/yargs-parser/build/index.cjs @@ -0,0 +1,1042 @@ +'use strict'; + +var util = require('util'); +var fs = require('fs'); +var path = require('path'); + +function camelCase(str) { + const isCamelCase = str !== str.toLowerCase() && str !== str.toUpperCase(); + if (!isCamelCase) { + str = str.toLowerCase(); + } + if (str.indexOf('-') === -1 && str.indexOf('_') === -1) { + return str; + } + else { + let camelcase = ''; + let nextChrUpper = false; + const leadingHyphens = str.match(/^-+/); + for (let i = leadingHyphens ? leadingHyphens[0].length : 0; i < str.length; i++) { + let chr = str.charAt(i); + if (nextChrUpper) { + nextChrUpper = false; + chr = chr.toUpperCase(); + } + if (i !== 0 && (chr === '-' || chr === '_')) { + nextChrUpper = true; + } + else if (chr !== '-' && chr !== '_') { + camelcase += chr; + } + } + return camelcase; + } +} +function decamelize(str, joinString) { + const lowercase = str.toLowerCase(); + joinString = joinString || '-'; + let notCamelcase = ''; + for (let i = 0; i < str.length; i++) { + const chrLower = lowercase.charAt(i); + const chrString = str.charAt(i); + if (chrLower !== chrString && i > 0) { + notCamelcase += `${joinString}${lowercase.charAt(i)}`; + } + else { + notCamelcase += chrString; + } + } + return notCamelcase; +} +function looksLikeNumber(x) { + if (x === null || x === undefined) + return false; + if (typeof x === 'number') + return true; + if (/^0x[0-9a-f]+$/i.test(x)) + return true; + if (/^0[^.]/.test(x)) + return false; + return /^[-]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/.test(x); +} + +function tokenizeArgString(argString) { + if (Array.isArray(argString)) { + return argString.map(e => typeof e !== 'string' ? e + '' : e); + } + argString = argString.trim(); + let i = 0; + let prevC = null; + let c = null; + let opening = null; + const args = []; + for (let ii = 0; ii < argString.length; ii++) { + prevC = c; + c = argString.charAt(ii); + if (c === ' ' && !opening) { + if (!(prevC === ' ')) { + i++; + } + continue; + } + if (c === opening) { + opening = null; + } + else if ((c === "'" || c === '"') && !opening) { + opening = c; + } + if (!args[i]) + args[i] = ''; + args[i] += c; + } + return args; +} + +var DefaultValuesForTypeKey; +(function (DefaultValuesForTypeKey) { + DefaultValuesForTypeKey["BOOLEAN"] = "boolean"; + DefaultValuesForTypeKey["STRING"] = "string"; + DefaultValuesForTypeKey["NUMBER"] = "number"; + DefaultValuesForTypeKey["ARRAY"] = "array"; +})(DefaultValuesForTypeKey || (DefaultValuesForTypeKey = {})); + +let mixin; +class YargsParser { + constructor(_mixin) { + mixin = _mixin; + } + parse(argsInput, options) { + const opts = Object.assign({ + alias: undefined, + array: undefined, + boolean: undefined, + config: undefined, + configObjects: undefined, + configuration: undefined, + coerce: undefined, + count: undefined, + default: undefined, + envPrefix: undefined, + narg: undefined, + normalize: undefined, + string: undefined, + number: undefined, + __: undefined, + key: undefined + }, options); + const args = tokenizeArgString(argsInput); + const aliases = combineAliases(Object.assign(Object.create(null), opts.alias)); + const configuration = Object.assign({ + 'boolean-negation': true, + 'camel-case-expansion': true, + 'combine-arrays': false, + 'dot-notation': true, + 'duplicate-arguments-array': true, + 'flatten-duplicate-arrays': true, + 'greedy-arrays': true, + 'halt-at-non-option': false, + 'nargs-eats-options': false, + 'negation-prefix': 'no-', + 'parse-numbers': true, + 'parse-positional-numbers': true, + 'populate--': false, + 'set-placeholder-key': false, + 'short-option-groups': true, + 'strip-aliased': false, + 'strip-dashed': false, + 'unknown-options-as-args': false + }, opts.configuration); + const defaults = Object.assign(Object.create(null), opts.default); + const configObjects = opts.configObjects || []; + const envPrefix = opts.envPrefix; + const notFlagsOption = configuration['populate--']; + const notFlagsArgv = notFlagsOption ? '--' : '_'; + const newAliases = Object.create(null); + const defaulted = Object.create(null); + const __ = opts.__ || mixin.format; + const flags = { + aliases: Object.create(null), + arrays: Object.create(null), + bools: Object.create(null), + strings: Object.create(null), + numbers: Object.create(null), + counts: Object.create(null), + normalize: Object.create(null), + configs: Object.create(null), + nargs: Object.create(null), + coercions: Object.create(null), + keys: [] + }; + const negative = /^-([0-9]+(\.[0-9]+)?|\.[0-9]+)$/; + const negatedBoolean = new RegExp('^--' + configuration['negation-prefix'] + '(.+)'); + [].concat(opts.array || []).filter(Boolean).forEach(function (opt) { + const key = typeof opt === 'object' ? opt.key : opt; + const assignment = Object.keys(opt).map(function (key) { + const arrayFlagKeys = { + boolean: 'bools', + string: 'strings', + number: 'numbers' + }; + return arrayFlagKeys[key]; + }).filter(Boolean).pop(); + if (assignment) { + flags[assignment][key] = true; + } + flags.arrays[key] = true; + flags.keys.push(key); + }); + [].concat(opts.boolean || []).filter(Boolean).forEach(function (key) { + flags.bools[key] = true; + flags.keys.push(key); + }); + [].concat(opts.string || []).filter(Boolean).forEach(function (key) { + flags.strings[key] = true; + flags.keys.push(key); + }); + [].concat(opts.number || []).filter(Boolean).forEach(function (key) { + flags.numbers[key] = true; + flags.keys.push(key); + }); + [].concat(opts.count || []).filter(Boolean).forEach(function (key) { + flags.counts[key] = true; + flags.keys.push(key); + }); + [].concat(opts.normalize || []).filter(Boolean).forEach(function (key) { + flags.normalize[key] = true; + flags.keys.push(key); + }); + if (typeof opts.narg === 'object') { + Object.entries(opts.narg).forEach(([key, value]) => { + if (typeof value === 'number') { + flags.nargs[key] = value; + flags.keys.push(key); + } + }); + } + if (typeof opts.coerce === 'object') { + Object.entries(opts.coerce).forEach(([key, value]) => { + if (typeof value === 'function') { + flags.coercions[key] = value; + flags.keys.push(key); + } + }); + } + if (typeof opts.config !== 'undefined') { + if (Array.isArray(opts.config) || typeof opts.config === 'string') { + [].concat(opts.config).filter(Boolean).forEach(function (key) { + flags.configs[key] = true; + }); + } + else if (typeof opts.config === 'object') { + Object.entries(opts.config).forEach(([key, value]) => { + if (typeof value === 'boolean' || typeof value === 'function') { + flags.configs[key] = value; + } + }); + } + } + extendAliases(opts.key, aliases, opts.default, flags.arrays); + Object.keys(defaults).forEach(function (key) { + (flags.aliases[key] || []).forEach(function (alias) { + defaults[alias] = defaults[key]; + }); + }); + let error = null; + checkConfiguration(); + let notFlags = []; + const argv = Object.assign(Object.create(null), { _: [] }); + const argvReturn = {}; + for (let i = 0; i < args.length; i++) { + const arg = args[i]; + const truncatedArg = arg.replace(/^-{3,}/, '---'); + let broken; + let key; + let letters; + let m; + let next; + let value; + if (arg !== '--' && isUnknownOptionAsArg(arg)) { + pushPositional(arg); + } + else if (truncatedArg.match(/---+(=|$)/)) { + pushPositional(arg); + continue; + } + else if (arg.match(/^--.+=/) || (!configuration['short-option-groups'] && arg.match(/^-.+=/))) { + m = arg.match(/^--?([^=]+)=([\s\S]*)$/); + if (m !== null && Array.isArray(m) && m.length >= 3) { + if (checkAllAliases(m[1], flags.arrays)) { + i = eatArray(i, m[1], args, m[2]); + } + else if (checkAllAliases(m[1], flags.nargs) !== false) { + i = eatNargs(i, m[1], args, m[2]); + } + else { + setArg(m[1], m[2]); + } + } + } + else if (arg.match(negatedBoolean) && configuration['boolean-negation']) { + m = arg.match(negatedBoolean); + if (m !== null && Array.isArray(m) && m.length >= 2) { + key = m[1]; + setArg(key, checkAllAliases(key, flags.arrays) ? [false] : false); + } + } + else if (arg.match(/^--.+/) || (!configuration['short-option-groups'] && arg.match(/^-[^-]+/))) { + m = arg.match(/^--?(.+)/); + if (m !== null && Array.isArray(m) && m.length >= 2) { + key = m[1]; + if (checkAllAliases(key, flags.arrays)) { + i = eatArray(i, key, args); + } + else if (checkAllAliases(key, flags.nargs) !== false) { + i = eatNargs(i, key, args); + } + else { + next = args[i + 1]; + if (next !== undefined && (!next.match(/^-/) || + next.match(negative)) && + !checkAllAliases(key, flags.bools) && + !checkAllAliases(key, flags.counts)) { + setArg(key, next); + i++; + } + else if (/^(true|false)$/.test(next)) { + setArg(key, next); + i++; + } + else { + setArg(key, defaultValue(key)); + } + } + } + } + else if (arg.match(/^-.\..+=/)) { + m = arg.match(/^-([^=]+)=([\s\S]*)$/); + if (m !== null && Array.isArray(m) && m.length >= 3) { + setArg(m[1], m[2]); + } + } + else if (arg.match(/^-.\..+/) && !arg.match(negative)) { + next = args[i + 1]; + m = arg.match(/^-(.\..+)/); + if (m !== null && Array.isArray(m) && m.length >= 2) { + key = m[1]; + if (next !== undefined && !next.match(/^-/) && + !checkAllAliases(key, flags.bools) && + !checkAllAliases(key, flags.counts)) { + setArg(key, next); + i++; + } + else { + setArg(key, defaultValue(key)); + } + } + } + else if (arg.match(/^-[^-]+/) && !arg.match(negative)) { + letters = arg.slice(1, -1).split(''); + broken = false; + for (let j = 0; j < letters.length; j++) { + next = arg.slice(j + 2); + if (letters[j + 1] && letters[j + 1] === '=') { + value = arg.slice(j + 3); + key = letters[j]; + if (checkAllAliases(key, flags.arrays)) { + i = eatArray(i, key, args, value); + } + else if (checkAllAliases(key, flags.nargs) !== false) { + i = eatNargs(i, key, args, value); + } + else { + setArg(key, value); + } + broken = true; + break; + } + if (next === '-') { + setArg(letters[j], next); + continue; + } + if (/[A-Za-z]/.test(letters[j]) && + /^-?\d+(\.\d*)?(e-?\d+)?$/.test(next) && + checkAllAliases(next, flags.bools) === false) { + setArg(letters[j], next); + broken = true; + break; + } + if (letters[j + 1] && letters[j + 1].match(/\W/)) { + setArg(letters[j], next); + broken = true; + break; + } + else { + setArg(letters[j], defaultValue(letters[j])); + } + } + key = arg.slice(-1)[0]; + if (!broken && key !== '-') { + if (checkAllAliases(key, flags.arrays)) { + i = eatArray(i, key, args); + } + else if (checkAllAliases(key, flags.nargs) !== false) { + i = eatNargs(i, key, args); + } + else { + next = args[i + 1]; + if (next !== undefined && (!/^(-|--)[^-]/.test(next) || + next.match(negative)) && + !checkAllAliases(key, flags.bools) && + !checkAllAliases(key, flags.counts)) { + setArg(key, next); + i++; + } + else if (/^(true|false)$/.test(next)) { + setArg(key, next); + i++; + } + else { + setArg(key, defaultValue(key)); + } + } + } + } + else if (arg.match(/^-[0-9]$/) && + arg.match(negative) && + checkAllAliases(arg.slice(1), flags.bools)) { + key = arg.slice(1); + setArg(key, defaultValue(key)); + } + else if (arg === '--') { + notFlags = args.slice(i + 1); + break; + } + else if (configuration['halt-at-non-option']) { + notFlags = args.slice(i); + break; + } + else { + pushPositional(arg); + } + } + applyEnvVars(argv, true); + applyEnvVars(argv, false); + setConfig(argv); + setConfigObjects(); + applyDefaultsAndAliases(argv, flags.aliases, defaults, true); + applyCoercions(argv); + if (configuration['set-placeholder-key']) + setPlaceholderKeys(argv); + Object.keys(flags.counts).forEach(function (key) { + if (!hasKey(argv, key.split('.'))) + setArg(key, 0); + }); + if (notFlagsOption && notFlags.length) + argv[notFlagsArgv] = []; + notFlags.forEach(function (key) { + argv[notFlagsArgv].push(key); + }); + if (configuration['camel-case-expansion'] && configuration['strip-dashed']) { + Object.keys(argv).filter(key => key !== '--' && key.includes('-')).forEach(key => { + delete argv[key]; + }); + } + if (configuration['strip-aliased']) { + [].concat(...Object.keys(aliases).map(k => aliases[k])).forEach(alias => { + if (configuration['camel-case-expansion'] && alias.includes('-')) { + delete argv[alias.split('.').map(prop => camelCase(prop)).join('.')]; + } + delete argv[alias]; + }); + } + function pushPositional(arg) { + const maybeCoercedNumber = maybeCoerceNumber('_', arg); + if (typeof maybeCoercedNumber === 'string' || typeof maybeCoercedNumber === 'number') { + argv._.push(maybeCoercedNumber); + } + } + function eatNargs(i, key, args, argAfterEqualSign) { + let ii; + let toEat = checkAllAliases(key, flags.nargs); + toEat = typeof toEat !== 'number' || isNaN(toEat) ? 1 : toEat; + if (toEat === 0) { + if (!isUndefined(argAfterEqualSign)) { + error = Error(__('Argument unexpected for: %s', key)); + } + setArg(key, defaultValue(key)); + return i; + } + let available = isUndefined(argAfterEqualSign) ? 0 : 1; + if (configuration['nargs-eats-options']) { + if (args.length - (i + 1) + available < toEat) { + error = Error(__('Not enough arguments following: %s', key)); + } + available = toEat; + } + else { + for (ii = i + 1; ii < args.length; ii++) { + if (!args[ii].match(/^-[^0-9]/) || args[ii].match(negative) || isUnknownOptionAsArg(args[ii])) + available++; + else + break; + } + if (available < toEat) + error = Error(__('Not enough arguments following: %s', key)); + } + let consumed = Math.min(available, toEat); + if (!isUndefined(argAfterEqualSign) && consumed > 0) { + setArg(key, argAfterEqualSign); + consumed--; + } + for (ii = i + 1; ii < (consumed + i + 1); ii++) { + setArg(key, args[ii]); + } + return (i + consumed); + } + function eatArray(i, key, args, argAfterEqualSign) { + let argsToSet = []; + let next = argAfterEqualSign || args[i + 1]; + const nargsCount = checkAllAliases(key, flags.nargs); + if (checkAllAliases(key, flags.bools) && !(/^(true|false)$/.test(next))) { + argsToSet.push(true); + } + else if (isUndefined(next) || + (isUndefined(argAfterEqualSign) && /^-/.test(next) && !negative.test(next) && !isUnknownOptionAsArg(next))) { + if (defaults[key] !== undefined) { + const defVal = defaults[key]; + argsToSet = Array.isArray(defVal) ? defVal : [defVal]; + } + } + else { + if (!isUndefined(argAfterEqualSign)) { + argsToSet.push(processValue(key, argAfterEqualSign)); + } + for (let ii = i + 1; ii < args.length; ii++) { + if ((!configuration['greedy-arrays'] && argsToSet.length > 0) || + (nargsCount && typeof nargsCount === 'number' && argsToSet.length >= nargsCount)) + break; + next = args[ii]; + if (/^-/.test(next) && !negative.test(next) && !isUnknownOptionAsArg(next)) + break; + i = ii; + argsToSet.push(processValue(key, next)); + } + } + if (typeof nargsCount === 'number' && ((nargsCount && argsToSet.length < nargsCount) || + (isNaN(nargsCount) && argsToSet.length === 0))) { + error = Error(__('Not enough arguments following: %s', key)); + } + setArg(key, argsToSet); + return i; + } + function setArg(key, val) { + if (/-/.test(key) && configuration['camel-case-expansion']) { + const alias = key.split('.').map(function (prop) { + return camelCase(prop); + }).join('.'); + addNewAlias(key, alias); + } + const value = processValue(key, val); + const splitKey = key.split('.'); + setKey(argv, splitKey, value); + if (flags.aliases[key]) { + flags.aliases[key].forEach(function (x) { + const keyProperties = x.split('.'); + setKey(argv, keyProperties, value); + }); + } + if (splitKey.length > 1 && configuration['dot-notation']) { + (flags.aliases[splitKey[0]] || []).forEach(function (x) { + let keyProperties = x.split('.'); + const a = [].concat(splitKey); + a.shift(); + keyProperties = keyProperties.concat(a); + if (!(flags.aliases[key] || []).includes(keyProperties.join('.'))) { + setKey(argv, keyProperties, value); + } + }); + } + if (checkAllAliases(key, flags.normalize) && !checkAllAliases(key, flags.arrays)) { + const keys = [key].concat(flags.aliases[key] || []); + keys.forEach(function (key) { + Object.defineProperty(argvReturn, key, { + enumerable: true, + get() { + return val; + }, + set(value) { + val = typeof value === 'string' ? mixin.normalize(value) : value; + } + }); + }); + } + } + function addNewAlias(key, alias) { + if (!(flags.aliases[key] && flags.aliases[key].length)) { + flags.aliases[key] = [alias]; + newAliases[alias] = true; + } + if (!(flags.aliases[alias] && flags.aliases[alias].length)) { + addNewAlias(alias, key); + } + } + function processValue(key, val) { + if (typeof val === 'string' && + (val[0] === "'" || val[0] === '"') && + val[val.length - 1] === val[0]) { + val = val.substring(1, val.length - 1); + } + if (checkAllAliases(key, flags.bools) || checkAllAliases(key, flags.counts)) { + if (typeof val === 'string') + val = val === 'true'; + } + let value = Array.isArray(val) + ? val.map(function (v) { return maybeCoerceNumber(key, v); }) + : maybeCoerceNumber(key, val); + if (checkAllAliases(key, flags.counts) && (isUndefined(value) || typeof value === 'boolean')) { + value = increment(); + } + if (checkAllAliases(key, flags.normalize) && checkAllAliases(key, flags.arrays)) { + if (Array.isArray(val)) + value = val.map((val) => { return mixin.normalize(val); }); + else + value = mixin.normalize(val); + } + return value; + } + function maybeCoerceNumber(key, value) { + if (!configuration['parse-positional-numbers'] && key === '_') + return value; + if (!checkAllAliases(key, flags.strings) && !checkAllAliases(key, flags.bools) && !Array.isArray(value)) { + const shouldCoerceNumber = looksLikeNumber(value) && configuration['parse-numbers'] && (Number.isSafeInteger(Math.floor(parseFloat(`${value}`)))); + if (shouldCoerceNumber || (!isUndefined(value) && checkAllAliases(key, flags.numbers))) { + value = Number(value); + } + } + return value; + } + function setConfig(argv) { + const configLookup = Object.create(null); + applyDefaultsAndAliases(configLookup, flags.aliases, defaults); + Object.keys(flags.configs).forEach(function (configKey) { + const configPath = argv[configKey] || configLookup[configKey]; + if (configPath) { + try { + let config = null; + const resolvedConfigPath = mixin.resolve(mixin.cwd(), configPath); + const resolveConfig = flags.configs[configKey]; + if (typeof resolveConfig === 'function') { + try { + config = resolveConfig(resolvedConfigPath); + } + catch (e) { + config = e; + } + if (config instanceof Error) { + error = config; + return; + } + } + else { + config = mixin.require(resolvedConfigPath); + } + setConfigObject(config); + } + catch (ex) { + if (ex.name === 'PermissionDenied') + error = ex; + else if (argv[configKey]) + error = Error(__('Invalid JSON config file: %s', configPath)); + } + } + }); + } + function setConfigObject(config, prev) { + Object.keys(config).forEach(function (key) { + const value = config[key]; + const fullKey = prev ? prev + '.' + key : key; + if (typeof value === 'object' && value !== null && !Array.isArray(value) && configuration['dot-notation']) { + setConfigObject(value, fullKey); + } + else { + if (!hasKey(argv, fullKey.split('.')) || (checkAllAliases(fullKey, flags.arrays) && configuration['combine-arrays'])) { + setArg(fullKey, value); + } + } + }); + } + function setConfigObjects() { + if (typeof configObjects !== 'undefined') { + configObjects.forEach(function (configObject) { + setConfigObject(configObject); + }); + } + } + function applyEnvVars(argv, configOnly) { + if (typeof envPrefix === 'undefined') + return; + const prefix = typeof envPrefix === 'string' ? envPrefix : ''; + const env = mixin.env(); + Object.keys(env).forEach(function (envVar) { + if (prefix === '' || envVar.lastIndexOf(prefix, 0) === 0) { + const keys = envVar.split('__').map(function (key, i) { + if (i === 0) { + key = key.substring(prefix.length); + } + return camelCase(key); + }); + if (((configOnly && flags.configs[keys.join('.')]) || !configOnly) && !hasKey(argv, keys)) { + setArg(keys.join('.'), env[envVar]); + } + } + }); + } + function applyCoercions(argv) { + let coerce; + const applied = new Set(); + Object.keys(argv).forEach(function (key) { + if (!applied.has(key)) { + coerce = checkAllAliases(key, flags.coercions); + if (typeof coerce === 'function') { + try { + const value = maybeCoerceNumber(key, coerce(argv[key])); + ([].concat(flags.aliases[key] || [], key)).forEach(ali => { + applied.add(ali); + argv[ali] = value; + }); + } + catch (err) { + error = err; + } + } + } + }); + } + function setPlaceholderKeys(argv) { + flags.keys.forEach((key) => { + if (~key.indexOf('.')) + return; + if (typeof argv[key] === 'undefined') + argv[key] = undefined; + }); + return argv; + } + function applyDefaultsAndAliases(obj, aliases, defaults, canLog = false) { + Object.keys(defaults).forEach(function (key) { + if (!hasKey(obj, key.split('.'))) { + setKey(obj, key.split('.'), defaults[key]); + if (canLog) + defaulted[key] = true; + (aliases[key] || []).forEach(function (x) { + if (hasKey(obj, x.split('.'))) + return; + setKey(obj, x.split('.'), defaults[key]); + }); + } + }); + } + function hasKey(obj, keys) { + let o = obj; + if (!configuration['dot-notation']) + keys = [keys.join('.')]; + keys.slice(0, -1).forEach(function (key) { + o = (o[key] || {}); + }); + const key = keys[keys.length - 1]; + if (typeof o !== 'object') + return false; + else + return key in o; + } + function setKey(obj, keys, value) { + let o = obj; + if (!configuration['dot-notation']) + keys = [keys.join('.')]; + keys.slice(0, -1).forEach(function (key) { + key = sanitizeKey(key); + if (typeof o === 'object' && o[key] === undefined) { + o[key] = {}; + } + if (typeof o[key] !== 'object' || Array.isArray(o[key])) { + if (Array.isArray(o[key])) { + o[key].push({}); + } + else { + o[key] = [o[key], {}]; + } + o = o[key][o[key].length - 1]; + } + else { + o = o[key]; + } + }); + const key = sanitizeKey(keys[keys.length - 1]); + const isTypeArray = checkAllAliases(keys.join('.'), flags.arrays); + const isValueArray = Array.isArray(value); + let duplicate = configuration['duplicate-arguments-array']; + if (!duplicate && checkAllAliases(key, flags.nargs)) { + duplicate = true; + if ((!isUndefined(o[key]) && flags.nargs[key] === 1) || (Array.isArray(o[key]) && o[key].length === flags.nargs[key])) { + o[key] = undefined; + } + } + if (value === increment()) { + o[key] = increment(o[key]); + } + else if (Array.isArray(o[key])) { + if (duplicate && isTypeArray && isValueArray) { + o[key] = configuration['flatten-duplicate-arrays'] ? o[key].concat(value) : (Array.isArray(o[key][0]) ? o[key] : [o[key]]).concat([value]); + } + else if (!duplicate && Boolean(isTypeArray) === Boolean(isValueArray)) { + o[key] = value; + } + else { + o[key] = o[key].concat([value]); + } + } + else if (o[key] === undefined && isTypeArray) { + o[key] = isValueArray ? value : [value]; + } + else if (duplicate && !(o[key] === undefined || + checkAllAliases(key, flags.counts) || + checkAllAliases(key, flags.bools))) { + o[key] = [o[key], value]; + } + else { + o[key] = value; + } + } + function extendAliases(...args) { + args.forEach(function (obj) { + Object.keys(obj || {}).forEach(function (key) { + if (flags.aliases[key]) + return; + flags.aliases[key] = [].concat(aliases[key] || []); + flags.aliases[key].concat(key).forEach(function (x) { + if (/-/.test(x) && configuration['camel-case-expansion']) { + const c = camelCase(x); + if (c !== key && flags.aliases[key].indexOf(c) === -1) { + flags.aliases[key].push(c); + newAliases[c] = true; + } + } + }); + flags.aliases[key].concat(key).forEach(function (x) { + if (x.length > 1 && /[A-Z]/.test(x) && configuration['camel-case-expansion']) { + const c = decamelize(x, '-'); + if (c !== key && flags.aliases[key].indexOf(c) === -1) { + flags.aliases[key].push(c); + newAliases[c] = true; + } + } + }); + flags.aliases[key].forEach(function (x) { + flags.aliases[x] = [key].concat(flags.aliases[key].filter(function (y) { + return x !== y; + })); + }); + }); + }); + } + function checkAllAliases(key, flag) { + const toCheck = [].concat(flags.aliases[key] || [], key); + const keys = Object.keys(flag); + const setAlias = toCheck.find(key => keys.includes(key)); + return setAlias ? flag[setAlias] : false; + } + function hasAnyFlag(key) { + const flagsKeys = Object.keys(flags); + const toCheck = [].concat(flagsKeys.map(k => flags[k])); + return toCheck.some(function (flag) { + return Array.isArray(flag) ? flag.includes(key) : flag[key]; + }); + } + function hasFlagsMatching(arg, ...patterns) { + const toCheck = [].concat(...patterns); + return toCheck.some(function (pattern) { + const match = arg.match(pattern); + return match && hasAnyFlag(match[1]); + }); + } + function hasAllShortFlags(arg) { + if (arg.match(negative) || !arg.match(/^-[^-]+/)) { + return false; + } + let hasAllFlags = true; + let next; + const letters = arg.slice(1).split(''); + for (let j = 0; j < letters.length; j++) { + next = arg.slice(j + 2); + if (!hasAnyFlag(letters[j])) { + hasAllFlags = false; + break; + } + if ((letters[j + 1] && letters[j + 1] === '=') || + next === '-' || + (/[A-Za-z]/.test(letters[j]) && /^-?\d+(\.\d*)?(e-?\d+)?$/.test(next)) || + (letters[j + 1] && letters[j + 1].match(/\W/))) { + break; + } + } + return hasAllFlags; + } + function isUnknownOptionAsArg(arg) { + return configuration['unknown-options-as-args'] && isUnknownOption(arg); + } + function isUnknownOption(arg) { + arg = arg.replace(/^-{3,}/, '--'); + if (arg.match(negative)) { + return false; + } + if (hasAllShortFlags(arg)) { + return false; + } + const flagWithEquals = /^-+([^=]+?)=[\s\S]*$/; + const normalFlag = /^-+([^=]+?)$/; + const flagEndingInHyphen = /^-+([^=]+?)-$/; + const flagEndingInDigits = /^-+([^=]+?\d+)$/; + const flagEndingInNonWordCharacters = /^-+([^=]+?)\W+.*$/; + return !hasFlagsMatching(arg, flagWithEquals, negatedBoolean, normalFlag, flagEndingInHyphen, flagEndingInDigits, flagEndingInNonWordCharacters); + } + function defaultValue(key) { + if (!checkAllAliases(key, flags.bools) && + !checkAllAliases(key, flags.counts) && + `${key}` in defaults) { + return defaults[key]; + } + else { + return defaultForType(guessType(key)); + } + } + function defaultForType(type) { + const def = { + [DefaultValuesForTypeKey.BOOLEAN]: true, + [DefaultValuesForTypeKey.STRING]: '', + [DefaultValuesForTypeKey.NUMBER]: undefined, + [DefaultValuesForTypeKey.ARRAY]: [] + }; + return def[type]; + } + function guessType(key) { + let type = DefaultValuesForTypeKey.BOOLEAN; + if (checkAllAliases(key, flags.strings)) + type = DefaultValuesForTypeKey.STRING; + else if (checkAllAliases(key, flags.numbers)) + type = DefaultValuesForTypeKey.NUMBER; + else if (checkAllAliases(key, flags.bools)) + type = DefaultValuesForTypeKey.BOOLEAN; + else if (checkAllAliases(key, flags.arrays)) + type = DefaultValuesForTypeKey.ARRAY; + return type; + } + function isUndefined(num) { + return num === undefined; + } + function checkConfiguration() { + Object.keys(flags.counts).find(key => { + if (checkAllAliases(key, flags.arrays)) { + error = Error(__('Invalid configuration: %s, opts.count excludes opts.array.', key)); + return true; + } + else if (checkAllAliases(key, flags.nargs)) { + error = Error(__('Invalid configuration: %s, opts.count excludes opts.narg.', key)); + return true; + } + return false; + }); + } + return { + aliases: Object.assign({}, flags.aliases), + argv: Object.assign(argvReturn, argv), + configuration: configuration, + defaulted: Object.assign({}, defaulted), + error: error, + newAliases: Object.assign({}, newAliases) + }; + } +} +function combineAliases(aliases) { + const aliasArrays = []; + const combined = Object.create(null); + let change = true; + Object.keys(aliases).forEach(function (key) { + aliasArrays.push([].concat(aliases[key], key)); + }); + while (change) { + change = false; + for (let i = 0; i < aliasArrays.length; i++) { + for (let ii = i + 1; ii < aliasArrays.length; ii++) { + const intersect = aliasArrays[i].filter(function (v) { + return aliasArrays[ii].indexOf(v) !== -1; + }); + if (intersect.length) { + aliasArrays[i] = aliasArrays[i].concat(aliasArrays[ii]); + aliasArrays.splice(ii, 1); + change = true; + break; + } + } + } + } + aliasArrays.forEach(function (aliasArray) { + aliasArray = aliasArray.filter(function (v, i, self) { + return self.indexOf(v) === i; + }); + const lastAlias = aliasArray.pop(); + if (lastAlias !== undefined && typeof lastAlias === 'string') { + combined[lastAlias] = aliasArray; + } + }); + return combined; +} +function increment(orig) { + return orig !== undefined ? orig + 1 : 1; +} +function sanitizeKey(key) { + if (key === '__proto__') + return '___proto___'; + return key; +} + +const minNodeVersion = (process && process.env && process.env.YARGS_MIN_NODE_VERSION) + ? Number(process.env.YARGS_MIN_NODE_VERSION) + : 10; +if (process && process.version) { + const major = Number(process.version.match(/v([^.]+)/)[1]); + if (major < minNodeVersion) { + throw Error(`yargs parser supports a minimum Node.js version of ${minNodeVersion}. Read our version support policy: https://github.com/yargs/yargs-parser#supported-nodejs-versions`); + } +} +const env = process ? process.env : {}; +const parser = new YargsParser({ + cwd: process.cwd, + env: () => { + return env; + }, + format: util.format, + normalize: path.normalize, + resolve: path.resolve, + require: (path) => { + if (typeof require !== 'undefined') { + return require(path); + } + else if (path.match(/\.json$/)) { + return fs.readFileSync(path, 'utf8'); + } + else { + throw Error('only .json config files are supported in ESM'); + } + } +}); +const yargsParser = function Parser(args, opts) { + const result = parser.parse(args.slice(), opts); + return result.argv; +}; +yargsParser.detailed = function (args, opts) { + return parser.parse(args.slice(), opts); +}; +yargsParser.camelCase = camelCase; +yargsParser.decamelize = decamelize; +yargsParser.looksLikeNumber = looksLikeNumber; + +module.exports = yargsParser; diff --git a/mybulma/node_modules/yargs-parser/build/lib/index.js b/mybulma/node_modules/yargs-parser/build/lib/index.js new file mode 100644 index 0000000..cc50788 --- /dev/null +++ b/mybulma/node_modules/yargs-parser/build/lib/index.js @@ -0,0 +1,59 @@ +/** + * @fileoverview Main entrypoint for libraries using yargs-parser in Node.js + * CJS and ESM environments. + * + * @license + * Copyright (c) 2016, Contributors + * SPDX-License-Identifier: ISC + */ +import { format } from 'util'; +import { readFileSync } from 'fs'; +import { normalize, resolve } from 'path'; +import { camelCase, decamelize, looksLikeNumber } from './string-utils.js'; +import { YargsParser } from './yargs-parser.js'; +// See https://github.com/yargs/yargs-parser#supported-nodejs-versions for our +// version support policy. The YARGS_MIN_NODE_VERSION is used for testing only. +const minNodeVersion = (process && process.env && process.env.YARGS_MIN_NODE_VERSION) + ? Number(process.env.YARGS_MIN_NODE_VERSION) + : 10; +if (process && process.version) { + const major = Number(process.version.match(/v([^.]+)/)[1]); + if (major < minNodeVersion) { + throw Error(`yargs parser supports a minimum Node.js version of ${minNodeVersion}. Read our version support policy: https://github.com/yargs/yargs-parser#supported-nodejs-versions`); + } +} +// Creates a yargs-parser instance using Node.js standard libraries: +const env = process ? process.env : {}; +const parser = new YargsParser({ + cwd: process.cwd, + env: () => { + return env; + }, + format, + normalize, + resolve, + // TODO: figure out a way to combine ESM and CJS coverage, such that + // we can exercise all the lines below: + require: (path) => { + if (typeof require !== 'undefined') { + return require(path); + } + else if (path.match(/\.json$/)) { + return readFileSync(path, 'utf8'); + } + else { + throw Error('only .json config files are supported in ESM'); + } + } +}); +const yargsParser = function Parser(args, opts) { + const result = parser.parse(args.slice(), opts); + return result.argv; +}; +yargsParser.detailed = function (args, opts) { + return parser.parse(args.slice(), opts); +}; +yargsParser.camelCase = camelCase; +yargsParser.decamelize = decamelize; +yargsParser.looksLikeNumber = looksLikeNumber; +export default yargsParser; diff --git a/mybulma/node_modules/yargs-parser/build/lib/string-utils.js b/mybulma/node_modules/yargs-parser/build/lib/string-utils.js new file mode 100644 index 0000000..4e8bd99 --- /dev/null +++ b/mybulma/node_modules/yargs-parser/build/lib/string-utils.js @@ -0,0 +1,65 @@ +/** + * @license + * Copyright (c) 2016, Contributors + * SPDX-License-Identifier: ISC + */ +export function camelCase(str) { + // Handle the case where an argument is provided as camel case, e.g., fooBar. + // by ensuring that the string isn't already mixed case: + const isCamelCase = str !== str.toLowerCase() && str !== str.toUpperCase(); + if (!isCamelCase) { + str = str.toLowerCase(); + } + if (str.indexOf('-') === -1 && str.indexOf('_') === -1) { + return str; + } + else { + let camelcase = ''; + let nextChrUpper = false; + const leadingHyphens = str.match(/^-+/); + for (let i = leadingHyphens ? leadingHyphens[0].length : 0; i < str.length; i++) { + let chr = str.charAt(i); + if (nextChrUpper) { + nextChrUpper = false; + chr = chr.toUpperCase(); + } + if (i !== 0 && (chr === '-' || chr === '_')) { + nextChrUpper = true; + } + else if (chr !== '-' && chr !== '_') { + camelcase += chr; + } + } + return camelcase; + } +} +export function decamelize(str, joinString) { + const lowercase = str.toLowerCase(); + joinString = joinString || '-'; + let notCamelcase = ''; + for (let i = 0; i < str.length; i++) { + const chrLower = lowercase.charAt(i); + const chrString = str.charAt(i); + if (chrLower !== chrString && i > 0) { + notCamelcase += `${joinString}${lowercase.charAt(i)}`; + } + else { + notCamelcase += chrString; + } + } + return notCamelcase; +} +export function looksLikeNumber(x) { + if (x === null || x === undefined) + return false; + // if loaded from config, may already be a number. + if (typeof x === 'number') + return true; + // hexadecimal. + if (/^0x[0-9a-f]+$/i.test(x)) + return true; + // don't treat 0123 as a number; as it drops the leading '0'. + if (/^0[^.]/.test(x)) + return false; + return /^[-]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/.test(x); +} diff --git a/mybulma/node_modules/yargs-parser/build/lib/tokenize-arg-string.js b/mybulma/node_modules/yargs-parser/build/lib/tokenize-arg-string.js new file mode 100644 index 0000000..5e732ef --- /dev/null +++ b/mybulma/node_modules/yargs-parser/build/lib/tokenize-arg-string.js @@ -0,0 +1,40 @@ +/** + * @license + * Copyright (c) 2016, Contributors + * SPDX-License-Identifier: ISC + */ +// take an un-split argv string and tokenize it. +export function tokenizeArgString(argString) { + if (Array.isArray(argString)) { + return argString.map(e => typeof e !== 'string' ? e + '' : e); + } + argString = argString.trim(); + let i = 0; + let prevC = null; + let c = null; + let opening = null; + const args = []; + for (let ii = 0; ii < argString.length; ii++) { + prevC = c; + c = argString.charAt(ii); + // split on spaces unless we're in quotes. + if (c === ' ' && !opening) { + if (!(prevC === ' ')) { + i++; + } + continue; + } + // don't split the string if we're in matching + // opening or closing single and double quotes. + if (c === opening) { + opening = null; + } + else if ((c === "'" || c === '"') && !opening) { + opening = c; + } + if (!args[i]) + args[i] = ''; + args[i] += c; + } + return args; +} diff --git a/mybulma/node_modules/yargs-parser/build/lib/yargs-parser-types.js b/mybulma/node_modules/yargs-parser/build/lib/yargs-parser-types.js new file mode 100644 index 0000000..63b7c31 --- /dev/null +++ b/mybulma/node_modules/yargs-parser/build/lib/yargs-parser-types.js @@ -0,0 +1,12 @@ +/** + * @license + * Copyright (c) 2016, Contributors + * SPDX-License-Identifier: ISC + */ +export var DefaultValuesForTypeKey; +(function (DefaultValuesForTypeKey) { + DefaultValuesForTypeKey["BOOLEAN"] = "boolean"; + DefaultValuesForTypeKey["STRING"] = "string"; + DefaultValuesForTypeKey["NUMBER"] = "number"; + DefaultValuesForTypeKey["ARRAY"] = "array"; +})(DefaultValuesForTypeKey || (DefaultValuesForTypeKey = {})); diff --git a/mybulma/node_modules/yargs-parser/build/lib/yargs-parser.js b/mybulma/node_modules/yargs-parser/build/lib/yargs-parser.js new file mode 100644 index 0000000..828a440 --- /dev/null +++ b/mybulma/node_modules/yargs-parser/build/lib/yargs-parser.js @@ -0,0 +1,1037 @@ +/** + * @license + * Copyright (c) 2016, Contributors + * SPDX-License-Identifier: ISC + */ +import { tokenizeArgString } from './tokenize-arg-string.js'; +import { DefaultValuesForTypeKey } from './yargs-parser-types.js'; +import { camelCase, decamelize, looksLikeNumber } from './string-utils.js'; +let mixin; +export class YargsParser { + constructor(_mixin) { + mixin = _mixin; + } + parse(argsInput, options) { + const opts = Object.assign({ + alias: undefined, + array: undefined, + boolean: undefined, + config: undefined, + configObjects: undefined, + configuration: undefined, + coerce: undefined, + count: undefined, + default: undefined, + envPrefix: undefined, + narg: undefined, + normalize: undefined, + string: undefined, + number: undefined, + __: undefined, + key: undefined + }, options); + // allow a string argument to be passed in rather + // than an argv array. + const args = tokenizeArgString(argsInput); + // aliases might have transitive relationships, normalize this. + const aliases = combineAliases(Object.assign(Object.create(null), opts.alias)); + const configuration = Object.assign({ + 'boolean-negation': true, + 'camel-case-expansion': true, + 'combine-arrays': false, + 'dot-notation': true, + 'duplicate-arguments-array': true, + 'flatten-duplicate-arrays': true, + 'greedy-arrays': true, + 'halt-at-non-option': false, + 'nargs-eats-options': false, + 'negation-prefix': 'no-', + 'parse-numbers': true, + 'parse-positional-numbers': true, + 'populate--': false, + 'set-placeholder-key': false, + 'short-option-groups': true, + 'strip-aliased': false, + 'strip-dashed': false, + 'unknown-options-as-args': false + }, opts.configuration); + const defaults = Object.assign(Object.create(null), opts.default); + const configObjects = opts.configObjects || []; + const envPrefix = opts.envPrefix; + const notFlagsOption = configuration['populate--']; + const notFlagsArgv = notFlagsOption ? '--' : '_'; + const newAliases = Object.create(null); + const defaulted = Object.create(null); + // allow a i18n handler to be passed in, default to a fake one (util.format). + const __ = opts.__ || mixin.format; + const flags = { + aliases: Object.create(null), + arrays: Object.create(null), + bools: Object.create(null), + strings: Object.create(null), + numbers: Object.create(null), + counts: Object.create(null), + normalize: Object.create(null), + configs: Object.create(null), + nargs: Object.create(null), + coercions: Object.create(null), + keys: [] + }; + const negative = /^-([0-9]+(\.[0-9]+)?|\.[0-9]+)$/; + const negatedBoolean = new RegExp('^--' + configuration['negation-prefix'] + '(.+)'); + [].concat(opts.array || []).filter(Boolean).forEach(function (opt) { + const key = typeof opt === 'object' ? opt.key : opt; + // assign to flags[bools|strings|numbers] + const assignment = Object.keys(opt).map(function (key) { + const arrayFlagKeys = { + boolean: 'bools', + string: 'strings', + number: 'numbers' + }; + return arrayFlagKeys[key]; + }).filter(Boolean).pop(); + // assign key to be coerced + if (assignment) { + flags[assignment][key] = true; + } + flags.arrays[key] = true; + flags.keys.push(key); + }); + [].concat(opts.boolean || []).filter(Boolean).forEach(function (key) { + flags.bools[key] = true; + flags.keys.push(key); + }); + [].concat(opts.string || []).filter(Boolean).forEach(function (key) { + flags.strings[key] = true; + flags.keys.push(key); + }); + [].concat(opts.number || []).filter(Boolean).forEach(function (key) { + flags.numbers[key] = true; + flags.keys.push(key); + }); + [].concat(opts.count || []).filter(Boolean).forEach(function (key) { + flags.counts[key] = true; + flags.keys.push(key); + }); + [].concat(opts.normalize || []).filter(Boolean).forEach(function (key) { + flags.normalize[key] = true; + flags.keys.push(key); + }); + if (typeof opts.narg === 'object') { + Object.entries(opts.narg).forEach(([key, value]) => { + if (typeof value === 'number') { + flags.nargs[key] = value; + flags.keys.push(key); + } + }); + } + if (typeof opts.coerce === 'object') { + Object.entries(opts.coerce).forEach(([key, value]) => { + if (typeof value === 'function') { + flags.coercions[key] = value; + flags.keys.push(key); + } + }); + } + if (typeof opts.config !== 'undefined') { + if (Array.isArray(opts.config) || typeof opts.config === 'string') { + ; + [].concat(opts.config).filter(Boolean).forEach(function (key) { + flags.configs[key] = true; + }); + } + else if (typeof opts.config === 'object') { + Object.entries(opts.config).forEach(([key, value]) => { + if (typeof value === 'boolean' || typeof value === 'function') { + flags.configs[key] = value; + } + }); + } + } + // create a lookup table that takes into account all + // combinations of aliases: {f: ['foo'], foo: ['f']} + extendAliases(opts.key, aliases, opts.default, flags.arrays); + // apply default values to all aliases. + Object.keys(defaults).forEach(function (key) { + (flags.aliases[key] || []).forEach(function (alias) { + defaults[alias] = defaults[key]; + }); + }); + let error = null; + checkConfiguration(); + let notFlags = []; + const argv = Object.assign(Object.create(null), { _: [] }); + // TODO(bcoe): for the first pass at removing object prototype we didn't + // remove all prototypes from objects returned by this API, we might want + // to gradually move towards doing so. + const argvReturn = {}; + for (let i = 0; i < args.length; i++) { + const arg = args[i]; + const truncatedArg = arg.replace(/^-{3,}/, '---'); + let broken; + let key; + let letters; + let m; + let next; + let value; + // any unknown option (except for end-of-options, "--") + if (arg !== '--' && isUnknownOptionAsArg(arg)) { + pushPositional(arg); + // ---, ---=, ----, etc, + } + else if (truncatedArg.match(/---+(=|$)/)) { + // options without key name are invalid. + pushPositional(arg); + continue; + // -- separated by = + } + else if (arg.match(/^--.+=/) || (!configuration['short-option-groups'] && arg.match(/^-.+=/))) { + // Using [\s\S] instead of . because js doesn't support the + // 'dotall' regex modifier. See: + // http://stackoverflow.com/a/1068308/13216 + m = arg.match(/^--?([^=]+)=([\s\S]*)$/); + // arrays format = '--f=a b c' + if (m !== null && Array.isArray(m) && m.length >= 3) { + if (checkAllAliases(m[1], flags.arrays)) { + i = eatArray(i, m[1], args, m[2]); + } + else if (checkAllAliases(m[1], flags.nargs) !== false) { + // nargs format = '--f=monkey washing cat' + i = eatNargs(i, m[1], args, m[2]); + } + else { + setArg(m[1], m[2]); + } + } + } + else if (arg.match(negatedBoolean) && configuration['boolean-negation']) { + m = arg.match(negatedBoolean); + if (m !== null && Array.isArray(m) && m.length >= 2) { + key = m[1]; + setArg(key, checkAllAliases(key, flags.arrays) ? [false] : false); + } + // -- separated by space. + } + else if (arg.match(/^--.+/) || (!configuration['short-option-groups'] && arg.match(/^-[^-]+/))) { + m = arg.match(/^--?(.+)/); + if (m !== null && Array.isArray(m) && m.length >= 2) { + key = m[1]; + if (checkAllAliases(key, flags.arrays)) { + // array format = '--foo a b c' + i = eatArray(i, key, args); + } + else if (checkAllAliases(key, flags.nargs) !== false) { + // nargs format = '--foo a b c' + // should be truthy even if: flags.nargs[key] === 0 + i = eatNargs(i, key, args); + } + else { + next = args[i + 1]; + if (next !== undefined && (!next.match(/^-/) || + next.match(negative)) && + !checkAllAliases(key, flags.bools) && + !checkAllAliases(key, flags.counts)) { + setArg(key, next); + i++; + } + else if (/^(true|false)$/.test(next)) { + setArg(key, next); + i++; + } + else { + setArg(key, defaultValue(key)); + } + } + } + // dot-notation flag separated by '='. + } + else if (arg.match(/^-.\..+=/)) { + m = arg.match(/^-([^=]+)=([\s\S]*)$/); + if (m !== null && Array.isArray(m) && m.length >= 3) { + setArg(m[1], m[2]); + } + // dot-notation flag separated by space. + } + else if (arg.match(/^-.\..+/) && !arg.match(negative)) { + next = args[i + 1]; + m = arg.match(/^-(.\..+)/); + if (m !== null && Array.isArray(m) && m.length >= 2) { + key = m[1]; + if (next !== undefined && !next.match(/^-/) && + !checkAllAliases(key, flags.bools) && + !checkAllAliases(key, flags.counts)) { + setArg(key, next); + i++; + } + else { + setArg(key, defaultValue(key)); + } + } + } + else if (arg.match(/^-[^-]+/) && !arg.match(negative)) { + letters = arg.slice(1, -1).split(''); + broken = false; + for (let j = 0; j < letters.length; j++) { + next = arg.slice(j + 2); + if (letters[j + 1] && letters[j + 1] === '=') { + value = arg.slice(j + 3); + key = letters[j]; + if (checkAllAliases(key, flags.arrays)) { + // array format = '-f=a b c' + i = eatArray(i, key, args, value); + } + else if (checkAllAliases(key, flags.nargs) !== false) { + // nargs format = '-f=monkey washing cat' + i = eatNargs(i, key, args, value); + } + else { + setArg(key, value); + } + broken = true; + break; + } + if (next === '-') { + setArg(letters[j], next); + continue; + } + // current letter is an alphabetic character and next value is a number + if (/[A-Za-z]/.test(letters[j]) && + /^-?\d+(\.\d*)?(e-?\d+)?$/.test(next) && + checkAllAliases(next, flags.bools) === false) { + setArg(letters[j], next); + broken = true; + break; + } + if (letters[j + 1] && letters[j + 1].match(/\W/)) { + setArg(letters[j], next); + broken = true; + break; + } + else { + setArg(letters[j], defaultValue(letters[j])); + } + } + key = arg.slice(-1)[0]; + if (!broken && key !== '-') { + if (checkAllAliases(key, flags.arrays)) { + // array format = '-f a b c' + i = eatArray(i, key, args); + } + else if (checkAllAliases(key, flags.nargs) !== false) { + // nargs format = '-f a b c' + // should be truthy even if: flags.nargs[key] === 0 + i = eatNargs(i, key, args); + } + else { + next = args[i + 1]; + if (next !== undefined && (!/^(-|--)[^-]/.test(next) || + next.match(negative)) && + !checkAllAliases(key, flags.bools) && + !checkAllAliases(key, flags.counts)) { + setArg(key, next); + i++; + } + else if (/^(true|false)$/.test(next)) { + setArg(key, next); + i++; + } + else { + setArg(key, defaultValue(key)); + } + } + } + } + else if (arg.match(/^-[0-9]$/) && + arg.match(negative) && + checkAllAliases(arg.slice(1), flags.bools)) { + // single-digit boolean alias, e.g: xargs -0 + key = arg.slice(1); + setArg(key, defaultValue(key)); + } + else if (arg === '--') { + notFlags = args.slice(i + 1); + break; + } + else if (configuration['halt-at-non-option']) { + notFlags = args.slice(i); + break; + } + else { + pushPositional(arg); + } + } + // order of precedence: + // 1. command line arg + // 2. value from env var + // 3. value from config file + // 4. value from config objects + // 5. configured default value + applyEnvVars(argv, true); // special case: check env vars that point to config file + applyEnvVars(argv, false); + setConfig(argv); + setConfigObjects(); + applyDefaultsAndAliases(argv, flags.aliases, defaults, true); + applyCoercions(argv); + if (configuration['set-placeholder-key']) + setPlaceholderKeys(argv); + // for any counts either not in args or without an explicit default, set to 0 + Object.keys(flags.counts).forEach(function (key) { + if (!hasKey(argv, key.split('.'))) + setArg(key, 0); + }); + // '--' defaults to undefined. + if (notFlagsOption && notFlags.length) + argv[notFlagsArgv] = []; + notFlags.forEach(function (key) { + argv[notFlagsArgv].push(key); + }); + if (configuration['camel-case-expansion'] && configuration['strip-dashed']) { + Object.keys(argv).filter(key => key !== '--' && key.includes('-')).forEach(key => { + delete argv[key]; + }); + } + if (configuration['strip-aliased']) { + ; + [].concat(...Object.keys(aliases).map(k => aliases[k])).forEach(alias => { + if (configuration['camel-case-expansion'] && alias.includes('-')) { + delete argv[alias.split('.').map(prop => camelCase(prop)).join('.')]; + } + delete argv[alias]; + }); + } + // Push argument into positional array, applying numeric coercion: + function pushPositional(arg) { + const maybeCoercedNumber = maybeCoerceNumber('_', arg); + if (typeof maybeCoercedNumber === 'string' || typeof maybeCoercedNumber === 'number') { + argv._.push(maybeCoercedNumber); + } + } + // how many arguments should we consume, based + // on the nargs option? + function eatNargs(i, key, args, argAfterEqualSign) { + let ii; + let toEat = checkAllAliases(key, flags.nargs); + // NaN has a special meaning for the array type, indicating that one or + // more values are expected. + toEat = typeof toEat !== 'number' || isNaN(toEat) ? 1 : toEat; + if (toEat === 0) { + if (!isUndefined(argAfterEqualSign)) { + error = Error(__('Argument unexpected for: %s', key)); + } + setArg(key, defaultValue(key)); + return i; + } + let available = isUndefined(argAfterEqualSign) ? 0 : 1; + if (configuration['nargs-eats-options']) { + // classic behavior, yargs eats positional and dash arguments. + if (args.length - (i + 1) + available < toEat) { + error = Error(__('Not enough arguments following: %s', key)); + } + available = toEat; + } + else { + // nargs will not consume flag arguments, e.g., -abc, --foo, + // and terminates when one is observed. + for (ii = i + 1; ii < args.length; ii++) { + if (!args[ii].match(/^-[^0-9]/) || args[ii].match(negative) || isUnknownOptionAsArg(args[ii])) + available++; + else + break; + } + if (available < toEat) + error = Error(__('Not enough arguments following: %s', key)); + } + let consumed = Math.min(available, toEat); + if (!isUndefined(argAfterEqualSign) && consumed > 0) { + setArg(key, argAfterEqualSign); + consumed--; + } + for (ii = i + 1; ii < (consumed + i + 1); ii++) { + setArg(key, args[ii]); + } + return (i + consumed); + } + // if an option is an array, eat all non-hyphenated arguments + // following it... YUM! + // e.g., --foo apple banana cat becomes ["apple", "banana", "cat"] + function eatArray(i, key, args, argAfterEqualSign) { + let argsToSet = []; + let next = argAfterEqualSign || args[i + 1]; + // If both array and nargs are configured, enforce the nargs count: + const nargsCount = checkAllAliases(key, flags.nargs); + if (checkAllAliases(key, flags.bools) && !(/^(true|false)$/.test(next))) { + argsToSet.push(true); + } + else if (isUndefined(next) || + (isUndefined(argAfterEqualSign) && /^-/.test(next) && !negative.test(next) && !isUnknownOptionAsArg(next))) { + // for keys without value ==> argsToSet remains an empty [] + // set user default value, if available + if (defaults[key] !== undefined) { + const defVal = defaults[key]; + argsToSet = Array.isArray(defVal) ? defVal : [defVal]; + } + } + else { + // value in --option=value is eaten as is + if (!isUndefined(argAfterEqualSign)) { + argsToSet.push(processValue(key, argAfterEqualSign)); + } + for (let ii = i + 1; ii < args.length; ii++) { + if ((!configuration['greedy-arrays'] && argsToSet.length > 0) || + (nargsCount && typeof nargsCount === 'number' && argsToSet.length >= nargsCount)) + break; + next = args[ii]; + if (/^-/.test(next) && !negative.test(next) && !isUnknownOptionAsArg(next)) + break; + i = ii; + argsToSet.push(processValue(key, next)); + } + } + // If both array and nargs are configured, create an error if less than + // nargs positionals were found. NaN has special meaning, indicating + // that at least one value is required (more are okay). + if (typeof nargsCount === 'number' && ((nargsCount && argsToSet.length < nargsCount) || + (isNaN(nargsCount) && argsToSet.length === 0))) { + error = Error(__('Not enough arguments following: %s', key)); + } + setArg(key, argsToSet); + return i; + } + function setArg(key, val) { + if (/-/.test(key) && configuration['camel-case-expansion']) { + const alias = key.split('.').map(function (prop) { + return camelCase(prop); + }).join('.'); + addNewAlias(key, alias); + } + const value = processValue(key, val); + const splitKey = key.split('.'); + setKey(argv, splitKey, value); + // handle populating aliases of the full key + if (flags.aliases[key]) { + flags.aliases[key].forEach(function (x) { + const keyProperties = x.split('.'); + setKey(argv, keyProperties, value); + }); + } + // handle populating aliases of the first element of the dot-notation key + if (splitKey.length > 1 && configuration['dot-notation']) { + ; + (flags.aliases[splitKey[0]] || []).forEach(function (x) { + let keyProperties = x.split('.'); + // expand alias with nested objects in key + const a = [].concat(splitKey); + a.shift(); // nuke the old key. + keyProperties = keyProperties.concat(a); + // populate alias only if is not already an alias of the full key + // (already populated above) + if (!(flags.aliases[key] || []).includes(keyProperties.join('.'))) { + setKey(argv, keyProperties, value); + } + }); + } + // Set normalize getter and setter when key is in 'normalize' but isn't an array + if (checkAllAliases(key, flags.normalize) && !checkAllAliases(key, flags.arrays)) { + const keys = [key].concat(flags.aliases[key] || []); + keys.forEach(function (key) { + Object.defineProperty(argvReturn, key, { + enumerable: true, + get() { + return val; + }, + set(value) { + val = typeof value === 'string' ? mixin.normalize(value) : value; + } + }); + }); + } + } + function addNewAlias(key, alias) { + if (!(flags.aliases[key] && flags.aliases[key].length)) { + flags.aliases[key] = [alias]; + newAliases[alias] = true; + } + if (!(flags.aliases[alias] && flags.aliases[alias].length)) { + addNewAlias(alias, key); + } + } + function processValue(key, val) { + // strings may be quoted, clean this up as we assign values. + if (typeof val === 'string' && + (val[0] === "'" || val[0] === '"') && + val[val.length - 1] === val[0]) { + val = val.substring(1, val.length - 1); + } + // handle parsing boolean arguments --foo=true --bar false. + if (checkAllAliases(key, flags.bools) || checkAllAliases(key, flags.counts)) { + if (typeof val === 'string') + val = val === 'true'; + } + let value = Array.isArray(val) + ? val.map(function (v) { return maybeCoerceNumber(key, v); }) + : maybeCoerceNumber(key, val); + // increment a count given as arg (either no value or value parsed as boolean) + if (checkAllAliases(key, flags.counts) && (isUndefined(value) || typeof value === 'boolean')) { + value = increment(); + } + // Set normalized value when key is in 'normalize' and in 'arrays' + if (checkAllAliases(key, flags.normalize) && checkAllAliases(key, flags.arrays)) { + if (Array.isArray(val)) + value = val.map((val) => { return mixin.normalize(val); }); + else + value = mixin.normalize(val); + } + return value; + } + function maybeCoerceNumber(key, value) { + if (!configuration['parse-positional-numbers'] && key === '_') + return value; + if (!checkAllAliases(key, flags.strings) && !checkAllAliases(key, flags.bools) && !Array.isArray(value)) { + const shouldCoerceNumber = looksLikeNumber(value) && configuration['parse-numbers'] && (Number.isSafeInteger(Math.floor(parseFloat(`${value}`)))); + if (shouldCoerceNumber || (!isUndefined(value) && checkAllAliases(key, flags.numbers))) { + value = Number(value); + } + } + return value; + } + // set args from config.json file, this should be + // applied last so that defaults can be applied. + function setConfig(argv) { + const configLookup = Object.create(null); + // expand defaults/aliases, in-case any happen to reference + // the config.json file. + applyDefaultsAndAliases(configLookup, flags.aliases, defaults); + Object.keys(flags.configs).forEach(function (configKey) { + const configPath = argv[configKey] || configLookup[configKey]; + if (configPath) { + try { + let config = null; + const resolvedConfigPath = mixin.resolve(mixin.cwd(), configPath); + const resolveConfig = flags.configs[configKey]; + if (typeof resolveConfig === 'function') { + try { + config = resolveConfig(resolvedConfigPath); + } + catch (e) { + config = e; + } + if (config instanceof Error) { + error = config; + return; + } + } + else { + config = mixin.require(resolvedConfigPath); + } + setConfigObject(config); + } + catch (ex) { + // Deno will receive a PermissionDenied error if an attempt is + // made to load config without the --allow-read flag: + if (ex.name === 'PermissionDenied') + error = ex; + else if (argv[configKey]) + error = Error(__('Invalid JSON config file: %s', configPath)); + } + } + }); + } + // set args from config object. + // it recursively checks nested objects. + function setConfigObject(config, prev) { + Object.keys(config).forEach(function (key) { + const value = config[key]; + const fullKey = prev ? prev + '.' + key : key; + // if the value is an inner object and we have dot-notation + // enabled, treat inner objects in config the same as + // heavily nested dot notations (foo.bar.apple). + if (typeof value === 'object' && value !== null && !Array.isArray(value) && configuration['dot-notation']) { + // if the value is an object but not an array, check nested object + setConfigObject(value, fullKey); + } + else { + // setting arguments via CLI takes precedence over + // values within the config file. + if (!hasKey(argv, fullKey.split('.')) || (checkAllAliases(fullKey, flags.arrays) && configuration['combine-arrays'])) { + setArg(fullKey, value); + } + } + }); + } + // set all config objects passed in opts + function setConfigObjects() { + if (typeof configObjects !== 'undefined') { + configObjects.forEach(function (configObject) { + setConfigObject(configObject); + }); + } + } + function applyEnvVars(argv, configOnly) { + if (typeof envPrefix === 'undefined') + return; + const prefix = typeof envPrefix === 'string' ? envPrefix : ''; + const env = mixin.env(); + Object.keys(env).forEach(function (envVar) { + if (prefix === '' || envVar.lastIndexOf(prefix, 0) === 0) { + // get array of nested keys and convert them to camel case + const keys = envVar.split('__').map(function (key, i) { + if (i === 0) { + key = key.substring(prefix.length); + } + return camelCase(key); + }); + if (((configOnly && flags.configs[keys.join('.')]) || !configOnly) && !hasKey(argv, keys)) { + setArg(keys.join('.'), env[envVar]); + } + } + }); + } + function applyCoercions(argv) { + let coerce; + const applied = new Set(); + Object.keys(argv).forEach(function (key) { + if (!applied.has(key)) { // If we haven't already coerced this option via one of its aliases + coerce = checkAllAliases(key, flags.coercions); + if (typeof coerce === 'function') { + try { + const value = maybeCoerceNumber(key, coerce(argv[key])); + ([].concat(flags.aliases[key] || [], key)).forEach(ali => { + applied.add(ali); + argv[ali] = value; + }); + } + catch (err) { + error = err; + } + } + } + }); + } + function setPlaceholderKeys(argv) { + flags.keys.forEach((key) => { + // don't set placeholder keys for dot notation options 'foo.bar'. + if (~key.indexOf('.')) + return; + if (typeof argv[key] === 'undefined') + argv[key] = undefined; + }); + return argv; + } + function applyDefaultsAndAliases(obj, aliases, defaults, canLog = false) { + Object.keys(defaults).forEach(function (key) { + if (!hasKey(obj, key.split('.'))) { + setKey(obj, key.split('.'), defaults[key]); + if (canLog) + defaulted[key] = true; + (aliases[key] || []).forEach(function (x) { + if (hasKey(obj, x.split('.'))) + return; + setKey(obj, x.split('.'), defaults[key]); + }); + } + }); + } + function hasKey(obj, keys) { + let o = obj; + if (!configuration['dot-notation']) + keys = [keys.join('.')]; + keys.slice(0, -1).forEach(function (key) { + o = (o[key] || {}); + }); + const key = keys[keys.length - 1]; + if (typeof o !== 'object') + return false; + else + return key in o; + } + function setKey(obj, keys, value) { + let o = obj; + if (!configuration['dot-notation']) + keys = [keys.join('.')]; + keys.slice(0, -1).forEach(function (key) { + // TODO(bcoe): in the next major version of yargs, switch to + // Object.create(null) for dot notation: + key = sanitizeKey(key); + if (typeof o === 'object' && o[key] === undefined) { + o[key] = {}; + } + if (typeof o[key] !== 'object' || Array.isArray(o[key])) { + // ensure that o[key] is an array, and that the last item is an empty object. + if (Array.isArray(o[key])) { + o[key].push({}); + } + else { + o[key] = [o[key], {}]; + } + // we want to update the empty object at the end of the o[key] array, so set o to that object + o = o[key][o[key].length - 1]; + } + else { + o = o[key]; + } + }); + // TODO(bcoe): in the next major version of yargs, switch to + // Object.create(null) for dot notation: + const key = sanitizeKey(keys[keys.length - 1]); + const isTypeArray = checkAllAliases(keys.join('.'), flags.arrays); + const isValueArray = Array.isArray(value); + let duplicate = configuration['duplicate-arguments-array']; + // nargs has higher priority than duplicate + if (!duplicate && checkAllAliases(key, flags.nargs)) { + duplicate = true; + if ((!isUndefined(o[key]) && flags.nargs[key] === 1) || (Array.isArray(o[key]) && o[key].length === flags.nargs[key])) { + o[key] = undefined; + } + } + if (value === increment()) { + o[key] = increment(o[key]); + } + else if (Array.isArray(o[key])) { + if (duplicate && isTypeArray && isValueArray) { + o[key] = configuration['flatten-duplicate-arrays'] ? o[key].concat(value) : (Array.isArray(o[key][0]) ? o[key] : [o[key]]).concat([value]); + } + else if (!duplicate && Boolean(isTypeArray) === Boolean(isValueArray)) { + o[key] = value; + } + else { + o[key] = o[key].concat([value]); + } + } + else if (o[key] === undefined && isTypeArray) { + o[key] = isValueArray ? value : [value]; + } + else if (duplicate && !(o[key] === undefined || + checkAllAliases(key, flags.counts) || + checkAllAliases(key, flags.bools))) { + o[key] = [o[key], value]; + } + else { + o[key] = value; + } + } + // extend the aliases list with inferred aliases. + function extendAliases(...args) { + args.forEach(function (obj) { + Object.keys(obj || {}).forEach(function (key) { + // short-circuit if we've already added a key + // to the aliases array, for example it might + // exist in both 'opts.default' and 'opts.key'. + if (flags.aliases[key]) + return; + flags.aliases[key] = [].concat(aliases[key] || []); + // For "--option-name", also set argv.optionName + flags.aliases[key].concat(key).forEach(function (x) { + if (/-/.test(x) && configuration['camel-case-expansion']) { + const c = camelCase(x); + if (c !== key && flags.aliases[key].indexOf(c) === -1) { + flags.aliases[key].push(c); + newAliases[c] = true; + } + } + }); + // For "--optionName", also set argv['option-name'] + flags.aliases[key].concat(key).forEach(function (x) { + if (x.length > 1 && /[A-Z]/.test(x) && configuration['camel-case-expansion']) { + const c = decamelize(x, '-'); + if (c !== key && flags.aliases[key].indexOf(c) === -1) { + flags.aliases[key].push(c); + newAliases[c] = true; + } + } + }); + flags.aliases[key].forEach(function (x) { + flags.aliases[x] = [key].concat(flags.aliases[key].filter(function (y) { + return x !== y; + })); + }); + }); + }); + } + function checkAllAliases(key, flag) { + const toCheck = [].concat(flags.aliases[key] || [], key); + const keys = Object.keys(flag); + const setAlias = toCheck.find(key => keys.includes(key)); + return setAlias ? flag[setAlias] : false; + } + function hasAnyFlag(key) { + const flagsKeys = Object.keys(flags); + const toCheck = [].concat(flagsKeys.map(k => flags[k])); + return toCheck.some(function (flag) { + return Array.isArray(flag) ? flag.includes(key) : flag[key]; + }); + } + function hasFlagsMatching(arg, ...patterns) { + const toCheck = [].concat(...patterns); + return toCheck.some(function (pattern) { + const match = arg.match(pattern); + return match && hasAnyFlag(match[1]); + }); + } + // based on a simplified version of the short flag group parsing logic + function hasAllShortFlags(arg) { + // if this is a negative number, or doesn't start with a single hyphen, it's not a short flag group + if (arg.match(negative) || !arg.match(/^-[^-]+/)) { + return false; + } + let hasAllFlags = true; + let next; + const letters = arg.slice(1).split(''); + for (let j = 0; j < letters.length; j++) { + next = arg.slice(j + 2); + if (!hasAnyFlag(letters[j])) { + hasAllFlags = false; + break; + } + if ((letters[j + 1] && letters[j + 1] === '=') || + next === '-' || + (/[A-Za-z]/.test(letters[j]) && /^-?\d+(\.\d*)?(e-?\d+)?$/.test(next)) || + (letters[j + 1] && letters[j + 1].match(/\W/))) { + break; + } + } + return hasAllFlags; + } + function isUnknownOptionAsArg(arg) { + return configuration['unknown-options-as-args'] && isUnknownOption(arg); + } + function isUnknownOption(arg) { + arg = arg.replace(/^-{3,}/, '--'); + // ignore negative numbers + if (arg.match(negative)) { + return false; + } + // if this is a short option group and all of them are configured, it isn't unknown + if (hasAllShortFlags(arg)) { + return false; + } + // e.g. '--count=2' + const flagWithEquals = /^-+([^=]+?)=[\s\S]*$/; + // e.g. '-a' or '--arg' + const normalFlag = /^-+([^=]+?)$/; + // e.g. '-a-' + const flagEndingInHyphen = /^-+([^=]+?)-$/; + // e.g. '-abc123' + const flagEndingInDigits = /^-+([^=]+?\d+)$/; + // e.g. '-a/usr/local' + const flagEndingInNonWordCharacters = /^-+([^=]+?)\W+.*$/; + // check the different types of flag styles, including negatedBoolean, a pattern defined near the start of the parse method + return !hasFlagsMatching(arg, flagWithEquals, negatedBoolean, normalFlag, flagEndingInHyphen, flagEndingInDigits, flagEndingInNonWordCharacters); + } + // make a best effort to pick a default value + // for an option based on name and type. + function defaultValue(key) { + if (!checkAllAliases(key, flags.bools) && + !checkAllAliases(key, flags.counts) && + `${key}` in defaults) { + return defaults[key]; + } + else { + return defaultForType(guessType(key)); + } + } + // return a default value, given the type of a flag., + function defaultForType(type) { + const def = { + [DefaultValuesForTypeKey.BOOLEAN]: true, + [DefaultValuesForTypeKey.STRING]: '', + [DefaultValuesForTypeKey.NUMBER]: undefined, + [DefaultValuesForTypeKey.ARRAY]: [] + }; + return def[type]; + } + // given a flag, enforce a default type. + function guessType(key) { + let type = DefaultValuesForTypeKey.BOOLEAN; + if (checkAllAliases(key, flags.strings)) + type = DefaultValuesForTypeKey.STRING; + else if (checkAllAliases(key, flags.numbers)) + type = DefaultValuesForTypeKey.NUMBER; + else if (checkAllAliases(key, flags.bools)) + type = DefaultValuesForTypeKey.BOOLEAN; + else if (checkAllAliases(key, flags.arrays)) + type = DefaultValuesForTypeKey.ARRAY; + return type; + } + function isUndefined(num) { + return num === undefined; + } + // check user configuration settings for inconsistencies + function checkConfiguration() { + // count keys should not be set as array/narg + Object.keys(flags.counts).find(key => { + if (checkAllAliases(key, flags.arrays)) { + error = Error(__('Invalid configuration: %s, opts.count excludes opts.array.', key)); + return true; + } + else if (checkAllAliases(key, flags.nargs)) { + error = Error(__('Invalid configuration: %s, opts.count excludes opts.narg.', key)); + return true; + } + return false; + }); + } + return { + aliases: Object.assign({}, flags.aliases), + argv: Object.assign(argvReturn, argv), + configuration: configuration, + defaulted: Object.assign({}, defaulted), + error: error, + newAliases: Object.assign({}, newAliases) + }; + } +} +// if any aliases reference each other, we should +// merge them together. +function combineAliases(aliases) { + const aliasArrays = []; + const combined = Object.create(null); + let change = true; + // turn alias lookup hash {key: ['alias1', 'alias2']} into + // a simple array ['key', 'alias1', 'alias2'] + Object.keys(aliases).forEach(function (key) { + aliasArrays.push([].concat(aliases[key], key)); + }); + // combine arrays until zero changes are + // made in an iteration. + while (change) { + change = false; + for (let i = 0; i < aliasArrays.length; i++) { + for (let ii = i + 1; ii < aliasArrays.length; ii++) { + const intersect = aliasArrays[i].filter(function (v) { + return aliasArrays[ii].indexOf(v) !== -1; + }); + if (intersect.length) { + aliasArrays[i] = aliasArrays[i].concat(aliasArrays[ii]); + aliasArrays.splice(ii, 1); + change = true; + break; + } + } + } + } + // map arrays back to the hash-lookup (de-dupe while + // we're at it). + aliasArrays.forEach(function (aliasArray) { + aliasArray = aliasArray.filter(function (v, i, self) { + return self.indexOf(v) === i; + }); + const lastAlias = aliasArray.pop(); + if (lastAlias !== undefined && typeof lastAlias === 'string') { + combined[lastAlias] = aliasArray; + } + }); + return combined; +} +// this function should only be called when a count is given as an arg +// it is NOT called to set a default value +// thus we can start the count at 1 instead of 0 +function increment(orig) { + return orig !== undefined ? orig + 1 : 1; +} +// TODO(bcoe): in the next major version of yargs, switch to +// Object.create(null) for dot notation: +function sanitizeKey(key) { + if (key === '__proto__') + return '___proto___'; + return key; +} diff --git a/mybulma/node_modules/yargs-parser/package.json b/mybulma/node_modules/yargs-parser/package.json new file mode 100644 index 0000000..f97aa9e --- /dev/null +++ b/mybulma/node_modules/yargs-parser/package.json @@ -0,0 +1,87 @@ +{ + "name": "yargs-parser", + "version": "20.2.9", + "description": "the mighty option parser used by yargs", + "main": "build/index.cjs", + "exports": { + ".": [ + { + "import": "./build/lib/index.js", + "require": "./build/index.cjs" + }, + "./build/index.cjs" + ] + }, + "type": "module", + "module": "./build/lib/index.js", + "scripts": { + "check": "standardx '**/*.ts' && standardx '**/*.js' && standardx '**/*.cjs'", + "fix": "standardx --fix '**/*.ts' && standardx --fix '**/*.js' && standardx --fix '**/*.cjs'", + "pretest": "rimraf build && tsc -p tsconfig.test.json && cross-env NODE_ENV=test npm run build:cjs", + "test": "c8 --reporter=text --reporter=html mocha test/*.cjs", + "test:browser": "start-server-and-test 'serve ./ -p 8080' http://127.0.0.1:8080/package.json 'node ./test/browser/yargs-test.cjs'", + "pretest:typescript": "npm run pretest", + "test:typescript": "c8 mocha ./build/test/typescript/*.js", + "coverage": "c8 report --check-coverage", + "precompile": "rimraf build", + "compile": "tsc", + "postcompile": "npm run build:cjs", + "build:cjs": "rollup -c", + "prepare": "npm run compile" + }, + "repository": { + "type": "git", + "url": "https://github.com/yargs/yargs-parser.git" + }, + "keywords": [ + "argument", + "parser", + "yargs", + "command", + "cli", + "parsing", + "option", + "args", + "argument" + ], + "author": "Ben Coe ", + "license": "ISC", + "devDependencies": { + "@types/chai": "^4.2.11", + "@types/mocha": "^8.0.0", + "@types/node": "^14.0.0", + "@typescript-eslint/eslint-plugin": "^3.10.1", + "@typescript-eslint/parser": "^3.10.1", + "@wessberg/rollup-plugin-ts": "^1.2.28", + "c8": "^7.3.0", + "chai": "^4.2.0", + "cross-env": "^7.0.2", + "eslint": "^7.0.0", + "eslint-plugin-import": "^2.20.1", + "eslint-plugin-node": "^11.0.0", + "gts": "^3.0.0", + "mocha": "^9.0.0", + "puppeteer": "^10.0.0", + "rimraf": "^3.0.2", + "rollup": "^2.22.1", + "rollup-plugin-cleanup": "^3.1.1", + "serve": "^12.0.0", + "standardx": "^7.0.0", + "start-server-and-test": "^1.11.2", + "ts-transform-default-export": "^1.0.2", + "typescript": "^4.0.0" + }, + "files": [ + "browser.js", + "build", + "!*.d.ts" + ], + "engines": { + "node": ">=10" + }, + "standardx": { + "ignore": [ + "build" + ] + } +} diff --git a/mybulma/node_modules/yargs/build/index.cjs b/mybulma/node_modules/yargs/build/index.cjs new file mode 100644 index 0000000..9641ad8 --- /dev/null +++ b/mybulma/node_modules/yargs/build/index.cjs @@ -0,0 +1 @@ +"use strict";var t=require("assert");class e extends Error{constructor(t){super(t||"yargs error"),this.name="YError",Error.captureStackTrace&&Error.captureStackTrace(this,e)}}let s,i=[];function n(t,o,a,h){s=h;let l={};if(Object.prototype.hasOwnProperty.call(t,"extends")){if("string"!=typeof t.extends)return l;const r=/\.json|\..*rc$/.test(t.extends);let h=null;if(r)h=function(t,e){return s.path.resolve(t,e)}(o,t.extends);else try{h=require.resolve(t.extends)}catch(e){return t}!function(t){if(i.indexOf(t)>-1)throw new e(`Circular extended configurations: '${t}'.`)}(h),i.push(h),l=r?JSON.parse(s.readFileSync(h,"utf8")):require(t.extends),delete t.extends,l=n(l,s.path.dirname(h),a,s)}return i=[],a?r(l,t):Object.assign({},l,t)}function r(t,e){const s={};function i(t){return t&&"object"==typeof t&&!Array.isArray(t)}Object.assign(s,t);for(const n of Object.keys(e))i(e[n])&&i(s[n])?s[n]=r(t[n],e[n]):s[n]=e[n];return s}function o(t){const e=t.replace(/\s{2,}/g," ").split(/\s+(?![^[]*]|[^<]*>)/),s=/\.*[\][<>]/g,i=e.shift();if(!i)throw new Error(`No command found in: ${t}`);const n={cmd:i.replace(s,""),demanded:[],optional:[]};return e.forEach(((t,i)=>{let r=!1;t=t.replace(/\s/g,""),/\.+[\]>]/.test(t)&&i===e.length-1&&(r=!0),/^\[/.test(t)?n.optional.push({cmd:t.replace(s,"").split("|"),variadic:r}):n.demanded.push({cmd:t.replace(s,"").split("|"),variadic:r})})),n}const a=["first","second","third","fourth","fifth","sixth"];function h(t,s,i){try{let n=0;const[r,a,h]="object"==typeof t?[{demanded:[],optional:[]},t,s]:[o(`cmd ${t}`),s,i],f=[].slice.call(a);for(;f.length&&void 0===f[f.length-1];)f.pop();const d=h||f.length;if(du)throw new e(`Too many arguments provided. Expected max ${u} but received ${d}.`);r.demanded.forEach((t=>{const e=l(f.shift());0===t.cmd.filter((t=>t===e||"*"===t)).length&&c(e,t.cmd,n),n+=1})),r.optional.forEach((t=>{if(0===f.length)return;const e=l(f.shift());0===t.cmd.filter((t=>t===e||"*"===t)).length&&c(e,t.cmd,n),n+=1}))}catch(t){console.warn(t.stack)}}function l(t){return Array.isArray(t)?"array":null===t?"null":typeof t}function c(t,s,i){throw new e(`Invalid ${a[i]||"manyith"} argument. Expected ${s.join(" or ")} but received ${t}.`)}function f(t){return!!t&&!!t.then&&"function"==typeof t.then}function d(t,e,s,i){s.assert.notStrictEqual(t,e,i)}function u(t,e){e.assert.strictEqual(typeof t,"string")}function p(t){return Object.keys(t)}function g(t={},e=(()=>!0)){const s={};return p(t).forEach((i=>{e(i,t[i])&&(s[i]=t[i])})),s}function m(){return process.versions.electron&&!process.defaultApp?0:1}function y(){return process.argv[m()]}var b=Object.freeze({__proto__:null,hideBin:function(t){return t.slice(m()+1)},getProcessArgvBin:y});function v(t,e,s,i){if("a"===s&&!i)throw new TypeError("Private accessor was defined without a getter");if("function"==typeof e?t!==e||!i:!e.has(t))throw new TypeError("Cannot read private member from an object whose class did not declare it");return"m"===s?i:"a"===s?i.call(t):i?i.value:e.get(t)}function O(t,e,s,i,n){if("m"===i)throw new TypeError("Private method is not writable");if("a"===i&&!n)throw new TypeError("Private accessor was defined without a setter");if("function"==typeof e?t!==e||!n:!e.has(t))throw new TypeError("Cannot write private member to an object whose class did not declare it");return"a"===i?n.call(t,s):n?n.value=s:e.set(t,s),s}class w{constructor(t){this.globalMiddleware=[],this.frozens=[],this.yargs=t}addMiddleware(t,e,s=!0,i=!1){if(h(" [boolean] [boolean] [boolean]",[t,e,s],arguments.length),Array.isArray(t)){for(let i=0;i{const i=[...s[e]||[],e];return!t.option||!i.includes(t.option)})),t.option=e,this.addMiddleware(t,!0,!0,!0)}getMiddleware(){return this.globalMiddleware}freeze(){this.frozens.push([...this.globalMiddleware])}unfreeze(){const t=this.frozens.pop();void 0!==t&&(this.globalMiddleware=t)}reset(){this.globalMiddleware=this.globalMiddleware.filter((t=>t.global))}}function C(t,e,s,i){return s.reduce(((t,s)=>{if(s.applyBeforeValidation!==i)return t;if(s.mutates){if(s.applied)return t;s.applied=!0}if(f(t))return t.then((t=>Promise.all([t,s(t,e)]))).then((([t,e])=>Object.assign(t,e)));{const i=s(t,e);return f(i)?i.then((e=>Object.assign(t,e))):Object.assign(t,i)}}),t)}function j(t,e,s=(t=>{throw t})){try{const s="function"==typeof t?t():t;return f(s)?s.then((t=>e(t))):e(s)}catch(t){return s(t)}}const _=/(^\*)|(^\$0)/;class M{constructor(t,e,s,i){this.requireCache=new Set,this.handlers={},this.aliasMap={},this.frozens=[],this.shim=i,this.usage=t,this.globalMiddleware=s,this.validation=e}addDirectory(t,e,s,i){"boolean"!=typeof(i=i||{}).recurse&&(i.recurse=!1),Array.isArray(i.extensions)||(i.extensions=["js"]);const n="function"==typeof i.visit?i.visit:t=>t;i.visit=(t,e,s)=>{const i=n(t,e,s);if(i){if(this.requireCache.has(e))return i;this.requireCache.add(e),this.addHandler(i)}return i},this.shim.requireDirectory({require:e,filename:s},t,i)}addHandler(t,e,s,i,n,r){let a=[];const h=function(t){return t?t.map((t=>(t.applyBeforeValidation=!1,t))):[]}(n);if(i=i||(()=>{}),Array.isArray(t))if(function(t){return t.every((t=>"string"==typeof t))}(t))[t,...a]=t;else for(const e of t)this.addHandler(e);else{if(function(t){return"object"==typeof t&&!Array.isArray(t)}(t)){let e=Array.isArray(t.command)||"string"==typeof t.command?t.command:this.moduleName(t);return t.aliases&&(e=[].concat(e).concat(t.aliases)),void this.addHandler(e,this.extractDesc(t),t.builder,t.handler,t.middlewares,t.deprecated)}if(k(s))return void this.addHandler([t].concat(a),e,s.builder,s.handler,s.middlewares,s.deprecated)}if("string"==typeof t){const n=o(t);a=a.map((t=>o(t).cmd));let l=!1;const c=[n.cmd].concat(a).filter((t=>!_.test(t)||(l=!0,!1)));0===c.length&&l&&c.push("$0"),l&&(n.cmd=c[0],a=c.slice(1),t=t.replace(_,n.cmd)),a.forEach((t=>{this.aliasMap[t]=n.cmd})),!1!==e&&this.usage.command(t,e,l,a,r),this.handlers[n.cmd]={original:t,description:e,handler:i,builder:s||{},middlewares:h,deprecated:r,demanded:n.demanded,optional:n.optional},l&&(this.defaultCommand=this.handlers[n.cmd])}}getCommandHandlers(){return this.handlers}getCommands(){return Object.keys(this.handlers).concat(Object.keys(this.aliasMap))}hasDefaultCommand(){return!!this.defaultCommand}runCommand(t,e,s,i,n,r){const o=this.handlers[t]||this.handlers[this.aliasMap[t]]||this.defaultCommand,a=e.getInternalMethods().getContext(),h=a.commands.slice(),l=!t;t&&(a.commands.push(t),a.fullCommands.push(o.original));const c=this.applyBuilderUpdateUsageAndParse(l,o,e,s.aliases,h,i,n,r);return f(c)?c.then((t=>this.applyMiddlewareAndGetResult(l,o,t.innerArgv,a,n,t.aliases,e))):this.applyMiddlewareAndGetResult(l,o,c.innerArgv,a,n,c.aliases,e)}applyBuilderUpdateUsageAndParse(t,e,s,i,n,r,o,a){const h=e.builder;let l=s;if(x(h)){const c=h(s.getInternalMethods().reset(i),a);if(f(c))return c.then((i=>{var a;return l=(a=i)&&"function"==typeof a.getInternalMethods?i:s,this.parseAndUpdateUsage(t,e,l,n,r,o)}))}else(function(t){return"object"==typeof t})(h)&&(l=s.getInternalMethods().reset(i),Object.keys(e.builder).forEach((t=>{l.option(t,h[t])})));return this.parseAndUpdateUsage(t,e,l,n,r,o)}parseAndUpdateUsage(t,e,s,i,n,r){t&&s.getInternalMethods().getUsageInstance().unfreeze(!0),this.shouldUpdateUsage(s)&&s.getInternalMethods().getUsageInstance().usage(this.usageFromParentCommandsCommandHandler(i,e),e.description);const o=s.getInternalMethods().runYargsParserAndExecuteCommands(null,void 0,!0,n,r);return f(o)?o.then((t=>({aliases:s.parsed.aliases,innerArgv:t}))):{aliases:s.parsed.aliases,innerArgv:o}}shouldUpdateUsage(t){return!t.getInternalMethods().getUsageInstance().getUsageDisabled()&&0===t.getInternalMethods().getUsageInstance().getUsage().length}usageFromParentCommandsCommandHandler(t,e){const s=_.test(e.original)?e.original.replace(_,"").trim():e.original,i=t.filter((t=>!_.test(t)));return i.push(s),`$0 ${i.join(" ")}`}handleValidationAndGetResult(t,e,s,i,n,r,o,a){if(!r.getInternalMethods().getHasOutput()){const e=r.getInternalMethods().runValidation(n,a,r.parsed.error,t);s=j(s,(t=>(e(t),t)))}if(e.handler&&!r.getInternalMethods().getHasOutput()){r.getInternalMethods().setHasOutput();const i=!!r.getOptions().configuration["populate--"];r.getInternalMethods().postProcess(s,i,!1,!1),s=j(s=C(s,r,o,!1),(t=>{const s=e.handler(t);return f(s)?s.then((()=>t)):t})),t||r.getInternalMethods().getUsageInstance().cacheHelpMessage(),f(s)&&!r.getInternalMethods().hasParseCallback()&&s.catch((t=>{try{r.getInternalMethods().getUsageInstance().fail(null,t)}catch(t){}}))}return t||(i.commands.pop(),i.fullCommands.pop()),s}applyMiddlewareAndGetResult(t,e,s,i,n,r,o){let a={};if(n)return s;o.getInternalMethods().getHasOutput()||(a=this.populatePositionals(e,s,i,o));const h=this.globalMiddleware.getMiddleware().slice(0).concat(e.middlewares),l=C(s,o,h,!0);return f(l)?l.then((s=>this.handleValidationAndGetResult(t,e,s,i,r,o,h,a))):this.handleValidationAndGetResult(t,e,l,i,r,o,h,a)}populatePositionals(t,e,s,i){e._=e._.slice(s.commands.length);const n=t.demanded.slice(0),r=t.optional.slice(0),o={};for(this.validation.positionalCount(n.length,e._.length);n.length;){const t=n.shift();this.populatePositional(t,e,o)}for(;r.length;){const t=r.shift();this.populatePositional(t,e,o)}return e._=s.commands.concat(e._.map((t=>""+t))),this.postProcessPositionals(e,o,this.cmdToParseOptions(t.original),i),o}populatePositional(t,e,s){const i=t.cmd[0];t.variadic?s[i]=e._.splice(0).map(String):e._.length&&(s[i]=[String(e._.shift())])}cmdToParseOptions(t){const e={array:[],default:{},alias:{},demand:{}},s=o(t);return s.demanded.forEach((t=>{const[s,...i]=t.cmd;t.variadic&&(e.array.push(s),e.default[s]=[]),e.alias[s]=i,e.demand[s]=!0})),s.optional.forEach((t=>{const[s,...i]=t.cmd;t.variadic&&(e.array.push(s),e.default[s]=[]),e.alias[s]=i})),e}postProcessPositionals(t,e,s,i){const n=Object.assign({},i.getOptions());n.default=Object.assign(s.default,n.default);for(const t of Object.keys(s.alias))n.alias[t]=(n.alias[t]||[]).concat(s.alias[t]);n.array=n.array.concat(s.array),n.config={};const r=[];if(Object.keys(e).forEach((t=>{e[t].map((e=>{n.configuration["unknown-options-as-args"]&&(n.key[t]=!0),r.push(`--${t}`),r.push(e)}))})),!r.length)return;const o=Object.assign({},n.configuration,{"populate--":!1}),a=this.shim.Parser.detailed(r,Object.assign({},n,{configuration:o}));if(a.error)i.getInternalMethods().getUsageInstance().fail(a.error.message,a.error);else{const s=Object.keys(e);Object.keys(e).forEach((t=>{s.push(...a.aliases[t])})),Object.keys(a.argv).forEach((n=>{s.includes(n)&&(e[n]||(e[n]=a.argv[n]),!this.isInConfigs(i,n)&&!this.isDefaulted(i,n)&&Object.prototype.hasOwnProperty.call(t,n)&&Object.prototype.hasOwnProperty.call(a.argv,n)&&(Array.isArray(t[n])||Array.isArray(a.argv[n]))?t[n]=[].concat(t[n],a.argv[n]):t[n]=a.argv[n])}))}}isDefaulted(t,e){const{default:s}=t.getOptions();return Object.prototype.hasOwnProperty.call(s,e)||Object.prototype.hasOwnProperty.call(s,this.shim.Parser.camelCase(e))}isInConfigs(t,e){const{configObjects:s}=t.getOptions();return s.some((t=>Object.prototype.hasOwnProperty.call(t,e)))||s.some((t=>Object.prototype.hasOwnProperty.call(t,this.shim.Parser.camelCase(e))))}runDefaultBuilderOn(t){if(!this.defaultCommand)return;if(this.shouldUpdateUsage(t)){const e=_.test(this.defaultCommand.original)?this.defaultCommand.original:this.defaultCommand.original.replace(/^[^[\]<>]*/,"$0 ");t.getInternalMethods().getUsageInstance().usage(e,this.defaultCommand.description)}const e=this.defaultCommand.builder;if(x(e))return e(t,!0);k(e)||Object.keys(e).forEach((s=>{t.option(s,e[s])}))}moduleName(t){const e=function(t){if("undefined"==typeof require)return null;for(let e,s=0,i=Object.keys(require.cache);s{const s=e;s._handle&&s.isTTY&&"function"==typeof s._handle.setBlocking&&s._handle.setBlocking(t)}))}function A(t){return"boolean"==typeof t}function P(t,s){const i=s.y18n.__,n={},r=[];n.failFn=function(t){r.push(t)};let o=null,a=null,h=!0;n.showHelpOnFail=function(e=!0,s){const[i,r]="string"==typeof e?[!0,e]:[e,s];return t.getInternalMethods().isGlobalContext()&&(a=r),o=r,h=i,n};let l=!1;n.fail=function(s,i){const c=t.getInternalMethods().getLoggerInstance();if(!r.length){if(t.getExitProcess()&&E(!0),!l){l=!0,h&&(t.showHelp("error"),c.error()),(s||i)&&c.error(s||i);const e=o||a;e&&((s||i)&&c.error(""),c.error(e))}if(i=i||new e(s),t.getExitProcess())return t.exit(1);if(t.getInternalMethods().hasParseCallback())return t.exit(1,i);throw i}for(let t=r.length-1;t>=0;--t){const e=r[t];if(A(e)){if(i)throw i;if(s)throw Error(s)}else e(s,i,n)}};let c=[],f=!1;n.usage=(t,e)=>null===t?(f=!0,c=[],n):(f=!1,c.push([t,e||""]),n),n.getUsage=()=>c,n.getUsageDisabled=()=>f,n.getPositionalGroupName=()=>i("Positionals:");let d=[];n.example=(t,e)=>{d.push([t,e||""])};let u=[];n.command=function(t,e,s,i,n=!1){s&&(u=u.map((t=>(t[2]=!1,t)))),u.push([t,e||"",s,i,n])},n.getCommands=()=>u;let p={};n.describe=function(t,e){Array.isArray(t)?t.forEach((t=>{n.describe(t,e)})):"object"==typeof t?Object.keys(t).forEach((e=>{n.describe(e,t[e])})):p[t]=e},n.getDescriptions=()=>p;let m=[];n.epilog=t=>{m.push(t)};let y,b=!1;n.wrap=t=>{b=!0,y=t},n.getWrap=()=>s.getEnv("YARGS_DISABLE_WRAP")?null:(b||(y=function(){const t=80;return s.process.stdColumns?Math.min(t,s.process.stdColumns):t}(),b=!0),y);const v="__yargsString__:";function O(t,e,i){let n=0;return Array.isArray(t)||(t=Object.values(t).map((t=>[t]))),t.forEach((t=>{n=Math.max(s.stringWidth(i?`${i} ${I(t[0])}`:I(t[0]))+$(t[0]),n)})),e&&(n=Math.min(n,parseInt((.5*e).toString(),10))),n}let w;function C(e){return t.getOptions().hiddenOptions.indexOf(e)<0||t.parsed.argv[t.getOptions().showHiddenOpt]}function j(t,e){let s=`[${i("default:")} `;if(void 0===t&&!e)return null;if(e)s+=e;else switch(typeof t){case"string":s+=`"${t}"`;break;case"object":s+=JSON.stringify(t);break;default:s+=t}return`${s}]`}n.deferY18nLookup=t=>v+t,n.help=function(){if(w)return w;!function(){const e=t.getDemandedOptions(),s=t.getOptions();(Object.keys(s.alias)||[]).forEach((i=>{s.alias[i].forEach((r=>{p[r]&&n.describe(i,p[r]),r in e&&t.demandOption(i,e[r]),s.boolean.includes(r)&&t.boolean(i),s.count.includes(r)&&t.count(i),s.string.includes(r)&&t.string(i),s.normalize.includes(r)&&t.normalize(i),s.array.includes(r)&&t.array(i),s.number.includes(r)&&t.number(i)}))}))}();const e=t.customScriptName?t.$0:s.path.basename(t.$0),r=t.getDemandedOptions(),o=t.getDemandedCommands(),a=t.getDeprecatedOptions(),h=t.getGroups(),l=t.getOptions();let g=[];g=g.concat(Object.keys(p)),g=g.concat(Object.keys(r)),g=g.concat(Object.keys(o)),g=g.concat(Object.keys(l.default)),g=g.filter(C),g=Object.keys(g.reduce(((t,e)=>("_"!==e&&(t[e]=!0),t)),{}));const y=n.getWrap(),b=s.cliui({width:y,wrap:!!y});if(!f)if(c.length)c.forEach((t=>{b.div({text:`${t[0].replace(/\$0/g,e)}`}),t[1]&&b.div({text:`${t[1]}`,padding:[1,0,0,0]})})),b.div();else if(u.length){let t=null;t=o._?`${e} <${i("command")}>\n`:`${e} [${i("command")}]\n`,b.div(`${t}`)}if(u.length>1||1===u.length&&!u[0][2]){b.div(i("Commands:"));const s=t.getInternalMethods().getContext(),n=s.commands.length?`${s.commands.join(" ")} `:"";!0===t.getInternalMethods().getParserConfiguration()["sort-commands"]&&(u=u.sort(((t,e)=>t[0].localeCompare(e[0]))));const r=e?`${e} `:"";u.forEach((t=>{const s=`${r}${n}${t[0].replace(/^\$0 ?/,"")}`;b.span({text:s,padding:[0,2,0,2],width:O(u,y,`${e}${n}`)+4},{text:t[1]});const o=[];t[2]&&o.push(`[${i("default")}]`),t[3]&&t[3].length&&o.push(`[${i("aliases:")} ${t[3].join(", ")}]`),t[4]&&("string"==typeof t[4]?o.push(`[${i("deprecated: %s",t[4])}]`):o.push(`[${i("deprecated")}]`)),o.length?b.div({text:o.join(" "),padding:[0,0,0,2],align:"right"}):b.div()})),b.div()}const _=(Object.keys(l.alias)||[]).concat(Object.keys(t.parsed.newAliases)||[]);g=g.filter((e=>!t.parsed.newAliases[e]&&_.every((t=>-1===(l.alias[t]||[]).indexOf(e)))));const M=i("Options:");h[M]||(h[M]=[]),function(t,e,s,i){let n=[],r=null;Object.keys(s).forEach((t=>{n=n.concat(s[t])})),t.forEach((t=>{r=[t].concat(e[t]),r.some((t=>-1!==n.indexOf(t)))||s[i].push(t)}))}(g,l.alias,h,M);const k=t=>/^--/.test(I(t)),x=Object.keys(h).filter((t=>h[t].length>0)).map((t=>({groupName:t,normalizedKeys:h[t].filter(C).map((t=>{if(_.includes(t))return t;for(let e,s=0;void 0!==(e=_[s]);s++)if((l.alias[e]||[]).includes(t))return e;return t}))}))).filter((({normalizedKeys:t})=>t.length>0)).map((({groupName:t,normalizedKeys:e})=>{const s=e.reduce(((e,s)=>(e[s]=[s].concat(l.alias[s]||[]).map((e=>t===n.getPositionalGroupName()?e:(/^[0-9]$/.test(e)?l.boolean.includes(s)?"-":"--":e.length>1?"--":"-")+e)).sort(((t,e)=>k(t)===k(e)?0:k(t)?1:-1)).join(", "),e)),{});return{groupName:t,normalizedKeys:e,switches:s}}));if(x.filter((({groupName:t})=>t!==n.getPositionalGroupName())).some((({normalizedKeys:t,switches:e})=>!t.every((t=>k(e[t])))))&&x.filter((({groupName:t})=>t!==n.getPositionalGroupName())).forEach((({normalizedKeys:t,switches:e})=>{t.forEach((t=>{var s,i;k(e[t])&&(e[t]=(s=e[t],i="-x, ".length,S(s)?{text:s.text,indentation:s.indentation+i}:{text:s,indentation:i}))}))})),x.forEach((({groupName:t,normalizedKeys:e,switches:s})=>{b.div(t),e.forEach((t=>{const e=s[t];let o=p[t]||"",h=null;o.includes(v)&&(o=i(o.substring(v.length))),l.boolean.includes(t)&&(h=`[${i("boolean")}]`),l.count.includes(t)&&(h=`[${i("count")}]`),l.string.includes(t)&&(h=`[${i("string")}]`),l.normalize.includes(t)&&(h=`[${i("string")}]`),l.array.includes(t)&&(h=`[${i("array")}]`),l.number.includes(t)&&(h=`[${i("number")}]`);const c=[t in a?(f=a[t],"string"==typeof f?`[${i("deprecated: %s",f)}]`:`[${i("deprecated")}]`):null,h,t in r?`[${i("required")}]`:null,l.choices&&l.choices[t]?`[${i("choices:")} ${n.stringifiedValues(l.choices[t])}]`:null,j(l.default[t],l.defaultDescription[t])].filter(Boolean).join(" ");var f;b.span({text:I(e),padding:[0,2,0,2+$(e)],width:O(s,y)+4},o),c?b.div({text:c,padding:[0,0,0,2],align:"right"}):b.div()})),b.div()})),d.length&&(b.div(i("Examples:")),d.forEach((t=>{t[0]=t[0].replace(/\$0/g,e)})),d.forEach((t=>{""===t[1]?b.div({text:t[0],padding:[0,2,0,2]}):b.div({text:t[0],padding:[0,2,0,2],width:O(d,y)+4},{text:t[1]})})),b.div()),m.length>0){const t=m.map((t=>t.replace(/\$0/g,e))).join("\n");b.div(`${t}\n`)}return b.toString().replace(/\s*$/,"")},n.cacheHelpMessage=function(){w=this.help()},n.clearCachedHelpMessage=function(){w=void 0},n.hasCachedHelpMessage=function(){return!!w},n.showHelp=e=>{const s=t.getInternalMethods().getLoggerInstance();e||(e="error");("function"==typeof e?e:s[e])(n.help())},n.functionDescription=t=>["(",t.name?s.Parser.decamelize(t.name,"-"):i("generated-value"),")"].join(""),n.stringifiedValues=function(t,e){let s="";const i=e||", ",n=[].concat(t);return t&&n.length?(n.forEach((t=>{s.length&&(s+=i),s+=JSON.stringify(t)})),s):s};let _=null;n.version=t=>{_=t},n.showVersion=e=>{const s=t.getInternalMethods().getLoggerInstance();e||(e="error");("function"==typeof e?e:s[e])(_)},n.reset=function(t){return o=null,l=!1,c=[],f=!1,m=[],d=[],u=[],p=g(p,(e=>!t[e])),n};const M=[];return n.freeze=function(){M.push({failMessage:o,failureOutput:l,usages:c,usageDisabled:f,epilogs:m,examples:d,commands:u,descriptions:p})},n.unfreeze=function(t=!1){const e=M.pop();e&&(t?(p={...e.descriptions,...p},u=[...e.commands,...u],c=[...e.usages,...c],d=[...e.examples,...d],m=[...e.epilogs,...m]):({failMessage:o,failureOutput:l,usages:c,usageDisabled:f,epilogs:m,examples:d,commands:u,descriptions:p}=e))},n}function S(t){return"object"==typeof t}function $(t){return S(t)?t.indentation:0}function I(t){return S(t)?t.text:t}class D{constructor(t,e,s,i){var n,r,o;this.yargs=t,this.usage=e,this.command=s,this.shim=i,this.completionKey="get-yargs-completions",this.aliases=null,this.customCompletionFunction=null,this.indexAfterLastReset=0,this.zshShell=null!==(o=(null===(n=this.shim.getEnv("SHELL"))||void 0===n?void 0:n.includes("zsh"))||(null===(r=this.shim.getEnv("ZSH_NAME"))||void 0===r?void 0:r.includes("zsh")))&&void 0!==o&&o}defaultCompletion(t,e,s,i){const n=this.command.getCommandHandlers();for(let e=0,s=t.length;e{const i=o(s[0]).cmd;if(-1===e.indexOf(i))if(this.zshShell){const e=s[1]||"";t.push(i.replace(/:/g,"\\:")+":"+e)}else t.push(i)}))}optionCompletions(t,e,s,i){if((i.match(/^-/)||""===i&&0===t.length)&&!this.previousArgHasChoices(e)){const s=this.yargs.getOptions(),n=this.yargs.getGroups()[this.usage.getPositionalGroupName()]||[];Object.keys(s.key).forEach((r=>{const o=!!s.configuration["boolean-negation"]&&s.boolean.includes(r);n.includes(r)||s.hiddenOptions.includes(r)||this.argsContainKey(e,r,o)||(this.completeOptionKey(r,t,i),o&&s.default[r]&&this.completeOptionKey(`no-${r}`,t,i))}))}}choicesFromOptionsCompletions(t,e,s,i){if(this.previousArgHasChoices(e)){const s=this.getPreviousArgChoices(e);s&&s.length>0&&t.push(...s.map((t=>t.replace(/:/g,"\\:"))))}}choicesFromPositionalsCompletions(t,e,s,i){if(""===i&&t.length>0&&this.previousArgHasChoices(e))return;const n=this.yargs.getGroups()[this.usage.getPositionalGroupName()]||[],r=Math.max(this.indexAfterLastReset,this.yargs.getInternalMethods().getContext().commands.length+1),o=n[s._.length-r-1];if(!o)return;const a=this.yargs.getOptions().choices[o]||[];for(const e of a)e.startsWith(i)&&t.push(e.replace(/:/g,"\\:"))}getPreviousArgChoices(t){if(t.length<1)return;let e=t[t.length-1],s="";if(!e.startsWith("-")&&t.length>1&&(s=e,e=t[t.length-2]),!e.startsWith("-"))return;const i=e.replace(/^-+/,""),n=this.yargs.getOptions(),r=[i,...this.yargs.getAliases()[i]||[]];let o;for(const t of r)if(Object.prototype.hasOwnProperty.call(n.key,t)&&Array.isArray(n.choices[t])){o=n.choices[t];break}return o?o.filter((t=>!s||t.startsWith(s))):void 0}previousArgHasChoices(t){const e=this.getPreviousArgChoices(t);return void 0!==e&&e.length>0}argsContainKey(t,e,s){const i=e=>-1!==t.indexOf((/^[^0-9]$/.test(e)?"-":"--")+e);if(i(e))return!0;if(s&&i(`no-${e}`))return!0;if(this.aliases)for(const t of this.aliases[e])if(i(t))return!0;return!1}completeOptionKey(t,e,s){const i=this.usage.getDescriptions(),n=!/^--/.test(s)&&(t=>/^[^0-9]$/.test(t))(t)?"-":"--";if(this.zshShell){const s=i[t]||"";e.push(n+`${t.replace(/:/g,"\\:")}:${s.replace("__yargsString__:","")}`)}else e.push(n+t)}customCompletion(t,e,s,i){if(d(this.customCompletionFunction,null,this.shim),this.customCompletionFunction.length<3){const t=this.customCompletionFunction(s,e);return f(t)?t.then((t=>{this.shim.process.nextTick((()=>{i(null,t)}))})).catch((t=>{this.shim.process.nextTick((()=>{i(t,void 0)}))})):i(null,t)}return function(t){return t.length>3}(this.customCompletionFunction)?this.customCompletionFunction(s,e,((n=i)=>this.defaultCompletion(t,e,s,n)),(t=>{i(null,t)})):this.customCompletionFunction(s,e,(t=>{i(null,t)}))}getCompletion(t,e){const s=t.length?t[t.length-1]:"",i=this.yargs.parse(t,!0),n=this.customCompletionFunction?i=>this.customCompletion(t,i,s,e):i=>this.defaultCompletion(t,i,s,e);return f(i)?i.then(n):n(i)}generateCompletionScript(t,e){let s=this.zshShell?'#compdef {{app_name}}\n###-begin-{{app_name}}-completions-###\n#\n# yargs command completion script\n#\n# Installation: {{app_path}} {{completion_command}} >> ~/.zshrc\n# or {{app_path}} {{completion_command}} >> ~/.zprofile on OSX.\n#\n_{{app_name}}_yargs_completions()\n{\n local reply\n local si=$IFS\n IFS=$\'\n\' reply=($(COMP_CWORD="$((CURRENT-1))" COMP_LINE="$BUFFER" COMP_POINT="$CURSOR" {{app_path}} --get-yargs-completions "${words[@]}"))\n IFS=$si\n _describe \'values\' reply\n}\ncompdef _{{app_name}}_yargs_completions {{app_name}}\n###-end-{{app_name}}-completions-###\n':'###-begin-{{app_name}}-completions-###\n#\n# yargs command completion script\n#\n# Installation: {{app_path}} {{completion_command}} >> ~/.bashrc\n# or {{app_path}} {{completion_command}} >> ~/.bash_profile on OSX.\n#\n_{{app_name}}_yargs_completions()\n{\n local cur_word args type_list\n\n cur_word="${COMP_WORDS[COMP_CWORD]}"\n args=("${COMP_WORDS[@]}")\n\n # ask yargs to generate completions.\n type_list=$({{app_path}} --get-yargs-completions "${args[@]}")\n\n COMPREPLY=( $(compgen -W "${type_list}" -- ${cur_word}) )\n\n # if no match was found, fall back to filename completion\n if [ ${#COMPREPLY[@]} -eq 0 ]; then\n COMPREPLY=()\n fi\n\n return 0\n}\ncomplete -o bashdefault -o default -F _{{app_name}}_yargs_completions {{app_name}}\n###-end-{{app_name}}-completions-###\n';const i=this.shim.path.basename(t);return t.match(/\.js$/)&&(t=`./${t}`),s=s.replace(/{{app_name}}/g,i),s=s.replace(/{{completion_command}}/g,e),s.replace(/{{app_path}}/g,t)}registerFunction(t){this.customCompletionFunction=t}setParsed(t){this.aliases=t.aliases}}function N(t,e){if(0===t.length)return e.length;if(0===e.length)return t.length;const s=[];let i,n;for(i=0;i<=e.length;i++)s[i]=[i];for(n=0;n<=t.length;n++)s[0][n]=n;for(i=1;i<=e.length;i++)for(n=1;n<=t.length;n++)e.charAt(i-1)===t.charAt(n-1)?s[i][n]=s[i-1][n-1]:i>1&&n>1&&e.charAt(i-2)===t.charAt(n-1)&&e.charAt(i-1)===t.charAt(n-2)?s[i][n]=s[i-2][n-2]+1:s[i][n]=Math.min(s[i-1][n-1]+1,Math.min(s[i][n-1]+1,s[i-1][n]+1));return s[e.length][t.length]}const H=["$0","--","_"];var W,z,q,F,U,L,V,G,R,T,B,K,Y,J,Z,X,Q,tt,et,st,it,nt,rt,ot,at,ht,lt,ct,ft,dt,ut,pt,gt,mt;const yt=Symbol("copyDoubleDash"),bt=Symbol("copyDoubleDash"),vt=Symbol("deleteFromParserHintObject"),Ot=Symbol("emitWarning"),wt=Symbol("freeze"),Ct=Symbol("getDollarZero"),jt=Symbol("getParserConfiguration"),_t=Symbol("guessLocale"),Mt=Symbol("guessVersion"),kt=Symbol("parsePositionalNumbers"),xt=Symbol("pkgUp"),Et=Symbol("populateParserHintArray"),At=Symbol("populateParserHintSingleValueDictionary"),Pt=Symbol("populateParserHintArrayDictionary"),St=Symbol("populateParserHintDictionary"),$t=Symbol("sanitizeKey"),It=Symbol("setKey"),Dt=Symbol("unfreeze"),Nt=Symbol("validateAsync"),Ht=Symbol("getCommandInstance"),Wt=Symbol("getContext"),zt=Symbol("getHasOutput"),qt=Symbol("getLoggerInstance"),Ft=Symbol("getParseContext"),Ut=Symbol("getUsageInstance"),Lt=Symbol("getValidationInstance"),Vt=Symbol("hasParseCallback"),Gt=Symbol("isGlobalContext"),Rt=Symbol("postProcess"),Tt=Symbol("rebase"),Bt=Symbol("reset"),Kt=Symbol("runYargsParserAndExecuteCommands"),Yt=Symbol("runValidation"),Jt=Symbol("setHasOutput"),Zt=Symbol("kTrackManuallySetKeys");class Xt{constructor(t=[],e,s,i){this.customScriptName=!1,this.parsed=!1,W.set(this,void 0),z.set(this,void 0),q.set(this,{commands:[],fullCommands:[]}),F.set(this,null),U.set(this,null),L.set(this,"show-hidden"),V.set(this,null),G.set(this,!0),R.set(this,{}),T.set(this,!0),B.set(this,[]),K.set(this,void 0),Y.set(this,{}),J.set(this,!1),Z.set(this,null),X.set(this,!0),Q.set(this,void 0),tt.set(this,""),et.set(this,void 0),st.set(this,void 0),it.set(this,{}),nt.set(this,null),rt.set(this,null),ot.set(this,{}),at.set(this,{}),ht.set(this,void 0),lt.set(this,!1),ct.set(this,void 0),ft.set(this,!1),dt.set(this,!1),ut.set(this,!1),pt.set(this,void 0),gt.set(this,null),mt.set(this,void 0),O(this,ct,i,"f"),O(this,ht,t,"f"),O(this,z,e,"f"),O(this,st,s,"f"),O(this,K,new w(this),"f"),this.$0=this[Ct](),this[Bt](),O(this,W,v(this,W,"f"),"f"),O(this,pt,v(this,pt,"f"),"f"),O(this,mt,v(this,mt,"f"),"f"),O(this,et,v(this,et,"f"),"f"),v(this,et,"f").showHiddenOpt=v(this,L,"f"),O(this,Q,this[bt](),"f")}addHelpOpt(t,e){return h("[string|boolean] [string]",[t,e],arguments.length),v(this,Z,"f")&&(this[vt](v(this,Z,"f")),O(this,Z,null,"f")),!1===t&&void 0===e||(O(this,Z,"string"==typeof t?t:"help","f"),this.boolean(v(this,Z,"f")),this.describe(v(this,Z,"f"),e||v(this,pt,"f").deferY18nLookup("Show help"))),this}help(t,e){return this.addHelpOpt(t,e)}addShowHiddenOpt(t,e){if(h("[string|boolean] [string]",[t,e],arguments.length),!1===t&&void 0===e)return this;const s="string"==typeof t?t:v(this,L,"f");return this.boolean(s),this.describe(s,e||v(this,pt,"f").deferY18nLookup("Show hidden options")),v(this,et,"f").showHiddenOpt=s,this}showHidden(t,e){return this.addShowHiddenOpt(t,e)}alias(t,e){return h(" [string|array]",[t,e],arguments.length),this[Pt](this.alias.bind(this),"alias",t,e),this}array(t){return h("",[t],arguments.length),this[Et]("array",t),this[Zt](t),this}boolean(t){return h("",[t],arguments.length),this[Et]("boolean",t),this[Zt](t),this}check(t,e){return h(" [boolean]",[t,e],arguments.length),this.middleware(((e,s)=>j((()=>t(e,s.getOptions())),(s=>(s?("string"==typeof s||s instanceof Error)&&v(this,pt,"f").fail(s.toString(),s):v(this,pt,"f").fail(v(this,ct,"f").y18n.__("Argument check failed: %s",t.toString())),e)),(t=>(v(this,pt,"f").fail(t.message?t.message:t.toString(),t),e)))),!1,e),this}choices(t,e){return h(" [string|array]",[t,e],arguments.length),this[Pt](this.choices.bind(this),"choices",t,e),this}coerce(t,s){if(h(" [function]",[t,s],arguments.length),Array.isArray(t)){if(!s)throw new e("coerce callback must be provided");for(const e of t)this.coerce(e,s);return this}if("object"==typeof t){for(const e of Object.keys(t))this.coerce(e,t[e]);return this}if(!s)throw new e("coerce callback must be provided");return v(this,et,"f").key[t]=!0,v(this,K,"f").addCoerceMiddleware(((i,n)=>{let r;return Object.prototype.hasOwnProperty.call(i,t)?j((()=>(r=n.getAliases(),s(i[t]))),(e=>{i[t]=e;const s=n.getInternalMethods().getParserConfiguration()["strip-aliased"];if(r[t]&&!0!==s)for(const s of r[t])i[s]=e;return i}),(t=>{throw new e(t.message)})):i}),t),this}conflicts(t,e){return h(" [string|array]",[t,e],arguments.length),v(this,mt,"f").conflicts(t,e),this}config(t="config",e,s){return h("[object|string] [string|function] [function]",[t,e,s],arguments.length),"object"!=typeof t||Array.isArray(t)?("function"==typeof e&&(s=e,e=void 0),this.describe(t,e||v(this,pt,"f").deferY18nLookup("Path to JSON config file")),(Array.isArray(t)?t:[t]).forEach((t=>{v(this,et,"f").config[t]=s||!0})),this):(t=n(t,v(this,z,"f"),this[jt]()["deep-merge-config"]||!1,v(this,ct,"f")),v(this,et,"f").configObjects=(v(this,et,"f").configObjects||[]).concat(t),this)}completion(t,e,s){return h("[string] [string|boolean|function] [function]",[t,e,s],arguments.length),"function"==typeof e&&(s=e,e=void 0),O(this,U,t||v(this,U,"f")||"completion","f"),e||!1===e||(e="generate completion script"),this.command(v(this,U,"f"),e),s&&v(this,F,"f").registerFunction(s),this}command(t,e,s,i,n,r){return h(" [string|boolean] [function|object] [function] [array] [boolean|string]",[t,e,s,i,n,r],arguments.length),v(this,W,"f").addHandler(t,e,s,i,n,r),this}commands(t,e,s,i,n,r){return this.command(t,e,s,i,n,r)}commandDir(t,e){h(" [object]",[t,e],arguments.length);const s=v(this,st,"f")||v(this,ct,"f").require;return v(this,W,"f").addDirectory(t,s,v(this,ct,"f").getCallerFile(),e),this}count(t){return h("",[t],arguments.length),this[Et]("count",t),this[Zt](t),this}default(t,e,s){return h(" [*] [string]",[t,e,s],arguments.length),s&&(u(t,v(this,ct,"f")),v(this,et,"f").defaultDescription[t]=s),"function"==typeof e&&(u(t,v(this,ct,"f")),v(this,et,"f").defaultDescription[t]||(v(this,et,"f").defaultDescription[t]=v(this,pt,"f").functionDescription(e)),e=e.call()),this[At](this.default.bind(this),"default",t,e),this}defaults(t,e,s){return this.default(t,e,s)}demandCommand(t=1,e,s,i){return h("[number] [number|string] [string|null|undefined] [string|null|undefined]",[t,e,s,i],arguments.length),"number"!=typeof e&&(s=e,e=1/0),this.global("_",!1),v(this,et,"f").demandedCommands._={min:t,max:e,minMsg:s,maxMsg:i},this}demand(t,e,s){return Array.isArray(e)?(e.forEach((t=>{d(s,!0,v(this,ct,"f")),this.demandOption(t,s)})),e=1/0):"number"!=typeof e&&(s=e,e=1/0),"number"==typeof t?(d(s,!0,v(this,ct,"f")),this.demandCommand(t,e,s,s)):Array.isArray(t)?t.forEach((t=>{d(s,!0,v(this,ct,"f")),this.demandOption(t,s)})):"string"==typeof s?this.demandOption(t,s):!0!==s&&void 0!==s||this.demandOption(t),this}demandOption(t,e){return h(" [string]",[t,e],arguments.length),this[At](this.demandOption.bind(this),"demandedOptions",t,e),this}deprecateOption(t,e){return h(" [string|boolean]",[t,e],arguments.length),v(this,et,"f").deprecatedOptions[t]=e,this}describe(t,e){return h(" [string]",[t,e],arguments.length),this[It](t,!0),v(this,pt,"f").describe(t,e),this}detectLocale(t){return h("",[t],arguments.length),O(this,G,t,"f"),this}env(t){return h("[string|boolean]",[t],arguments.length),!1===t?delete v(this,et,"f").envPrefix:v(this,et,"f").envPrefix=t||"",this}epilogue(t){return h("",[t],arguments.length),v(this,pt,"f").epilog(t),this}epilog(t){return this.epilogue(t)}example(t,e){return h(" [string]",[t,e],arguments.length),Array.isArray(t)?t.forEach((t=>this.example(...t))):v(this,pt,"f").example(t,e),this}exit(t,e){O(this,J,!0,"f"),O(this,V,e,"f"),v(this,T,"f")&&v(this,ct,"f").process.exit(t)}exitProcess(t=!0){return h("[boolean]",[t],arguments.length),O(this,T,t,"f"),this}fail(t){if(h("",[t],arguments.length),"boolean"==typeof t&&!1!==t)throw new e("Invalid first argument. Expected function or boolean 'false'");return v(this,pt,"f").failFn(t),this}getAliases(){return this.parsed?this.parsed.aliases:{}}async getCompletion(t,e){return h(" [function]",[t,e],arguments.length),e?v(this,F,"f").getCompletion(t,e):new Promise(((e,s)=>{v(this,F,"f").getCompletion(t,((t,i)=>{t?s(t):e(i)}))}))}getDemandedOptions(){return h([],0),v(this,et,"f").demandedOptions}getDemandedCommands(){return h([],0),v(this,et,"f").demandedCommands}getDeprecatedOptions(){return h([],0),v(this,et,"f").deprecatedOptions}getDetectLocale(){return v(this,G,"f")}getExitProcess(){return v(this,T,"f")}getGroups(){return Object.assign({},v(this,Y,"f"),v(this,at,"f"))}getHelp(){if(O(this,J,!0,"f"),!v(this,pt,"f").hasCachedHelpMessage()){if(!this.parsed){const t=this[Kt](v(this,ht,"f"),void 0,void 0,0,!0);if(f(t))return t.then((()=>v(this,pt,"f").help()))}const t=v(this,W,"f").runDefaultBuilderOn(this);if(f(t))return t.then((()=>v(this,pt,"f").help()))}return Promise.resolve(v(this,pt,"f").help())}getOptions(){return v(this,et,"f")}getStrict(){return v(this,ft,"f")}getStrictCommands(){return v(this,dt,"f")}getStrictOptions(){return v(this,ut,"f")}global(t,e){return h(" [boolean]",[t,e],arguments.length),t=[].concat(t),!1!==e?v(this,et,"f").local=v(this,et,"f").local.filter((e=>-1===t.indexOf(e))):t.forEach((t=>{v(this,et,"f").local.includes(t)||v(this,et,"f").local.push(t)})),this}group(t,e){h(" ",[t,e],arguments.length);const s=v(this,at,"f")[e]||v(this,Y,"f")[e];v(this,at,"f")[e]&&delete v(this,at,"f")[e];const i={};return v(this,Y,"f")[e]=(s||[]).concat(t).filter((t=>!i[t]&&(i[t]=!0))),this}hide(t){return h("",[t],arguments.length),v(this,et,"f").hiddenOptions.push(t),this}implies(t,e){return h(" [number|string|array]",[t,e],arguments.length),v(this,mt,"f").implies(t,e),this}locale(t){return h("[string]",[t],arguments.length),void 0===t?(this[_t](),v(this,ct,"f").y18n.getLocale()):(O(this,G,!1,"f"),v(this,ct,"f").y18n.setLocale(t),this)}middleware(t,e,s){return v(this,K,"f").addMiddleware(t,!!e,s)}nargs(t,e){return h(" [number]",[t,e],arguments.length),this[At](this.nargs.bind(this),"narg",t,e),this}normalize(t){return h("",[t],arguments.length),this[Et]("normalize",t),this}number(t){return h("",[t],arguments.length),this[Et]("number",t),this[Zt](t),this}option(t,e){if(h(" [object]",[t,e],arguments.length),"object"==typeof t)Object.keys(t).forEach((e=>{this.options(e,t[e])}));else{"object"!=typeof e&&(e={}),this[Zt](t),!v(this,gt,"f")||"version"!==t&&"version"!==(null==e?void 0:e.alias)||this[Ot](['"version" is a reserved word.',"Please do one of the following:",'- Disable version with `yargs.version(false)` if using "version" as an option',"- Use the built-in `yargs.version` method instead (if applicable)","- Use a different option key","https://yargs.js.org/docs/#api-reference-version"].join("\n"),void 0,"versionWarning"),v(this,et,"f").key[t]=!0,e.alias&&this.alias(t,e.alias);const s=e.deprecate||e.deprecated;s&&this.deprecateOption(t,s);const i=e.demand||e.required||e.require;i&&this.demand(t,i),e.demandOption&&this.demandOption(t,"string"==typeof e.demandOption?e.demandOption:void 0),e.conflicts&&this.conflicts(t,e.conflicts),"default"in e&&this.default(t,e.default),void 0!==e.implies&&this.implies(t,e.implies),void 0!==e.nargs&&this.nargs(t,e.nargs),e.config&&this.config(t,e.configParser),e.normalize&&this.normalize(t),e.choices&&this.choices(t,e.choices),e.coerce&&this.coerce(t,e.coerce),e.group&&this.group(t,e.group),(e.boolean||"boolean"===e.type)&&(this.boolean(t),e.alias&&this.boolean(e.alias)),(e.array||"array"===e.type)&&(this.array(t),e.alias&&this.array(e.alias)),(e.number||"number"===e.type)&&(this.number(t),e.alias&&this.number(e.alias)),(e.string||"string"===e.type)&&(this.string(t),e.alias&&this.string(e.alias)),(e.count||"count"===e.type)&&this.count(t),"boolean"==typeof e.global&&this.global(t,e.global),e.defaultDescription&&(v(this,et,"f").defaultDescription[t]=e.defaultDescription),e.skipValidation&&this.skipValidation(t);const n=e.describe||e.description||e.desc,r=v(this,pt,"f").getDescriptions();Object.prototype.hasOwnProperty.call(r,t)&&"string"!=typeof n||this.describe(t,n),e.hidden&&this.hide(t),e.requiresArg&&this.requiresArg(t)}return this}options(t,e){return this.option(t,e)}parse(t,e,s){h("[string|array] [function|boolean|object] [function]",[t,e,s],arguments.length),this[wt](),void 0===t&&(t=v(this,ht,"f")),"object"==typeof e&&(O(this,rt,e,"f"),e=s),"function"==typeof e&&(O(this,nt,e,"f"),e=!1),e||O(this,ht,t,"f"),v(this,nt,"f")&&O(this,T,!1,"f");const i=this[Kt](t,!!e),n=this.parsed;return v(this,F,"f").setParsed(this.parsed),f(i)?i.then((t=>(v(this,nt,"f")&&v(this,nt,"f").call(this,v(this,V,"f"),t,v(this,tt,"f")),t))).catch((t=>{throw v(this,nt,"f")&&v(this,nt,"f")(t,this.parsed.argv,v(this,tt,"f")),t})).finally((()=>{this[Dt](),this.parsed=n})):(v(this,nt,"f")&&v(this,nt,"f").call(this,v(this,V,"f"),i,v(this,tt,"f")),this[Dt](),this.parsed=n,i)}parseAsync(t,e,s){const i=this.parse(t,e,s);return f(i)?i:Promise.resolve(i)}parseSync(t,s,i){const n=this.parse(t,s,i);if(f(n))throw new e(".parseSync() must not be used with asynchronous builders, handlers, or middleware");return n}parserConfiguration(t){return h("",[t],arguments.length),O(this,it,t,"f"),this}pkgConf(t,e){h(" [string]",[t,e],arguments.length);let s=null;const i=this[xt](e||v(this,z,"f"));return i[t]&&"object"==typeof i[t]&&(s=n(i[t],e||v(this,z,"f"),this[jt]()["deep-merge-config"]||!1,v(this,ct,"f")),v(this,et,"f").configObjects=(v(this,et,"f").configObjects||[]).concat(s)),this}positional(t,e){h(" ",[t,e],arguments.length);const s=["default","defaultDescription","implies","normalize","choices","conflicts","coerce","type","describe","desc","description","alias"];e=g(e,((t,e)=>!("type"===t&&!["string","number","boolean"].includes(e))&&s.includes(t)));const i=v(this,q,"f").fullCommands[v(this,q,"f").fullCommands.length-1],n=i?v(this,W,"f").cmdToParseOptions(i):{array:[],alias:{},default:{},demand:{}};return p(n).forEach((s=>{const i=n[s];Array.isArray(i)?-1!==i.indexOf(t)&&(e[s]=!0):i[t]&&!(s in e)&&(e[s]=i[t])})),this.group(t,v(this,pt,"f").getPositionalGroupName()),this.option(t,e)}recommendCommands(t=!0){return h("[boolean]",[t],arguments.length),O(this,lt,t,"f"),this}required(t,e,s){return this.demand(t,e,s)}require(t,e,s){return this.demand(t,e,s)}requiresArg(t){return h(" [number]",[t],arguments.length),"string"==typeof t&&v(this,et,"f").narg[t]||this[At](this.requiresArg.bind(this),"narg",t,NaN),this}showCompletionScript(t,e){return h("[string] [string]",[t,e],arguments.length),t=t||this.$0,v(this,Q,"f").log(v(this,F,"f").generateCompletionScript(t,e||v(this,U,"f")||"completion")),this}showHelp(t){if(h("[string|function]",[t],arguments.length),O(this,J,!0,"f"),!v(this,pt,"f").hasCachedHelpMessage()){if(!this.parsed){const e=this[Kt](v(this,ht,"f"),void 0,void 0,0,!0);if(f(e))return e.then((()=>{v(this,pt,"f").showHelp(t)})),this}const e=v(this,W,"f").runDefaultBuilderOn(this);if(f(e))return e.then((()=>{v(this,pt,"f").showHelp(t)})),this}return v(this,pt,"f").showHelp(t),this}scriptName(t){return this.customScriptName=!0,this.$0=t,this}showHelpOnFail(t,e){return h("[boolean|string] [string]",[t,e],arguments.length),v(this,pt,"f").showHelpOnFail(t,e),this}showVersion(t){return h("[string|function]",[t],arguments.length),v(this,pt,"f").showVersion(t),this}skipValidation(t){return h("",[t],arguments.length),this[Et]("skipValidation",t),this}strict(t){return h("[boolean]",[t],arguments.length),O(this,ft,!1!==t,"f"),this}strictCommands(t){return h("[boolean]",[t],arguments.length),O(this,dt,!1!==t,"f"),this}strictOptions(t){return h("[boolean]",[t],arguments.length),O(this,ut,!1!==t,"f"),this}string(t){return h("",[t],arguments.length),this[Et]("string",t),this[Zt](t),this}terminalWidth(){return h([],0),v(this,ct,"f").process.stdColumns}updateLocale(t){return this.updateStrings(t)}updateStrings(t){return h("",[t],arguments.length),O(this,G,!1,"f"),v(this,ct,"f").y18n.updateLocale(t),this}usage(t,s,i,n){if(h(" [string|boolean] [function|object] [function]",[t,s,i,n],arguments.length),void 0!==s){if(d(t,null,v(this,ct,"f")),(t||"").match(/^\$0( |$)/))return this.command(t,s,i,n);throw new e(".usage() description must start with $0 if being used as alias for .command()")}return v(this,pt,"f").usage(t),this}version(t,e,s){const i="version";if(h("[boolean|string] [string] [string]",[t,e,s],arguments.length),v(this,gt,"f")&&(this[vt](v(this,gt,"f")),v(this,pt,"f").version(void 0),O(this,gt,null,"f")),0===arguments.length)s=this[Mt](),t=i;else if(1===arguments.length){if(!1===t)return this;s=t,t=i}else 2===arguments.length&&(s=e,e=void 0);return O(this,gt,"string"==typeof t?t:i,"f"),e=e||v(this,pt,"f").deferY18nLookup("Show version number"),v(this,pt,"f").version(s||void 0),this.boolean(v(this,gt,"f")),this.describe(v(this,gt,"f"),e),this}wrap(t){return h("",[t],arguments.length),v(this,pt,"f").wrap(t),this}[(W=new WeakMap,z=new WeakMap,q=new WeakMap,F=new WeakMap,U=new WeakMap,L=new WeakMap,V=new WeakMap,G=new WeakMap,R=new WeakMap,T=new WeakMap,B=new WeakMap,K=new WeakMap,Y=new WeakMap,J=new WeakMap,Z=new WeakMap,X=new WeakMap,Q=new WeakMap,tt=new WeakMap,et=new WeakMap,st=new WeakMap,it=new WeakMap,nt=new WeakMap,rt=new WeakMap,ot=new WeakMap,at=new WeakMap,ht=new WeakMap,lt=new WeakMap,ct=new WeakMap,ft=new WeakMap,dt=new WeakMap,ut=new WeakMap,pt=new WeakMap,gt=new WeakMap,mt=new WeakMap,yt)](t){if(!t._||!t["--"])return t;t._.push.apply(t._,t["--"]);try{delete t["--"]}catch(t){}return t}[bt](){return{log:(...t)=>{this[Vt]()||console.log(...t),O(this,J,!0,"f"),v(this,tt,"f").length&&O(this,tt,v(this,tt,"f")+"\n","f"),O(this,tt,v(this,tt,"f")+t.join(" "),"f")},error:(...t)=>{this[Vt]()||console.error(...t),O(this,J,!0,"f"),v(this,tt,"f").length&&O(this,tt,v(this,tt,"f")+"\n","f"),O(this,tt,v(this,tt,"f")+t.join(" "),"f")}}}[vt](t){p(v(this,et,"f")).forEach((e=>{if("configObjects"===e)return;const s=v(this,et,"f")[e];Array.isArray(s)?s.includes(t)&&s.splice(s.indexOf(t),1):"object"==typeof s&&delete s[t]})),delete v(this,pt,"f").getDescriptions()[t]}[Ot](t,e,s){v(this,R,"f")[s]||(v(this,ct,"f").process.emitWarning(t,e),v(this,R,"f")[s]=!0)}[wt](){v(this,B,"f").push({options:v(this,et,"f"),configObjects:v(this,et,"f").configObjects.slice(0),exitProcess:v(this,T,"f"),groups:v(this,Y,"f"),strict:v(this,ft,"f"),strictCommands:v(this,dt,"f"),strictOptions:v(this,ut,"f"),completionCommand:v(this,U,"f"),output:v(this,tt,"f"),exitError:v(this,V,"f"),hasOutput:v(this,J,"f"),parsed:this.parsed,parseFn:v(this,nt,"f"),parseContext:v(this,rt,"f")}),v(this,pt,"f").freeze(),v(this,mt,"f").freeze(),v(this,W,"f").freeze(),v(this,K,"f").freeze()}[Ct](){let t,e="";return t=/\b(node|iojs|electron)(\.exe)?$/.test(v(this,ct,"f").process.argv()[0])?v(this,ct,"f").process.argv().slice(1,2):v(this,ct,"f").process.argv().slice(0,1),e=t.map((t=>{const e=this[Tt](v(this,z,"f"),t);return t.match(/^(\/|([a-zA-Z]:)?\\)/)&&e.lengthe.includes("package.json")?"package.json":void 0));d(i,void 0,v(this,ct,"f")),s=JSON.parse(v(this,ct,"f").readFileSync(i,"utf8"))}catch(t){}return v(this,ot,"f")[e]=s||{},v(this,ot,"f")[e]}[Et](t,e){(e=[].concat(e)).forEach((e=>{e=this[$t](e),v(this,et,"f")[t].push(e)}))}[At](t,e,s,i){this[St](t,e,s,i,((t,e,s)=>{v(this,et,"f")[t][e]=s}))}[Pt](t,e,s,i){this[St](t,e,s,i,((t,e,s)=>{v(this,et,"f")[t][e]=(v(this,et,"f")[t][e]||[]).concat(s)}))}[St](t,e,s,i,n){if(Array.isArray(s))s.forEach((e=>{t(e,i)}));else if((t=>"object"==typeof t)(s))for(const e of p(s))t(e,s[e]);else n(e,this[$t](s),i)}[$t](t){return"__proto__"===t?"___proto___":t}[It](t,e){return this[At](this[It].bind(this),"key",t,e),this}[Dt](){var t,e,s,i,n,r,o,a,h,l,c,f;const u=v(this,B,"f").pop();let p;d(u,void 0,v(this,ct,"f")),t=this,e=this,s=this,i=this,n=this,r=this,o=this,a=this,h=this,l=this,c=this,f=this,({options:{set value(e){O(t,et,e,"f")}}.value,configObjects:p,exitProcess:{set value(t){O(e,T,t,"f")}}.value,groups:{set value(t){O(s,Y,t,"f")}}.value,output:{set value(t){O(i,tt,t,"f")}}.value,exitError:{set value(t){O(n,V,t,"f")}}.value,hasOutput:{set value(t){O(r,J,t,"f")}}.value,parsed:this.parsed,strict:{set value(t){O(o,ft,t,"f")}}.value,strictCommands:{set value(t){O(a,dt,t,"f")}}.value,strictOptions:{set value(t){O(h,ut,t,"f")}}.value,completionCommand:{set value(t){O(l,U,t,"f")}}.value,parseFn:{set value(t){O(c,nt,t,"f")}}.value,parseContext:{set value(t){O(f,rt,t,"f")}}.value}=u),v(this,et,"f").configObjects=p,v(this,pt,"f").unfreeze(),v(this,mt,"f").unfreeze(),v(this,W,"f").unfreeze(),v(this,K,"f").unfreeze()}[Nt](t,e){return j(e,(e=>(t(e),e)))}getInternalMethods(){return{getCommandInstance:this[Ht].bind(this),getContext:this[Wt].bind(this),getHasOutput:this[zt].bind(this),getLoggerInstance:this[qt].bind(this),getParseContext:this[Ft].bind(this),getParserConfiguration:this[jt].bind(this),getUsageInstance:this[Ut].bind(this),getValidationInstance:this[Lt].bind(this),hasParseCallback:this[Vt].bind(this),isGlobalContext:this[Gt].bind(this),postProcess:this[Rt].bind(this),reset:this[Bt].bind(this),runValidation:this[Yt].bind(this),runYargsParserAndExecuteCommands:this[Kt].bind(this),setHasOutput:this[Jt].bind(this)}}[Ht](){return v(this,W,"f")}[Wt](){return v(this,q,"f")}[zt](){return v(this,J,"f")}[qt](){return v(this,Q,"f")}[Ft](){return v(this,rt,"f")||{}}[Ut](){return v(this,pt,"f")}[Lt](){return v(this,mt,"f")}[Vt](){return!!v(this,nt,"f")}[Gt](){return v(this,X,"f")}[Rt](t,e,s,i){if(s)return t;if(f(t))return t;e||(t=this[yt](t));return(this[jt]()["parse-positional-numbers"]||void 0===this[jt]()["parse-positional-numbers"])&&(t=this[kt](t)),i&&(t=C(t,this,v(this,K,"f").getMiddleware(),!1)),t}[Bt](t={}){O(this,et,v(this,et,"f")||{},"f");const e={};e.local=v(this,et,"f").local||[],e.configObjects=v(this,et,"f").configObjects||[];const s={};e.local.forEach((e=>{s[e]=!0,(t[e]||[]).forEach((t=>{s[t]=!0}))})),Object.assign(v(this,at,"f"),Object.keys(v(this,Y,"f")).reduce(((t,e)=>{const i=v(this,Y,"f")[e].filter((t=>!(t in s)));return i.length>0&&(t[e]=i),t}),{})),O(this,Y,{},"f");return["array","boolean","string","skipValidation","count","normalize","number","hiddenOptions"].forEach((t=>{e[t]=(v(this,et,"f")[t]||[]).filter((t=>!s[t]))})),["narg","key","alias","default","defaultDescription","config","choices","demandedOptions","demandedCommands","deprecatedOptions"].forEach((t=>{e[t]=g(v(this,et,"f")[t],(t=>!s[t]))})),e.envPrefix=v(this,et,"f").envPrefix,O(this,et,e,"f"),O(this,pt,v(this,pt,"f")?v(this,pt,"f").reset(s):P(this,v(this,ct,"f")),"f"),O(this,mt,v(this,mt,"f")?v(this,mt,"f").reset(s):function(t,e,s){const i=s.y18n.__,n=s.y18n.__n,r={nonOptionCount:function(s){const i=t.getDemandedCommands(),r=s._.length+(s["--"]?s["--"].length:0)-t.getInternalMethods().getContext().commands.length;i._&&(ri._.max)&&(ri._.max&&(void 0!==i._.maxMsg?e.fail(i._.maxMsg?i._.maxMsg.replace(/\$0/g,r.toString()).replace(/\$1/,i._.max.toString()):null):e.fail(n("Too many non-option arguments: got %s, maximum of %s","Too many non-option arguments: got %s, maximum of %s",r,r.toString(),i._.max.toString()))))},positionalCount:function(t,s){s{H.includes(e)||Object.prototype.hasOwnProperty.call(o,e)||Object.prototype.hasOwnProperty.call(t.getInternalMethods().getParseContext(),e)||r.isValidAndSomeAliasIsNotNew(e,i)||f.push(e)})),h&&(d.commands.length>0||c.length>0||a)&&s._.slice(d.commands.length).forEach((t=>{c.includes(""+t)||f.push(""+t)})),h){const e=(null===(l=t.getDemandedCommands()._)||void 0===l?void 0:l.max)||0,i=d.commands.length+e;i{t=String(t),d.commands.includes(t)||f.includes(t)||f.push(t)}))}f.length&&e.fail(n("Unknown argument: %s","Unknown arguments: %s",f.length,f.map((t=>t.trim()?t:`"${t}"`)).join(", ")))},unknownCommands:function(s){const i=t.getInternalMethods().getCommandInstance().getCommands(),r=[],o=t.getInternalMethods().getContext();return(o.commands.length>0||i.length>0)&&s._.slice(o.commands.length).forEach((t=>{i.includes(""+t)||r.push(""+t)})),r.length>0&&(e.fail(n("Unknown command: %s","Unknown commands: %s",r.length,r.join(", "))),!0)},isValidAndSomeAliasIsNotNew:function(e,s){if(!Object.prototype.hasOwnProperty.call(s,e))return!1;const i=t.parsed.newAliases;return[e,...s[e]].some((t=>!Object.prototype.hasOwnProperty.call(i,t)||!i[e]))},limitedChoices:function(s){const n=t.getOptions(),r={};if(!Object.keys(n.choices).length)return;Object.keys(s).forEach((t=>{-1===H.indexOf(t)&&Object.prototype.hasOwnProperty.call(n.choices,t)&&[].concat(s[t]).forEach((e=>{-1===n.choices[t].indexOf(e)&&void 0!==e&&(r[t]=(r[t]||[]).concat(e))}))}));const o=Object.keys(r);if(!o.length)return;let a=i("Invalid values:");o.forEach((t=>{a+=`\n ${i("Argument: %s, Given: %s, Choices: %s",t,e.stringifiedValues(r[t]),e.stringifiedValues(n.choices[t]))}`})),e.fail(a)}};let o={};function a(t,e){const s=Number(e);return"number"==typeof(e=isNaN(s)?e:s)?e=t._.length>=e:e.match(/^--no-.+/)?(e=e.match(/^--no-(.+)/)[1],e=!Object.prototype.hasOwnProperty.call(t,e)):e=Object.prototype.hasOwnProperty.call(t,e),e}r.implies=function(e,i){h(" [array|number|string]",[e,i],arguments.length),"object"==typeof e?Object.keys(e).forEach((t=>{r.implies(t,e[t])})):(t.global(e),o[e]||(o[e]=[]),Array.isArray(i)?i.forEach((t=>r.implies(e,t))):(d(i,void 0,s),o[e].push(i)))},r.getImplied=function(){return o},r.implications=function(t){const s=[];if(Object.keys(o).forEach((e=>{const i=e;(o[e]||[]).forEach((e=>{let n=i;const r=e;n=a(t,n),e=a(t,e),n&&!e&&s.push(` ${i} -> ${r}`)}))})),s.length){let t=`${i("Implications failed:")}\n`;s.forEach((e=>{t+=e})),e.fail(t)}};let l={};r.conflicts=function(e,s){h(" [array|string]",[e,s],arguments.length),"object"==typeof e?Object.keys(e).forEach((t=>{r.conflicts(t,e[t])})):(t.global(e),l[e]||(l[e]=[]),Array.isArray(s)?s.forEach((t=>r.conflicts(e,t))):l[e].push(s))},r.getConflicting=()=>l,r.conflicting=function(n){Object.keys(n).forEach((t=>{l[t]&&l[t].forEach((s=>{s&&void 0!==n[t]&&void 0!==n[s]&&e.fail(i("Arguments %s and %s are mutually exclusive",t,s))}))})),t.getInternalMethods().getParserConfiguration()["strip-dashed"]&&Object.keys(l).forEach((t=>{l[t].forEach((r=>{r&&void 0!==n[s.Parser.camelCase(t)]&&void 0!==n[s.Parser.camelCase(r)]&&e.fail(i("Arguments %s and %s are mutually exclusive",t,r))}))}))},r.recommendCommands=function(t,s){s=s.sort(((t,e)=>e.length-t.length));let n=null,r=1/0;for(let e,i=0;void 0!==(e=s[i]);i++){const s=N(t,e);s<=3&&s!t[e])),l=g(l,(e=>!t[e])),r};const c=[];return r.freeze=function(){c.push({implied:o,conflicting:l})},r.unfreeze=function(){const t=c.pop();d(t,void 0,s),({implied:o,conflicting:l}=t)},r}(this,v(this,pt,"f"),v(this,ct,"f")),"f"),O(this,W,v(this,W,"f")?v(this,W,"f").reset():function(t,e,s,i){return new M(t,e,s,i)}(v(this,pt,"f"),v(this,mt,"f"),v(this,K,"f"),v(this,ct,"f")),"f"),v(this,F,"f")||O(this,F,function(t,e,s,i){return new D(t,e,s,i)}(this,v(this,pt,"f"),v(this,W,"f"),v(this,ct,"f")),"f"),v(this,K,"f").reset(),O(this,U,null,"f"),O(this,tt,"","f"),O(this,V,null,"f"),O(this,J,!1,"f"),this.parsed=!1,this}[Tt](t,e){return v(this,ct,"f").path.relative(t,e)}[Kt](t,s,i,n=0,r=!1){let o=!!i||r;t=t||v(this,ht,"f"),v(this,et,"f").__=v(this,ct,"f").y18n.__,v(this,et,"f").configuration=this[jt]();const a=!!v(this,et,"f").configuration["populate--"],h=Object.assign({},v(this,et,"f").configuration,{"populate--":!0}),l=v(this,ct,"f").Parser.detailed(t,Object.assign({},v(this,et,"f"),{configuration:{"parse-positional-numbers":!1,...h}})),c=Object.assign(l.argv,v(this,rt,"f"));let d;const u=l.aliases;let p=!1,g=!1;Object.keys(c).forEach((t=>{t===v(this,Z,"f")&&c[t]?p=!0:t===v(this,gt,"f")&&c[t]&&(g=!0)})),c.$0=this.$0,this.parsed=l,0===n&&v(this,pt,"f").clearCachedHelpMessage();try{if(this[_t](),s)return this[Rt](c,a,!!i,!1);if(v(this,Z,"f")){[v(this,Z,"f")].concat(u[v(this,Z,"f")]||[]).filter((t=>t.length>1)).includes(""+c._[c._.length-1])&&(c._.pop(),p=!0)}O(this,X,!1,"f");const h=v(this,W,"f").getCommands(),m=v(this,F,"f").completionKey in c,y=p||m||r;if(c._.length){if(h.length){let t;for(let e,s=n||0;void 0!==c._[s];s++){if(e=String(c._[s]),h.includes(e)&&e!==v(this,U,"f")){const t=v(this,W,"f").runCommand(e,this,l,s+1,r,p||g||r);return this[Rt](t,a,!!i,!1)}if(!t&&e!==v(this,U,"f")){t=e;break}}!v(this,W,"f").hasDefaultCommand()&&v(this,lt,"f")&&t&&!y&&v(this,mt,"f").recommendCommands(t,h)}v(this,U,"f")&&c._.includes(v(this,U,"f"))&&!m&&(v(this,T,"f")&&E(!0),this.showCompletionScript(),this.exit(0))}if(v(this,W,"f").hasDefaultCommand()&&!y){const t=v(this,W,"f").runCommand(null,this,l,0,r,p||g||r);return this[Rt](t,a,!!i,!1)}if(m){v(this,T,"f")&&E(!0);const s=(t=[].concat(t)).slice(t.indexOf(`--${v(this,F,"f").completionKey}`)+1);return v(this,F,"f").getCompletion(s,((t,s)=>{if(t)throw new e(t.message);(s||[]).forEach((t=>{v(this,Q,"f").log(t)})),this.exit(0)})),this[Rt](c,!a,!!i,!1)}if(v(this,J,"f")||(p?(v(this,T,"f")&&E(!0),o=!0,this.showHelp("log"),this.exit(0)):g&&(v(this,T,"f")&&E(!0),o=!0,v(this,pt,"f").showVersion("log"),this.exit(0))),!o&&v(this,et,"f").skipValidation.length>0&&(o=Object.keys(c).some((t=>v(this,et,"f").skipValidation.indexOf(t)>=0&&!0===c[t]))),!o){if(l.error)throw new e(l.error.message);if(!m){const t=this[Yt](u,{},l.error);i||(d=C(c,this,v(this,K,"f").getMiddleware(),!0)),d=this[Nt](t,null!=d?d:c),f(d)&&!i&&(d=d.then((()=>C(c,this,v(this,K,"f").getMiddleware(),!1))))}}}catch(t){if(!(t instanceof e))throw t;v(this,pt,"f").fail(t.message,t)}return this[Rt](null!=d?d:c,a,!!i,!0)}[Yt](t,s,i,n){const r={...this.getDemandedOptions()};return o=>{if(i)throw new e(i.message);v(this,mt,"f").nonOptionCount(o),v(this,mt,"f").requiredArguments(o,r);let a=!1;v(this,dt,"f")&&(a=v(this,mt,"f").unknownCommands(o)),v(this,ft,"f")&&!a?v(this,mt,"f").unknownArguments(o,t,s,!!n):v(this,ut,"f")&&v(this,mt,"f").unknownArguments(o,t,{},!1,!1),v(this,mt,"f").limitedChoices(o),v(this,mt,"f").implications(o),v(this,mt,"f").conflicting(o)}}[Jt](){O(this,J,!0,"f")}[Zt](t){if("string"==typeof t)v(this,et,"f").key[t]=!0;else for(const e of t)v(this,et,"f").key[e]=!0}}var Qt,te;const{readFileSync:ee}=require("fs"),{inspect:se}=require("util"),{resolve:ie}=require("path"),ne=require("y18n"),re=require("yargs-parser");var oe,ae={assert:{notStrictEqual:t.notStrictEqual,strictEqual:t.strictEqual},cliui:require("cliui"),findUp:require("escalade/sync"),getEnv:t=>process.env[t],getCallerFile:require("get-caller-file"),getProcessArgvBin:y,inspect:se,mainFilename:null!==(te=null===(Qt=null===require||void 0===require?void 0:require.main)||void 0===Qt?void 0:Qt.filename)&&void 0!==te?te:process.cwd(),Parser:re,path:require("path"),process:{argv:()=>process.argv,cwd:process.cwd,emitWarning:(t,e)=>process.emitWarning(t,e),execPath:()=>process.execPath,exit:t=>{process.exit(t)},nextTick:process.nextTick,stdColumns:void 0!==process.stdout.columns?process.stdout.columns:null},readFileSync:ee,require:require,requireDirectory:require("require-directory"),stringWidth:require("string-width"),y18n:ne({directory:ie(__dirname,"../locales"),updateFiles:!1})};const he=(null===(oe=null===process||void 0===process?void 0:process.env)||void 0===oe?void 0:oe.YARGS_MIN_NODE_VERSION)?Number(process.env.YARGS_MIN_NODE_VERSION):12;if(process&&process.version){if(Number(process.version.match(/v([^.]+)/)[1]){const i=new Xt(t,e,s,ce);return Object.defineProperty(i,"argv",{get:()=>i.parse(),enumerable:!0}),i.help(),i.version(),i}),argsert:h,isPromise:f,objFilter:g,parseCommand:o,Parser:le,processArgv:b,YError:e};module.exports=fe; diff --git a/mybulma/node_modules/yargs/build/lib/argsert.js b/mybulma/node_modules/yargs/build/lib/argsert.js new file mode 100644 index 0000000..be5b3aa --- /dev/null +++ b/mybulma/node_modules/yargs/build/lib/argsert.js @@ -0,0 +1,62 @@ +import { YError } from './yerror.js'; +import { parseCommand } from './parse-command.js'; +const positionName = ['first', 'second', 'third', 'fourth', 'fifth', 'sixth']; +export function argsert(arg1, arg2, arg3) { + function parseArgs() { + return typeof arg1 === 'object' + ? [{ demanded: [], optional: [] }, arg1, arg2] + : [ + parseCommand(`cmd ${arg1}`), + arg2, + arg3, + ]; + } + try { + let position = 0; + const [parsed, callerArguments, _length] = parseArgs(); + const args = [].slice.call(callerArguments); + while (args.length && args[args.length - 1] === undefined) + args.pop(); + const length = _length || args.length; + if (length < parsed.demanded.length) { + throw new YError(`Not enough arguments provided. Expected ${parsed.demanded.length} but received ${args.length}.`); + } + const totalCommands = parsed.demanded.length + parsed.optional.length; + if (length > totalCommands) { + throw new YError(`Too many arguments provided. Expected max ${totalCommands} but received ${length}.`); + } + parsed.demanded.forEach(demanded => { + const arg = args.shift(); + const observedType = guessType(arg); + const matchingTypes = demanded.cmd.filter(type => type === observedType || type === '*'); + if (matchingTypes.length === 0) + argumentTypeError(observedType, demanded.cmd, position); + position += 1; + }); + parsed.optional.forEach(optional => { + if (args.length === 0) + return; + const arg = args.shift(); + const observedType = guessType(arg); + const matchingTypes = optional.cmd.filter(type => type === observedType || type === '*'); + if (matchingTypes.length === 0) + argumentTypeError(observedType, optional.cmd, position); + position += 1; + }); + } + catch (err) { + console.warn(err.stack); + } +} +function guessType(arg) { + if (Array.isArray(arg)) { + return 'array'; + } + else if (arg === null) { + return 'null'; + } + return typeof arg; +} +function argumentTypeError(observedType, allowedTypes, position) { + throw new YError(`Invalid ${positionName[position] || 'manyith'} argument. Expected ${allowedTypes.join(' or ')} but received ${observedType}.`); +} diff --git a/mybulma/node_modules/yargs/build/lib/command.js b/mybulma/node_modules/yargs/build/lib/command.js new file mode 100644 index 0000000..89b150c --- /dev/null +++ b/mybulma/node_modules/yargs/build/lib/command.js @@ -0,0 +1,447 @@ +import { assertNotStrictEqual, } from './typings/common-types.js'; +import { isPromise } from './utils/is-promise.js'; +import { applyMiddleware, commandMiddlewareFactory, } from './middleware.js'; +import { parseCommand } from './parse-command.js'; +import { isYargsInstance, } from './yargs-factory.js'; +import { maybeAsyncResult } from './utils/maybe-async-result.js'; +import whichModule from './utils/which-module.js'; +const DEFAULT_MARKER = /(^\*)|(^\$0)/; +export class CommandInstance { + constructor(usage, validation, globalMiddleware, shim) { + this.requireCache = new Set(); + this.handlers = {}; + this.aliasMap = {}; + this.frozens = []; + this.shim = shim; + this.usage = usage; + this.globalMiddleware = globalMiddleware; + this.validation = validation; + } + addDirectory(dir, req, callerFile, opts) { + opts = opts || {}; + if (typeof opts.recurse !== 'boolean') + opts.recurse = false; + if (!Array.isArray(opts.extensions)) + opts.extensions = ['js']; + const parentVisit = typeof opts.visit === 'function' ? opts.visit : (o) => o; + opts.visit = (obj, joined, filename) => { + const visited = parentVisit(obj, joined, filename); + if (visited) { + if (this.requireCache.has(joined)) + return visited; + else + this.requireCache.add(joined); + this.addHandler(visited); + } + return visited; + }; + this.shim.requireDirectory({ require: req, filename: callerFile }, dir, opts); + } + addHandler(cmd, description, builder, handler, commandMiddleware, deprecated) { + let aliases = []; + const middlewares = commandMiddlewareFactory(commandMiddleware); + handler = handler || (() => { }); + if (Array.isArray(cmd)) { + if (isCommandAndAliases(cmd)) { + [cmd, ...aliases] = cmd; + } + else { + for (const command of cmd) { + this.addHandler(command); + } + } + } + else if (isCommandHandlerDefinition(cmd)) { + let command = Array.isArray(cmd.command) || typeof cmd.command === 'string' + ? cmd.command + : this.moduleName(cmd); + if (cmd.aliases) + command = [].concat(command).concat(cmd.aliases); + this.addHandler(command, this.extractDesc(cmd), cmd.builder, cmd.handler, cmd.middlewares, cmd.deprecated); + return; + } + else if (isCommandBuilderDefinition(builder)) { + this.addHandler([cmd].concat(aliases), description, builder.builder, builder.handler, builder.middlewares, builder.deprecated); + return; + } + if (typeof cmd === 'string') { + const parsedCommand = parseCommand(cmd); + aliases = aliases.map(alias => parseCommand(alias).cmd); + let isDefault = false; + const parsedAliases = [parsedCommand.cmd].concat(aliases).filter(c => { + if (DEFAULT_MARKER.test(c)) { + isDefault = true; + return false; + } + return true; + }); + if (parsedAliases.length === 0 && isDefault) + parsedAliases.push('$0'); + if (isDefault) { + parsedCommand.cmd = parsedAliases[0]; + aliases = parsedAliases.slice(1); + cmd = cmd.replace(DEFAULT_MARKER, parsedCommand.cmd); + } + aliases.forEach(alias => { + this.aliasMap[alias] = parsedCommand.cmd; + }); + if (description !== false) { + this.usage.command(cmd, description, isDefault, aliases, deprecated); + } + this.handlers[parsedCommand.cmd] = { + original: cmd, + description, + handler, + builder: builder || {}, + middlewares, + deprecated, + demanded: parsedCommand.demanded, + optional: parsedCommand.optional, + }; + if (isDefault) + this.defaultCommand = this.handlers[parsedCommand.cmd]; + } + } + getCommandHandlers() { + return this.handlers; + } + getCommands() { + return Object.keys(this.handlers).concat(Object.keys(this.aliasMap)); + } + hasDefaultCommand() { + return !!this.defaultCommand; + } + runCommand(command, yargs, parsed, commandIndex, helpOnly, helpOrVersionSet) { + const commandHandler = this.handlers[command] || + this.handlers[this.aliasMap[command]] || + this.defaultCommand; + const currentContext = yargs.getInternalMethods().getContext(); + const parentCommands = currentContext.commands.slice(); + const isDefaultCommand = !command; + if (command) { + currentContext.commands.push(command); + currentContext.fullCommands.push(commandHandler.original); + } + const builderResult = this.applyBuilderUpdateUsageAndParse(isDefaultCommand, commandHandler, yargs, parsed.aliases, parentCommands, commandIndex, helpOnly, helpOrVersionSet); + return isPromise(builderResult) + ? builderResult.then(result => this.applyMiddlewareAndGetResult(isDefaultCommand, commandHandler, result.innerArgv, currentContext, helpOnly, result.aliases, yargs)) + : this.applyMiddlewareAndGetResult(isDefaultCommand, commandHandler, builderResult.innerArgv, currentContext, helpOnly, builderResult.aliases, yargs); + } + applyBuilderUpdateUsageAndParse(isDefaultCommand, commandHandler, yargs, aliases, parentCommands, commandIndex, helpOnly, helpOrVersionSet) { + const builder = commandHandler.builder; + let innerYargs = yargs; + if (isCommandBuilderCallback(builder)) { + const builderOutput = builder(yargs.getInternalMethods().reset(aliases), helpOrVersionSet); + if (isPromise(builderOutput)) { + return builderOutput.then(output => { + innerYargs = isYargsInstance(output) ? output : yargs; + return this.parseAndUpdateUsage(isDefaultCommand, commandHandler, innerYargs, parentCommands, commandIndex, helpOnly); + }); + } + } + else if (isCommandBuilderOptionDefinitions(builder)) { + innerYargs = yargs.getInternalMethods().reset(aliases); + Object.keys(commandHandler.builder).forEach(key => { + innerYargs.option(key, builder[key]); + }); + } + return this.parseAndUpdateUsage(isDefaultCommand, commandHandler, innerYargs, parentCommands, commandIndex, helpOnly); + } + parseAndUpdateUsage(isDefaultCommand, commandHandler, innerYargs, parentCommands, commandIndex, helpOnly) { + if (isDefaultCommand) + innerYargs.getInternalMethods().getUsageInstance().unfreeze(true); + if (this.shouldUpdateUsage(innerYargs)) { + innerYargs + .getInternalMethods() + .getUsageInstance() + .usage(this.usageFromParentCommandsCommandHandler(parentCommands, commandHandler), commandHandler.description); + } + const innerArgv = innerYargs + .getInternalMethods() + .runYargsParserAndExecuteCommands(null, undefined, true, commandIndex, helpOnly); + return isPromise(innerArgv) + ? innerArgv.then(argv => ({ + aliases: innerYargs.parsed.aliases, + innerArgv: argv, + })) + : { + aliases: innerYargs.parsed.aliases, + innerArgv: innerArgv, + }; + } + shouldUpdateUsage(yargs) { + return (!yargs.getInternalMethods().getUsageInstance().getUsageDisabled() && + yargs.getInternalMethods().getUsageInstance().getUsage().length === 0); + } + usageFromParentCommandsCommandHandler(parentCommands, commandHandler) { + const c = DEFAULT_MARKER.test(commandHandler.original) + ? commandHandler.original.replace(DEFAULT_MARKER, '').trim() + : commandHandler.original; + const pc = parentCommands.filter(c => { + return !DEFAULT_MARKER.test(c); + }); + pc.push(c); + return `$0 ${pc.join(' ')}`; + } + handleValidationAndGetResult(isDefaultCommand, commandHandler, innerArgv, currentContext, aliases, yargs, middlewares, positionalMap) { + if (!yargs.getInternalMethods().getHasOutput()) { + const validation = yargs + .getInternalMethods() + .runValidation(aliases, positionalMap, yargs.parsed.error, isDefaultCommand); + innerArgv = maybeAsyncResult(innerArgv, result => { + validation(result); + return result; + }); + } + if (commandHandler.handler && !yargs.getInternalMethods().getHasOutput()) { + yargs.getInternalMethods().setHasOutput(); + const populateDoubleDash = !!yargs.getOptions().configuration['populate--']; + yargs + .getInternalMethods() + .postProcess(innerArgv, populateDoubleDash, false, false); + innerArgv = applyMiddleware(innerArgv, yargs, middlewares, false); + innerArgv = maybeAsyncResult(innerArgv, result => { + const handlerResult = commandHandler.handler(result); + return isPromise(handlerResult) + ? handlerResult.then(() => result) + : result; + }); + if (!isDefaultCommand) { + yargs.getInternalMethods().getUsageInstance().cacheHelpMessage(); + } + if (isPromise(innerArgv) && + !yargs.getInternalMethods().hasParseCallback()) { + innerArgv.catch(error => { + try { + yargs.getInternalMethods().getUsageInstance().fail(null, error); + } + catch (_err) { + } + }); + } + } + if (!isDefaultCommand) { + currentContext.commands.pop(); + currentContext.fullCommands.pop(); + } + return innerArgv; + } + applyMiddlewareAndGetResult(isDefaultCommand, commandHandler, innerArgv, currentContext, helpOnly, aliases, yargs) { + let positionalMap = {}; + if (helpOnly) + return innerArgv; + if (!yargs.getInternalMethods().getHasOutput()) { + positionalMap = this.populatePositionals(commandHandler, innerArgv, currentContext, yargs); + } + const middlewares = this.globalMiddleware + .getMiddleware() + .slice(0) + .concat(commandHandler.middlewares); + const maybePromiseArgv = applyMiddleware(innerArgv, yargs, middlewares, true); + return isPromise(maybePromiseArgv) + ? maybePromiseArgv.then(resolvedInnerArgv => this.handleValidationAndGetResult(isDefaultCommand, commandHandler, resolvedInnerArgv, currentContext, aliases, yargs, middlewares, positionalMap)) + : this.handleValidationAndGetResult(isDefaultCommand, commandHandler, maybePromiseArgv, currentContext, aliases, yargs, middlewares, positionalMap); + } + populatePositionals(commandHandler, argv, context, yargs) { + argv._ = argv._.slice(context.commands.length); + const demanded = commandHandler.demanded.slice(0); + const optional = commandHandler.optional.slice(0); + const positionalMap = {}; + this.validation.positionalCount(demanded.length, argv._.length); + while (demanded.length) { + const demand = demanded.shift(); + this.populatePositional(demand, argv, positionalMap); + } + while (optional.length) { + const maybe = optional.shift(); + this.populatePositional(maybe, argv, positionalMap); + } + argv._ = context.commands.concat(argv._.map(a => '' + a)); + this.postProcessPositionals(argv, positionalMap, this.cmdToParseOptions(commandHandler.original), yargs); + return positionalMap; + } + populatePositional(positional, argv, positionalMap) { + const cmd = positional.cmd[0]; + if (positional.variadic) { + positionalMap[cmd] = argv._.splice(0).map(String); + } + else { + if (argv._.length) + positionalMap[cmd] = [String(argv._.shift())]; + } + } + cmdToParseOptions(cmdString) { + const parseOptions = { + array: [], + default: {}, + alias: {}, + demand: {}, + }; + const parsed = parseCommand(cmdString); + parsed.demanded.forEach(d => { + const [cmd, ...aliases] = d.cmd; + if (d.variadic) { + parseOptions.array.push(cmd); + parseOptions.default[cmd] = []; + } + parseOptions.alias[cmd] = aliases; + parseOptions.demand[cmd] = true; + }); + parsed.optional.forEach(o => { + const [cmd, ...aliases] = o.cmd; + if (o.variadic) { + parseOptions.array.push(cmd); + parseOptions.default[cmd] = []; + } + parseOptions.alias[cmd] = aliases; + }); + return parseOptions; + } + postProcessPositionals(argv, positionalMap, parseOptions, yargs) { + const options = Object.assign({}, yargs.getOptions()); + options.default = Object.assign(parseOptions.default, options.default); + for (const key of Object.keys(parseOptions.alias)) { + options.alias[key] = (options.alias[key] || []).concat(parseOptions.alias[key]); + } + options.array = options.array.concat(parseOptions.array); + options.config = {}; + const unparsed = []; + Object.keys(positionalMap).forEach(key => { + positionalMap[key].map(value => { + if (options.configuration['unknown-options-as-args']) + options.key[key] = true; + unparsed.push(`--${key}`); + unparsed.push(value); + }); + }); + if (!unparsed.length) + return; + const config = Object.assign({}, options.configuration, { + 'populate--': false, + }); + const parsed = this.shim.Parser.detailed(unparsed, Object.assign({}, options, { + configuration: config, + })); + if (parsed.error) { + yargs + .getInternalMethods() + .getUsageInstance() + .fail(parsed.error.message, parsed.error); + } + else { + const positionalKeys = Object.keys(positionalMap); + Object.keys(positionalMap).forEach(key => { + positionalKeys.push(...parsed.aliases[key]); + }); + Object.keys(parsed.argv).forEach(key => { + if (positionalKeys.includes(key)) { + if (!positionalMap[key]) + positionalMap[key] = parsed.argv[key]; + if (!this.isInConfigs(yargs, key) && + !this.isDefaulted(yargs, key) && + Object.prototype.hasOwnProperty.call(argv, key) && + Object.prototype.hasOwnProperty.call(parsed.argv, key) && + (Array.isArray(argv[key]) || Array.isArray(parsed.argv[key]))) { + argv[key] = [].concat(argv[key], parsed.argv[key]); + } + else { + argv[key] = parsed.argv[key]; + } + } + }); + } + } + isDefaulted(yargs, key) { + const { default: defaults } = yargs.getOptions(); + return (Object.prototype.hasOwnProperty.call(defaults, key) || + Object.prototype.hasOwnProperty.call(defaults, this.shim.Parser.camelCase(key))); + } + isInConfigs(yargs, key) { + const { configObjects } = yargs.getOptions(); + return (configObjects.some(c => Object.prototype.hasOwnProperty.call(c, key)) || + configObjects.some(c => Object.prototype.hasOwnProperty.call(c, this.shim.Parser.camelCase(key)))); + } + runDefaultBuilderOn(yargs) { + if (!this.defaultCommand) + return; + if (this.shouldUpdateUsage(yargs)) { + const commandString = DEFAULT_MARKER.test(this.defaultCommand.original) + ? this.defaultCommand.original + : this.defaultCommand.original.replace(/^[^[\]<>]*/, '$0 '); + yargs + .getInternalMethods() + .getUsageInstance() + .usage(commandString, this.defaultCommand.description); + } + const builder = this.defaultCommand.builder; + if (isCommandBuilderCallback(builder)) { + return builder(yargs, true); + } + else if (!isCommandBuilderDefinition(builder)) { + Object.keys(builder).forEach(key => { + yargs.option(key, builder[key]); + }); + } + return undefined; + } + moduleName(obj) { + const mod = whichModule(obj); + if (!mod) + throw new Error(`No command name given for module: ${this.shim.inspect(obj)}`); + return this.commandFromFilename(mod.filename); + } + commandFromFilename(filename) { + return this.shim.path.basename(filename, this.shim.path.extname(filename)); + } + extractDesc({ describe, description, desc }) { + for (const test of [describe, description, desc]) { + if (typeof test === 'string' || test === false) + return test; + assertNotStrictEqual(test, true, this.shim); + } + return false; + } + freeze() { + this.frozens.push({ + handlers: this.handlers, + aliasMap: this.aliasMap, + defaultCommand: this.defaultCommand, + }); + } + unfreeze() { + const frozen = this.frozens.pop(); + assertNotStrictEqual(frozen, undefined, this.shim); + ({ + handlers: this.handlers, + aliasMap: this.aliasMap, + defaultCommand: this.defaultCommand, + } = frozen); + } + reset() { + this.handlers = {}; + this.aliasMap = {}; + this.defaultCommand = undefined; + this.requireCache = new Set(); + return this; + } +} +export function command(usage, validation, globalMiddleware, shim) { + return new CommandInstance(usage, validation, globalMiddleware, shim); +} +export function isCommandBuilderDefinition(builder) { + return (typeof builder === 'object' && + !!builder.builder && + typeof builder.handler === 'function'); +} +function isCommandAndAliases(cmd) { + return cmd.every(c => typeof c === 'string'); +} +export function isCommandBuilderCallback(builder) { + return typeof builder === 'function'; +} +function isCommandBuilderOptionDefinitions(builder) { + return typeof builder === 'object'; +} +export function isCommandHandlerDefinition(cmd) { + return typeof cmd === 'object' && !Array.isArray(cmd); +} diff --git a/mybulma/node_modules/yargs/build/lib/completion-templates.js b/mybulma/node_modules/yargs/build/lib/completion-templates.js new file mode 100644 index 0000000..2c4dcb5 --- /dev/null +++ b/mybulma/node_modules/yargs/build/lib/completion-templates.js @@ -0,0 +1,48 @@ +export const completionShTemplate = `###-begin-{{app_name}}-completions-### +# +# yargs command completion script +# +# Installation: {{app_path}} {{completion_command}} >> ~/.bashrc +# or {{app_path}} {{completion_command}} >> ~/.bash_profile on OSX. +# +_{{app_name}}_yargs_completions() +{ + local cur_word args type_list + + cur_word="\${COMP_WORDS[COMP_CWORD]}" + args=("\${COMP_WORDS[@]}") + + # ask yargs to generate completions. + type_list=$({{app_path}} --get-yargs-completions "\${args[@]}") + + COMPREPLY=( $(compgen -W "\${type_list}" -- \${cur_word}) ) + + # if no match was found, fall back to filename completion + if [ \${#COMPREPLY[@]} -eq 0 ]; then + COMPREPLY=() + fi + + return 0 +} +complete -o bashdefault -o default -F _{{app_name}}_yargs_completions {{app_name}} +###-end-{{app_name}}-completions-### +`; +export const completionZshTemplate = `#compdef {{app_name}} +###-begin-{{app_name}}-completions-### +# +# yargs command completion script +# +# Installation: {{app_path}} {{completion_command}} >> ~/.zshrc +# or {{app_path}} {{completion_command}} >> ~/.zprofile on OSX. +# +_{{app_name}}_yargs_completions() +{ + local reply + local si=$IFS + IFS=$'\n' reply=($(COMP_CWORD="$((CURRENT-1))" COMP_LINE="$BUFFER" COMP_POINT="$CURSOR" {{app_path}} --get-yargs-completions "\${words[@]}")) + IFS=$si + _describe 'values' reply +} +compdef _{{app_name}}_yargs_completions {{app_name}} +###-end-{{app_name}}-completions-### +`; diff --git a/mybulma/node_modules/yargs/build/lib/completion.js b/mybulma/node_modules/yargs/build/lib/completion.js new file mode 100644 index 0000000..1c59212 --- /dev/null +++ b/mybulma/node_modules/yargs/build/lib/completion.js @@ -0,0 +1,236 @@ +import { isCommandBuilderCallback } from './command.js'; +import { assertNotStrictEqual } from './typings/common-types.js'; +import * as templates from './completion-templates.js'; +import { isPromise } from './utils/is-promise.js'; +import { parseCommand } from './parse-command.js'; +export class Completion { + constructor(yargs, usage, command, shim) { + var _a, _b, _c; + this.yargs = yargs; + this.usage = usage; + this.command = command; + this.shim = shim; + this.completionKey = 'get-yargs-completions'; + this.aliases = null; + this.customCompletionFunction = null; + this.indexAfterLastReset = 0; + this.zshShell = + (_c = (((_a = this.shim.getEnv('SHELL')) === null || _a === void 0 ? void 0 : _a.includes('zsh')) || + ((_b = this.shim.getEnv('ZSH_NAME')) === null || _b === void 0 ? void 0 : _b.includes('zsh')))) !== null && _c !== void 0 ? _c : false; + } + defaultCompletion(args, argv, current, done) { + const handlers = this.command.getCommandHandlers(); + for (let i = 0, ii = args.length; i < ii; ++i) { + if (handlers[args[i]] && handlers[args[i]].builder) { + const builder = handlers[args[i]].builder; + if (isCommandBuilderCallback(builder)) { + this.indexAfterLastReset = i + 1; + const y = this.yargs.getInternalMethods().reset(); + builder(y, true); + return y.argv; + } + } + } + const completions = []; + this.commandCompletions(completions, args, current); + this.optionCompletions(completions, args, argv, current); + this.choicesFromOptionsCompletions(completions, args, argv, current); + this.choicesFromPositionalsCompletions(completions, args, argv, current); + done(null, completions); + } + commandCompletions(completions, args, current) { + const parentCommands = this.yargs + .getInternalMethods() + .getContext().commands; + if (!current.match(/^-/) && + parentCommands[parentCommands.length - 1] !== current && + !this.previousArgHasChoices(args)) { + this.usage.getCommands().forEach(usageCommand => { + const commandName = parseCommand(usageCommand[0]).cmd; + if (args.indexOf(commandName) === -1) { + if (!this.zshShell) { + completions.push(commandName); + } + else { + const desc = usageCommand[1] || ''; + completions.push(commandName.replace(/:/g, '\\:') + ':' + desc); + } + } + }); + } + } + optionCompletions(completions, args, argv, current) { + if ((current.match(/^-/) || (current === '' && completions.length === 0)) && + !this.previousArgHasChoices(args)) { + const options = this.yargs.getOptions(); + const positionalKeys = this.yargs.getGroups()[this.usage.getPositionalGroupName()] || []; + Object.keys(options.key).forEach(key => { + const negable = !!options.configuration['boolean-negation'] && + options.boolean.includes(key); + const isPositionalKey = positionalKeys.includes(key); + if (!isPositionalKey && + !options.hiddenOptions.includes(key) && + !this.argsContainKey(args, key, negable)) { + this.completeOptionKey(key, completions, current); + if (negable && !!options.default[key]) + this.completeOptionKey(`no-${key}`, completions, current); + } + }); + } + } + choicesFromOptionsCompletions(completions, args, argv, current) { + if (this.previousArgHasChoices(args)) { + const choices = this.getPreviousArgChoices(args); + if (choices && choices.length > 0) { + completions.push(...choices.map(c => c.replace(/:/g, '\\:'))); + } + } + } + choicesFromPositionalsCompletions(completions, args, argv, current) { + if (current === '' && + completions.length > 0 && + this.previousArgHasChoices(args)) { + return; + } + const positionalKeys = this.yargs.getGroups()[this.usage.getPositionalGroupName()] || []; + const offset = Math.max(this.indexAfterLastReset, this.yargs.getInternalMethods().getContext().commands.length + + 1); + const positionalKey = positionalKeys[argv._.length - offset - 1]; + if (!positionalKey) { + return; + } + const choices = this.yargs.getOptions().choices[positionalKey] || []; + for (const choice of choices) { + if (choice.startsWith(current)) { + completions.push(choice.replace(/:/g, '\\:')); + } + } + } + getPreviousArgChoices(args) { + if (args.length < 1) + return; + let previousArg = args[args.length - 1]; + let filter = ''; + if (!previousArg.startsWith('-') && args.length > 1) { + filter = previousArg; + previousArg = args[args.length - 2]; + } + if (!previousArg.startsWith('-')) + return; + const previousArgKey = previousArg.replace(/^-+/, ''); + const options = this.yargs.getOptions(); + const possibleAliases = [ + previousArgKey, + ...(this.yargs.getAliases()[previousArgKey] || []), + ]; + let choices; + for (const possibleAlias of possibleAliases) { + if (Object.prototype.hasOwnProperty.call(options.key, possibleAlias) && + Array.isArray(options.choices[possibleAlias])) { + choices = options.choices[possibleAlias]; + break; + } + } + if (choices) { + return choices.filter(choice => !filter || choice.startsWith(filter)); + } + } + previousArgHasChoices(args) { + const choices = this.getPreviousArgChoices(args); + return choices !== undefined && choices.length > 0; + } + argsContainKey(args, key, negable) { + const argsContains = (s) => args.indexOf((/^[^0-9]$/.test(s) ? '-' : '--') + s) !== -1; + if (argsContains(key)) + return true; + if (negable && argsContains(`no-${key}`)) + return true; + if (this.aliases) { + for (const alias of this.aliases[key]) { + if (argsContains(alias)) + return true; + } + } + return false; + } + completeOptionKey(key, completions, current) { + const descs = this.usage.getDescriptions(); + const startsByTwoDashes = (s) => /^--/.test(s); + const isShortOption = (s) => /^[^0-9]$/.test(s); + const dashes = !startsByTwoDashes(current) && isShortOption(key) ? '-' : '--'; + if (!this.zshShell) { + completions.push(dashes + key); + } + else { + const desc = descs[key] || ''; + completions.push(dashes + + `${key.replace(/:/g, '\\:')}:${desc.replace('__yargsString__:', '')}`); + } + } + customCompletion(args, argv, current, done) { + assertNotStrictEqual(this.customCompletionFunction, null, this.shim); + if (isSyncCompletionFunction(this.customCompletionFunction)) { + const result = this.customCompletionFunction(current, argv); + if (isPromise(result)) { + return result + .then(list => { + this.shim.process.nextTick(() => { + done(null, list); + }); + }) + .catch(err => { + this.shim.process.nextTick(() => { + done(err, undefined); + }); + }); + } + return done(null, result); + } + else if (isFallbackCompletionFunction(this.customCompletionFunction)) { + return this.customCompletionFunction(current, argv, (onCompleted = done) => this.defaultCompletion(args, argv, current, onCompleted), completions => { + done(null, completions); + }); + } + else { + return this.customCompletionFunction(current, argv, completions => { + done(null, completions); + }); + } + } + getCompletion(args, done) { + const current = args.length ? args[args.length - 1] : ''; + const argv = this.yargs.parse(args, true); + const completionFunction = this.customCompletionFunction + ? (argv) => this.customCompletion(args, argv, current, done) + : (argv) => this.defaultCompletion(args, argv, current, done); + return isPromise(argv) + ? argv.then(completionFunction) + : completionFunction(argv); + } + generateCompletionScript($0, cmd) { + let script = this.zshShell + ? templates.completionZshTemplate + : templates.completionShTemplate; + const name = this.shim.path.basename($0); + if ($0.match(/\.js$/)) + $0 = `./${$0}`; + script = script.replace(/{{app_name}}/g, name); + script = script.replace(/{{completion_command}}/g, cmd); + return script.replace(/{{app_path}}/g, $0); + } + registerFunction(fn) { + this.customCompletionFunction = fn; + } + setParsed(parsed) { + this.aliases = parsed.aliases; + } +} +export function completion(yargs, usage, command, shim) { + return new Completion(yargs, usage, command, shim); +} +function isSyncCompletionFunction(completionFunction) { + return completionFunction.length < 3; +} +function isFallbackCompletionFunction(completionFunction) { + return completionFunction.length > 3; +} diff --git a/mybulma/node_modules/yargs/build/lib/middleware.js b/mybulma/node_modules/yargs/build/lib/middleware.js new file mode 100644 index 0000000..4e561a7 --- /dev/null +++ b/mybulma/node_modules/yargs/build/lib/middleware.js @@ -0,0 +1,88 @@ +import { argsert } from './argsert.js'; +import { isPromise } from './utils/is-promise.js'; +export class GlobalMiddleware { + constructor(yargs) { + this.globalMiddleware = []; + this.frozens = []; + this.yargs = yargs; + } + addMiddleware(callback, applyBeforeValidation, global = true, mutates = false) { + argsert(' [boolean] [boolean] [boolean]', [callback, applyBeforeValidation, global], arguments.length); + if (Array.isArray(callback)) { + for (let i = 0; i < callback.length; i++) { + if (typeof callback[i] !== 'function') { + throw Error('middleware must be a function'); + } + const m = callback[i]; + m.applyBeforeValidation = applyBeforeValidation; + m.global = global; + } + Array.prototype.push.apply(this.globalMiddleware, callback); + } + else if (typeof callback === 'function') { + const m = callback; + m.applyBeforeValidation = applyBeforeValidation; + m.global = global; + m.mutates = mutates; + this.globalMiddleware.push(callback); + } + return this.yargs; + } + addCoerceMiddleware(callback, option) { + const aliases = this.yargs.getAliases(); + this.globalMiddleware = this.globalMiddleware.filter(m => { + const toCheck = [...(aliases[option] || []), option]; + if (!m.option) + return true; + else + return !toCheck.includes(m.option); + }); + callback.option = option; + return this.addMiddleware(callback, true, true, true); + } + getMiddleware() { + return this.globalMiddleware; + } + freeze() { + this.frozens.push([...this.globalMiddleware]); + } + unfreeze() { + const frozen = this.frozens.pop(); + if (frozen !== undefined) + this.globalMiddleware = frozen; + } + reset() { + this.globalMiddleware = this.globalMiddleware.filter(m => m.global); + } +} +export function commandMiddlewareFactory(commandMiddleware) { + if (!commandMiddleware) + return []; + return commandMiddleware.map(middleware => { + middleware.applyBeforeValidation = false; + return middleware; + }); +} +export function applyMiddleware(argv, yargs, middlewares, beforeValidation) { + return middlewares.reduce((acc, middleware) => { + if (middleware.applyBeforeValidation !== beforeValidation) { + return acc; + } + if (middleware.mutates) { + if (middleware.applied) + return acc; + middleware.applied = true; + } + if (isPromise(acc)) { + return acc + .then(initialObj => Promise.all([initialObj, middleware(initialObj, yargs)])) + .then(([initialObj, middlewareObj]) => Object.assign(initialObj, middlewareObj)); + } + else { + const result = middleware(acc, yargs); + return isPromise(result) + ? result.then(middlewareObj => Object.assign(acc, middlewareObj)) + : Object.assign(acc, result); + } + }, argv); +} diff --git a/mybulma/node_modules/yargs/build/lib/parse-command.js b/mybulma/node_modules/yargs/build/lib/parse-command.js new file mode 100644 index 0000000..4989f53 --- /dev/null +++ b/mybulma/node_modules/yargs/build/lib/parse-command.js @@ -0,0 +1,32 @@ +export function parseCommand(cmd) { + const extraSpacesStrippedCommand = cmd.replace(/\s{2,}/g, ' '); + const splitCommand = extraSpacesStrippedCommand.split(/\s+(?![^[]*]|[^<]*>)/); + const bregex = /\.*[\][<>]/g; + const firstCommand = splitCommand.shift(); + if (!firstCommand) + throw new Error(`No command found in: ${cmd}`); + const parsedCommand = { + cmd: firstCommand.replace(bregex, ''), + demanded: [], + optional: [], + }; + splitCommand.forEach((cmd, i) => { + let variadic = false; + cmd = cmd.replace(/\s/g, ''); + if (/\.+[\]>]/.test(cmd) && i === splitCommand.length - 1) + variadic = true; + if (/^\[/.test(cmd)) { + parsedCommand.optional.push({ + cmd: cmd.replace(bregex, '').split('|'), + variadic, + }); + } + else { + parsedCommand.demanded.push({ + cmd: cmd.replace(bregex, '').split('|'), + variadic, + }); + } + }); + return parsedCommand; +} diff --git a/mybulma/node_modules/yargs/build/lib/typings/common-types.js b/mybulma/node_modules/yargs/build/lib/typings/common-types.js new file mode 100644 index 0000000..73e1773 --- /dev/null +++ b/mybulma/node_modules/yargs/build/lib/typings/common-types.js @@ -0,0 +1,9 @@ +export function assertNotStrictEqual(actual, expected, shim, message) { + shim.assert.notStrictEqual(actual, expected, message); +} +export function assertSingleKey(actual, shim) { + shim.assert.strictEqual(typeof actual, 'string'); +} +export function objectKeys(object) { + return Object.keys(object); +} diff --git a/mybulma/node_modules/yargs/build/lib/typings/yargs-parser-types.js b/mybulma/node_modules/yargs/build/lib/typings/yargs-parser-types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/mybulma/node_modules/yargs/build/lib/typings/yargs-parser-types.js @@ -0,0 +1 @@ +export {}; diff --git a/mybulma/node_modules/yargs/build/lib/usage.js b/mybulma/node_modules/yargs/build/lib/usage.js new file mode 100644 index 0000000..2e14073 --- /dev/null +++ b/mybulma/node_modules/yargs/build/lib/usage.js @@ -0,0 +1,582 @@ +import { objFilter } from './utils/obj-filter.js'; +import { YError } from './yerror.js'; +import setBlocking from './utils/set-blocking.js'; +function isBoolean(fail) { + return typeof fail === 'boolean'; +} +export function usage(yargs, shim) { + const __ = shim.y18n.__; + const self = {}; + const fails = []; + self.failFn = function failFn(f) { + fails.push(f); + }; + let failMessage = null; + let globalFailMessage = null; + let showHelpOnFail = true; + self.showHelpOnFail = function showHelpOnFailFn(arg1 = true, arg2) { + const [enabled, message] = typeof arg1 === 'string' ? [true, arg1] : [arg1, arg2]; + if (yargs.getInternalMethods().isGlobalContext()) { + globalFailMessage = message; + } + failMessage = message; + showHelpOnFail = enabled; + return self; + }; + let failureOutput = false; + self.fail = function fail(msg, err) { + const logger = yargs.getInternalMethods().getLoggerInstance(); + if (fails.length) { + for (let i = fails.length - 1; i >= 0; --i) { + const fail = fails[i]; + if (isBoolean(fail)) { + if (err) + throw err; + else if (msg) + throw Error(msg); + } + else { + fail(msg, err, self); + } + } + } + else { + if (yargs.getExitProcess()) + setBlocking(true); + if (!failureOutput) { + failureOutput = true; + if (showHelpOnFail) { + yargs.showHelp('error'); + logger.error(); + } + if (msg || err) + logger.error(msg || err); + const globalOrCommandFailMessage = failMessage || globalFailMessage; + if (globalOrCommandFailMessage) { + if (msg || err) + logger.error(''); + logger.error(globalOrCommandFailMessage); + } + } + err = err || new YError(msg); + if (yargs.getExitProcess()) { + return yargs.exit(1); + } + else if (yargs.getInternalMethods().hasParseCallback()) { + return yargs.exit(1, err); + } + else { + throw err; + } + } + }; + let usages = []; + let usageDisabled = false; + self.usage = (msg, description) => { + if (msg === null) { + usageDisabled = true; + usages = []; + return self; + } + usageDisabled = false; + usages.push([msg, description || '']); + return self; + }; + self.getUsage = () => { + return usages; + }; + self.getUsageDisabled = () => { + return usageDisabled; + }; + self.getPositionalGroupName = () => { + return __('Positionals:'); + }; + let examples = []; + self.example = (cmd, description) => { + examples.push([cmd, description || '']); + }; + let commands = []; + self.command = function command(cmd, description, isDefault, aliases, deprecated = false) { + if (isDefault) { + commands = commands.map(cmdArray => { + cmdArray[2] = false; + return cmdArray; + }); + } + commands.push([cmd, description || '', isDefault, aliases, deprecated]); + }; + self.getCommands = () => commands; + let descriptions = {}; + self.describe = function describe(keyOrKeys, desc) { + if (Array.isArray(keyOrKeys)) { + keyOrKeys.forEach(k => { + self.describe(k, desc); + }); + } + else if (typeof keyOrKeys === 'object') { + Object.keys(keyOrKeys).forEach(k => { + self.describe(k, keyOrKeys[k]); + }); + } + else { + descriptions[keyOrKeys] = desc; + } + }; + self.getDescriptions = () => descriptions; + let epilogs = []; + self.epilog = msg => { + epilogs.push(msg); + }; + let wrapSet = false; + let wrap; + self.wrap = cols => { + wrapSet = true; + wrap = cols; + }; + self.getWrap = () => { + if (shim.getEnv('YARGS_DISABLE_WRAP')) { + return null; + } + if (!wrapSet) { + wrap = windowWidth(); + wrapSet = true; + } + return wrap; + }; + const deferY18nLookupPrefix = '__yargsString__:'; + self.deferY18nLookup = str => deferY18nLookupPrefix + str; + self.help = function help() { + if (cachedHelpMessage) + return cachedHelpMessage; + normalizeAliases(); + const base$0 = yargs.customScriptName + ? yargs.$0 + : shim.path.basename(yargs.$0); + const demandedOptions = yargs.getDemandedOptions(); + const demandedCommands = yargs.getDemandedCommands(); + const deprecatedOptions = yargs.getDeprecatedOptions(); + const groups = yargs.getGroups(); + const options = yargs.getOptions(); + let keys = []; + keys = keys.concat(Object.keys(descriptions)); + keys = keys.concat(Object.keys(demandedOptions)); + keys = keys.concat(Object.keys(demandedCommands)); + keys = keys.concat(Object.keys(options.default)); + keys = keys.filter(filterHiddenOptions); + keys = Object.keys(keys.reduce((acc, key) => { + if (key !== '_') + acc[key] = true; + return acc; + }, {})); + const theWrap = self.getWrap(); + const ui = shim.cliui({ + width: theWrap, + wrap: !!theWrap, + }); + if (!usageDisabled) { + if (usages.length) { + usages.forEach(usage => { + ui.div({ text: `${usage[0].replace(/\$0/g, base$0)}` }); + if (usage[1]) { + ui.div({ text: `${usage[1]}`, padding: [1, 0, 0, 0] }); + } + }); + ui.div(); + } + else if (commands.length) { + let u = null; + if (demandedCommands._) { + u = `${base$0} <${__('command')}>\n`; + } + else { + u = `${base$0} [${__('command')}]\n`; + } + ui.div(`${u}`); + } + } + if (commands.length > 1 || (commands.length === 1 && !commands[0][2])) { + ui.div(__('Commands:')); + const context = yargs.getInternalMethods().getContext(); + const parentCommands = context.commands.length + ? `${context.commands.join(' ')} ` + : ''; + if (yargs.getInternalMethods().getParserConfiguration()['sort-commands'] === + true) { + commands = commands.sort((a, b) => a[0].localeCompare(b[0])); + } + const prefix = base$0 ? `${base$0} ` : ''; + commands.forEach(command => { + const commandString = `${prefix}${parentCommands}${command[0].replace(/^\$0 ?/, '')}`; + ui.span({ + text: commandString, + padding: [0, 2, 0, 2], + width: maxWidth(commands, theWrap, `${base$0}${parentCommands}`) + 4, + }, { text: command[1] }); + const hints = []; + if (command[2]) + hints.push(`[${__('default')}]`); + if (command[3] && command[3].length) { + hints.push(`[${__('aliases:')} ${command[3].join(', ')}]`); + } + if (command[4]) { + if (typeof command[4] === 'string') { + hints.push(`[${__('deprecated: %s', command[4])}]`); + } + else { + hints.push(`[${__('deprecated')}]`); + } + } + if (hints.length) { + ui.div({ + text: hints.join(' '), + padding: [0, 0, 0, 2], + align: 'right', + }); + } + else { + ui.div(); + } + }); + ui.div(); + } + const aliasKeys = (Object.keys(options.alias) || []).concat(Object.keys(yargs.parsed.newAliases) || []); + keys = keys.filter(key => !yargs.parsed.newAliases[key] && + aliasKeys.every(alias => (options.alias[alias] || []).indexOf(key) === -1)); + const defaultGroup = __('Options:'); + if (!groups[defaultGroup]) + groups[defaultGroup] = []; + addUngroupedKeys(keys, options.alias, groups, defaultGroup); + const isLongSwitch = (sw) => /^--/.test(getText(sw)); + const displayedGroups = Object.keys(groups) + .filter(groupName => groups[groupName].length > 0) + .map(groupName => { + const normalizedKeys = groups[groupName] + .filter(filterHiddenOptions) + .map(key => { + if (aliasKeys.includes(key)) + return key; + for (let i = 0, aliasKey; (aliasKey = aliasKeys[i]) !== undefined; i++) { + if ((options.alias[aliasKey] || []).includes(key)) + return aliasKey; + } + return key; + }); + return { groupName, normalizedKeys }; + }) + .filter(({ normalizedKeys }) => normalizedKeys.length > 0) + .map(({ groupName, normalizedKeys }) => { + const switches = normalizedKeys.reduce((acc, key) => { + acc[key] = [key] + .concat(options.alias[key] || []) + .map(sw => { + if (groupName === self.getPositionalGroupName()) + return sw; + else { + return ((/^[0-9]$/.test(sw) + ? options.boolean.includes(key) + ? '-' + : '--' + : sw.length > 1 + ? '--' + : '-') + sw); + } + }) + .sort((sw1, sw2) => isLongSwitch(sw1) === isLongSwitch(sw2) + ? 0 + : isLongSwitch(sw1) + ? 1 + : -1) + .join(', '); + return acc; + }, {}); + return { groupName, normalizedKeys, switches }; + }); + const shortSwitchesUsed = displayedGroups + .filter(({ groupName }) => groupName !== self.getPositionalGroupName()) + .some(({ normalizedKeys, switches }) => !normalizedKeys.every(key => isLongSwitch(switches[key]))); + if (shortSwitchesUsed) { + displayedGroups + .filter(({ groupName }) => groupName !== self.getPositionalGroupName()) + .forEach(({ normalizedKeys, switches }) => { + normalizedKeys.forEach(key => { + if (isLongSwitch(switches[key])) { + switches[key] = addIndentation(switches[key], '-x, '.length); + } + }); + }); + } + displayedGroups.forEach(({ groupName, normalizedKeys, switches }) => { + ui.div(groupName); + normalizedKeys.forEach(key => { + const kswitch = switches[key]; + let desc = descriptions[key] || ''; + let type = null; + if (desc.includes(deferY18nLookupPrefix)) + desc = __(desc.substring(deferY18nLookupPrefix.length)); + if (options.boolean.includes(key)) + type = `[${__('boolean')}]`; + if (options.count.includes(key)) + type = `[${__('count')}]`; + if (options.string.includes(key)) + type = `[${__('string')}]`; + if (options.normalize.includes(key)) + type = `[${__('string')}]`; + if (options.array.includes(key)) + type = `[${__('array')}]`; + if (options.number.includes(key)) + type = `[${__('number')}]`; + const deprecatedExtra = (deprecated) => typeof deprecated === 'string' + ? `[${__('deprecated: %s', deprecated)}]` + : `[${__('deprecated')}]`; + const extra = [ + key in deprecatedOptions + ? deprecatedExtra(deprecatedOptions[key]) + : null, + type, + key in demandedOptions ? `[${__('required')}]` : null, + options.choices && options.choices[key] + ? `[${__('choices:')} ${self.stringifiedValues(options.choices[key])}]` + : null, + defaultString(options.default[key], options.defaultDescription[key]), + ] + .filter(Boolean) + .join(' '); + ui.span({ + text: getText(kswitch), + padding: [0, 2, 0, 2 + getIndentation(kswitch)], + width: maxWidth(switches, theWrap) + 4, + }, desc); + if (extra) + ui.div({ text: extra, padding: [0, 0, 0, 2], align: 'right' }); + else + ui.div(); + }); + ui.div(); + }); + if (examples.length) { + ui.div(__('Examples:')); + examples.forEach(example => { + example[0] = example[0].replace(/\$0/g, base$0); + }); + examples.forEach(example => { + if (example[1] === '') { + ui.div({ + text: example[0], + padding: [0, 2, 0, 2], + }); + } + else { + ui.div({ + text: example[0], + padding: [0, 2, 0, 2], + width: maxWidth(examples, theWrap) + 4, + }, { + text: example[1], + }); + } + }); + ui.div(); + } + if (epilogs.length > 0) { + const e = epilogs + .map(epilog => epilog.replace(/\$0/g, base$0)) + .join('\n'); + ui.div(`${e}\n`); + } + return ui.toString().replace(/\s*$/, ''); + }; + function maxWidth(table, theWrap, modifier) { + let width = 0; + if (!Array.isArray(table)) { + table = Object.values(table).map(v => [v]); + } + table.forEach(v => { + width = Math.max(shim.stringWidth(modifier ? `${modifier} ${getText(v[0])}` : getText(v[0])) + getIndentation(v[0]), width); + }); + if (theWrap) + width = Math.min(width, parseInt((theWrap * 0.5).toString(), 10)); + return width; + } + function normalizeAliases() { + const demandedOptions = yargs.getDemandedOptions(); + const options = yargs.getOptions(); + (Object.keys(options.alias) || []).forEach(key => { + options.alias[key].forEach(alias => { + if (descriptions[alias]) + self.describe(key, descriptions[alias]); + if (alias in demandedOptions) + yargs.demandOption(key, demandedOptions[alias]); + if (options.boolean.includes(alias)) + yargs.boolean(key); + if (options.count.includes(alias)) + yargs.count(key); + if (options.string.includes(alias)) + yargs.string(key); + if (options.normalize.includes(alias)) + yargs.normalize(key); + if (options.array.includes(alias)) + yargs.array(key); + if (options.number.includes(alias)) + yargs.number(key); + }); + }); + } + let cachedHelpMessage; + self.cacheHelpMessage = function () { + cachedHelpMessage = this.help(); + }; + self.clearCachedHelpMessage = function () { + cachedHelpMessage = undefined; + }; + self.hasCachedHelpMessage = function () { + return !!cachedHelpMessage; + }; + function addUngroupedKeys(keys, aliases, groups, defaultGroup) { + let groupedKeys = []; + let toCheck = null; + Object.keys(groups).forEach(group => { + groupedKeys = groupedKeys.concat(groups[group]); + }); + keys.forEach(key => { + toCheck = [key].concat(aliases[key]); + if (!toCheck.some(k => groupedKeys.indexOf(k) !== -1)) { + groups[defaultGroup].push(key); + } + }); + return groupedKeys; + } + function filterHiddenOptions(key) { + return (yargs.getOptions().hiddenOptions.indexOf(key) < 0 || + yargs.parsed.argv[yargs.getOptions().showHiddenOpt]); + } + self.showHelp = (level) => { + const logger = yargs.getInternalMethods().getLoggerInstance(); + if (!level) + level = 'error'; + const emit = typeof level === 'function' ? level : logger[level]; + emit(self.help()); + }; + self.functionDescription = fn => { + const description = fn.name + ? shim.Parser.decamelize(fn.name, '-') + : __('generated-value'); + return ['(', description, ')'].join(''); + }; + self.stringifiedValues = function stringifiedValues(values, separator) { + let string = ''; + const sep = separator || ', '; + const array = [].concat(values); + if (!values || !array.length) + return string; + array.forEach(value => { + if (string.length) + string += sep; + string += JSON.stringify(value); + }); + return string; + }; + function defaultString(value, defaultDescription) { + let string = `[${__('default:')} `; + if (value === undefined && !defaultDescription) + return null; + if (defaultDescription) { + string += defaultDescription; + } + else { + switch (typeof value) { + case 'string': + string += `"${value}"`; + break; + case 'object': + string += JSON.stringify(value); + break; + default: + string += value; + } + } + return `${string}]`; + } + function windowWidth() { + const maxWidth = 80; + if (shim.process.stdColumns) { + return Math.min(maxWidth, shim.process.stdColumns); + } + else { + return maxWidth; + } + } + let version = null; + self.version = ver => { + version = ver; + }; + self.showVersion = level => { + const logger = yargs.getInternalMethods().getLoggerInstance(); + if (!level) + level = 'error'; + const emit = typeof level === 'function' ? level : logger[level]; + emit(version); + }; + self.reset = function reset(localLookup) { + failMessage = null; + failureOutput = false; + usages = []; + usageDisabled = false; + epilogs = []; + examples = []; + commands = []; + descriptions = objFilter(descriptions, k => !localLookup[k]); + return self; + }; + const frozens = []; + self.freeze = function freeze() { + frozens.push({ + failMessage, + failureOutput, + usages, + usageDisabled, + epilogs, + examples, + commands, + descriptions, + }); + }; + self.unfreeze = function unfreeze(defaultCommand = false) { + const frozen = frozens.pop(); + if (!frozen) + return; + if (defaultCommand) { + descriptions = { ...frozen.descriptions, ...descriptions }; + commands = [...frozen.commands, ...commands]; + usages = [...frozen.usages, ...usages]; + examples = [...frozen.examples, ...examples]; + epilogs = [...frozen.epilogs, ...epilogs]; + } + else { + ({ + failMessage, + failureOutput, + usages, + usageDisabled, + epilogs, + examples, + commands, + descriptions, + } = frozen); + } + }; + return self; +} +function isIndentedText(text) { + return typeof text === 'object'; +} +function addIndentation(text, indent) { + return isIndentedText(text) + ? { text: text.text, indentation: text.indentation + indent } + : { text, indentation: indent }; +} +function getIndentation(text) { + return isIndentedText(text) ? text.indentation : 0; +} +function getText(text) { + return isIndentedText(text) ? text.text : text; +} diff --git a/mybulma/node_modules/yargs/build/lib/utils/apply-extends.js b/mybulma/node_modules/yargs/build/lib/utils/apply-extends.js new file mode 100644 index 0000000..0e593b4 --- /dev/null +++ b/mybulma/node_modules/yargs/build/lib/utils/apply-extends.js @@ -0,0 +1,59 @@ +import { YError } from '../yerror.js'; +let previouslyVisitedConfigs = []; +let shim; +export function applyExtends(config, cwd, mergeExtends, _shim) { + shim = _shim; + let defaultConfig = {}; + if (Object.prototype.hasOwnProperty.call(config, 'extends')) { + if (typeof config.extends !== 'string') + return defaultConfig; + const isPath = /\.json|\..*rc$/.test(config.extends); + let pathToDefault = null; + if (!isPath) { + try { + pathToDefault = require.resolve(config.extends); + } + catch (_err) { + return config; + } + } + else { + pathToDefault = getPathToDefaultConfig(cwd, config.extends); + } + checkForCircularExtends(pathToDefault); + previouslyVisitedConfigs.push(pathToDefault); + defaultConfig = isPath + ? JSON.parse(shim.readFileSync(pathToDefault, 'utf8')) + : require(config.extends); + delete config.extends; + defaultConfig = applyExtends(defaultConfig, shim.path.dirname(pathToDefault), mergeExtends, shim); + } + previouslyVisitedConfigs = []; + return mergeExtends + ? mergeDeep(defaultConfig, config) + : Object.assign({}, defaultConfig, config); +} +function checkForCircularExtends(cfgPath) { + if (previouslyVisitedConfigs.indexOf(cfgPath) > -1) { + throw new YError(`Circular extended configurations: '${cfgPath}'.`); + } +} +function getPathToDefaultConfig(cwd, pathToExtend) { + return shim.path.resolve(cwd, pathToExtend); +} +function mergeDeep(config1, config2) { + const target = {}; + function isObject(obj) { + return obj && typeof obj === 'object' && !Array.isArray(obj); + } + Object.assign(target, config1); + for (const key of Object.keys(config2)) { + if (isObject(config2[key]) && isObject(target[key])) { + target[key] = mergeDeep(config1[key], config2[key]); + } + else { + target[key] = config2[key]; + } + } + return target; +} diff --git a/mybulma/node_modules/yargs/build/lib/utils/is-promise.js b/mybulma/node_modules/yargs/build/lib/utils/is-promise.js new file mode 100644 index 0000000..d250c08 --- /dev/null +++ b/mybulma/node_modules/yargs/build/lib/utils/is-promise.js @@ -0,0 +1,5 @@ +export function isPromise(maybePromise) { + return (!!maybePromise && + !!maybePromise.then && + typeof maybePromise.then === 'function'); +} diff --git a/mybulma/node_modules/yargs/build/lib/utils/levenshtein.js b/mybulma/node_modules/yargs/build/lib/utils/levenshtein.js new file mode 100644 index 0000000..60575ef --- /dev/null +++ b/mybulma/node_modules/yargs/build/lib/utils/levenshtein.js @@ -0,0 +1,34 @@ +export function levenshtein(a, b) { + if (a.length === 0) + return b.length; + if (b.length === 0) + return a.length; + const matrix = []; + let i; + for (i = 0; i <= b.length; i++) { + matrix[i] = [i]; + } + let j; + for (j = 0; j <= a.length; j++) { + matrix[0][j] = j; + } + for (i = 1; i <= b.length; i++) { + for (j = 1; j <= a.length; j++) { + if (b.charAt(i - 1) === a.charAt(j - 1)) { + matrix[i][j] = matrix[i - 1][j - 1]; + } + else { + if (i > 1 && + j > 1 && + b.charAt(i - 2) === a.charAt(j - 1) && + b.charAt(i - 1) === a.charAt(j - 2)) { + matrix[i][j] = matrix[i - 2][j - 2] + 1; + } + else { + matrix[i][j] = Math.min(matrix[i - 1][j - 1] + 1, Math.min(matrix[i][j - 1] + 1, matrix[i - 1][j] + 1)); + } + } + } + } + return matrix[b.length][a.length]; +} diff --git a/mybulma/node_modules/yargs/build/lib/utils/maybe-async-result.js b/mybulma/node_modules/yargs/build/lib/utils/maybe-async-result.js new file mode 100644 index 0000000..8c6a40c --- /dev/null +++ b/mybulma/node_modules/yargs/build/lib/utils/maybe-async-result.js @@ -0,0 +1,17 @@ +import { isPromise } from './is-promise.js'; +export function maybeAsyncResult(getResult, resultHandler, errorHandler = (err) => { + throw err; +}) { + try { + const result = isFunction(getResult) ? getResult() : getResult; + return isPromise(result) + ? result.then((result) => resultHandler(result)) + : resultHandler(result); + } + catch (err) { + return errorHandler(err); + } +} +function isFunction(arg) { + return typeof arg === 'function'; +} diff --git a/mybulma/node_modules/yargs/build/lib/utils/obj-filter.js b/mybulma/node_modules/yargs/build/lib/utils/obj-filter.js new file mode 100644 index 0000000..cd68ad2 --- /dev/null +++ b/mybulma/node_modules/yargs/build/lib/utils/obj-filter.js @@ -0,0 +1,10 @@ +import { objectKeys } from '../typings/common-types.js'; +export function objFilter(original = {}, filter = () => true) { + const obj = {}; + objectKeys(original).forEach(key => { + if (filter(key, original[key])) { + obj[key] = original[key]; + } + }); + return obj; +} diff --git a/mybulma/node_modules/yargs/build/lib/utils/process-argv.js b/mybulma/node_modules/yargs/build/lib/utils/process-argv.js new file mode 100644 index 0000000..74dc9e4 --- /dev/null +++ b/mybulma/node_modules/yargs/build/lib/utils/process-argv.js @@ -0,0 +1,17 @@ +function getProcessArgvBinIndex() { + if (isBundledElectronApp()) + return 0; + return 1; +} +function isBundledElectronApp() { + return isElectronApp() && !process.defaultApp; +} +function isElectronApp() { + return !!process.versions.electron; +} +export function hideBin(argv) { + return argv.slice(getProcessArgvBinIndex() + 1); +} +export function getProcessArgvBin() { + return process.argv[getProcessArgvBinIndex()]; +} diff --git a/mybulma/node_modules/yargs/build/lib/utils/set-blocking.js b/mybulma/node_modules/yargs/build/lib/utils/set-blocking.js new file mode 100644 index 0000000..88fb806 --- /dev/null +++ b/mybulma/node_modules/yargs/build/lib/utils/set-blocking.js @@ -0,0 +1,12 @@ +export default function setBlocking(blocking) { + if (typeof process === 'undefined') + return; + [process.stdout, process.stderr].forEach(_stream => { + const stream = _stream; + if (stream._handle && + stream.isTTY && + typeof stream._handle.setBlocking === 'function') { + stream._handle.setBlocking(blocking); + } + }); +} diff --git a/mybulma/node_modules/yargs/build/lib/utils/which-module.js b/mybulma/node_modules/yargs/build/lib/utils/which-module.js new file mode 100644 index 0000000..5974e22 --- /dev/null +++ b/mybulma/node_modules/yargs/build/lib/utils/which-module.js @@ -0,0 +1,10 @@ +export default function whichModule(exported) { + if (typeof require === 'undefined') + return null; + for (let i = 0, files = Object.keys(require.cache), mod; i < files.length; i++) { + mod = require.cache[files[i]]; + if (mod.exports === exported) + return mod; + } + return null; +} diff --git a/mybulma/node_modules/yargs/build/lib/validation.js b/mybulma/node_modules/yargs/build/lib/validation.js new file mode 100644 index 0000000..bd2e1b8 --- /dev/null +++ b/mybulma/node_modules/yargs/build/lib/validation.js @@ -0,0 +1,305 @@ +import { argsert } from './argsert.js'; +import { assertNotStrictEqual, } from './typings/common-types.js'; +import { levenshtein as distance } from './utils/levenshtein.js'; +import { objFilter } from './utils/obj-filter.js'; +const specialKeys = ['$0', '--', '_']; +export function validation(yargs, usage, shim) { + const __ = shim.y18n.__; + const __n = shim.y18n.__n; + const self = {}; + self.nonOptionCount = function nonOptionCount(argv) { + const demandedCommands = yargs.getDemandedCommands(); + const positionalCount = argv._.length + (argv['--'] ? argv['--'].length : 0); + const _s = positionalCount - yargs.getInternalMethods().getContext().commands.length; + if (demandedCommands._ && + (_s < demandedCommands._.min || _s > demandedCommands._.max)) { + if (_s < demandedCommands._.min) { + if (demandedCommands._.minMsg !== undefined) { + usage.fail(demandedCommands._.minMsg + ? demandedCommands._.minMsg + .replace(/\$0/g, _s.toString()) + .replace(/\$1/, demandedCommands._.min.toString()) + : null); + } + else { + usage.fail(__n('Not enough non-option arguments: got %s, need at least %s', 'Not enough non-option arguments: got %s, need at least %s', _s, _s.toString(), demandedCommands._.min.toString())); + } + } + else if (_s > demandedCommands._.max) { + if (demandedCommands._.maxMsg !== undefined) { + usage.fail(demandedCommands._.maxMsg + ? demandedCommands._.maxMsg + .replace(/\$0/g, _s.toString()) + .replace(/\$1/, demandedCommands._.max.toString()) + : null); + } + else { + usage.fail(__n('Too many non-option arguments: got %s, maximum of %s', 'Too many non-option arguments: got %s, maximum of %s', _s, _s.toString(), demandedCommands._.max.toString())); + } + } + } + }; + self.positionalCount = function positionalCount(required, observed) { + if (observed < required) { + usage.fail(__n('Not enough non-option arguments: got %s, need at least %s', 'Not enough non-option arguments: got %s, need at least %s', observed, observed + '', required + '')); + } + }; + self.requiredArguments = function requiredArguments(argv, demandedOptions) { + let missing = null; + for (const key of Object.keys(demandedOptions)) { + if (!Object.prototype.hasOwnProperty.call(argv, key) || + typeof argv[key] === 'undefined') { + missing = missing || {}; + missing[key] = demandedOptions[key]; + } + } + if (missing) { + const customMsgs = []; + for (const key of Object.keys(missing)) { + const msg = missing[key]; + if (msg && customMsgs.indexOf(msg) < 0) { + customMsgs.push(msg); + } + } + const customMsg = customMsgs.length ? `\n${customMsgs.join('\n')}` : ''; + usage.fail(__n('Missing required argument: %s', 'Missing required arguments: %s', Object.keys(missing).length, Object.keys(missing).join(', ') + customMsg)); + } + }; + self.unknownArguments = function unknownArguments(argv, aliases, positionalMap, isDefaultCommand, checkPositionals = true) { + var _a; + const commandKeys = yargs + .getInternalMethods() + .getCommandInstance() + .getCommands(); + const unknown = []; + const currentContext = yargs.getInternalMethods().getContext(); + Object.keys(argv).forEach(key => { + if (!specialKeys.includes(key) && + !Object.prototype.hasOwnProperty.call(positionalMap, key) && + !Object.prototype.hasOwnProperty.call(yargs.getInternalMethods().getParseContext(), key) && + !self.isValidAndSomeAliasIsNotNew(key, aliases)) { + unknown.push(key); + } + }); + if (checkPositionals && + (currentContext.commands.length > 0 || + commandKeys.length > 0 || + isDefaultCommand)) { + argv._.slice(currentContext.commands.length).forEach(key => { + if (!commandKeys.includes('' + key)) { + unknown.push('' + key); + } + }); + } + if (checkPositionals) { + const demandedCommands = yargs.getDemandedCommands(); + const maxNonOptDemanded = ((_a = demandedCommands._) === null || _a === void 0 ? void 0 : _a.max) || 0; + const expected = currentContext.commands.length + maxNonOptDemanded; + if (expected < argv._.length) { + argv._.slice(expected).forEach(key => { + key = String(key); + if (!currentContext.commands.includes(key) && + !unknown.includes(key)) { + unknown.push(key); + } + }); + } + } + if (unknown.length) { + usage.fail(__n('Unknown argument: %s', 'Unknown arguments: %s', unknown.length, unknown.map(s => (s.trim() ? s : `"${s}"`)).join(', '))); + } + }; + self.unknownCommands = function unknownCommands(argv) { + const commandKeys = yargs + .getInternalMethods() + .getCommandInstance() + .getCommands(); + const unknown = []; + const currentContext = yargs.getInternalMethods().getContext(); + if (currentContext.commands.length > 0 || commandKeys.length > 0) { + argv._.slice(currentContext.commands.length).forEach(key => { + if (!commandKeys.includes('' + key)) { + unknown.push('' + key); + } + }); + } + if (unknown.length > 0) { + usage.fail(__n('Unknown command: %s', 'Unknown commands: %s', unknown.length, unknown.join(', '))); + return true; + } + else { + return false; + } + }; + self.isValidAndSomeAliasIsNotNew = function isValidAndSomeAliasIsNotNew(key, aliases) { + if (!Object.prototype.hasOwnProperty.call(aliases, key)) { + return false; + } + const newAliases = yargs.parsed.newAliases; + return [key, ...aliases[key]].some(a => !Object.prototype.hasOwnProperty.call(newAliases, a) || !newAliases[key]); + }; + self.limitedChoices = function limitedChoices(argv) { + const options = yargs.getOptions(); + const invalid = {}; + if (!Object.keys(options.choices).length) + return; + Object.keys(argv).forEach(key => { + if (specialKeys.indexOf(key) === -1 && + Object.prototype.hasOwnProperty.call(options.choices, key)) { + [].concat(argv[key]).forEach(value => { + if (options.choices[key].indexOf(value) === -1 && + value !== undefined) { + invalid[key] = (invalid[key] || []).concat(value); + } + }); + } + }); + const invalidKeys = Object.keys(invalid); + if (!invalidKeys.length) + return; + let msg = __('Invalid values:'); + invalidKeys.forEach(key => { + msg += `\n ${__('Argument: %s, Given: %s, Choices: %s', key, usage.stringifiedValues(invalid[key]), usage.stringifiedValues(options.choices[key]))}`; + }); + usage.fail(msg); + }; + let implied = {}; + self.implies = function implies(key, value) { + argsert(' [array|number|string]', [key, value], arguments.length); + if (typeof key === 'object') { + Object.keys(key).forEach(k => { + self.implies(k, key[k]); + }); + } + else { + yargs.global(key); + if (!implied[key]) { + implied[key] = []; + } + if (Array.isArray(value)) { + value.forEach(i => self.implies(key, i)); + } + else { + assertNotStrictEqual(value, undefined, shim); + implied[key].push(value); + } + } + }; + self.getImplied = function getImplied() { + return implied; + }; + function keyExists(argv, val) { + const num = Number(val); + val = isNaN(num) ? val : num; + if (typeof val === 'number') { + val = argv._.length >= val; + } + else if (val.match(/^--no-.+/)) { + val = val.match(/^--no-(.+)/)[1]; + val = !Object.prototype.hasOwnProperty.call(argv, val); + } + else { + val = Object.prototype.hasOwnProperty.call(argv, val); + } + return val; + } + self.implications = function implications(argv) { + const implyFail = []; + Object.keys(implied).forEach(key => { + const origKey = key; + (implied[key] || []).forEach(value => { + let key = origKey; + const origValue = value; + key = keyExists(argv, key); + value = keyExists(argv, value); + if (key && !value) { + implyFail.push(` ${origKey} -> ${origValue}`); + } + }); + }); + if (implyFail.length) { + let msg = `${__('Implications failed:')}\n`; + implyFail.forEach(value => { + msg += value; + }); + usage.fail(msg); + } + }; + let conflicting = {}; + self.conflicts = function conflicts(key, value) { + argsert(' [array|string]', [key, value], arguments.length); + if (typeof key === 'object') { + Object.keys(key).forEach(k => { + self.conflicts(k, key[k]); + }); + } + else { + yargs.global(key); + if (!conflicting[key]) { + conflicting[key] = []; + } + if (Array.isArray(value)) { + value.forEach(i => self.conflicts(key, i)); + } + else { + conflicting[key].push(value); + } + } + }; + self.getConflicting = () => conflicting; + self.conflicting = function conflictingFn(argv) { + Object.keys(argv).forEach(key => { + if (conflicting[key]) { + conflicting[key].forEach(value => { + if (value && argv[key] !== undefined && argv[value] !== undefined) { + usage.fail(__('Arguments %s and %s are mutually exclusive', key, value)); + } + }); + } + }); + if (yargs.getInternalMethods().getParserConfiguration()['strip-dashed']) { + Object.keys(conflicting).forEach(key => { + conflicting[key].forEach(value => { + if (value && + argv[shim.Parser.camelCase(key)] !== undefined && + argv[shim.Parser.camelCase(value)] !== undefined) { + usage.fail(__('Arguments %s and %s are mutually exclusive', key, value)); + } + }); + }); + } + }; + self.recommendCommands = function recommendCommands(cmd, potentialCommands) { + const threshold = 3; + potentialCommands = potentialCommands.sort((a, b) => b.length - a.length); + let recommended = null; + let bestDistance = Infinity; + for (let i = 0, candidate; (candidate = potentialCommands[i]) !== undefined; i++) { + const d = distance(cmd, candidate); + if (d <= threshold && d < bestDistance) { + bestDistance = d; + recommended = candidate; + } + } + if (recommended) + usage.fail(__('Did you mean %s?', recommended)); + }; + self.reset = function reset(localLookup) { + implied = objFilter(implied, k => !localLookup[k]); + conflicting = objFilter(conflicting, k => !localLookup[k]); + return self; + }; + const frozens = []; + self.freeze = function freeze() { + frozens.push({ + implied, + conflicting, + }); + }; + self.unfreeze = function unfreeze() { + const frozen = frozens.pop(); + assertNotStrictEqual(frozen, undefined, shim); + ({ implied, conflicting } = frozen); + }; + return self; +} diff --git a/mybulma/node_modules/yargs/build/lib/yargs-factory.js b/mybulma/node_modules/yargs/build/lib/yargs-factory.js new file mode 100644 index 0000000..db8a96d --- /dev/null +++ b/mybulma/node_modules/yargs/build/lib/yargs-factory.js @@ -0,0 +1,1501 @@ +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _YargsInstance_command, _YargsInstance_cwd, _YargsInstance_context, _YargsInstance_completion, _YargsInstance_completionCommand, _YargsInstance_defaultShowHiddenOpt, _YargsInstance_exitError, _YargsInstance_detectLocale, _YargsInstance_emittedWarnings, _YargsInstance_exitProcess, _YargsInstance_frozens, _YargsInstance_globalMiddleware, _YargsInstance_groups, _YargsInstance_hasOutput, _YargsInstance_helpOpt, _YargsInstance_isGlobalContext, _YargsInstance_logger, _YargsInstance_output, _YargsInstance_options, _YargsInstance_parentRequire, _YargsInstance_parserConfig, _YargsInstance_parseFn, _YargsInstance_parseContext, _YargsInstance_pkgs, _YargsInstance_preservedGroups, _YargsInstance_processArgs, _YargsInstance_recommendCommands, _YargsInstance_shim, _YargsInstance_strict, _YargsInstance_strictCommands, _YargsInstance_strictOptions, _YargsInstance_usage, _YargsInstance_versionOpt, _YargsInstance_validation; +import { command as Command, } from './command.js'; +import { assertNotStrictEqual, assertSingleKey, objectKeys, } from './typings/common-types.js'; +import { YError } from './yerror.js'; +import { usage as Usage } from './usage.js'; +import { argsert } from './argsert.js'; +import { completion as Completion, } from './completion.js'; +import { validation as Validation, } from './validation.js'; +import { objFilter } from './utils/obj-filter.js'; +import { applyExtends } from './utils/apply-extends.js'; +import { applyMiddleware, GlobalMiddleware, } from './middleware.js'; +import { isPromise } from './utils/is-promise.js'; +import { maybeAsyncResult } from './utils/maybe-async-result.js'; +import setBlocking from './utils/set-blocking.js'; +export function YargsFactory(_shim) { + return (processArgs = [], cwd = _shim.process.cwd(), parentRequire) => { + const yargs = new YargsInstance(processArgs, cwd, parentRequire, _shim); + Object.defineProperty(yargs, 'argv', { + get: () => { + return yargs.parse(); + }, + enumerable: true, + }); + yargs.help(); + yargs.version(); + return yargs; + }; +} +const kCopyDoubleDash = Symbol('copyDoubleDash'); +const kCreateLogger = Symbol('copyDoubleDash'); +const kDeleteFromParserHintObject = Symbol('deleteFromParserHintObject'); +const kEmitWarning = Symbol('emitWarning'); +const kFreeze = Symbol('freeze'); +const kGetDollarZero = Symbol('getDollarZero'); +const kGetParserConfiguration = Symbol('getParserConfiguration'); +const kGuessLocale = Symbol('guessLocale'); +const kGuessVersion = Symbol('guessVersion'); +const kParsePositionalNumbers = Symbol('parsePositionalNumbers'); +const kPkgUp = Symbol('pkgUp'); +const kPopulateParserHintArray = Symbol('populateParserHintArray'); +const kPopulateParserHintSingleValueDictionary = Symbol('populateParserHintSingleValueDictionary'); +const kPopulateParserHintArrayDictionary = Symbol('populateParserHintArrayDictionary'); +const kPopulateParserHintDictionary = Symbol('populateParserHintDictionary'); +const kSanitizeKey = Symbol('sanitizeKey'); +const kSetKey = Symbol('setKey'); +const kUnfreeze = Symbol('unfreeze'); +const kValidateAsync = Symbol('validateAsync'); +const kGetCommandInstance = Symbol('getCommandInstance'); +const kGetContext = Symbol('getContext'); +const kGetHasOutput = Symbol('getHasOutput'); +const kGetLoggerInstance = Symbol('getLoggerInstance'); +const kGetParseContext = Symbol('getParseContext'); +const kGetUsageInstance = Symbol('getUsageInstance'); +const kGetValidationInstance = Symbol('getValidationInstance'); +const kHasParseCallback = Symbol('hasParseCallback'); +const kIsGlobalContext = Symbol('isGlobalContext'); +const kPostProcess = Symbol('postProcess'); +const kRebase = Symbol('rebase'); +const kReset = Symbol('reset'); +const kRunYargsParserAndExecuteCommands = Symbol('runYargsParserAndExecuteCommands'); +const kRunValidation = Symbol('runValidation'); +const kSetHasOutput = Symbol('setHasOutput'); +const kTrackManuallySetKeys = Symbol('kTrackManuallySetKeys'); +export class YargsInstance { + constructor(processArgs = [], cwd, parentRequire, shim) { + this.customScriptName = false; + this.parsed = false; + _YargsInstance_command.set(this, void 0); + _YargsInstance_cwd.set(this, void 0); + _YargsInstance_context.set(this, { commands: [], fullCommands: [] }); + _YargsInstance_completion.set(this, null); + _YargsInstance_completionCommand.set(this, null); + _YargsInstance_defaultShowHiddenOpt.set(this, 'show-hidden'); + _YargsInstance_exitError.set(this, null); + _YargsInstance_detectLocale.set(this, true); + _YargsInstance_emittedWarnings.set(this, {}); + _YargsInstance_exitProcess.set(this, true); + _YargsInstance_frozens.set(this, []); + _YargsInstance_globalMiddleware.set(this, void 0); + _YargsInstance_groups.set(this, {}); + _YargsInstance_hasOutput.set(this, false); + _YargsInstance_helpOpt.set(this, null); + _YargsInstance_isGlobalContext.set(this, true); + _YargsInstance_logger.set(this, void 0); + _YargsInstance_output.set(this, ''); + _YargsInstance_options.set(this, void 0); + _YargsInstance_parentRequire.set(this, void 0); + _YargsInstance_parserConfig.set(this, {}); + _YargsInstance_parseFn.set(this, null); + _YargsInstance_parseContext.set(this, null); + _YargsInstance_pkgs.set(this, {}); + _YargsInstance_preservedGroups.set(this, {}); + _YargsInstance_processArgs.set(this, void 0); + _YargsInstance_recommendCommands.set(this, false); + _YargsInstance_shim.set(this, void 0); + _YargsInstance_strict.set(this, false); + _YargsInstance_strictCommands.set(this, false); + _YargsInstance_strictOptions.set(this, false); + _YargsInstance_usage.set(this, void 0); + _YargsInstance_versionOpt.set(this, null); + _YargsInstance_validation.set(this, void 0); + __classPrivateFieldSet(this, _YargsInstance_shim, shim, "f"); + __classPrivateFieldSet(this, _YargsInstance_processArgs, processArgs, "f"); + __classPrivateFieldSet(this, _YargsInstance_cwd, cwd, "f"); + __classPrivateFieldSet(this, _YargsInstance_parentRequire, parentRequire, "f"); + __classPrivateFieldSet(this, _YargsInstance_globalMiddleware, new GlobalMiddleware(this), "f"); + this.$0 = this[kGetDollarZero](); + this[kReset](); + __classPrivateFieldSet(this, _YargsInstance_command, __classPrivateFieldGet(this, _YargsInstance_command, "f"), "f"); + __classPrivateFieldSet(this, _YargsInstance_usage, __classPrivateFieldGet(this, _YargsInstance_usage, "f"), "f"); + __classPrivateFieldSet(this, _YargsInstance_validation, __classPrivateFieldGet(this, _YargsInstance_validation, "f"), "f"); + __classPrivateFieldSet(this, _YargsInstance_options, __classPrivateFieldGet(this, _YargsInstance_options, "f"), "f"); + __classPrivateFieldGet(this, _YargsInstance_options, "f").showHiddenOpt = __classPrivateFieldGet(this, _YargsInstance_defaultShowHiddenOpt, "f"); + __classPrivateFieldSet(this, _YargsInstance_logger, this[kCreateLogger](), "f"); + } + addHelpOpt(opt, msg) { + const defaultHelpOpt = 'help'; + argsert('[string|boolean] [string]', [opt, msg], arguments.length); + if (__classPrivateFieldGet(this, _YargsInstance_helpOpt, "f")) { + this[kDeleteFromParserHintObject](__classPrivateFieldGet(this, _YargsInstance_helpOpt, "f")); + __classPrivateFieldSet(this, _YargsInstance_helpOpt, null, "f"); + } + if (opt === false && msg === undefined) + return this; + __classPrivateFieldSet(this, _YargsInstance_helpOpt, typeof opt === 'string' ? opt : defaultHelpOpt, "f"); + this.boolean(__classPrivateFieldGet(this, _YargsInstance_helpOpt, "f")); + this.describe(__classPrivateFieldGet(this, _YargsInstance_helpOpt, "f"), msg || __classPrivateFieldGet(this, _YargsInstance_usage, "f").deferY18nLookup('Show help')); + return this; + } + help(opt, msg) { + return this.addHelpOpt(opt, msg); + } + addShowHiddenOpt(opt, msg) { + argsert('[string|boolean] [string]', [opt, msg], arguments.length); + if (opt === false && msg === undefined) + return this; + const showHiddenOpt = typeof opt === 'string' ? opt : __classPrivateFieldGet(this, _YargsInstance_defaultShowHiddenOpt, "f"); + this.boolean(showHiddenOpt); + this.describe(showHiddenOpt, msg || __classPrivateFieldGet(this, _YargsInstance_usage, "f").deferY18nLookup('Show hidden options')); + __classPrivateFieldGet(this, _YargsInstance_options, "f").showHiddenOpt = showHiddenOpt; + return this; + } + showHidden(opt, msg) { + return this.addShowHiddenOpt(opt, msg); + } + alias(key, value) { + argsert(' [string|array]', [key, value], arguments.length); + this[kPopulateParserHintArrayDictionary](this.alias.bind(this), 'alias', key, value); + return this; + } + array(keys) { + argsert('', [keys], arguments.length); + this[kPopulateParserHintArray]('array', keys); + this[kTrackManuallySetKeys](keys); + return this; + } + boolean(keys) { + argsert('', [keys], arguments.length); + this[kPopulateParserHintArray]('boolean', keys); + this[kTrackManuallySetKeys](keys); + return this; + } + check(f, global) { + argsert(' [boolean]', [f, global], arguments.length); + this.middleware((argv, _yargs) => { + return maybeAsyncResult(() => { + return f(argv, _yargs.getOptions()); + }, (result) => { + if (!result) { + __classPrivateFieldGet(this, _YargsInstance_usage, "f").fail(__classPrivateFieldGet(this, _YargsInstance_shim, "f").y18n.__('Argument check failed: %s', f.toString())); + } + else if (typeof result === 'string' || result instanceof Error) { + __classPrivateFieldGet(this, _YargsInstance_usage, "f").fail(result.toString(), result); + } + return argv; + }, (err) => { + __classPrivateFieldGet(this, _YargsInstance_usage, "f").fail(err.message ? err.message : err.toString(), err); + return argv; + }); + }, false, global); + return this; + } + choices(key, value) { + argsert(' [string|array]', [key, value], arguments.length); + this[kPopulateParserHintArrayDictionary](this.choices.bind(this), 'choices', key, value); + return this; + } + coerce(keys, value) { + argsert(' [function]', [keys, value], arguments.length); + if (Array.isArray(keys)) { + if (!value) { + throw new YError('coerce callback must be provided'); + } + for (const key of keys) { + this.coerce(key, value); + } + return this; + } + else if (typeof keys === 'object') { + for (const key of Object.keys(keys)) { + this.coerce(key, keys[key]); + } + return this; + } + if (!value) { + throw new YError('coerce callback must be provided'); + } + __classPrivateFieldGet(this, _YargsInstance_options, "f").key[keys] = true; + __classPrivateFieldGet(this, _YargsInstance_globalMiddleware, "f").addCoerceMiddleware((argv, yargs) => { + let aliases; + const shouldCoerce = Object.prototype.hasOwnProperty.call(argv, keys); + if (!shouldCoerce) { + return argv; + } + return maybeAsyncResult(() => { + aliases = yargs.getAliases(); + return value(argv[keys]); + }, (result) => { + argv[keys] = result; + const stripAliased = yargs + .getInternalMethods() + .getParserConfiguration()['strip-aliased']; + if (aliases[keys] && stripAliased !== true) { + for (const alias of aliases[keys]) { + argv[alias] = result; + } + } + return argv; + }, (err) => { + throw new YError(err.message); + }); + }, keys); + return this; + } + conflicts(key1, key2) { + argsert(' [string|array]', [key1, key2], arguments.length); + __classPrivateFieldGet(this, _YargsInstance_validation, "f").conflicts(key1, key2); + return this; + } + config(key = 'config', msg, parseFn) { + argsert('[object|string] [string|function] [function]', [key, msg, parseFn], arguments.length); + if (typeof key === 'object' && !Array.isArray(key)) { + key = applyExtends(key, __classPrivateFieldGet(this, _YargsInstance_cwd, "f"), this[kGetParserConfiguration]()['deep-merge-config'] || false, __classPrivateFieldGet(this, _YargsInstance_shim, "f")); + __classPrivateFieldGet(this, _YargsInstance_options, "f").configObjects = (__classPrivateFieldGet(this, _YargsInstance_options, "f").configObjects || []).concat(key); + return this; + } + if (typeof msg === 'function') { + parseFn = msg; + msg = undefined; + } + this.describe(key, msg || __classPrivateFieldGet(this, _YargsInstance_usage, "f").deferY18nLookup('Path to JSON config file')); + (Array.isArray(key) ? key : [key]).forEach(k => { + __classPrivateFieldGet(this, _YargsInstance_options, "f").config[k] = parseFn || true; + }); + return this; + } + completion(cmd, desc, fn) { + argsert('[string] [string|boolean|function] [function]', [cmd, desc, fn], arguments.length); + if (typeof desc === 'function') { + fn = desc; + desc = undefined; + } + __classPrivateFieldSet(this, _YargsInstance_completionCommand, cmd || __classPrivateFieldGet(this, _YargsInstance_completionCommand, "f") || 'completion', "f"); + if (!desc && desc !== false) { + desc = 'generate completion script'; + } + this.command(__classPrivateFieldGet(this, _YargsInstance_completionCommand, "f"), desc); + if (fn) + __classPrivateFieldGet(this, _YargsInstance_completion, "f").registerFunction(fn); + return this; + } + command(cmd, description, builder, handler, middlewares, deprecated) { + argsert(' [string|boolean] [function|object] [function] [array] [boolean|string]', [cmd, description, builder, handler, middlewares, deprecated], arguments.length); + __classPrivateFieldGet(this, _YargsInstance_command, "f").addHandler(cmd, description, builder, handler, middlewares, deprecated); + return this; + } + commands(cmd, description, builder, handler, middlewares, deprecated) { + return this.command(cmd, description, builder, handler, middlewares, deprecated); + } + commandDir(dir, opts) { + argsert(' [object]', [dir, opts], arguments.length); + const req = __classPrivateFieldGet(this, _YargsInstance_parentRequire, "f") || __classPrivateFieldGet(this, _YargsInstance_shim, "f").require; + __classPrivateFieldGet(this, _YargsInstance_command, "f").addDirectory(dir, req, __classPrivateFieldGet(this, _YargsInstance_shim, "f").getCallerFile(), opts); + return this; + } + count(keys) { + argsert('', [keys], arguments.length); + this[kPopulateParserHintArray]('count', keys); + this[kTrackManuallySetKeys](keys); + return this; + } + default(key, value, defaultDescription) { + argsert(' [*] [string]', [key, value, defaultDescription], arguments.length); + if (defaultDescription) { + assertSingleKey(key, __classPrivateFieldGet(this, _YargsInstance_shim, "f")); + __classPrivateFieldGet(this, _YargsInstance_options, "f").defaultDescription[key] = defaultDescription; + } + if (typeof value === 'function') { + assertSingleKey(key, __classPrivateFieldGet(this, _YargsInstance_shim, "f")); + if (!__classPrivateFieldGet(this, _YargsInstance_options, "f").defaultDescription[key]) + __classPrivateFieldGet(this, _YargsInstance_options, "f").defaultDescription[key] = + __classPrivateFieldGet(this, _YargsInstance_usage, "f").functionDescription(value); + value = value.call(); + } + this[kPopulateParserHintSingleValueDictionary](this.default.bind(this), 'default', key, value); + return this; + } + defaults(key, value, defaultDescription) { + return this.default(key, value, defaultDescription); + } + demandCommand(min = 1, max, minMsg, maxMsg) { + argsert('[number] [number|string] [string|null|undefined] [string|null|undefined]', [min, max, minMsg, maxMsg], arguments.length); + if (typeof max !== 'number') { + minMsg = max; + max = Infinity; + } + this.global('_', false); + __classPrivateFieldGet(this, _YargsInstance_options, "f").demandedCommands._ = { + min, + max, + minMsg, + maxMsg, + }; + return this; + } + demand(keys, max, msg) { + if (Array.isArray(max)) { + max.forEach(key => { + assertNotStrictEqual(msg, true, __classPrivateFieldGet(this, _YargsInstance_shim, "f")); + this.demandOption(key, msg); + }); + max = Infinity; + } + else if (typeof max !== 'number') { + msg = max; + max = Infinity; + } + if (typeof keys === 'number') { + assertNotStrictEqual(msg, true, __classPrivateFieldGet(this, _YargsInstance_shim, "f")); + this.demandCommand(keys, max, msg, msg); + } + else if (Array.isArray(keys)) { + keys.forEach(key => { + assertNotStrictEqual(msg, true, __classPrivateFieldGet(this, _YargsInstance_shim, "f")); + this.demandOption(key, msg); + }); + } + else { + if (typeof msg === 'string') { + this.demandOption(keys, msg); + } + else if (msg === true || typeof msg === 'undefined') { + this.demandOption(keys); + } + } + return this; + } + demandOption(keys, msg) { + argsert(' [string]', [keys, msg], arguments.length); + this[kPopulateParserHintSingleValueDictionary](this.demandOption.bind(this), 'demandedOptions', keys, msg); + return this; + } + deprecateOption(option, message) { + argsert(' [string|boolean]', [option, message], arguments.length); + __classPrivateFieldGet(this, _YargsInstance_options, "f").deprecatedOptions[option] = message; + return this; + } + describe(keys, description) { + argsert(' [string]', [keys, description], arguments.length); + this[kSetKey](keys, true); + __classPrivateFieldGet(this, _YargsInstance_usage, "f").describe(keys, description); + return this; + } + detectLocale(detect) { + argsert('', [detect], arguments.length); + __classPrivateFieldSet(this, _YargsInstance_detectLocale, detect, "f"); + return this; + } + env(prefix) { + argsert('[string|boolean]', [prefix], arguments.length); + if (prefix === false) + delete __classPrivateFieldGet(this, _YargsInstance_options, "f").envPrefix; + else + __classPrivateFieldGet(this, _YargsInstance_options, "f").envPrefix = prefix || ''; + return this; + } + epilogue(msg) { + argsert('', [msg], arguments.length); + __classPrivateFieldGet(this, _YargsInstance_usage, "f").epilog(msg); + return this; + } + epilog(msg) { + return this.epilogue(msg); + } + example(cmd, description) { + argsert(' [string]', [cmd, description], arguments.length); + if (Array.isArray(cmd)) { + cmd.forEach(exampleParams => this.example(...exampleParams)); + } + else { + __classPrivateFieldGet(this, _YargsInstance_usage, "f").example(cmd, description); + } + return this; + } + exit(code, err) { + __classPrivateFieldSet(this, _YargsInstance_hasOutput, true, "f"); + __classPrivateFieldSet(this, _YargsInstance_exitError, err, "f"); + if (__classPrivateFieldGet(this, _YargsInstance_exitProcess, "f")) + __classPrivateFieldGet(this, _YargsInstance_shim, "f").process.exit(code); + } + exitProcess(enabled = true) { + argsert('[boolean]', [enabled], arguments.length); + __classPrivateFieldSet(this, _YargsInstance_exitProcess, enabled, "f"); + return this; + } + fail(f) { + argsert('', [f], arguments.length); + if (typeof f === 'boolean' && f !== false) { + throw new YError("Invalid first argument. Expected function or boolean 'false'"); + } + __classPrivateFieldGet(this, _YargsInstance_usage, "f").failFn(f); + return this; + } + getAliases() { + return this.parsed ? this.parsed.aliases : {}; + } + async getCompletion(args, done) { + argsert(' [function]', [args, done], arguments.length); + if (!done) { + return new Promise((resolve, reject) => { + __classPrivateFieldGet(this, _YargsInstance_completion, "f").getCompletion(args, (err, completions) => { + if (err) + reject(err); + else + resolve(completions); + }); + }); + } + else { + return __classPrivateFieldGet(this, _YargsInstance_completion, "f").getCompletion(args, done); + } + } + getDemandedOptions() { + argsert([], 0); + return __classPrivateFieldGet(this, _YargsInstance_options, "f").demandedOptions; + } + getDemandedCommands() { + argsert([], 0); + return __classPrivateFieldGet(this, _YargsInstance_options, "f").demandedCommands; + } + getDeprecatedOptions() { + argsert([], 0); + return __classPrivateFieldGet(this, _YargsInstance_options, "f").deprecatedOptions; + } + getDetectLocale() { + return __classPrivateFieldGet(this, _YargsInstance_detectLocale, "f"); + } + getExitProcess() { + return __classPrivateFieldGet(this, _YargsInstance_exitProcess, "f"); + } + getGroups() { + return Object.assign({}, __classPrivateFieldGet(this, _YargsInstance_groups, "f"), __classPrivateFieldGet(this, _YargsInstance_preservedGroups, "f")); + } + getHelp() { + __classPrivateFieldSet(this, _YargsInstance_hasOutput, true, "f"); + if (!__classPrivateFieldGet(this, _YargsInstance_usage, "f").hasCachedHelpMessage()) { + if (!this.parsed) { + const parse = this[kRunYargsParserAndExecuteCommands](__classPrivateFieldGet(this, _YargsInstance_processArgs, "f"), undefined, undefined, 0, true); + if (isPromise(parse)) { + return parse.then(() => { + return __classPrivateFieldGet(this, _YargsInstance_usage, "f").help(); + }); + } + } + const builderResponse = __classPrivateFieldGet(this, _YargsInstance_command, "f").runDefaultBuilderOn(this); + if (isPromise(builderResponse)) { + return builderResponse.then(() => { + return __classPrivateFieldGet(this, _YargsInstance_usage, "f").help(); + }); + } + } + return Promise.resolve(__classPrivateFieldGet(this, _YargsInstance_usage, "f").help()); + } + getOptions() { + return __classPrivateFieldGet(this, _YargsInstance_options, "f"); + } + getStrict() { + return __classPrivateFieldGet(this, _YargsInstance_strict, "f"); + } + getStrictCommands() { + return __classPrivateFieldGet(this, _YargsInstance_strictCommands, "f"); + } + getStrictOptions() { + return __classPrivateFieldGet(this, _YargsInstance_strictOptions, "f"); + } + global(globals, global) { + argsert(' [boolean]', [globals, global], arguments.length); + globals = [].concat(globals); + if (global !== false) { + __classPrivateFieldGet(this, _YargsInstance_options, "f").local = __classPrivateFieldGet(this, _YargsInstance_options, "f").local.filter(l => globals.indexOf(l) === -1); + } + else { + globals.forEach(g => { + if (!__classPrivateFieldGet(this, _YargsInstance_options, "f").local.includes(g)) + __classPrivateFieldGet(this, _YargsInstance_options, "f").local.push(g); + }); + } + return this; + } + group(opts, groupName) { + argsert(' ', [opts, groupName], arguments.length); + const existing = __classPrivateFieldGet(this, _YargsInstance_preservedGroups, "f")[groupName] || __classPrivateFieldGet(this, _YargsInstance_groups, "f")[groupName]; + if (__classPrivateFieldGet(this, _YargsInstance_preservedGroups, "f")[groupName]) { + delete __classPrivateFieldGet(this, _YargsInstance_preservedGroups, "f")[groupName]; + } + const seen = {}; + __classPrivateFieldGet(this, _YargsInstance_groups, "f")[groupName] = (existing || []).concat(opts).filter(key => { + if (seen[key]) + return false; + return (seen[key] = true); + }); + return this; + } + hide(key) { + argsert('', [key], arguments.length); + __classPrivateFieldGet(this, _YargsInstance_options, "f").hiddenOptions.push(key); + return this; + } + implies(key, value) { + argsert(' [number|string|array]', [key, value], arguments.length); + __classPrivateFieldGet(this, _YargsInstance_validation, "f").implies(key, value); + return this; + } + locale(locale) { + argsert('[string]', [locale], arguments.length); + if (locale === undefined) { + this[kGuessLocale](); + return __classPrivateFieldGet(this, _YargsInstance_shim, "f").y18n.getLocale(); + } + __classPrivateFieldSet(this, _YargsInstance_detectLocale, false, "f"); + __classPrivateFieldGet(this, _YargsInstance_shim, "f").y18n.setLocale(locale); + return this; + } + middleware(callback, applyBeforeValidation, global) { + return __classPrivateFieldGet(this, _YargsInstance_globalMiddleware, "f").addMiddleware(callback, !!applyBeforeValidation, global); + } + nargs(key, value) { + argsert(' [number]', [key, value], arguments.length); + this[kPopulateParserHintSingleValueDictionary](this.nargs.bind(this), 'narg', key, value); + return this; + } + normalize(keys) { + argsert('', [keys], arguments.length); + this[kPopulateParserHintArray]('normalize', keys); + return this; + } + number(keys) { + argsert('', [keys], arguments.length); + this[kPopulateParserHintArray]('number', keys); + this[kTrackManuallySetKeys](keys); + return this; + } + option(key, opt) { + argsert(' [object]', [key, opt], arguments.length); + if (typeof key === 'object') { + Object.keys(key).forEach(k => { + this.options(k, key[k]); + }); + } + else { + if (typeof opt !== 'object') { + opt = {}; + } + this[kTrackManuallySetKeys](key); + if (__classPrivateFieldGet(this, _YargsInstance_versionOpt, "f") && (key === 'version' || (opt === null || opt === void 0 ? void 0 : opt.alias) === 'version')) { + this[kEmitWarning]([ + '"version" is a reserved word.', + 'Please do one of the following:', + '- Disable version with `yargs.version(false)` if using "version" as an option', + '- Use the built-in `yargs.version` method instead (if applicable)', + '- Use a different option key', + 'https://yargs.js.org/docs/#api-reference-version', + ].join('\n'), undefined, 'versionWarning'); + } + __classPrivateFieldGet(this, _YargsInstance_options, "f").key[key] = true; + if (opt.alias) + this.alias(key, opt.alias); + const deprecate = opt.deprecate || opt.deprecated; + if (deprecate) { + this.deprecateOption(key, deprecate); + } + const demand = opt.demand || opt.required || opt.require; + if (demand) { + this.demand(key, demand); + } + if (opt.demandOption) { + this.demandOption(key, typeof opt.demandOption === 'string' ? opt.demandOption : undefined); + } + if (opt.conflicts) { + this.conflicts(key, opt.conflicts); + } + if ('default' in opt) { + this.default(key, opt.default); + } + if (opt.implies !== undefined) { + this.implies(key, opt.implies); + } + if (opt.nargs !== undefined) { + this.nargs(key, opt.nargs); + } + if (opt.config) { + this.config(key, opt.configParser); + } + if (opt.normalize) { + this.normalize(key); + } + if (opt.choices) { + this.choices(key, opt.choices); + } + if (opt.coerce) { + this.coerce(key, opt.coerce); + } + if (opt.group) { + this.group(key, opt.group); + } + if (opt.boolean || opt.type === 'boolean') { + this.boolean(key); + if (opt.alias) + this.boolean(opt.alias); + } + if (opt.array || opt.type === 'array') { + this.array(key); + if (opt.alias) + this.array(opt.alias); + } + if (opt.number || opt.type === 'number') { + this.number(key); + if (opt.alias) + this.number(opt.alias); + } + if (opt.string || opt.type === 'string') { + this.string(key); + if (opt.alias) + this.string(opt.alias); + } + if (opt.count || opt.type === 'count') { + this.count(key); + } + if (typeof opt.global === 'boolean') { + this.global(key, opt.global); + } + if (opt.defaultDescription) { + __classPrivateFieldGet(this, _YargsInstance_options, "f").defaultDescription[key] = opt.defaultDescription; + } + if (opt.skipValidation) { + this.skipValidation(key); + } + const desc = opt.describe || opt.description || opt.desc; + const descriptions = __classPrivateFieldGet(this, _YargsInstance_usage, "f").getDescriptions(); + if (!Object.prototype.hasOwnProperty.call(descriptions, key) || + typeof desc === 'string') { + this.describe(key, desc); + } + if (opt.hidden) { + this.hide(key); + } + if (opt.requiresArg) { + this.requiresArg(key); + } + } + return this; + } + options(key, opt) { + return this.option(key, opt); + } + parse(args, shortCircuit, _parseFn) { + argsert('[string|array] [function|boolean|object] [function]', [args, shortCircuit, _parseFn], arguments.length); + this[kFreeze](); + if (typeof args === 'undefined') { + args = __classPrivateFieldGet(this, _YargsInstance_processArgs, "f"); + } + if (typeof shortCircuit === 'object') { + __classPrivateFieldSet(this, _YargsInstance_parseContext, shortCircuit, "f"); + shortCircuit = _parseFn; + } + if (typeof shortCircuit === 'function') { + __classPrivateFieldSet(this, _YargsInstance_parseFn, shortCircuit, "f"); + shortCircuit = false; + } + if (!shortCircuit) + __classPrivateFieldSet(this, _YargsInstance_processArgs, args, "f"); + if (__classPrivateFieldGet(this, _YargsInstance_parseFn, "f")) + __classPrivateFieldSet(this, _YargsInstance_exitProcess, false, "f"); + const parsed = this[kRunYargsParserAndExecuteCommands](args, !!shortCircuit); + const tmpParsed = this.parsed; + __classPrivateFieldGet(this, _YargsInstance_completion, "f").setParsed(this.parsed); + if (isPromise(parsed)) { + return parsed + .then(argv => { + if (__classPrivateFieldGet(this, _YargsInstance_parseFn, "f")) + __classPrivateFieldGet(this, _YargsInstance_parseFn, "f").call(this, __classPrivateFieldGet(this, _YargsInstance_exitError, "f"), argv, __classPrivateFieldGet(this, _YargsInstance_output, "f")); + return argv; + }) + .catch(err => { + if (__classPrivateFieldGet(this, _YargsInstance_parseFn, "f")) { + __classPrivateFieldGet(this, _YargsInstance_parseFn, "f")(err, this.parsed.argv, __classPrivateFieldGet(this, _YargsInstance_output, "f")); + } + throw err; + }) + .finally(() => { + this[kUnfreeze](); + this.parsed = tmpParsed; + }); + } + else { + if (__classPrivateFieldGet(this, _YargsInstance_parseFn, "f")) + __classPrivateFieldGet(this, _YargsInstance_parseFn, "f").call(this, __classPrivateFieldGet(this, _YargsInstance_exitError, "f"), parsed, __classPrivateFieldGet(this, _YargsInstance_output, "f")); + this[kUnfreeze](); + this.parsed = tmpParsed; + } + return parsed; + } + parseAsync(args, shortCircuit, _parseFn) { + const maybePromise = this.parse(args, shortCircuit, _parseFn); + return !isPromise(maybePromise) + ? Promise.resolve(maybePromise) + : maybePromise; + } + parseSync(args, shortCircuit, _parseFn) { + const maybePromise = this.parse(args, shortCircuit, _parseFn); + if (isPromise(maybePromise)) { + throw new YError('.parseSync() must not be used with asynchronous builders, handlers, or middleware'); + } + return maybePromise; + } + parserConfiguration(config) { + argsert('', [config], arguments.length); + __classPrivateFieldSet(this, _YargsInstance_parserConfig, config, "f"); + return this; + } + pkgConf(key, rootPath) { + argsert(' [string]', [key, rootPath], arguments.length); + let conf = null; + const obj = this[kPkgUp](rootPath || __classPrivateFieldGet(this, _YargsInstance_cwd, "f")); + if (obj[key] && typeof obj[key] === 'object') { + conf = applyExtends(obj[key], rootPath || __classPrivateFieldGet(this, _YargsInstance_cwd, "f"), this[kGetParserConfiguration]()['deep-merge-config'] || false, __classPrivateFieldGet(this, _YargsInstance_shim, "f")); + __classPrivateFieldGet(this, _YargsInstance_options, "f").configObjects = (__classPrivateFieldGet(this, _YargsInstance_options, "f").configObjects || []).concat(conf); + } + return this; + } + positional(key, opts) { + argsert(' ', [key, opts], arguments.length); + const supportedOpts = [ + 'default', + 'defaultDescription', + 'implies', + 'normalize', + 'choices', + 'conflicts', + 'coerce', + 'type', + 'describe', + 'desc', + 'description', + 'alias', + ]; + opts = objFilter(opts, (k, v) => { + if (k === 'type' && !['string', 'number', 'boolean'].includes(v)) + return false; + return supportedOpts.includes(k); + }); + const fullCommand = __classPrivateFieldGet(this, _YargsInstance_context, "f").fullCommands[__classPrivateFieldGet(this, _YargsInstance_context, "f").fullCommands.length - 1]; + const parseOptions = fullCommand + ? __classPrivateFieldGet(this, _YargsInstance_command, "f").cmdToParseOptions(fullCommand) + : { + array: [], + alias: {}, + default: {}, + demand: {}, + }; + objectKeys(parseOptions).forEach(pk => { + const parseOption = parseOptions[pk]; + if (Array.isArray(parseOption)) { + if (parseOption.indexOf(key) !== -1) + opts[pk] = true; + } + else { + if (parseOption[key] && !(pk in opts)) + opts[pk] = parseOption[key]; + } + }); + this.group(key, __classPrivateFieldGet(this, _YargsInstance_usage, "f").getPositionalGroupName()); + return this.option(key, opts); + } + recommendCommands(recommend = true) { + argsert('[boolean]', [recommend], arguments.length); + __classPrivateFieldSet(this, _YargsInstance_recommendCommands, recommend, "f"); + return this; + } + required(keys, max, msg) { + return this.demand(keys, max, msg); + } + require(keys, max, msg) { + return this.demand(keys, max, msg); + } + requiresArg(keys) { + argsert(' [number]', [keys], arguments.length); + if (typeof keys === 'string' && __classPrivateFieldGet(this, _YargsInstance_options, "f").narg[keys]) { + return this; + } + else { + this[kPopulateParserHintSingleValueDictionary](this.requiresArg.bind(this), 'narg', keys, NaN); + } + return this; + } + showCompletionScript($0, cmd) { + argsert('[string] [string]', [$0, cmd], arguments.length); + $0 = $0 || this.$0; + __classPrivateFieldGet(this, _YargsInstance_logger, "f").log(__classPrivateFieldGet(this, _YargsInstance_completion, "f").generateCompletionScript($0, cmd || __classPrivateFieldGet(this, _YargsInstance_completionCommand, "f") || 'completion')); + return this; + } + showHelp(level) { + argsert('[string|function]', [level], arguments.length); + __classPrivateFieldSet(this, _YargsInstance_hasOutput, true, "f"); + if (!__classPrivateFieldGet(this, _YargsInstance_usage, "f").hasCachedHelpMessage()) { + if (!this.parsed) { + const parse = this[kRunYargsParserAndExecuteCommands](__classPrivateFieldGet(this, _YargsInstance_processArgs, "f"), undefined, undefined, 0, true); + if (isPromise(parse)) { + parse.then(() => { + __classPrivateFieldGet(this, _YargsInstance_usage, "f").showHelp(level); + }); + return this; + } + } + const builderResponse = __classPrivateFieldGet(this, _YargsInstance_command, "f").runDefaultBuilderOn(this); + if (isPromise(builderResponse)) { + builderResponse.then(() => { + __classPrivateFieldGet(this, _YargsInstance_usage, "f").showHelp(level); + }); + return this; + } + } + __classPrivateFieldGet(this, _YargsInstance_usage, "f").showHelp(level); + return this; + } + scriptName(scriptName) { + this.customScriptName = true; + this.$0 = scriptName; + return this; + } + showHelpOnFail(enabled, message) { + argsert('[boolean|string] [string]', [enabled, message], arguments.length); + __classPrivateFieldGet(this, _YargsInstance_usage, "f").showHelpOnFail(enabled, message); + return this; + } + showVersion(level) { + argsert('[string|function]', [level], arguments.length); + __classPrivateFieldGet(this, _YargsInstance_usage, "f").showVersion(level); + return this; + } + skipValidation(keys) { + argsert('', [keys], arguments.length); + this[kPopulateParserHintArray]('skipValidation', keys); + return this; + } + strict(enabled) { + argsert('[boolean]', [enabled], arguments.length); + __classPrivateFieldSet(this, _YargsInstance_strict, enabled !== false, "f"); + return this; + } + strictCommands(enabled) { + argsert('[boolean]', [enabled], arguments.length); + __classPrivateFieldSet(this, _YargsInstance_strictCommands, enabled !== false, "f"); + return this; + } + strictOptions(enabled) { + argsert('[boolean]', [enabled], arguments.length); + __classPrivateFieldSet(this, _YargsInstance_strictOptions, enabled !== false, "f"); + return this; + } + string(keys) { + argsert('', [keys], arguments.length); + this[kPopulateParserHintArray]('string', keys); + this[kTrackManuallySetKeys](keys); + return this; + } + terminalWidth() { + argsert([], 0); + return __classPrivateFieldGet(this, _YargsInstance_shim, "f").process.stdColumns; + } + updateLocale(obj) { + return this.updateStrings(obj); + } + updateStrings(obj) { + argsert('', [obj], arguments.length); + __classPrivateFieldSet(this, _YargsInstance_detectLocale, false, "f"); + __classPrivateFieldGet(this, _YargsInstance_shim, "f").y18n.updateLocale(obj); + return this; + } + usage(msg, description, builder, handler) { + argsert(' [string|boolean] [function|object] [function]', [msg, description, builder, handler], arguments.length); + if (description !== undefined) { + assertNotStrictEqual(msg, null, __classPrivateFieldGet(this, _YargsInstance_shim, "f")); + if ((msg || '').match(/^\$0( |$)/)) { + return this.command(msg, description, builder, handler); + } + else { + throw new YError('.usage() description must start with $0 if being used as alias for .command()'); + } + } + else { + __classPrivateFieldGet(this, _YargsInstance_usage, "f").usage(msg); + return this; + } + } + version(opt, msg, ver) { + const defaultVersionOpt = 'version'; + argsert('[boolean|string] [string] [string]', [opt, msg, ver], arguments.length); + if (__classPrivateFieldGet(this, _YargsInstance_versionOpt, "f")) { + this[kDeleteFromParserHintObject](__classPrivateFieldGet(this, _YargsInstance_versionOpt, "f")); + __classPrivateFieldGet(this, _YargsInstance_usage, "f").version(undefined); + __classPrivateFieldSet(this, _YargsInstance_versionOpt, null, "f"); + } + if (arguments.length === 0) { + ver = this[kGuessVersion](); + opt = defaultVersionOpt; + } + else if (arguments.length === 1) { + if (opt === false) { + return this; + } + ver = opt; + opt = defaultVersionOpt; + } + else if (arguments.length === 2) { + ver = msg; + msg = undefined; + } + __classPrivateFieldSet(this, _YargsInstance_versionOpt, typeof opt === 'string' ? opt : defaultVersionOpt, "f"); + msg = msg || __classPrivateFieldGet(this, _YargsInstance_usage, "f").deferY18nLookup('Show version number'); + __classPrivateFieldGet(this, _YargsInstance_usage, "f").version(ver || undefined); + this.boolean(__classPrivateFieldGet(this, _YargsInstance_versionOpt, "f")); + this.describe(__classPrivateFieldGet(this, _YargsInstance_versionOpt, "f"), msg); + return this; + } + wrap(cols) { + argsert('', [cols], arguments.length); + __classPrivateFieldGet(this, _YargsInstance_usage, "f").wrap(cols); + return this; + } + [(_YargsInstance_command = new WeakMap(), _YargsInstance_cwd = new WeakMap(), _YargsInstance_context = new WeakMap(), _YargsInstance_completion = new WeakMap(), _YargsInstance_completionCommand = new WeakMap(), _YargsInstance_defaultShowHiddenOpt = new WeakMap(), _YargsInstance_exitError = new WeakMap(), _YargsInstance_detectLocale = new WeakMap(), _YargsInstance_emittedWarnings = new WeakMap(), _YargsInstance_exitProcess = new WeakMap(), _YargsInstance_frozens = new WeakMap(), _YargsInstance_globalMiddleware = new WeakMap(), _YargsInstance_groups = new WeakMap(), _YargsInstance_hasOutput = new WeakMap(), _YargsInstance_helpOpt = new WeakMap(), _YargsInstance_isGlobalContext = new WeakMap(), _YargsInstance_logger = new WeakMap(), _YargsInstance_output = new WeakMap(), _YargsInstance_options = new WeakMap(), _YargsInstance_parentRequire = new WeakMap(), _YargsInstance_parserConfig = new WeakMap(), _YargsInstance_parseFn = new WeakMap(), _YargsInstance_parseContext = new WeakMap(), _YargsInstance_pkgs = new WeakMap(), _YargsInstance_preservedGroups = new WeakMap(), _YargsInstance_processArgs = new WeakMap(), _YargsInstance_recommendCommands = new WeakMap(), _YargsInstance_shim = new WeakMap(), _YargsInstance_strict = new WeakMap(), _YargsInstance_strictCommands = new WeakMap(), _YargsInstance_strictOptions = new WeakMap(), _YargsInstance_usage = new WeakMap(), _YargsInstance_versionOpt = new WeakMap(), _YargsInstance_validation = new WeakMap(), kCopyDoubleDash)](argv) { + if (!argv._ || !argv['--']) + return argv; + argv._.push.apply(argv._, argv['--']); + try { + delete argv['--']; + } + catch (_err) { } + return argv; + } + [kCreateLogger]() { + return { + log: (...args) => { + if (!this[kHasParseCallback]()) + console.log(...args); + __classPrivateFieldSet(this, _YargsInstance_hasOutput, true, "f"); + if (__classPrivateFieldGet(this, _YargsInstance_output, "f").length) + __classPrivateFieldSet(this, _YargsInstance_output, __classPrivateFieldGet(this, _YargsInstance_output, "f") + '\n', "f"); + __classPrivateFieldSet(this, _YargsInstance_output, __classPrivateFieldGet(this, _YargsInstance_output, "f") + args.join(' '), "f"); + }, + error: (...args) => { + if (!this[kHasParseCallback]()) + console.error(...args); + __classPrivateFieldSet(this, _YargsInstance_hasOutput, true, "f"); + if (__classPrivateFieldGet(this, _YargsInstance_output, "f").length) + __classPrivateFieldSet(this, _YargsInstance_output, __classPrivateFieldGet(this, _YargsInstance_output, "f") + '\n', "f"); + __classPrivateFieldSet(this, _YargsInstance_output, __classPrivateFieldGet(this, _YargsInstance_output, "f") + args.join(' '), "f"); + }, + }; + } + [kDeleteFromParserHintObject](optionKey) { + objectKeys(__classPrivateFieldGet(this, _YargsInstance_options, "f")).forEach((hintKey) => { + if (((key) => key === 'configObjects')(hintKey)) + return; + const hint = __classPrivateFieldGet(this, _YargsInstance_options, "f")[hintKey]; + if (Array.isArray(hint)) { + if (hint.includes(optionKey)) + hint.splice(hint.indexOf(optionKey), 1); + } + else if (typeof hint === 'object') { + delete hint[optionKey]; + } + }); + delete __classPrivateFieldGet(this, _YargsInstance_usage, "f").getDescriptions()[optionKey]; + } + [kEmitWarning](warning, type, deduplicationId) { + if (!__classPrivateFieldGet(this, _YargsInstance_emittedWarnings, "f")[deduplicationId]) { + __classPrivateFieldGet(this, _YargsInstance_shim, "f").process.emitWarning(warning, type); + __classPrivateFieldGet(this, _YargsInstance_emittedWarnings, "f")[deduplicationId] = true; + } + } + [kFreeze]() { + __classPrivateFieldGet(this, _YargsInstance_frozens, "f").push({ + options: __classPrivateFieldGet(this, _YargsInstance_options, "f"), + configObjects: __classPrivateFieldGet(this, _YargsInstance_options, "f").configObjects.slice(0), + exitProcess: __classPrivateFieldGet(this, _YargsInstance_exitProcess, "f"), + groups: __classPrivateFieldGet(this, _YargsInstance_groups, "f"), + strict: __classPrivateFieldGet(this, _YargsInstance_strict, "f"), + strictCommands: __classPrivateFieldGet(this, _YargsInstance_strictCommands, "f"), + strictOptions: __classPrivateFieldGet(this, _YargsInstance_strictOptions, "f"), + completionCommand: __classPrivateFieldGet(this, _YargsInstance_completionCommand, "f"), + output: __classPrivateFieldGet(this, _YargsInstance_output, "f"), + exitError: __classPrivateFieldGet(this, _YargsInstance_exitError, "f"), + hasOutput: __classPrivateFieldGet(this, _YargsInstance_hasOutput, "f"), + parsed: this.parsed, + parseFn: __classPrivateFieldGet(this, _YargsInstance_parseFn, "f"), + parseContext: __classPrivateFieldGet(this, _YargsInstance_parseContext, "f"), + }); + __classPrivateFieldGet(this, _YargsInstance_usage, "f").freeze(); + __classPrivateFieldGet(this, _YargsInstance_validation, "f").freeze(); + __classPrivateFieldGet(this, _YargsInstance_command, "f").freeze(); + __classPrivateFieldGet(this, _YargsInstance_globalMiddleware, "f").freeze(); + } + [kGetDollarZero]() { + let $0 = ''; + let default$0; + if (/\b(node|iojs|electron)(\.exe)?$/.test(__classPrivateFieldGet(this, _YargsInstance_shim, "f").process.argv()[0])) { + default$0 = __classPrivateFieldGet(this, _YargsInstance_shim, "f").process.argv().slice(1, 2); + } + else { + default$0 = __classPrivateFieldGet(this, _YargsInstance_shim, "f").process.argv().slice(0, 1); + } + $0 = default$0 + .map(x => { + const b = this[kRebase](__classPrivateFieldGet(this, _YargsInstance_cwd, "f"), x); + return x.match(/^(\/|([a-zA-Z]:)?\\)/) && b.length < x.length ? b : x; + }) + .join(' ') + .trim(); + if (__classPrivateFieldGet(this, _YargsInstance_shim, "f").getEnv('_') && + __classPrivateFieldGet(this, _YargsInstance_shim, "f").getProcessArgvBin() === __classPrivateFieldGet(this, _YargsInstance_shim, "f").getEnv('_')) { + $0 = __classPrivateFieldGet(this, _YargsInstance_shim, "f") + .getEnv('_') + .replace(`${__classPrivateFieldGet(this, _YargsInstance_shim, "f").path.dirname(__classPrivateFieldGet(this, _YargsInstance_shim, "f").process.execPath())}/`, ''); + } + return $0; + } + [kGetParserConfiguration]() { + return __classPrivateFieldGet(this, _YargsInstance_parserConfig, "f"); + } + [kGuessLocale]() { + if (!__classPrivateFieldGet(this, _YargsInstance_detectLocale, "f")) + return; + const locale = __classPrivateFieldGet(this, _YargsInstance_shim, "f").getEnv('LC_ALL') || + __classPrivateFieldGet(this, _YargsInstance_shim, "f").getEnv('LC_MESSAGES') || + __classPrivateFieldGet(this, _YargsInstance_shim, "f").getEnv('LANG') || + __classPrivateFieldGet(this, _YargsInstance_shim, "f").getEnv('LANGUAGE') || + 'en_US'; + this.locale(locale.replace(/[.:].*/, '')); + } + [kGuessVersion]() { + const obj = this[kPkgUp](); + return obj.version || 'unknown'; + } + [kParsePositionalNumbers](argv) { + const args = argv['--'] ? argv['--'] : argv._; + for (let i = 0, arg; (arg = args[i]) !== undefined; i++) { + if (__classPrivateFieldGet(this, _YargsInstance_shim, "f").Parser.looksLikeNumber(arg) && + Number.isSafeInteger(Math.floor(parseFloat(`${arg}`)))) { + args[i] = Number(arg); + } + } + return argv; + } + [kPkgUp](rootPath) { + const npath = rootPath || '*'; + if (__classPrivateFieldGet(this, _YargsInstance_pkgs, "f")[npath]) + return __classPrivateFieldGet(this, _YargsInstance_pkgs, "f")[npath]; + let obj = {}; + try { + let startDir = rootPath || __classPrivateFieldGet(this, _YargsInstance_shim, "f").mainFilename; + if (!rootPath && __classPrivateFieldGet(this, _YargsInstance_shim, "f").path.extname(startDir)) { + startDir = __classPrivateFieldGet(this, _YargsInstance_shim, "f").path.dirname(startDir); + } + const pkgJsonPath = __classPrivateFieldGet(this, _YargsInstance_shim, "f").findUp(startDir, (dir, names) => { + if (names.includes('package.json')) { + return 'package.json'; + } + else { + return undefined; + } + }); + assertNotStrictEqual(pkgJsonPath, undefined, __classPrivateFieldGet(this, _YargsInstance_shim, "f")); + obj = JSON.parse(__classPrivateFieldGet(this, _YargsInstance_shim, "f").readFileSync(pkgJsonPath, 'utf8')); + } + catch (_noop) { } + __classPrivateFieldGet(this, _YargsInstance_pkgs, "f")[npath] = obj || {}; + return __classPrivateFieldGet(this, _YargsInstance_pkgs, "f")[npath]; + } + [kPopulateParserHintArray](type, keys) { + keys = [].concat(keys); + keys.forEach(key => { + key = this[kSanitizeKey](key); + __classPrivateFieldGet(this, _YargsInstance_options, "f")[type].push(key); + }); + } + [kPopulateParserHintSingleValueDictionary](builder, type, key, value) { + this[kPopulateParserHintDictionary](builder, type, key, value, (type, key, value) => { + __classPrivateFieldGet(this, _YargsInstance_options, "f")[type][key] = value; + }); + } + [kPopulateParserHintArrayDictionary](builder, type, key, value) { + this[kPopulateParserHintDictionary](builder, type, key, value, (type, key, value) => { + __classPrivateFieldGet(this, _YargsInstance_options, "f")[type][key] = (__classPrivateFieldGet(this, _YargsInstance_options, "f")[type][key] || []).concat(value); + }); + } + [kPopulateParserHintDictionary](builder, type, key, value, singleKeyHandler) { + if (Array.isArray(key)) { + key.forEach(k => { + builder(k, value); + }); + } + else if (((key) => typeof key === 'object')(key)) { + for (const k of objectKeys(key)) { + builder(k, key[k]); + } + } + else { + singleKeyHandler(type, this[kSanitizeKey](key), value); + } + } + [kSanitizeKey](key) { + if (key === '__proto__') + return '___proto___'; + return key; + } + [kSetKey](key, set) { + this[kPopulateParserHintSingleValueDictionary](this[kSetKey].bind(this), 'key', key, set); + return this; + } + [kUnfreeze]() { + var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m; + const frozen = __classPrivateFieldGet(this, _YargsInstance_frozens, "f").pop(); + assertNotStrictEqual(frozen, undefined, __classPrivateFieldGet(this, _YargsInstance_shim, "f")); + let configObjects; + (_a = this, _b = this, _c = this, _d = this, _e = this, _f = this, _g = this, _h = this, _j = this, _k = this, _l = this, _m = this, { + options: ({ set value(_o) { __classPrivateFieldSet(_a, _YargsInstance_options, _o, "f"); } }).value, + configObjects, + exitProcess: ({ set value(_o) { __classPrivateFieldSet(_b, _YargsInstance_exitProcess, _o, "f"); } }).value, + groups: ({ set value(_o) { __classPrivateFieldSet(_c, _YargsInstance_groups, _o, "f"); } }).value, + output: ({ set value(_o) { __classPrivateFieldSet(_d, _YargsInstance_output, _o, "f"); } }).value, + exitError: ({ set value(_o) { __classPrivateFieldSet(_e, _YargsInstance_exitError, _o, "f"); } }).value, + hasOutput: ({ set value(_o) { __classPrivateFieldSet(_f, _YargsInstance_hasOutput, _o, "f"); } }).value, + parsed: this.parsed, + strict: ({ set value(_o) { __classPrivateFieldSet(_g, _YargsInstance_strict, _o, "f"); } }).value, + strictCommands: ({ set value(_o) { __classPrivateFieldSet(_h, _YargsInstance_strictCommands, _o, "f"); } }).value, + strictOptions: ({ set value(_o) { __classPrivateFieldSet(_j, _YargsInstance_strictOptions, _o, "f"); } }).value, + completionCommand: ({ set value(_o) { __classPrivateFieldSet(_k, _YargsInstance_completionCommand, _o, "f"); } }).value, + parseFn: ({ set value(_o) { __classPrivateFieldSet(_l, _YargsInstance_parseFn, _o, "f"); } }).value, + parseContext: ({ set value(_o) { __classPrivateFieldSet(_m, _YargsInstance_parseContext, _o, "f"); } }).value, + } = frozen); + __classPrivateFieldGet(this, _YargsInstance_options, "f").configObjects = configObjects; + __classPrivateFieldGet(this, _YargsInstance_usage, "f").unfreeze(); + __classPrivateFieldGet(this, _YargsInstance_validation, "f").unfreeze(); + __classPrivateFieldGet(this, _YargsInstance_command, "f").unfreeze(); + __classPrivateFieldGet(this, _YargsInstance_globalMiddleware, "f").unfreeze(); + } + [kValidateAsync](validation, argv) { + return maybeAsyncResult(argv, result => { + validation(result); + return result; + }); + } + getInternalMethods() { + return { + getCommandInstance: this[kGetCommandInstance].bind(this), + getContext: this[kGetContext].bind(this), + getHasOutput: this[kGetHasOutput].bind(this), + getLoggerInstance: this[kGetLoggerInstance].bind(this), + getParseContext: this[kGetParseContext].bind(this), + getParserConfiguration: this[kGetParserConfiguration].bind(this), + getUsageInstance: this[kGetUsageInstance].bind(this), + getValidationInstance: this[kGetValidationInstance].bind(this), + hasParseCallback: this[kHasParseCallback].bind(this), + isGlobalContext: this[kIsGlobalContext].bind(this), + postProcess: this[kPostProcess].bind(this), + reset: this[kReset].bind(this), + runValidation: this[kRunValidation].bind(this), + runYargsParserAndExecuteCommands: this[kRunYargsParserAndExecuteCommands].bind(this), + setHasOutput: this[kSetHasOutput].bind(this), + }; + } + [kGetCommandInstance]() { + return __classPrivateFieldGet(this, _YargsInstance_command, "f"); + } + [kGetContext]() { + return __classPrivateFieldGet(this, _YargsInstance_context, "f"); + } + [kGetHasOutput]() { + return __classPrivateFieldGet(this, _YargsInstance_hasOutput, "f"); + } + [kGetLoggerInstance]() { + return __classPrivateFieldGet(this, _YargsInstance_logger, "f"); + } + [kGetParseContext]() { + return __classPrivateFieldGet(this, _YargsInstance_parseContext, "f") || {}; + } + [kGetUsageInstance]() { + return __classPrivateFieldGet(this, _YargsInstance_usage, "f"); + } + [kGetValidationInstance]() { + return __classPrivateFieldGet(this, _YargsInstance_validation, "f"); + } + [kHasParseCallback]() { + return !!__classPrivateFieldGet(this, _YargsInstance_parseFn, "f"); + } + [kIsGlobalContext]() { + return __classPrivateFieldGet(this, _YargsInstance_isGlobalContext, "f"); + } + [kPostProcess](argv, populateDoubleDash, calledFromCommand, runGlobalMiddleware) { + if (calledFromCommand) + return argv; + if (isPromise(argv)) + return argv; + if (!populateDoubleDash) { + argv = this[kCopyDoubleDash](argv); + } + const parsePositionalNumbers = this[kGetParserConfiguration]()['parse-positional-numbers'] || + this[kGetParserConfiguration]()['parse-positional-numbers'] === undefined; + if (parsePositionalNumbers) { + argv = this[kParsePositionalNumbers](argv); + } + if (runGlobalMiddleware) { + argv = applyMiddleware(argv, this, __classPrivateFieldGet(this, _YargsInstance_globalMiddleware, "f").getMiddleware(), false); + } + return argv; + } + [kReset](aliases = {}) { + __classPrivateFieldSet(this, _YargsInstance_options, __classPrivateFieldGet(this, _YargsInstance_options, "f") || {}, "f"); + const tmpOptions = {}; + tmpOptions.local = __classPrivateFieldGet(this, _YargsInstance_options, "f").local || []; + tmpOptions.configObjects = __classPrivateFieldGet(this, _YargsInstance_options, "f").configObjects || []; + const localLookup = {}; + tmpOptions.local.forEach(l => { + localLookup[l] = true; + (aliases[l] || []).forEach(a => { + localLookup[a] = true; + }); + }); + Object.assign(__classPrivateFieldGet(this, _YargsInstance_preservedGroups, "f"), Object.keys(__classPrivateFieldGet(this, _YargsInstance_groups, "f")).reduce((acc, groupName) => { + const keys = __classPrivateFieldGet(this, _YargsInstance_groups, "f")[groupName].filter(key => !(key in localLookup)); + if (keys.length > 0) { + acc[groupName] = keys; + } + return acc; + }, {})); + __classPrivateFieldSet(this, _YargsInstance_groups, {}, "f"); + const arrayOptions = [ + 'array', + 'boolean', + 'string', + 'skipValidation', + 'count', + 'normalize', + 'number', + 'hiddenOptions', + ]; + const objectOptions = [ + 'narg', + 'key', + 'alias', + 'default', + 'defaultDescription', + 'config', + 'choices', + 'demandedOptions', + 'demandedCommands', + 'deprecatedOptions', + ]; + arrayOptions.forEach(k => { + tmpOptions[k] = (__classPrivateFieldGet(this, _YargsInstance_options, "f")[k] || []).filter((k) => !localLookup[k]); + }); + objectOptions.forEach((k) => { + tmpOptions[k] = objFilter(__classPrivateFieldGet(this, _YargsInstance_options, "f")[k], k => !localLookup[k]); + }); + tmpOptions.envPrefix = __classPrivateFieldGet(this, _YargsInstance_options, "f").envPrefix; + __classPrivateFieldSet(this, _YargsInstance_options, tmpOptions, "f"); + __classPrivateFieldSet(this, _YargsInstance_usage, __classPrivateFieldGet(this, _YargsInstance_usage, "f") + ? __classPrivateFieldGet(this, _YargsInstance_usage, "f").reset(localLookup) + : Usage(this, __classPrivateFieldGet(this, _YargsInstance_shim, "f")), "f"); + __classPrivateFieldSet(this, _YargsInstance_validation, __classPrivateFieldGet(this, _YargsInstance_validation, "f") + ? __classPrivateFieldGet(this, _YargsInstance_validation, "f").reset(localLookup) + : Validation(this, __classPrivateFieldGet(this, _YargsInstance_usage, "f"), __classPrivateFieldGet(this, _YargsInstance_shim, "f")), "f"); + __classPrivateFieldSet(this, _YargsInstance_command, __classPrivateFieldGet(this, _YargsInstance_command, "f") + ? __classPrivateFieldGet(this, _YargsInstance_command, "f").reset() + : Command(__classPrivateFieldGet(this, _YargsInstance_usage, "f"), __classPrivateFieldGet(this, _YargsInstance_validation, "f"), __classPrivateFieldGet(this, _YargsInstance_globalMiddleware, "f"), __classPrivateFieldGet(this, _YargsInstance_shim, "f")), "f"); + if (!__classPrivateFieldGet(this, _YargsInstance_completion, "f")) + __classPrivateFieldSet(this, _YargsInstance_completion, Completion(this, __classPrivateFieldGet(this, _YargsInstance_usage, "f"), __classPrivateFieldGet(this, _YargsInstance_command, "f"), __classPrivateFieldGet(this, _YargsInstance_shim, "f")), "f"); + __classPrivateFieldGet(this, _YargsInstance_globalMiddleware, "f").reset(); + __classPrivateFieldSet(this, _YargsInstance_completionCommand, null, "f"); + __classPrivateFieldSet(this, _YargsInstance_output, '', "f"); + __classPrivateFieldSet(this, _YargsInstance_exitError, null, "f"); + __classPrivateFieldSet(this, _YargsInstance_hasOutput, false, "f"); + this.parsed = false; + return this; + } + [kRebase](base, dir) { + return __classPrivateFieldGet(this, _YargsInstance_shim, "f").path.relative(base, dir); + } + [kRunYargsParserAndExecuteCommands](args, shortCircuit, calledFromCommand, commandIndex = 0, helpOnly = false) { + let skipValidation = !!calledFromCommand || helpOnly; + args = args || __classPrivateFieldGet(this, _YargsInstance_processArgs, "f"); + __classPrivateFieldGet(this, _YargsInstance_options, "f").__ = __classPrivateFieldGet(this, _YargsInstance_shim, "f").y18n.__; + __classPrivateFieldGet(this, _YargsInstance_options, "f").configuration = this[kGetParserConfiguration](); + const populateDoubleDash = !!__classPrivateFieldGet(this, _YargsInstance_options, "f").configuration['populate--']; + const config = Object.assign({}, __classPrivateFieldGet(this, _YargsInstance_options, "f").configuration, { + 'populate--': true, + }); + const parsed = __classPrivateFieldGet(this, _YargsInstance_shim, "f").Parser.detailed(args, Object.assign({}, __classPrivateFieldGet(this, _YargsInstance_options, "f"), { + configuration: { 'parse-positional-numbers': false, ...config }, + })); + const argv = Object.assign(parsed.argv, __classPrivateFieldGet(this, _YargsInstance_parseContext, "f")); + let argvPromise = undefined; + const aliases = parsed.aliases; + let helpOptSet = false; + let versionOptSet = false; + Object.keys(argv).forEach(key => { + if (key === __classPrivateFieldGet(this, _YargsInstance_helpOpt, "f") && argv[key]) { + helpOptSet = true; + } + else if (key === __classPrivateFieldGet(this, _YargsInstance_versionOpt, "f") && argv[key]) { + versionOptSet = true; + } + }); + argv.$0 = this.$0; + this.parsed = parsed; + if (commandIndex === 0) { + __classPrivateFieldGet(this, _YargsInstance_usage, "f").clearCachedHelpMessage(); + } + try { + this[kGuessLocale](); + if (shortCircuit) { + return this[kPostProcess](argv, populateDoubleDash, !!calledFromCommand, false); + } + if (__classPrivateFieldGet(this, _YargsInstance_helpOpt, "f")) { + const helpCmds = [__classPrivateFieldGet(this, _YargsInstance_helpOpt, "f")] + .concat(aliases[__classPrivateFieldGet(this, _YargsInstance_helpOpt, "f")] || []) + .filter(k => k.length > 1); + if (helpCmds.includes('' + argv._[argv._.length - 1])) { + argv._.pop(); + helpOptSet = true; + } + } + __classPrivateFieldSet(this, _YargsInstance_isGlobalContext, false, "f"); + const handlerKeys = __classPrivateFieldGet(this, _YargsInstance_command, "f").getCommands(); + const requestCompletions = __classPrivateFieldGet(this, _YargsInstance_completion, "f").completionKey in argv; + const skipRecommendation = helpOptSet || requestCompletions || helpOnly; + if (argv._.length) { + if (handlerKeys.length) { + let firstUnknownCommand; + for (let i = commandIndex || 0, cmd; argv._[i] !== undefined; i++) { + cmd = String(argv._[i]); + if (handlerKeys.includes(cmd) && cmd !== __classPrivateFieldGet(this, _YargsInstance_completionCommand, "f")) { + const innerArgv = __classPrivateFieldGet(this, _YargsInstance_command, "f").runCommand(cmd, this, parsed, i + 1, helpOnly, helpOptSet || versionOptSet || helpOnly); + return this[kPostProcess](innerArgv, populateDoubleDash, !!calledFromCommand, false); + } + else if (!firstUnknownCommand && + cmd !== __classPrivateFieldGet(this, _YargsInstance_completionCommand, "f")) { + firstUnknownCommand = cmd; + break; + } + } + if (!__classPrivateFieldGet(this, _YargsInstance_command, "f").hasDefaultCommand() && + __classPrivateFieldGet(this, _YargsInstance_recommendCommands, "f") && + firstUnknownCommand && + !skipRecommendation) { + __classPrivateFieldGet(this, _YargsInstance_validation, "f").recommendCommands(firstUnknownCommand, handlerKeys); + } + } + if (__classPrivateFieldGet(this, _YargsInstance_completionCommand, "f") && + argv._.includes(__classPrivateFieldGet(this, _YargsInstance_completionCommand, "f")) && + !requestCompletions) { + if (__classPrivateFieldGet(this, _YargsInstance_exitProcess, "f")) + setBlocking(true); + this.showCompletionScript(); + this.exit(0); + } + } + if (__classPrivateFieldGet(this, _YargsInstance_command, "f").hasDefaultCommand() && !skipRecommendation) { + const innerArgv = __classPrivateFieldGet(this, _YargsInstance_command, "f").runCommand(null, this, parsed, 0, helpOnly, helpOptSet || versionOptSet || helpOnly); + return this[kPostProcess](innerArgv, populateDoubleDash, !!calledFromCommand, false); + } + if (requestCompletions) { + if (__classPrivateFieldGet(this, _YargsInstance_exitProcess, "f")) + setBlocking(true); + args = [].concat(args); + const completionArgs = args.slice(args.indexOf(`--${__classPrivateFieldGet(this, _YargsInstance_completion, "f").completionKey}`) + 1); + __classPrivateFieldGet(this, _YargsInstance_completion, "f").getCompletion(completionArgs, (err, completions) => { + if (err) + throw new YError(err.message); + (completions || []).forEach(completion => { + __classPrivateFieldGet(this, _YargsInstance_logger, "f").log(completion); + }); + this.exit(0); + }); + return this[kPostProcess](argv, !populateDoubleDash, !!calledFromCommand, false); + } + if (!__classPrivateFieldGet(this, _YargsInstance_hasOutput, "f")) { + if (helpOptSet) { + if (__classPrivateFieldGet(this, _YargsInstance_exitProcess, "f")) + setBlocking(true); + skipValidation = true; + this.showHelp('log'); + this.exit(0); + } + else if (versionOptSet) { + if (__classPrivateFieldGet(this, _YargsInstance_exitProcess, "f")) + setBlocking(true); + skipValidation = true; + __classPrivateFieldGet(this, _YargsInstance_usage, "f").showVersion('log'); + this.exit(0); + } + } + if (!skipValidation && __classPrivateFieldGet(this, _YargsInstance_options, "f").skipValidation.length > 0) { + skipValidation = Object.keys(argv).some(key => __classPrivateFieldGet(this, _YargsInstance_options, "f").skipValidation.indexOf(key) >= 0 && argv[key] === true); + } + if (!skipValidation) { + if (parsed.error) + throw new YError(parsed.error.message); + if (!requestCompletions) { + const validation = this[kRunValidation](aliases, {}, parsed.error); + if (!calledFromCommand) { + argvPromise = applyMiddleware(argv, this, __classPrivateFieldGet(this, _YargsInstance_globalMiddleware, "f").getMiddleware(), true); + } + argvPromise = this[kValidateAsync](validation, argvPromise !== null && argvPromise !== void 0 ? argvPromise : argv); + if (isPromise(argvPromise) && !calledFromCommand) { + argvPromise = argvPromise.then(() => { + return applyMiddleware(argv, this, __classPrivateFieldGet(this, _YargsInstance_globalMiddleware, "f").getMiddleware(), false); + }); + } + } + } + } + catch (err) { + if (err instanceof YError) + __classPrivateFieldGet(this, _YargsInstance_usage, "f").fail(err.message, err); + else + throw err; + } + return this[kPostProcess](argvPromise !== null && argvPromise !== void 0 ? argvPromise : argv, populateDoubleDash, !!calledFromCommand, true); + } + [kRunValidation](aliases, positionalMap, parseErrors, isDefaultCommand) { + const demandedOptions = { ...this.getDemandedOptions() }; + return (argv) => { + if (parseErrors) + throw new YError(parseErrors.message); + __classPrivateFieldGet(this, _YargsInstance_validation, "f").nonOptionCount(argv); + __classPrivateFieldGet(this, _YargsInstance_validation, "f").requiredArguments(argv, demandedOptions); + let failedStrictCommands = false; + if (__classPrivateFieldGet(this, _YargsInstance_strictCommands, "f")) { + failedStrictCommands = __classPrivateFieldGet(this, _YargsInstance_validation, "f").unknownCommands(argv); + } + if (__classPrivateFieldGet(this, _YargsInstance_strict, "f") && !failedStrictCommands) { + __classPrivateFieldGet(this, _YargsInstance_validation, "f").unknownArguments(argv, aliases, positionalMap, !!isDefaultCommand); + } + else if (__classPrivateFieldGet(this, _YargsInstance_strictOptions, "f")) { + __classPrivateFieldGet(this, _YargsInstance_validation, "f").unknownArguments(argv, aliases, {}, false, false); + } + __classPrivateFieldGet(this, _YargsInstance_validation, "f").limitedChoices(argv); + __classPrivateFieldGet(this, _YargsInstance_validation, "f").implications(argv); + __classPrivateFieldGet(this, _YargsInstance_validation, "f").conflicting(argv); + }; + } + [kSetHasOutput]() { + __classPrivateFieldSet(this, _YargsInstance_hasOutput, true, "f"); + } + [kTrackManuallySetKeys](keys) { + if (typeof keys === 'string') { + __classPrivateFieldGet(this, _YargsInstance_options, "f").key[keys] = true; + } + else { + for (const k of keys) { + __classPrivateFieldGet(this, _YargsInstance_options, "f").key[k] = true; + } + } + } +} +export function isYargsInstance(y) { + return !!y && typeof y.getInternalMethods === 'function'; +} diff --git a/mybulma/node_modules/yargs/build/lib/yerror.js b/mybulma/node_modules/yargs/build/lib/yerror.js new file mode 100644 index 0000000..7a36684 --- /dev/null +++ b/mybulma/node_modules/yargs/build/lib/yerror.js @@ -0,0 +1,9 @@ +export class YError extends Error { + constructor(msg) { + super(msg || 'yargs error'); + this.name = 'YError'; + if (Error.captureStackTrace) { + Error.captureStackTrace(this, YError); + } + } +} diff --git a/mybulma/node_modules/yargs/helpers/helpers.mjs b/mybulma/node_modules/yargs/helpers/helpers.mjs new file mode 100644 index 0000000..3f96b3d --- /dev/null +++ b/mybulma/node_modules/yargs/helpers/helpers.mjs @@ -0,0 +1,10 @@ +import {applyExtends as _applyExtends} from '../build/lib/utils/apply-extends.js'; +import {hideBin} from '../build/lib/utils/process-argv.js'; +import Parser from 'yargs-parser'; +import shim from '../lib/platform-shims/esm.mjs'; + +const applyExtends = (config, cwd, mergeExtends) => { + return _applyExtends(config, cwd, mergeExtends, shim); +}; + +export {applyExtends, hideBin, Parser}; diff --git a/mybulma/node_modules/yargs/helpers/index.js b/mybulma/node_modules/yargs/helpers/index.js new file mode 100644 index 0000000..8ab79a3 --- /dev/null +++ b/mybulma/node_modules/yargs/helpers/index.js @@ -0,0 +1,14 @@ +const { + applyExtends, + cjsPlatformShim, + Parser, + processArgv, +} = require('../build/index.cjs'); + +module.exports = { + applyExtends: (config, cwd, mergeExtends) => { + return applyExtends(config, cwd, mergeExtends, cjsPlatformShim); + }, + hideBin: processArgv.hideBin, + Parser, +}; diff --git a/mybulma/node_modules/yargs/helpers/package.json b/mybulma/node_modules/yargs/helpers/package.json new file mode 100644 index 0000000..5bbefff --- /dev/null +++ b/mybulma/node_modules/yargs/helpers/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/mybulma/node_modules/yargs/lib/platform-shims/browser.mjs b/mybulma/node_modules/yargs/lib/platform-shims/browser.mjs new file mode 100644 index 0000000..5f8ec61 --- /dev/null +++ b/mybulma/node_modules/yargs/lib/platform-shims/browser.mjs @@ -0,0 +1,95 @@ +/* eslint-disable no-unused-vars */ +'use strict'; + +import cliui from 'https://unpkg.com/cliui@7.0.1/index.mjs'; // eslint-disable-line +import Parser from 'https://unpkg.com/yargs-parser@19.0.0/browser.js'; // eslint-disable-line +import {getProcessArgvBin} from '../../build/lib/utils/process-argv.js'; +import {YError} from '../../build/lib/yerror.js'; + +const REQUIRE_ERROR = 'require is not supported in browser'; +const REQUIRE_DIRECTORY_ERROR = + 'loading a directory of commands is not supported in browser'; + +export default { + assert: { + notStrictEqual: (a, b) => { + // noop. + }, + strictEqual: (a, b) => { + // noop. + }, + }, + cliui, + findUp: () => undefined, + getEnv: key => { + // There is no environment in browser: + return undefined; + }, + inspect: console.log, + getCallerFile: () => { + throw new YError(REQUIRE_DIRECTORY_ERROR); + }, + getProcessArgvBin, + mainFilename: 'yargs', + Parser, + path: { + basename: str => str, + dirname: str => str, + extname: str => str, + relative: str => str, + }, + process: { + argv: () => [], + cwd: () => '', + emitWarning: (warning, name) => {}, + execPath: () => '', + // exit is noop browser: + exit: () => {}, + nextTick: cb => { + // eslint-disable-next-line no-undef + window.setTimeout(cb, 1); + }, + stdColumns: 80, + }, + readFileSync: () => { + return ''; + }, + require: () => { + throw new YError(REQUIRE_ERROR); + }, + requireDirectory: () => { + throw new YError(REQUIRE_DIRECTORY_ERROR); + }, + stringWidth: str => { + return [...str].length; + }, + // TODO: replace this with y18n once it's ported to ESM: + y18n: { + __: (...str) => { + if (str.length === 0) return ''; + const args = str.slice(1); + return sprintf(str[0], ...args); + }, + __n: (str1, str2, count, ...args) => { + if (count === 1) { + return sprintf(str1, ...args); + } else { + return sprintf(str2, ...args); + } + }, + getLocale: () => { + return 'en_US'; + }, + setLocale: () => {}, + updateLocale: () => {}, + }, +}; + +function sprintf(_str, ...args) { + let str = ''; + const split = _str.split('%s'); + split.forEach((token, i) => { + str += `${token}${split[i + 1] !== undefined && args[i] ? args[i] : ''}`; + }); + return str; +} diff --git a/mybulma/node_modules/yargs/lib/platform-shims/esm.mjs b/mybulma/node_modules/yargs/lib/platform-shims/esm.mjs new file mode 100644 index 0000000..c25baa5 --- /dev/null +++ b/mybulma/node_modules/yargs/lib/platform-shims/esm.mjs @@ -0,0 +1,73 @@ +'use strict' + +import { notStrictEqual, strictEqual } from 'assert' +import cliui from 'cliui' +import escalade from 'escalade/sync' +import { inspect } from 'util' +import { readFileSync } from 'fs' +import { fileURLToPath } from 'url'; +import Parser from 'yargs-parser' +import { basename, dirname, extname, relative, resolve } from 'path' +import { getProcessArgvBin } from '../../build/lib/utils/process-argv.js' +import { YError } from '../../build/lib/yerror.js' +import y18n from 'y18n' + +const REQUIRE_ERROR = 'require is not supported by ESM' +const REQUIRE_DIRECTORY_ERROR = 'loading a directory of commands is not supported yet for ESM' + +let __dirname; +try { + __dirname = fileURLToPath(import.meta.url); +} catch (e) { + __dirname = process.cwd(); +} +const mainFilename = __dirname.substring(0, __dirname.lastIndexOf('node_modules')); + +export default { + assert: { + notStrictEqual, + strictEqual + }, + cliui, + findUp: escalade, + getEnv: (key) => { + return process.env[key] + }, + inspect, + getCallerFile: () => { + throw new YError(REQUIRE_DIRECTORY_ERROR) + }, + getProcessArgvBin, + mainFilename: mainFilename || process.cwd(), + Parser, + path: { + basename, + dirname, + extname, + relative, + resolve + }, + process: { + argv: () => process.argv, + cwd: process.cwd, + emitWarning: (warning, type) => process.emitWarning(warning, type), + execPath: () => process.execPath, + exit: process.exit, + nextTick: process.nextTick, + stdColumns: typeof process.stdout.columns !== 'undefined' ? process.stdout.columns : null + }, + readFileSync, + require: () => { + throw new YError(REQUIRE_ERROR) + }, + requireDirectory: () => { + throw new YError(REQUIRE_DIRECTORY_ERROR) + }, + stringWidth: (str) => { + return [...str].length + }, + y18n: y18n({ + directory: resolve(__dirname, '../../../locales'), + updateFiles: false + }) +} diff --git a/mybulma/node_modules/yargs/locales/be.json b/mybulma/node_modules/yargs/locales/be.json new file mode 100644 index 0000000..e28fa30 --- /dev/null +++ b/mybulma/node_modules/yargs/locales/be.json @@ -0,0 +1,46 @@ +{ + "Commands:": "Каманды:", + "Options:": "Опцыі:", + "Examples:": "Прыклады:", + "boolean": "булевы тып", + "count": "падлік", + "string": "радковы тып", + "number": "лік", + "array": "масіў", + "required": "неабходна", + "default": "па змаўчанні", + "default:": "па змаўчанні:", + "choices:": "магчымасці:", + "aliases:": "аліасы:", + "generated-value": "згенераванае значэнне", + "Not enough non-option arguments: got %s, need at least %s": { + "one": "Недастаткова неапцыйных аргументаў: ёсць %s, трэба як мінімум %s", + "other": "Недастаткова неапцыйных аргументаў: ёсць %s, трэба як мінімум %s" + }, + "Too many non-option arguments: got %s, maximum of %s": { + "one": "Занадта шмат неапцыйных аргументаў: ёсць %s, максімум дапушчальна %s", + "other": "Занадта шмат неапцыйных аргументаў: ёсць %s, максімум дапушчальна %s" + }, + "Missing argument value: %s": { + "one": "Не хапае значэння аргументу: %s", + "other": "Не хапае значэнняў аргументаў: %s" + }, + "Missing required argument: %s": { + "one": "Не хапае неабходнага аргументу: %s", + "other": "Не хапае неабходных аргументаў: %s" + }, + "Unknown argument: %s": { + "one": "Невядомы аргумент: %s", + "other": "Невядомыя аргументы: %s" + }, + "Invalid values:": "Несапраўдныя значэння:", + "Argument: %s, Given: %s, Choices: %s": "Аргумент: %s, Дадзенае значэнне: %s, Магчымасці: %s", + "Argument check failed: %s": "Праверка аргументаў не ўдалася: %s", + "Implications failed:": "Дадзены аргумент патрабуе наступны дадатковы аргумент:", + "Not enough arguments following: %s": "Недастаткова наступных аргументаў: %s", + "Invalid JSON config file: %s": "Несапраўдны файл канфігурацыі JSON: %s", + "Path to JSON config file": "Шлях да файла канфігурацыі JSON", + "Show help": "Паказаць дапамогу", + "Show version number": "Паказаць нумар версіі", + "Did you mean %s?": "Вы мелі на ўвазе %s?" +} diff --git a/mybulma/node_modules/yargs/locales/cs.json b/mybulma/node_modules/yargs/locales/cs.json new file mode 100644 index 0000000..6394875 --- /dev/null +++ b/mybulma/node_modules/yargs/locales/cs.json @@ -0,0 +1,51 @@ +{ + "Commands:": "Příkazy:", + "Options:": "Možnosti:", + "Examples:": "Příklady:", + "boolean": "logická hodnota", + "count": "počet", + "string": "řetězec", + "number": "číslo", + "array": "pole", + "required": "povinné", + "default": "výchozí", + "default:": "výchozí:", + "choices:": "volby:", + "aliases:": "aliasy:", + "generated-value": "generovaná-hodnota", + "Not enough non-option arguments: got %s, need at least %s": { + "one": "Nedostatek argumentů: zadáno %s, je potřeba alespoň %s", + "other": "Nedostatek argumentů: zadáno %s, je potřeba alespoň %s" + }, + "Too many non-option arguments: got %s, maximum of %s": { + "one": "Příliš mnoho argumentů: zadáno %s, maximálně %s", + "other": "Příliš mnoho argumentů: zadáno %s, maximálně %s" + }, + "Missing argument value: %s": { + "one": "Chybí hodnota argumentu: %s", + "other": "Chybí hodnoty argumentů: %s" + }, + "Missing required argument: %s": { + "one": "Chybí požadovaný argument: %s", + "other": "Chybí požadované argumenty: %s" + }, + "Unknown argument: %s": { + "one": "Neznámý argument: %s", + "other": "Neznámé argumenty: %s" + }, + "Invalid values:": "Neplatné hodnoty:", + "Argument: %s, Given: %s, Choices: %s": "Argument: %s, Zadáno: %s, Možnosti: %s", + "Argument check failed: %s": "Kontrola argumentů se nezdařila: %s", + "Implications failed:": "Chybí závislé argumenty:", + "Not enough arguments following: %s": "Následuje nedostatek argumentů: %s", + "Invalid JSON config file: %s": "Neplatný konfigurační soubor JSON: %s", + "Path to JSON config file": "Cesta ke konfiguračnímu souboru JSON", + "Show help": "Zobrazit nápovědu", + "Show version number": "Zobrazit číslo verze", + "Did you mean %s?": "Měl jste na mysli %s?", + "Arguments %s and %s are mutually exclusive" : "Argumenty %s a %s se vzájemně vylučují", + "Positionals:": "Poziční:", + "command": "příkaz", + "deprecated": "zastaralé", + "deprecated: %s": "zastaralé: %s" +} diff --git a/mybulma/node_modules/yargs/locales/de.json b/mybulma/node_modules/yargs/locales/de.json new file mode 100644 index 0000000..dc73ec3 --- /dev/null +++ b/mybulma/node_modules/yargs/locales/de.json @@ -0,0 +1,46 @@ +{ + "Commands:": "Kommandos:", + "Options:": "Optionen:", + "Examples:": "Beispiele:", + "boolean": "boolean", + "count": "Zähler", + "string": "string", + "number": "Zahl", + "array": "array", + "required": "erforderlich", + "default": "Standard", + "default:": "Standard:", + "choices:": "Möglichkeiten:", + "aliases:": "Aliase:", + "generated-value": "Generierter-Wert", + "Not enough non-option arguments: got %s, need at least %s": { + "one": "Nicht genügend Argumente ohne Optionen: %s vorhanden, mindestens %s benötigt", + "other": "Nicht genügend Argumente ohne Optionen: %s vorhanden, mindestens %s benötigt" + }, + "Too many non-option arguments: got %s, maximum of %s": { + "one": "Zu viele Argumente ohne Optionen: %s vorhanden, maximal %s erlaubt", + "other": "Zu viele Argumente ohne Optionen: %s vorhanden, maximal %s erlaubt" + }, + "Missing argument value: %s": { + "one": "Fehlender Argumentwert: %s", + "other": "Fehlende Argumentwerte: %s" + }, + "Missing required argument: %s": { + "one": "Fehlendes Argument: %s", + "other": "Fehlende Argumente: %s" + }, + "Unknown argument: %s": { + "one": "Unbekanntes Argument: %s", + "other": "Unbekannte Argumente: %s" + }, + "Invalid values:": "Unzulässige Werte:", + "Argument: %s, Given: %s, Choices: %s": "Argument: %s, Gegeben: %s, Möglichkeiten: %s", + "Argument check failed: %s": "Argumente-Check fehlgeschlagen: %s", + "Implications failed:": "Fehlende abhängige Argumente:", + "Not enough arguments following: %s": "Nicht genügend Argumente nach: %s", + "Invalid JSON config file: %s": "Fehlerhafte JSON-Config Datei: %s", + "Path to JSON config file": "Pfad zur JSON-Config Datei", + "Show help": "Hilfe anzeigen", + "Show version number": "Version anzeigen", + "Did you mean %s?": "Meintest du %s?" +} diff --git a/mybulma/node_modules/yargs/locales/en.json b/mybulma/node_modules/yargs/locales/en.json new file mode 100644 index 0000000..d794947 --- /dev/null +++ b/mybulma/node_modules/yargs/locales/en.json @@ -0,0 +1,51 @@ +{ + "Commands:": "Commands:", + "Options:": "Options:", + "Examples:": "Examples:", + "boolean": "boolean", + "count": "count", + "string": "string", + "number": "number", + "array": "array", + "required": "required", + "default": "default", + "default:": "default:", + "choices:": "choices:", + "aliases:": "aliases:", + "generated-value": "generated-value", + "Not enough non-option arguments: got %s, need at least %s": { + "one": "Not enough non-option arguments: got %s, need at least %s", + "other": "Not enough non-option arguments: got %s, need at least %s" + }, + "Too many non-option arguments: got %s, maximum of %s": { + "one": "Too many non-option arguments: got %s, maximum of %s", + "other": "Too many non-option arguments: got %s, maximum of %s" + }, + "Missing argument value: %s": { + "one": "Missing argument value: %s", + "other": "Missing argument values: %s" + }, + "Missing required argument: %s": { + "one": "Missing required argument: %s", + "other": "Missing required arguments: %s" + }, + "Unknown argument: %s": { + "one": "Unknown argument: %s", + "other": "Unknown arguments: %s" + }, + "Invalid values:": "Invalid values:", + "Argument: %s, Given: %s, Choices: %s": "Argument: %s, Given: %s, Choices: %s", + "Argument check failed: %s": "Argument check failed: %s", + "Implications failed:": "Missing dependent arguments:", + "Not enough arguments following: %s": "Not enough arguments following: %s", + "Invalid JSON config file: %s": "Invalid JSON config file: %s", + "Path to JSON config file": "Path to JSON config file", + "Show help": "Show help", + "Show version number": "Show version number", + "Did you mean %s?": "Did you mean %s?", + "Arguments %s and %s are mutually exclusive" : "Arguments %s and %s are mutually exclusive", + "Positionals:": "Positionals:", + "command": "command", + "deprecated": "deprecated", + "deprecated: %s": "deprecated: %s" +} diff --git a/mybulma/node_modules/yargs/locales/es.json b/mybulma/node_modules/yargs/locales/es.json new file mode 100644 index 0000000..d77b461 --- /dev/null +++ b/mybulma/node_modules/yargs/locales/es.json @@ -0,0 +1,46 @@ +{ + "Commands:": "Comandos:", + "Options:": "Opciones:", + "Examples:": "Ejemplos:", + "boolean": "booleano", + "count": "cuenta", + "string": "cadena de caracteres", + "number": "número", + "array": "tabla", + "required": "requerido", + "default": "defecto", + "default:": "defecto:", + "choices:": "selección:", + "aliases:": "alias:", + "generated-value": "valor-generado", + "Not enough non-option arguments: got %s, need at least %s": { + "one": "Hacen falta argumentos no-opcionales: Número recibido %s, necesita por lo menos %s", + "other": "Hacen falta argumentos no-opcionales: Número recibido %s, necesita por lo menos %s" + }, + "Too many non-option arguments: got %s, maximum of %s": { + "one": "Demasiados argumentos no-opcionales: Número recibido %s, máximo es %s", + "other": "Demasiados argumentos no-opcionales: Número recibido %s, máximo es %s" + }, + "Missing argument value: %s": { + "one": "Falta argumento: %s", + "other": "Faltan argumentos: %s" + }, + "Missing required argument: %s": { + "one": "Falta argumento requerido: %s", + "other": "Faltan argumentos requeridos: %s" + }, + "Unknown argument: %s": { + "one": "Argumento desconocido: %s", + "other": "Argumentos desconocidos: %s" + }, + "Invalid values:": "Valores inválidos:", + "Argument: %s, Given: %s, Choices: %s": "Argumento: %s, Recibido: %s, Seleccionados: %s", + "Argument check failed: %s": "Verificación de argumento ha fallado: %s", + "Implications failed:": "Implicaciones fallidas:", + "Not enough arguments following: %s": "No hay suficientes argumentos después de: %s", + "Invalid JSON config file: %s": "Archivo de configuración JSON inválido: %s", + "Path to JSON config file": "Ruta al archivo de configuración JSON", + "Show help": "Muestra ayuda", + "Show version number": "Muestra número de versión", + "Did you mean %s?": "Quisiste decir %s?" +} diff --git a/mybulma/node_modules/yargs/locales/fi.json b/mybulma/node_modules/yargs/locales/fi.json new file mode 100644 index 0000000..481feb7 --- /dev/null +++ b/mybulma/node_modules/yargs/locales/fi.json @@ -0,0 +1,49 @@ +{ + "Commands:": "Komennot:", + "Options:": "Valinnat:", + "Examples:": "Esimerkkejä:", + "boolean": "totuusarvo", + "count": "lukumäärä", + "string": "merkkijono", + "number": "numero", + "array": "taulukko", + "required": "pakollinen", + "default": "oletusarvo", + "default:": "oletusarvo:", + "choices:": "vaihtoehdot:", + "aliases:": "aliakset:", + "generated-value": "generoitu-arvo", + "Not enough non-option arguments: got %s, need at least %s": { + "one": "Liian vähän argumentteja, jotka eivät ole valintoja: annettu %s, vaaditaan vähintään %s", + "other": "Liian vähän argumentteja, jotka eivät ole valintoja: annettu %s, vaaditaan vähintään %s" + }, + "Too many non-option arguments: got %s, maximum of %s": { + "one": "Liikaa argumentteja, jotka eivät ole valintoja: annettu %s, sallitaan enintään %s", + "other": "Liikaa argumentteja, jotka eivät ole valintoja: annettu %s, sallitaan enintään %s" + }, + "Missing argument value: %s": { + "one": "Argumentin arvo puuttuu: %s", + "other": "Argumentin arvot puuttuvat: %s" + }, + "Missing required argument: %s": { + "one": "Pakollinen argumentti puuttuu: %s", + "other": "Pakollisia argumentteja puuttuu: %s" + }, + "Unknown argument: %s": { + "one": "Tuntematon argumentti: %s", + "other": "Tuntemattomia argumentteja: %s" + }, + "Invalid values:": "Virheelliset arvot:", + "Argument: %s, Given: %s, Choices: %s": "Argumentti: %s, Annettu: %s, Vaihtoehdot: %s", + "Argument check failed: %s": "Argumentin tarkistus epäonnistui: %s", + "Implications failed:": "Riippuvia argumentteja puuttuu:", + "Not enough arguments following: %s": "Argumentin perässä ei ole tarpeeksi argumentteja: %s", + "Invalid JSON config file: %s": "Epävalidi JSON-asetustiedosto: %s", + "Path to JSON config file": "JSON-asetustiedoston polku", + "Show help": "Näytä ohje", + "Show version number": "Näytä versionumero", + "Did you mean %s?": "Tarkoititko %s?", + "Arguments %s and %s are mutually exclusive" : "Argumentit %s ja %s eivät ole yhteensopivat", + "Positionals:": "Sijaintiparametrit:", + "command": "komento" +} diff --git a/mybulma/node_modules/yargs/locales/fr.json b/mybulma/node_modules/yargs/locales/fr.json new file mode 100644 index 0000000..edd743f --- /dev/null +++ b/mybulma/node_modules/yargs/locales/fr.json @@ -0,0 +1,53 @@ +{ + "Commands:": "Commandes :", + "Options:": "Options :", + "Examples:": "Exemples :", + "boolean": "booléen", + "count": "compteur", + "string": "chaîne de caractères", + "number": "nombre", + "array": "tableau", + "required": "requis", + "default": "défaut", + "default:": "défaut :", + "choices:": "choix :", + "aliases:": "alias :", + "generated-value": "valeur générée", + "Not enough non-option arguments: got %s, need at least %s": { + "one": "Pas assez d'arguments (hors options) : reçu %s, besoin d'au moins %s", + "other": "Pas assez d'arguments (hors options) : reçus %s, besoin d'au moins %s" + }, + "Too many non-option arguments: got %s, maximum of %s": { + "one": "Trop d'arguments (hors options) : reçu %s, maximum de %s", + "other": "Trop d'arguments (hors options) : reçus %s, maximum de %s" + }, + "Missing argument value: %s": { + "one": "Argument manquant : %s", + "other": "Arguments manquants : %s" + }, + "Missing required argument: %s": { + "one": "Argument requis manquant : %s", + "other": "Arguments requis manquants : %s" + }, + "Unknown argument: %s": { + "one": "Argument inconnu : %s", + "other": "Arguments inconnus : %s" + }, + "Unknown command: %s": { + "one": "Commande inconnue : %s", + "other": "Commandes inconnues : %s" + }, + "Invalid values:": "Valeurs invalides :", + "Argument: %s, Given: %s, Choices: %s": "Argument : %s, donné : %s, choix : %s", + "Argument check failed: %s": "Echec de la vérification de l'argument : %s", + "Implications failed:": "Arguments dépendants manquants :", + "Not enough arguments following: %s": "Pas assez d'arguments après : %s", + "Invalid JSON config file: %s": "Fichier de configuration JSON invalide : %s", + "Path to JSON config file": "Chemin du fichier de configuration JSON", + "Show help": "Affiche l'aide", + "Show version number": "Affiche le numéro de version", + "Did you mean %s?": "Vouliez-vous dire %s ?", + "Arguments %s and %s are mutually exclusive" : "Les arguments %s et %s sont mutuellement exclusifs", + "Positionals:": "Arguments positionnels :", + "command": "commande" +} diff --git a/mybulma/node_modules/yargs/locales/hi.json b/mybulma/node_modules/yargs/locales/hi.json new file mode 100644 index 0000000..a9de77c --- /dev/null +++ b/mybulma/node_modules/yargs/locales/hi.json @@ -0,0 +1,49 @@ +{ + "Commands:": "आदेश:", + "Options:": "विकल्प:", + "Examples:": "उदाहरण:", + "boolean": "सत्यता", + "count": "संख्या", + "string": "वर्णों का तार ", + "number": "अंक", + "array": "सरणी", + "required": "आवश्यक", + "default": "डिफॉल्ट", + "default:": "डिफॉल्ट:", + "choices:": "विकल्प:", + "aliases:": "उपनाम:", + "generated-value": "उत्पन्न-मूल्य", + "Not enough non-option arguments: got %s, need at least %s": { + "one": "पर्याप्त गैर-विकल्प तर्क प्राप्त नहीं: %s प्राप्त, कम से कम %s की आवश्यकता है", + "other": "पर्याप्त गैर-विकल्प तर्क प्राप्त नहीं: %s प्राप्त, कम से कम %s की आवश्यकता है" + }, + "Too many non-option arguments: got %s, maximum of %s": { + "one": "बहुत सारे गैर-विकल्प तर्क: %s प्राप्त, अधिकतम %s मान्य", + "other": "बहुत सारे गैर-विकल्प तर्क: %s प्राप्त, अधिकतम %s मान्य" + }, + "Missing argument value: %s": { + "one": "कुछ तर्को के मूल्य गुम हैं: %s", + "other": "कुछ तर्को के मूल्य गुम हैं: %s" + }, + "Missing required argument: %s": { + "one": "आवश्यक तर्क गुम हैं: %s", + "other": "आवश्यक तर्क गुम हैं: %s" + }, + "Unknown argument: %s": { + "one": "अज्ञात तर्क प्राप्त: %s", + "other": "अज्ञात तर्क प्राप्त: %s" + }, + "Invalid values:": "अमान्य मूल्य:", + "Argument: %s, Given: %s, Choices: %s": "तर्क: %s, प्राप्त: %s, विकल्प: %s", + "Argument check failed: %s": "तर्क जांच विफल: %s", + "Implications failed:": "दिए गए तर्क के लिए अतिरिक्त तर्क की अपेक्षा है:", + "Not enough arguments following: %s": "निम्नलिखित के बाद पर्याप्त तर्क नहीं प्राप्त: %s", + "Invalid JSON config file: %s": "अमान्य JSON config फाइल: %s", + "Path to JSON config file": "JSON config फाइल का पथ", + "Show help": "सहायता दिखाएँ", + "Show version number": "Version संख्या दिखाएँ", + "Did you mean %s?": "क्या आपका मतलब है %s?", + "Arguments %s and %s are mutually exclusive" : "तर्क %s और %s परस्पर अनन्य हैं", + "Positionals:": "स्थानीय:", + "command": "आदेश" +} diff --git a/mybulma/node_modules/yargs/locales/hu.json b/mybulma/node_modules/yargs/locales/hu.json new file mode 100644 index 0000000..21492d0 --- /dev/null +++ b/mybulma/node_modules/yargs/locales/hu.json @@ -0,0 +1,46 @@ +{ + "Commands:": "Parancsok:", + "Options:": "Opciók:", + "Examples:": "Példák:", + "boolean": "boolean", + "count": "számláló", + "string": "szöveg", + "number": "szám", + "array": "tömb", + "required": "kötelező", + "default": "alapértelmezett", + "default:": "alapértelmezett:", + "choices:": "lehetőségek:", + "aliases:": "aliaszok:", + "generated-value": "generált-érték", + "Not enough non-option arguments: got %s, need at least %s": { + "one": "Nincs elég nem opcionális argumentum: %s van, legalább %s kell", + "other": "Nincs elég nem opcionális argumentum: %s van, legalább %s kell" + }, + "Too many non-option arguments: got %s, maximum of %s": { + "one": "Túl sok nem opciánlis argumentum van: %s van, maximum %s lehet", + "other": "Túl sok nem opciánlis argumentum van: %s van, maximum %s lehet" + }, + "Missing argument value: %s": { + "one": "Hiányzó argumentum érték: %s", + "other": "Hiányzó argumentum értékek: %s" + }, + "Missing required argument: %s": { + "one": "Hiányzó kötelező argumentum: %s", + "other": "Hiányzó kötelező argumentumok: %s" + }, + "Unknown argument: %s": { + "one": "Ismeretlen argumentum: %s", + "other": "Ismeretlen argumentumok: %s" + }, + "Invalid values:": "Érvénytelen érték:", + "Argument: %s, Given: %s, Choices: %s": "Argumentum: %s, Megadott: %s, Lehetőségek: %s", + "Argument check failed: %s": "Argumentum ellenőrzés sikertelen: %s", + "Implications failed:": "Implikációk sikertelenek:", + "Not enough arguments following: %s": "Nem elég argumentum követi: %s", + "Invalid JSON config file: %s": "Érvénytelen JSON konfigurációs file: %s", + "Path to JSON config file": "JSON konfigurációs file helye", + "Show help": "Súgo megjelenítése", + "Show version number": "Verziószám megjelenítése", + "Did you mean %s?": "Erre gondoltál %s?" +} diff --git a/mybulma/node_modules/yargs/locales/id.json b/mybulma/node_modules/yargs/locales/id.json new file mode 100644 index 0000000..125867c --- /dev/null +++ b/mybulma/node_modules/yargs/locales/id.json @@ -0,0 +1,50 @@ + +{ + "Commands:": "Perintah:", + "Options:": "Pilihan:", + "Examples:": "Contoh:", + "boolean": "boolean", + "count": "jumlah", + "number": "nomor", + "string": "string", + "array": "larik", + "required": "diperlukan", + "default": "bawaan", + "default:": "bawaan:", + "aliases:": "istilah lain:", + "choices:": "pilihan:", + "generated-value": "nilai-yang-dihasilkan", + "Not enough non-option arguments: got %s, need at least %s": { + "one": "Argumen wajib kurang: hanya %s, minimal %s", + "other": "Argumen wajib kurang: hanya %s, minimal %s" + }, + "Too many non-option arguments: got %s, maximum of %s": { + "one": "Terlalu banyak argumen wajib: ada %s, maksimal %s", + "other": "Terlalu banyak argumen wajib: ada %s, maksimal %s" + }, + "Missing argument value: %s": { + "one": "Kurang argumen: %s", + "other": "Kurang argumen: %s" + }, + "Missing required argument: %s": { + "one": "Kurang argumen wajib: %s", + "other": "Kurang argumen wajib: %s" + }, + "Unknown argument: %s": { + "one": "Argumen tak diketahui: %s", + "other": "Argumen tak diketahui: %s" + }, + "Invalid values:": "Nilai-nilai tidak valid:", + "Argument: %s, Given: %s, Choices: %s": "Argumen: %s, Diberikan: %s, Pilihan: %s", + "Argument check failed: %s": "Pemeriksaan argument gagal: %s", + "Implications failed:": "Implikasi gagal:", + "Not enough arguments following: %s": "Kurang argumen untuk: %s", + "Invalid JSON config file: %s": "Berkas konfigurasi JSON tidak valid: %s", + "Path to JSON config file": "Alamat berkas konfigurasi JSON", + "Show help": "Lihat bantuan", + "Show version number": "Lihat nomor versi", + "Did you mean %s?": "Maksud Anda: %s?", + "Arguments %s and %s are mutually exclusive" : "Argumen %s dan %s saling eksklusif", + "Positionals:": "Posisional-posisional:", + "command": "perintah" +} diff --git a/mybulma/node_modules/yargs/locales/it.json b/mybulma/node_modules/yargs/locales/it.json new file mode 100644 index 0000000..fde5756 --- /dev/null +++ b/mybulma/node_modules/yargs/locales/it.json @@ -0,0 +1,46 @@ +{ + "Commands:": "Comandi:", + "Options:": "Opzioni:", + "Examples:": "Esempi:", + "boolean": "booleano", + "count": "contatore", + "string": "stringa", + "number": "numero", + "array": "vettore", + "required": "richiesto", + "default": "predefinito", + "default:": "predefinito:", + "choices:": "scelte:", + "aliases:": "alias:", + "generated-value": "valore generato", + "Not enough non-option arguments: got %s, need at least %s": { + "one": "Numero insufficiente di argomenti non opzione: inseriti %s, richiesti almeno %s", + "other": "Numero insufficiente di argomenti non opzione: inseriti %s, richiesti almeno %s" + }, + "Too many non-option arguments: got %s, maximum of %s": { + "one": "Troppi argomenti non opzione: inseriti %s, massimo possibile %s", + "other": "Troppi argomenti non opzione: inseriti %s, massimo possibile %s" + }, + "Missing argument value: %s": { + "one": "Argomento mancante: %s", + "other": "Argomenti mancanti: %s" + }, + "Missing required argument: %s": { + "one": "Argomento richiesto mancante: %s", + "other": "Argomenti richiesti mancanti: %s" + }, + "Unknown argument: %s": { + "one": "Argomento sconosciuto: %s", + "other": "Argomenti sconosciuti: %s" + }, + "Invalid values:": "Valori non validi:", + "Argument: %s, Given: %s, Choices: %s": "Argomento: %s, Richiesto: %s, Scelte: %s", + "Argument check failed: %s": "Controllo dell'argomento fallito: %s", + "Implications failed:": "Argomenti dipendenti mancanti:", + "Not enough arguments following: %s": "Argomenti insufficienti dopo: %s", + "Invalid JSON config file: %s": "File di configurazione JSON non valido: %s", + "Path to JSON config file": "Percorso del file di configurazione JSON", + "Show help": "Mostra la schermata di aiuto", + "Show version number": "Mostra il numero di versione", + "Did you mean %s?": "Intendi forse %s?" +} diff --git a/mybulma/node_modules/yargs/locales/ja.json b/mybulma/node_modules/yargs/locales/ja.json new file mode 100644 index 0000000..3954ae6 --- /dev/null +++ b/mybulma/node_modules/yargs/locales/ja.json @@ -0,0 +1,51 @@ +{ + "Commands:": "コマンド:", + "Options:": "オプション:", + "Examples:": "例:", + "boolean": "真偽", + "count": "カウント", + "string": "文字列", + "number": "数値", + "array": "配列", + "required": "必須", + "default": "デフォルト", + "default:": "デフォルト:", + "choices:": "選択してください:", + "aliases:": "エイリアス:", + "generated-value": "生成された値", + "Not enough non-option arguments: got %s, need at least %s": { + "one": "オプションではない引数が %s 個では不足しています。少なくとも %s 個の引数が必要です:", + "other": "オプションではない引数が %s 個では不足しています。少なくとも %s 個の引数が必要です:" + }, + "Too many non-option arguments: got %s, maximum of %s": { + "one": "オプションではない引数が %s 個では多すぎます。最大で %s 個までです:", + "other": "オプションではない引数が %s 個では多すぎます。最大で %s 個までです:" + }, + "Missing argument value: %s": { + "one": "引数の値が見つかりません: %s", + "other": "引数の値が見つかりません: %s" + }, + "Missing required argument: %s": { + "one": "必須の引数が見つかりません: %s", + "other": "必須の引数が見つかりません: %s" + }, + "Unknown argument: %s": { + "one": "未知の引数です: %s", + "other": "未知の引数です: %s" + }, + "Invalid values:": "不正な値です:", + "Argument: %s, Given: %s, Choices: %s": "引数は %s です。与えられた値: %s, 選択してください: %s", + "Argument check failed: %s": "引数のチェックに失敗しました: %s", + "Implications failed:": "オプションの組み合わせで不正が生じました:", + "Not enough arguments following: %s": "次の引数が不足しています。: %s", + "Invalid JSON config file: %s": "JSONの設定ファイルが不正です: %s", + "Path to JSON config file": "JSONの設定ファイルまでのpath", + "Show help": "ヘルプを表示", + "Show version number": "バージョンを表示", + "Did you mean %s?": "もしかして %s?", + "Arguments %s and %s are mutually exclusive" : "引数 %s と %s は同時に指定できません", + "Positionals:": "位置:", + "command": "コマンド", + "deprecated": "非推奨", + "deprecated: %s": "非推奨: %s" +} diff --git a/mybulma/node_modules/yargs/locales/ko.json b/mybulma/node_modules/yargs/locales/ko.json new file mode 100644 index 0000000..746bc89 --- /dev/null +++ b/mybulma/node_modules/yargs/locales/ko.json @@ -0,0 +1,49 @@ +{ + "Commands:": "명령:", + "Options:": "옵션:", + "Examples:": "예시:", + "boolean": "불리언", + "count": "개수", + "string": "문자열", + "number": "숫자", + "array": "배열", + "required": "필수", + "default": "기본값", + "default:": "기본값:", + "choices:": "선택지:", + "aliases:": "별칭:", + "generated-value": "생성된 값", + "Not enough non-option arguments: got %s, need at least %s": { + "one": "옵션이 아닌 인수가 충분하지 않습니다: %s개 입력받음, 최소 %s개 입력 필요", + "other": "옵션이 아닌 인수가 충분하지 않습니다: %s개 입력받음, 최소 %s개 입력 필요" + }, + "Too many non-option arguments: got %s, maximum of %s": { + "one": "옵션이 아닌 인수가 너무 많습니다: %s개 입력받음, 최대 %s개 입력 가능", + "other": "옵션이 아닌 인수가 너무 많습니다: %s개 입력받음, 최대 %s개 입력 가능" + }, + "Missing argument value: %s": { + "one": "인수가 주어지지 않았습니다: %s", + "other": "인수가 주어지지 않았습니다: %s" + }, + "Missing required argument: %s": { + "one": "필수 인수가 주어지지 않았습니다: %s", + "other": "필수 인수가 주어지지 않았습니다: %s" + }, + "Unknown argument: %s": { + "one": "알 수 없는 인수입니다: %s", + "other": "알 수 없는 인수입니다: %s" + }, + "Invalid values:": "유효하지 않은 값:", + "Argument: %s, Given: %s, Choices: %s": "인수: %s, 주어진 값: %s, 선택지: %s", + "Argument check failed: %s": "인수 체크에 실패했습니다: %s", + "Implications failed:": "주어진 인수에 필요한 추가 인수가 주어지지 않았습니다:", + "Not enough arguments following: %s": "다음 인수가 주어지지 않았습니다: %s", + "Invalid JSON config file: %s": "유효하지 않은 JSON 설정 파일: %s", + "Path to JSON config file": "JSON 설정 파일 경로", + "Show help": "도움말 표시", + "Show version number": "버전 표시", + "Did you mean %s?": "%s을(를) 찾으시나요?", + "Arguments %s and %s are mutually exclusive" : "인수 %s과(와) %s은(는) 동시에 지정할 수 없습니다", + "Positionals:": "위치:", + "command": "명령" +} diff --git a/mybulma/node_modules/yargs/locales/nb.json b/mybulma/node_modules/yargs/locales/nb.json new file mode 100644 index 0000000..6f410ed --- /dev/null +++ b/mybulma/node_modules/yargs/locales/nb.json @@ -0,0 +1,44 @@ +{ + "Commands:": "Kommandoer:", + "Options:": "Alternativer:", + "Examples:": "Eksempler:", + "boolean": "boolsk", + "count": "antall", + "string": "streng", + "number": "nummer", + "array": "matrise", + "required": "obligatorisk", + "default": "standard", + "default:": "standard:", + "choices:": "valg:", + "generated-value": "generert-verdi", + "Not enough non-option arguments: got %s, need at least %s": { + "one": "Ikke nok ikke-alternativ argumenter: fikk %s, trenger minst %s", + "other": "Ikke nok ikke-alternativ argumenter: fikk %s, trenger minst %s" + }, + "Too many non-option arguments: got %s, maximum of %s": { + "one": "For mange ikke-alternativ argumenter: fikk %s, maksimum %s", + "other": "For mange ikke-alternativ argumenter: fikk %s, maksimum %s" + }, + "Missing argument value: %s": { + "one": "Mangler argument verdi: %s", + "other": "Mangler argument verdier: %s" + }, + "Missing required argument: %s": { + "one": "Mangler obligatorisk argument: %s", + "other": "Mangler obligatoriske argumenter: %s" + }, + "Unknown argument: %s": { + "one": "Ukjent argument: %s", + "other": "Ukjente argumenter: %s" + }, + "Invalid values:": "Ugyldige verdier:", + "Argument: %s, Given: %s, Choices: %s": "Argument: %s, Gitt: %s, Valg: %s", + "Argument check failed: %s": "Argumentsjekk mislyktes: %s", + "Implications failed:": "Konsekvensene mislyktes:", + "Not enough arguments following: %s": "Ikke nok følgende argumenter: %s", + "Invalid JSON config file: %s": "Ugyldig JSON konfigurasjonsfil: %s", + "Path to JSON config file": "Bane til JSON konfigurasjonsfil", + "Show help": "Vis hjelp", + "Show version number": "Vis versjonsnummer" +} diff --git a/mybulma/node_modules/yargs/locales/nl.json b/mybulma/node_modules/yargs/locales/nl.json new file mode 100644 index 0000000..9ff95c5 --- /dev/null +++ b/mybulma/node_modules/yargs/locales/nl.json @@ -0,0 +1,49 @@ +{ + "Commands:": "Commando's:", + "Options:": "Opties:", + "Examples:": "Voorbeelden:", + "boolean": "booleaans", + "count": "aantal", + "string": "string", + "number": "getal", + "array": "lijst", + "required": "verplicht", + "default": "standaard", + "default:": "standaard:", + "choices:": "keuzes:", + "aliases:": "aliassen:", + "generated-value": "gegenereerde waarde", + "Not enough non-option arguments: got %s, need at least %s": { + "one": "Niet genoeg niet-optie-argumenten: %s gekregen, minstens %s nodig", + "other": "Niet genoeg niet-optie-argumenten: %s gekregen, minstens %s nodig" + }, + "Too many non-option arguments: got %s, maximum of %s": { + "one": "Te veel niet-optie-argumenten: %s gekregen, maximum is %s", + "other": "Te veel niet-optie-argumenten: %s gekregen, maximum is %s" + }, + "Missing argument value: %s": { + "one": "Missende argumentwaarde: %s", + "other": "Missende argumentwaarden: %s" + }, + "Missing required argument: %s": { + "one": "Missend verplicht argument: %s", + "other": "Missende verplichte argumenten: %s" + }, + "Unknown argument: %s": { + "one": "Onbekend argument: %s", + "other": "Onbekende argumenten: %s" + }, + "Invalid values:": "Ongeldige waarden:", + "Argument: %s, Given: %s, Choices: %s": "Argument: %s, Gegeven: %s, Keuzes: %s", + "Argument check failed: %s": "Argumentcontrole mislukt: %s", + "Implications failed:": "Ontbrekende afhankelijke argumenten:", + "Not enough arguments following: %s": "Niet genoeg argumenten na: %s", + "Invalid JSON config file: %s": "Ongeldig JSON-config-bestand: %s", + "Path to JSON config file": "Pad naar JSON-config-bestand", + "Show help": "Toon help", + "Show version number": "Toon versienummer", + "Did you mean %s?": "Bedoelde u misschien %s?", + "Arguments %s and %s are mutually exclusive": "Argumenten %s en %s kunnen niet tegelijk gebruikt worden", + "Positionals:": "Positie-afhankelijke argumenten", + "command": "commando" +} diff --git a/mybulma/node_modules/yargs/locales/nn.json b/mybulma/node_modules/yargs/locales/nn.json new file mode 100644 index 0000000..24479ac --- /dev/null +++ b/mybulma/node_modules/yargs/locales/nn.json @@ -0,0 +1,44 @@ +{ + "Commands:": "Kommandoar:", + "Options:": "Alternativ:", + "Examples:": "Døme:", + "boolean": "boolsk", + "count": "mengd", + "string": "streng", + "number": "nummer", + "array": "matrise", + "required": "obligatorisk", + "default": "standard", + "default:": "standard:", + "choices:": "val:", + "generated-value": "generert-verdi", + "Not enough non-option arguments: got %s, need at least %s": { + "one": "Ikkje nok ikkje-alternativ argument: fekk %s, treng minst %s", + "other": "Ikkje nok ikkje-alternativ argument: fekk %s, treng minst %s" + }, + "Too many non-option arguments: got %s, maximum of %s": { + "one": "For mange ikkje-alternativ argument: fekk %s, maksimum %s", + "other": "For mange ikkje-alternativ argument: fekk %s, maksimum %s" + }, + "Missing argument value: %s": { + "one": "Manglar argumentverdi: %s", + "other": "Manglar argumentverdiar: %s" + }, + "Missing required argument: %s": { + "one": "Manglar obligatorisk argument: %s", + "other": "Manglar obligatoriske argument: %s" + }, + "Unknown argument: %s": { + "one": "Ukjent argument: %s", + "other": "Ukjende argument: %s" + }, + "Invalid values:": "Ugyldige verdiar:", + "Argument: %s, Given: %s, Choices: %s": "Argument: %s, Gjeve: %s, Val: %s", + "Argument check failed: %s": "Argumentsjekk mislukkast: %s", + "Implications failed:": "Konsekvensane mislukkast:", + "Not enough arguments following: %s": "Ikkje nok fylgjande argument: %s", + "Invalid JSON config file: %s": "Ugyldig JSON konfigurasjonsfil: %s", + "Path to JSON config file": "Bane til JSON konfigurasjonsfil", + "Show help": "Vis hjelp", + "Show version number": "Vis versjonsnummer" +} diff --git a/mybulma/node_modules/yargs/locales/pirate.json b/mybulma/node_modules/yargs/locales/pirate.json new file mode 100644 index 0000000..dcb5cb7 --- /dev/null +++ b/mybulma/node_modules/yargs/locales/pirate.json @@ -0,0 +1,13 @@ +{ + "Commands:": "Choose yer command:", + "Options:": "Options for me hearties!", + "Examples:": "Ex. marks the spot:", + "required": "requi-yar-ed", + "Missing required argument: %s": { + "one": "Ye be havin' to set the followin' argument land lubber: %s", + "other": "Ye be havin' to set the followin' arguments land lubber: %s" + }, + "Show help": "Parlay this here code of conduct", + "Show version number": "'Tis the version ye be askin' fer", + "Arguments %s and %s are mutually exclusive" : "Yon scurvy dogs %s and %s be as bad as rum and a prudish wench" +} diff --git a/mybulma/node_modules/yargs/locales/pl.json b/mybulma/node_modules/yargs/locales/pl.json new file mode 100644 index 0000000..a41d4bd --- /dev/null +++ b/mybulma/node_modules/yargs/locales/pl.json @@ -0,0 +1,49 @@ +{ + "Commands:": "Polecenia:", + "Options:": "Opcje:", + "Examples:": "Przykłady:", + "boolean": "boolean", + "count": "ilość", + "string": "ciąg znaków", + "number": "liczba", + "array": "tablica", + "required": "wymagany", + "default": "domyślny", + "default:": "domyślny:", + "choices:": "dostępne:", + "aliases:": "aliasy:", + "generated-value": "wygenerowana-wartość", + "Not enough non-option arguments: got %s, need at least %s": { + "one": "Niewystarczająca ilość argumentów: otrzymano %s, wymagane co najmniej %s", + "other": "Niewystarczająca ilość argumentów: otrzymano %s, wymagane co najmniej %s" + }, + "Too many non-option arguments: got %s, maximum of %s": { + "one": "Zbyt duża ilość argumentów: otrzymano %s, wymagane co najwyżej %s", + "other": "Zbyt duża ilość argumentów: otrzymano %s, wymagane co najwyżej %s" + }, + "Missing argument value: %s": { + "one": "Brak wartości dla argumentu: %s", + "other": "Brak wartości dla argumentów: %s" + }, + "Missing required argument: %s": { + "one": "Brak wymaganego argumentu: %s", + "other": "Brak wymaganych argumentów: %s" + }, + "Unknown argument: %s": { + "one": "Nieznany argument: %s", + "other": "Nieznane argumenty: %s" + }, + "Invalid values:": "Nieprawidłowe wartości:", + "Argument: %s, Given: %s, Choices: %s": "Argument: %s, Otrzymano: %s, Dostępne: %s", + "Argument check failed: %s": "Weryfikacja argumentów nie powiodła się: %s", + "Implications failed:": "Założenia nie zostały spełnione:", + "Not enough arguments following: %s": "Niewystarczająca ilość argumentów następujących po: %s", + "Invalid JSON config file: %s": "Nieprawidłowy plik konfiguracyjny JSON: %s", + "Path to JSON config file": "Ścieżka do pliku konfiguracyjnego JSON", + "Show help": "Pokaż pomoc", + "Show version number": "Pokaż numer wersji", + "Did you mean %s?": "Czy chodziło Ci o %s?", + "Arguments %s and %s are mutually exclusive": "Argumenty %s i %s wzajemnie się wykluczają", + "Positionals:": "Pozycyjne:", + "command": "polecenie" +} diff --git a/mybulma/node_modules/yargs/locales/pt.json b/mybulma/node_modules/yargs/locales/pt.json new file mode 100644 index 0000000..0c8ac99 --- /dev/null +++ b/mybulma/node_modules/yargs/locales/pt.json @@ -0,0 +1,45 @@ +{ + "Commands:": "Comandos:", + "Options:": "Opções:", + "Examples:": "Exemplos:", + "boolean": "boolean", + "count": "contagem", + "string": "cadeia de caracteres", + "number": "número", + "array": "arranjo", + "required": "requerido", + "default": "padrão", + "default:": "padrão:", + "choices:": "escolhas:", + "generated-value": "valor-gerado", + "Not enough non-option arguments: got %s, need at least %s": { + "one": "Argumentos insuficientes não opcionais: Argumento %s, necessário pelo menos %s", + "other": "Argumentos insuficientes não opcionais: Argumento %s, necessário pelo menos %s" + }, + "Too many non-option arguments: got %s, maximum of %s": { + "one": "Excesso de argumentos não opcionais: recebido %s, máximo de %s", + "other": "Excesso de argumentos não opcionais: recebido %s, máximo de %s" + }, + "Missing argument value: %s": { + "one": "Falta valor de argumento: %s", + "other": "Falta valores de argumento: %s" + }, + "Missing required argument: %s": { + "one": "Falta argumento obrigatório: %s", + "other": "Faltando argumentos obrigatórios: %s" + }, + "Unknown argument: %s": { + "one": "Argumento desconhecido: %s", + "other": "Argumentos desconhecidos: %s" + }, + "Invalid values:": "Valores inválidos:", + "Argument: %s, Given: %s, Choices: %s": "Argumento: %s, Dado: %s, Escolhas: %s", + "Argument check failed: %s": "Verificação de argumento falhou: %s", + "Implications failed:": "Implicações falharam:", + "Not enough arguments following: %s": "Insuficientes argumentos a seguir: %s", + "Invalid JSON config file: %s": "Arquivo de configuração em JSON esta inválido: %s", + "Path to JSON config file": "Caminho para o arquivo de configuração em JSON", + "Show help": "Mostra ajuda", + "Show version number": "Mostra número de versão", + "Arguments %s and %s are mutually exclusive" : "Argumentos %s e %s são mutualmente exclusivos" +} diff --git a/mybulma/node_modules/yargs/locales/pt_BR.json b/mybulma/node_modules/yargs/locales/pt_BR.json new file mode 100644 index 0000000..eae1ec6 --- /dev/null +++ b/mybulma/node_modules/yargs/locales/pt_BR.json @@ -0,0 +1,48 @@ +{ + "Commands:": "Comandos:", + "Options:": "Opções:", + "Examples:": "Exemplos:", + "boolean": "booleano", + "count": "contagem", + "string": "string", + "number": "número", + "array": "array", + "required": "obrigatório", + "default:": "padrão:", + "choices:": "opções:", + "aliases:": "sinônimos:", + "generated-value": "valor-gerado", + "Not enough non-option arguments: got %s, need at least %s": { + "one": "Argumentos insuficientes: Argumento %s, necessário pelo menos %s", + "other": "Argumentos insuficientes: Argumento %s, necessário pelo menos %s" + }, + "Too many non-option arguments: got %s, maximum of %s": { + "one": "Excesso de argumentos: recebido %s, máximo de %s", + "other": "Excesso de argumentos: recebido %s, máximo de %s" + }, + "Missing argument value: %s": { + "one": "Falta valor de argumento: %s", + "other": "Falta valores de argumento: %s" + }, + "Missing required argument: %s": { + "one": "Falta argumento obrigatório: %s", + "other": "Faltando argumentos obrigatórios: %s" + }, + "Unknown argument: %s": { + "one": "Argumento desconhecido: %s", + "other": "Argumentos desconhecidos: %s" + }, + "Invalid values:": "Valores inválidos:", + "Argument: %s, Given: %s, Choices: %s": "Argumento: %s, Dado: %s, Opções: %s", + "Argument check failed: %s": "Verificação de argumento falhou: %s", + "Implications failed:": "Implicações falharam:", + "Not enough arguments following: %s": "Argumentos insuficientes a seguir: %s", + "Invalid JSON config file: %s": "Arquivo JSON de configuração inválido: %s", + "Path to JSON config file": "Caminho para o arquivo JSON de configuração", + "Show help": "Exibe ajuda", + "Show version number": "Exibe a versão", + "Did you mean %s?": "Você quis dizer %s?", + "Arguments %s and %s are mutually exclusive" : "Argumentos %s e %s são mutualmente exclusivos", + "Positionals:": "Posicionais:", + "command": "comando" +} diff --git a/mybulma/node_modules/yargs/locales/ru.json b/mybulma/node_modules/yargs/locales/ru.json new file mode 100644 index 0000000..d5c9e32 --- /dev/null +++ b/mybulma/node_modules/yargs/locales/ru.json @@ -0,0 +1,51 @@ +{ + "Commands:": "Команды:", + "Options:": "Опции:", + "Examples:": "Примеры:", + "boolean": "булевый тип", + "count": "подсчет", + "string": "строковой тип", + "number": "число", + "array": "массив", + "required": "необходимо", + "default": "по умолчанию", + "default:": "по умолчанию:", + "choices:": "возможности:", + "aliases:": "алиасы:", + "generated-value": "генерированное значение", + "Not enough non-option arguments: got %s, need at least %s": { + "one": "Недостаточно неопционных аргументов: есть %s, нужно как минимум %s", + "other": "Недостаточно неопционных аргументов: есть %s, нужно как минимум %s" + }, + "Too many non-option arguments: got %s, maximum of %s": { + "one": "Слишком много неопционных аргументов: есть %s, максимум допустимо %s", + "other": "Слишком много неопционных аргументов: есть %s, максимум допустимо %s" + }, + "Missing argument value: %s": { + "one": "Не хватает значения аргумента: %s", + "other": "Не хватает значений аргументов: %s" + }, + "Missing required argument: %s": { + "one": "Не хватает необходимого аргумента: %s", + "other": "Не хватает необходимых аргументов: %s" + }, + "Unknown argument: %s": { + "one": "Неизвестный аргумент: %s", + "other": "Неизвестные аргументы: %s" + }, + "Invalid values:": "Недействительные значения:", + "Argument: %s, Given: %s, Choices: %s": "Аргумент: %s, Данное значение: %s, Возможности: %s", + "Argument check failed: %s": "Проверка аргументов не удалась: %s", + "Implications failed:": "Данный аргумент требует следующий дополнительный аргумент:", + "Not enough arguments following: %s": "Недостаточно следующих аргументов: %s", + "Invalid JSON config file: %s": "Недействительный файл конфигурации JSON: %s", + "Path to JSON config file": "Путь к файлу конфигурации JSON", + "Show help": "Показать помощь", + "Show version number": "Показать номер версии", + "Did you mean %s?": "Вы имели в виду %s?", + "Arguments %s and %s are mutually exclusive": "Аргументы %s и %s являются взаимоисключающими", + "Positionals:": "Позиционные аргументы:", + "command": "команда", + "deprecated": "устар.", + "deprecated: %s": "устар.: %s" +} diff --git a/mybulma/node_modules/yargs/locales/th.json b/mybulma/node_modules/yargs/locales/th.json new file mode 100644 index 0000000..33b048e --- /dev/null +++ b/mybulma/node_modules/yargs/locales/th.json @@ -0,0 +1,46 @@ +{ + "Commands:": "คอมมาน", + "Options:": "ออฟชั่น", + "Examples:": "ตัวอย่าง", + "boolean": "บูลีน", + "count": "นับ", + "string": "สตริง", + "number": "ตัวเลข", + "array": "อาเรย์", + "required": "จำเป็น", + "default": "ค่าเริ่มต้", + "default:": "ค่าเริ่มต้น", + "choices:": "ตัวเลือก", + "aliases:": "เอเลียส", + "generated-value": "ค่าที่ถูกสร้างขึ้น", + "Not enough non-option arguments: got %s, need at least %s": { + "one": "ใส่อาร์กิวเมนต์ไม่ครบตามจำนวนที่กำหนด: ใส่ค่ามาจำนวน %s ค่า, แต่ต้องการอย่างน้อย %s ค่า", + "other": "ใส่อาร์กิวเมนต์ไม่ครบตามจำนวนที่กำหนด: ใส่ค่ามาจำนวน %s ค่า, แต่ต้องการอย่างน้อย %s ค่า" + }, + "Too many non-option arguments: got %s, maximum of %s": { + "one": "ใส่อาร์กิวเมนต์เกินจำนวนที่กำหนด: ใส่ค่ามาจำนวน %s ค่า, แต่ต้องการมากที่สุด %s ค่า", + "other": "ใส่อาร์กิวเมนต์เกินจำนวนที่กำหนด: ใส่ค่ามาจำนวน %s ค่า, แต่ต้องการมากที่สุด %s ค่า" + }, + "Missing argument value: %s": { + "one": "ค่าอาร์กิวเมนต์ที่ขาดไป: %s", + "other": "ค่าอาร์กิวเมนต์ที่ขาดไป: %s" + }, + "Missing required argument: %s": { + "one": "อาร์กิวเมนต์จำเป็นที่ขาดไป: %s", + "other": "อาร์กิวเมนต์จำเป็นที่ขาดไป: %s" + }, + "Unknown argument: %s": { + "one": "อาร์กิวเมนต์ที่ไม่รู้จัก: %s", + "other": "อาร์กิวเมนต์ที่ไม่รู้จัก: %s" + }, + "Invalid values:": "ค่าไม่ถูกต้อง:", + "Argument: %s, Given: %s, Choices: %s": "อาร์กิวเมนต์: %s, ได้รับ: %s, ตัวเลือก: %s", + "Argument check failed: %s": "ตรวจสอบพบอาร์กิวเมนต์ที่ไม่ถูกต้อง: %s", + "Implications failed:": "Implications ไม่สำเร็จ:", + "Not enough arguments following: %s": "ใส่อาร์กิวเมนต์ไม่ครบ: %s", + "Invalid JSON config file: %s": "ไฟล์คอนฟิค JSON ไม่ถูกต้อง: %s", + "Path to JSON config file": "พาทไฟล์คอนฟิค JSON", + "Show help": "ขอความช่วยเหลือ", + "Show version number": "แสดงตัวเลขเวอร์ชั่น", + "Did you mean %s?": "คุณหมายถึง %s?" +} diff --git a/mybulma/node_modules/yargs/locales/tr.json b/mybulma/node_modules/yargs/locales/tr.json new file mode 100644 index 0000000..0d0d2cc --- /dev/null +++ b/mybulma/node_modules/yargs/locales/tr.json @@ -0,0 +1,48 @@ +{ + "Commands:": "Komutlar:", + "Options:": "Seçenekler:", + "Examples:": "Örnekler:", + "boolean": "boolean", + "count": "sayı", + "string": "string", + "number": "numara", + "array": "array", + "required": "zorunlu", + "default": "varsayılan", + "default:": "varsayılan:", + "choices:": "seçimler:", + "aliases:": "takma adlar:", + "generated-value": "oluşturulan-değer", + "Not enough non-option arguments: got %s, need at least %s": { + "one": "Seçenek dışı argümanlar yetersiz: %s bulundu, %s gerekli", + "other": "Seçenek dışı argümanlar yetersiz: %s bulundu, %s gerekli" + }, + "Too many non-option arguments: got %s, maximum of %s": { + "one": "Seçenek dışı argümanlar gereğinden fazla: %s bulundu, azami %s", + "other": "Seçenek dışı argümanlar gereğinden fazla: %s bulundu, azami %s" + }, + "Missing argument value: %s": { + "one": "Eksik argüman değeri: %s", + "other": "Eksik argüman değerleri: %s" + }, + "Missing required argument: %s": { + "one": "Eksik zorunlu argüman: %s", + "other": "Eksik zorunlu argümanlar: %s" + }, + "Unknown argument: %s": { + "one": "Bilinmeyen argüman: %s", + "other": "Bilinmeyen argümanlar: %s" + }, + "Invalid values:": "Geçersiz değerler:", + "Argument: %s, Given: %s, Choices: %s": "Argüman: %s, Verilen: %s, Seçimler: %s", + "Argument check failed: %s": "Argüman kontrolü başarısız oldu: %s", + "Implications failed:": "Sonuçlar başarısız oldu:", + "Not enough arguments following: %s": "%s için yeterli argüman bulunamadı", + "Invalid JSON config file: %s": "Geçersiz JSON yapılandırma dosyası: %s", + "Path to JSON config file": "JSON yapılandırma dosya konumu", + "Show help": "Yardım detaylarını göster", + "Show version number": "Versiyon detaylarını göster", + "Did you mean %s?": "Bunu mu demek istediniz: %s?", + "Positionals:": "Sıralılar:", + "command": "komut" +} diff --git a/mybulma/node_modules/yargs/locales/uk_UA.json b/mybulma/node_modules/yargs/locales/uk_UA.json new file mode 100644 index 0000000..0af0e99 --- /dev/null +++ b/mybulma/node_modules/yargs/locales/uk_UA.json @@ -0,0 +1,51 @@ +{ + "Commands:": "Команди:", + "Options:": "Опції:", + "Examples:": "Приклади:", + "boolean": "boolean", + "count": "кількість", + "string": "строка", + "number": "число", + "array": "масива", + "required": "обов'язково", + "default": "за замовчуванням", + "default:": "за замовчуванням:", + "choices:": "доступні варіанти:", + "aliases:": "псевдоніми:", + "generated-value": "згенероване значення", + "Not enough non-option arguments: got %s, need at least %s": { + "one": "Недостатньо аргументів: наразі %s, потрібно %s або більше", + "other": "Недостатньо аргументів: наразі %s, потрібно %s або більше" + }, + "Too many non-option arguments: got %s, maximum of %s": { + "one": "Забагато аргументів: наразі %s, максимум %s", + "other": "Too many non-option arguments: наразі %s, максимум of %s" + }, + "Missing argument value: %s": { + "one": "Відсутнє значення для аргументу: %s", + "other": "Відсутні значення для аргументу: %s" + }, + "Missing required argument: %s": { + "one": "Відсутній обов'язковий аргумент: %s", + "other": "Відсутні обов'язкові аргументи: %s" + }, + "Unknown argument: %s": { + "one": "Аргумент %s не підтримується", + "other": "Аргументи %s не підтримуються" + }, + "Invalid values:": "Некоректні значення:", + "Argument: %s, Given: %s, Choices: %s": "Аргумент: %s, Введено: %s, Доступні варіанти: %s", + "Argument check failed: %s": "Аргумент не пройшов перевірку: %s", + "Implications failed:": "Відсутні залежні аргументи:", + "Not enough arguments following: %s": "Не достатньо аргументів після: %s", + "Invalid JSON config file: %s": "Некоректний JSON-файл конфігурації: %s", + "Path to JSON config file": "Шлях до JSON-файлу конфігурації", + "Show help": "Показати довідку", + "Show version number": "Показати версію", + "Did you mean %s?": "Можливо, ви мали на увазі %s?", + "Arguments %s and %s are mutually exclusive" : "Аргументи %s та %s взаємовиключні", + "Positionals:": "Позиційні:", + "command": "команда", + "deprecated": "застарілий", + "deprecated: %s": "застарілий: %s" +} diff --git a/mybulma/node_modules/yargs/locales/uz.json b/mybulma/node_modules/yargs/locales/uz.json new file mode 100644 index 0000000..0d07168 --- /dev/null +++ b/mybulma/node_modules/yargs/locales/uz.json @@ -0,0 +1,52 @@ +{ + "Commands:": "Buyruqlar:", + "Options:": "Imkoniyatlar:", + "Examples:": "Misollar:", + "boolean": "boolean", + "count": "sanoq", + "string": "satr", + "number": "raqam", + "array": "massiv", + "required": "majburiy", + "default": "boshlang'ich", + "default:": "boshlang'ich:", + "choices:": "tanlovlar:", + "aliases:": "taxalluslar:", + "generated-value": "yaratilgan-qiymat", + "Not enough non-option arguments: got %s, need at least %s": { + "one": "No-imkoniyat argumentlar yetarli emas: berilgan %s, minimum %s", + "other": "No-imkoniyat argumentlar yetarli emas: berilgan %s, minimum %s" + }, + "Too many non-option arguments: got %s, maximum of %s": { + "one": "No-imkoniyat argumentlar juda ko'p: berilgan %s, maksimum %s", + "other": "No-imkoniyat argumentlar juda ko'p: got %s, maksimum %s" + }, + "Missing argument value: %s": { + "one": "Argument qiymati berilmagan: %s", + "other": "Argument qiymatlari berilmagan: %s" + }, + "Missing required argument: %s": { + "one": "Majburiy argument berilmagan: %s", + "other": "Majburiy argumentlar berilmagan: %s" + }, + "Unknown argument: %s": { + "one": "Noma'lum argument berilmagan: %s", + "other": "Noma'lum argumentlar berilmagan: %s" + }, + "Invalid values:": "Nosoz qiymatlar:", + "Argument: %s, Given: %s, Choices: %s": "Argument: %s, Berilgan: %s, Tanlovlar: %s", + "Argument check failed: %s": "Muvaffaqiyatsiz argument tekshiruvi: %s", + "Implications failed:": "Bog'liq argumentlar berilmagan:", + "Not enough arguments following: %s": "Quyidagi argumentlar yetarli emas: %s", + "Invalid JSON config file: %s": "Nosoz JSON konfiguratsiya fayli: %s", + "Path to JSON config file": "JSON konfiguratsiya fayli joylashuvi", + "Show help": "Yordam ko'rsatish", + "Show version number": "Versiyani ko'rsatish", + "Did you mean %s?": "%s ni nazarda tutyapsizmi?", + "Arguments %s and %s are mutually exclusive" : "%s va %s argumentlari alohida", + "Positionals:": "Positsionallar:", + "command": "buyruq", + "deprecated": "eskirgan", + "deprecated: %s": "eskirgan: %s" + } + \ No newline at end of file diff --git a/mybulma/node_modules/yargs/locales/zh_CN.json b/mybulma/node_modules/yargs/locales/zh_CN.json new file mode 100644 index 0000000..257d26b --- /dev/null +++ b/mybulma/node_modules/yargs/locales/zh_CN.json @@ -0,0 +1,48 @@ +{ + "Commands:": "命令:", + "Options:": "选项:", + "Examples:": "示例:", + "boolean": "布尔", + "count": "计数", + "string": "字符串", + "number": "数字", + "array": "数组", + "required": "必需", + "default": "默认值", + "default:": "默认值:", + "choices:": "可选值:", + "generated-value": "生成的值", + "Not enough non-option arguments: got %s, need at least %s": { + "one": "缺少 non-option 参数:传入了 %s 个, 至少需要 %s 个", + "other": "缺少 non-option 参数:传入了 %s 个, 至少需要 %s 个" + }, + "Too many non-option arguments: got %s, maximum of %s": { + "one": "non-option 参数过多:传入了 %s 个, 最大允许 %s 个", + "other": "non-option 参数过多:传入了 %s 个, 最大允许 %s 个" + }, + "Missing argument value: %s": { + "one": "没有给此选项指定值:%s", + "other": "没有给这些选项指定值:%s" + }, + "Missing required argument: %s": { + "one": "缺少必须的选项:%s", + "other": "缺少这些必须的选项:%s" + }, + "Unknown argument: %s": { + "one": "无法识别的选项:%s", + "other": "无法识别这些选项:%s" + }, + "Invalid values:": "无效的选项值:", + "Argument: %s, Given: %s, Choices: %s": "选项名称: %s, 传入的值: %s, 可选的值:%s", + "Argument check failed: %s": "选项值验证失败:%s", + "Implications failed:": "缺少依赖的选项:", + "Not enough arguments following: %s": "没有提供足够的值给此选项:%s", + "Invalid JSON config file: %s": "无效的 JSON 配置文件:%s", + "Path to JSON config file": "JSON 配置文件的路径", + "Show help": "显示帮助信息", + "Show version number": "显示版本号", + "Did you mean %s?": "是指 %s?", + "Arguments %s and %s are mutually exclusive" : "选项 %s 和 %s 是互斥的", + "Positionals:": "位置:", + "command": "命令" +} diff --git a/mybulma/node_modules/yargs/locales/zh_TW.json b/mybulma/node_modules/yargs/locales/zh_TW.json new file mode 100644 index 0000000..e38495d --- /dev/null +++ b/mybulma/node_modules/yargs/locales/zh_TW.json @@ -0,0 +1,51 @@ +{ + "Commands:": "命令:", + "Options:": "選項:", + "Examples:": "範例:", + "boolean": "布林", + "count": "次數", + "string": "字串", + "number": "數字", + "array": "陣列", + "required": "必填", + "default": "預設值", + "default:": "預設值:", + "choices:": "可選值:", + "aliases:": "別名:", + "generated-value": "生成的值", + "Not enough non-option arguments: got %s, need at least %s": { + "one": "non-option 引數不足:只傳入了 %s 個, 至少要 %s 個", + "other": "non-option 引數不足:只傳入了 %s 個, 至少要 %s 個" + }, + "Too many non-option arguments: got %s, maximum of %s": { + "one": "non-option 引數過多:傳入了 %s 個, 但最多 %s 個", + "other": "non-option 引數過多:傳入了 %s 個, 但最多 %s 個" + }, + "Missing argument value: %s": { + "one": "此引數無指定值:%s", + "other": "這些引數無指定值:%s" + }, + "Missing required argument: %s": { + "one": "缺少必須的引數:%s", + "other": "缺少這些必須的引數:%s" + }, + "Unknown argument: %s": { + "one": "未知的引數:%s", + "other": "未知的引數:%s" + }, + "Invalid values:": "無效的選項值:", + "Argument: %s, Given: %s, Choices: %s": "引數名稱: %s, 傳入的值: %s, 可選的值:%s", + "Argument check failed: %s": "引數驗證失敗:%s", + "Implications failed:": "缺少依賴引數:", + "Not enough arguments following: %s": "沒有提供足夠的值給此引數:%s", + "Invalid JSON config file: %s": "無效的 JSON 設置文件:%s", + "Path to JSON config file": "JSON 設置文件的路徑", + "Show help": "顯示說明", + "Show version number": "顯示版本", + "Did you mean %s?": "您是指 %s 嗎?", + "Arguments %s and %s are mutually exclusive" : "引數 %s 和 %s 互斥", + "Positionals:": "位置:", + "command": "命令", + "deprecated": "已淘汰", + "deprecated: %s": "已淘汰:%s" + } diff --git a/mybulma/node_modules/yargs/node_modules/yargs-parser/CHANGELOG.md b/mybulma/node_modules/yargs/node_modules/yargs-parser/CHANGELOG.md new file mode 100644 index 0000000..584eb86 --- /dev/null +++ b/mybulma/node_modules/yargs/node_modules/yargs-parser/CHANGELOG.md @@ -0,0 +1,308 @@ +# Changelog + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [21.1.1](https://github.com/yargs/yargs-parser/compare/yargs-parser-v21.1.0...yargs-parser-v21.1.1) (2022-08-04) + + +### Bug Fixes + +* **typescript:** ignore .cts files during publish ([#454](https://github.com/yargs/yargs-parser/issues/454)) ([d69f9c3](https://github.com/yargs/yargs-parser/commit/d69f9c3a91c3ad2f9494d0a94e29a8b76c41b81b)), closes [#452](https://github.com/yargs/yargs-parser/issues/452) + +## [21.1.0](https://github.com/yargs/yargs-parser/compare/yargs-parser-v21.0.1...yargs-parser-v21.1.0) (2022-08-03) + + +### Features + +* allow the browser build to be imported ([#443](https://github.com/yargs/yargs-parser/issues/443)) ([a89259f](https://github.com/yargs/yargs-parser/commit/a89259ff41d6f5312b3ce8a30bef343a993f395a)) + + +### Bug Fixes + +* **halt-at-non-option:** prevent known args from being parsed when "unknown-options-as-args" is enabled ([#438](https://github.com/yargs/yargs-parser/issues/438)) ([c474bc1](https://github.com/yargs/yargs-parser/commit/c474bc10c3aa0ae864b95e5722730114ef15f573)) +* node version check now uses process.versions.node ([#450](https://github.com/yargs/yargs-parser/issues/450)) ([d07bcdb](https://github.com/yargs/yargs-parser/commit/d07bcdbe43075f7201fbe8a08e491217247fe1f1)) +* parse options ending with 3+ hyphens ([#434](https://github.com/yargs/yargs-parser/issues/434)) ([4f1060b](https://github.com/yargs/yargs-parser/commit/4f1060b50759fadbac3315c5117b0c3d65b0a7d8)) + +### [21.0.1](https://github.com/yargs/yargs-parser/compare/yargs-parser-v21.0.0...yargs-parser-v21.0.1) (2022-02-27) + + +### Bug Fixes + +* return deno env object ([#432](https://github.com/yargs/yargs-parser/issues/432)) ([b00eb87](https://github.com/yargs/yargs-parser/commit/b00eb87b4860a890dd2dab0d6058241bbfd2b3ec)) + +## [21.0.0](https://www.github.com/yargs/yargs-parser/compare/yargs-parser-v20.2.9...yargs-parser-v21.0.0) (2021-11-15) + + +### ⚠ BREAKING CHANGES + +* drops support for 10 (#421) + +### Bug Fixes + +* esm json import ([#416](https://www.github.com/yargs/yargs-parser/issues/416)) ([90f970a](https://www.github.com/yargs/yargs-parser/commit/90f970a6482dd4f5b5eb18d38596dd6f02d73edf)) +* parser should preserve inner quotes ([#407](https://www.github.com/yargs/yargs-parser/issues/407)) ([ae11f49](https://www.github.com/yargs/yargs-parser/commit/ae11f496a8318ea8885aa25015d429b33713c314)) + + +### Code Refactoring + +* drops support for 10 ([#421](https://www.github.com/yargs/yargs-parser/issues/421)) ([3aaf878](https://www.github.com/yargs/yargs-parser/commit/3aaf8784f5c7f2aec6108c1c6a55537fa7e3b5c1)) + +### [20.2.9](https://www.github.com/yargs/yargs-parser/compare/yargs-parser-v20.2.8...yargs-parser-v20.2.9) (2021-06-20) + + +### Bug Fixes + +* **build:** fixed automated release pipeline ([1fe9135](https://www.github.com/yargs/yargs-parser/commit/1fe9135884790a083615419b2861683e2597dac3)) + +### [20.2.8](https://www.github.com/yargs/yargs-parser/compare/yargs-parser-v20.2.7...yargs-parser-v20.2.8) (2021-06-20) + + +### Bug Fixes + +* **locale:** Turkish camelize and decamelize issues with toLocaleLowerCase/toLocaleUpperCase ([2617303](https://www.github.com/yargs/yargs-parser/commit/261730383e02448562f737b94bbd1f164aed5143)) +* **perf:** address slow parse when using unknown-options-as-args ([#394](https://www.github.com/yargs/yargs-parser/issues/394)) ([441f059](https://www.github.com/yargs/yargs-parser/commit/441f059d585d446551068ad213db79ac91daf83a)) +* **string-utils:** detect [0,1] ranged values as numbers ([#388](https://www.github.com/yargs/yargs-parser/issues/388)) ([efcc32c](https://www.github.com/yargs/yargs-parser/commit/efcc32c2d6b09aba31abfa2db9bd947befe5586b)) + +### [20.2.7](https://www.github.com/yargs/yargs-parser/compare/v20.2.6...v20.2.7) (2021-03-10) + + +### Bug Fixes + +* **deno:** force release for Deno ([6687c97](https://www.github.com/yargs/yargs-parser/commit/6687c972d0f3ca7865a97908dde3080b05f8b026)) + +### [20.2.6](https://www.github.com/yargs/yargs-parser/compare/v20.2.5...v20.2.6) (2021-02-22) + + +### Bug Fixes + +* **populate--:** -- should always be array ([#354](https://www.github.com/yargs/yargs-parser/issues/354)) ([585ae8f](https://www.github.com/yargs/yargs-parser/commit/585ae8ffad74cc02974f92d788e750137fd65146)) + +### [20.2.5](https://www.github.com/yargs/yargs-parser/compare/v20.2.4...v20.2.5) (2021-02-13) + + +### Bug Fixes + +* do not lowercase camel cased string ([#348](https://www.github.com/yargs/yargs-parser/issues/348)) ([5f4da1f](https://www.github.com/yargs/yargs-parser/commit/5f4da1f17d9d50542d2aaa206c9806ce3e320335)) + +### [20.2.4](https://www.github.com/yargs/yargs-parser/compare/v20.2.3...v20.2.4) (2020-11-09) + + +### Bug Fixes + +* **deno:** address import issues in Deno ([#339](https://www.github.com/yargs/yargs-parser/issues/339)) ([3b54e5e](https://www.github.com/yargs/yargs-parser/commit/3b54e5eef6e9a7b7c6eec7c12bab3ba3b8ba8306)) + +### [20.2.3](https://www.github.com/yargs/yargs-parser/compare/v20.2.2...v20.2.3) (2020-10-16) + + +### Bug Fixes + +* **exports:** node 13.0 and 13.1 require the dotted object form _with_ a string fallback ([#336](https://www.github.com/yargs/yargs-parser/issues/336)) ([3ae7242](https://www.github.com/yargs/yargs-parser/commit/3ae7242040ff876d28dabded60ac226e00150c88)) + +### [20.2.2](https://www.github.com/yargs/yargs-parser/compare/v20.2.1...v20.2.2) (2020-10-14) + + +### Bug Fixes + +* **exports:** node 13.0-13.6 require a string fallback ([#333](https://www.github.com/yargs/yargs-parser/issues/333)) ([291aeda](https://www.github.com/yargs/yargs-parser/commit/291aeda06b685b7a015d83bdf2558e180b37388d)) + +### [20.2.1](https://www.github.com/yargs/yargs-parser/compare/v20.2.0...v20.2.1) (2020-10-01) + + +### Bug Fixes + +* **deno:** update types for deno ^1.4.0 ([#330](https://www.github.com/yargs/yargs-parser/issues/330)) ([0ab92e5](https://www.github.com/yargs/yargs-parser/commit/0ab92e50b090f11196334c048c9c92cecaddaf56)) + +## [20.2.0](https://www.github.com/yargs/yargs-parser/compare/v20.1.0...v20.2.0) (2020-09-21) + + +### Features + +* **string-utils:** export looksLikeNumber helper ([#324](https://www.github.com/yargs/yargs-parser/issues/324)) ([c8580a2](https://www.github.com/yargs/yargs-parser/commit/c8580a2327b55f6342acecb6e72b62963d506750)) + + +### Bug Fixes + +* **unknown-options-as-args:** convert positionals that look like numbers ([#326](https://www.github.com/yargs/yargs-parser/issues/326)) ([f85ebb4](https://www.github.com/yargs/yargs-parser/commit/f85ebb4face9d4b0f56147659404cbe0002f3dad)) + +## [20.1.0](https://www.github.com/yargs/yargs-parser/compare/v20.0.0...v20.1.0) (2020-09-20) + + +### Features + +* adds parse-positional-numbers configuration ([#321](https://www.github.com/yargs/yargs-parser/issues/321)) ([9cec00a](https://www.github.com/yargs/yargs-parser/commit/9cec00a622251292ffb7dce6f78f5353afaa0d4c)) + + +### Bug Fixes + +* **build:** update release-please; make labels kick off builds ([#323](https://www.github.com/yargs/yargs-parser/issues/323)) ([09f448b](https://www.github.com/yargs/yargs-parser/commit/09f448b4cd66e25d2872544718df46dab8af062a)) + +## [20.0.0](https://www.github.com/yargs/yargs-parser/compare/v19.0.4...v20.0.0) (2020-09-09) + + +### ⚠ BREAKING CHANGES + +* do not ship type definitions (#318) + +### Bug Fixes + +* only strip camel case if hyphenated ([#316](https://www.github.com/yargs/yargs-parser/issues/316)) ([95a9e78](https://www.github.com/yargs/yargs-parser/commit/95a9e785127b9bbf2d1db1f1f808ca1fb100e82a)), closes [#315](https://www.github.com/yargs/yargs-parser/issues/315) + + +### Code Refactoring + +* do not ship type definitions ([#318](https://www.github.com/yargs/yargs-parser/issues/318)) ([8fbd56f](https://www.github.com/yargs/yargs-parser/commit/8fbd56f1d0b6c44c30fca62708812151ca0ce330)) + +### [19.0.4](https://www.github.com/yargs/yargs-parser/compare/v19.0.3...v19.0.4) (2020-08-27) + + +### Bug Fixes + +* **build:** fixing publication ([#310](https://www.github.com/yargs/yargs-parser/issues/310)) ([5d3c6c2](https://www.github.com/yargs/yargs-parser/commit/5d3c6c29a9126248ba601920d9cf87c78e161ff5)) + +### [19.0.3](https://www.github.com/yargs/yargs-parser/compare/v19.0.2...v19.0.3) (2020-08-27) + + +### Bug Fixes + +* **build:** switch to action for publish ([#308](https://www.github.com/yargs/yargs-parser/issues/308)) ([5c2f305](https://www.github.com/yargs/yargs-parser/commit/5c2f30585342bcd8aaf926407c863099d256d174)) + +### [19.0.2](https://www.github.com/yargs/yargs-parser/compare/v19.0.1...v19.0.2) (2020-08-27) + + +### Bug Fixes + +* **types:** envPrefix should be optional ([#305](https://www.github.com/yargs/yargs-parser/issues/305)) ([ae3f180](https://www.github.com/yargs/yargs-parser/commit/ae3f180e14df2de2fd962145f4518f9aa0e76523)) + +### [19.0.1](https://www.github.com/yargs/yargs-parser/compare/v19.0.0...v19.0.1) (2020-08-09) + + +### Bug Fixes + +* **build:** push tag created for deno ([2186a14](https://www.github.com/yargs/yargs-parser/commit/2186a14989749887d56189867602e39e6679f8b0)) + +## [19.0.0](https://www.github.com/yargs/yargs-parser/compare/v18.1.3...v19.0.0) (2020-08-09) + + +### ⚠ BREAKING CHANGES + +* adds support for ESM and Deno (#295) +* **ts:** projects using `@types/yargs-parser` may see variations in type definitions. +* drops Node 6. begin following Node.js LTS schedule (#278) + +### Features + +* adds support for ESM and Deno ([#295](https://www.github.com/yargs/yargs-parser/issues/295)) ([195bc4a](https://www.github.com/yargs/yargs-parser/commit/195bc4a7f20c2a8f8e33fbb6ba96ef6e9a0120a1)) +* expose camelCase and decamelize helpers ([#296](https://www.github.com/yargs/yargs-parser/issues/296)) ([39154ce](https://www.github.com/yargs/yargs-parser/commit/39154ceb5bdcf76b5f59a9219b34cedb79b67f26)) +* **deps:** update to latest camelcase/decamelize ([#281](https://www.github.com/yargs/yargs-parser/issues/281)) ([8931ab0](https://www.github.com/yargs/yargs-parser/commit/8931ab08f686cc55286f33a95a83537da2be5516)) + + +### Bug Fixes + +* boolean numeric short option ([#294](https://www.github.com/yargs/yargs-parser/issues/294)) ([f600082](https://www.github.com/yargs/yargs-parser/commit/f600082c959e092076caf420bbbc9d7a231e2418)) +* raise permission error for Deno if config load fails ([#298](https://www.github.com/yargs/yargs-parser/issues/298)) ([1174e2b](https://www.github.com/yargs/yargs-parser/commit/1174e2b3f0c845a1cd64e14ffc3703e730567a84)) +* **deps:** update dependency decamelize to v3 ([#274](https://www.github.com/yargs/yargs-parser/issues/274)) ([4d98698](https://www.github.com/yargs/yargs-parser/commit/4d98698bc6767e84ec54a0842908191739be73b7)) +* **types:** switch back to using Partial types ([#293](https://www.github.com/yargs/yargs-parser/issues/293)) ([bdc80ba](https://www.github.com/yargs/yargs-parser/commit/bdc80ba59fa13bc3025ce0a85e8bad9f9da24ea7)) + + +### Build System + +* drops Node 6. begin following Node.js LTS schedule ([#278](https://www.github.com/yargs/yargs-parser/issues/278)) ([9014ed7](https://www.github.com/yargs/yargs-parser/commit/9014ed722a32768b96b829e65a31705db5c1458a)) + + +### Code Refactoring + +* **ts:** move index.js to TypeScript ([#292](https://www.github.com/yargs/yargs-parser/issues/292)) ([f78d2b9](https://www.github.com/yargs/yargs-parser/commit/f78d2b97567ac4828624406e420b4047c710b789)) + +### [18.1.3](https://www.github.com/yargs/yargs-parser/compare/v18.1.2...v18.1.3) (2020-04-16) + + +### Bug Fixes + +* **setArg:** options using camel-case and dot-notation populated twice ([#268](https://www.github.com/yargs/yargs-parser/issues/268)) ([f7e15b9](https://www.github.com/yargs/yargs-parser/commit/f7e15b9800900b9856acac1a830a5f35847be73e)) + +### [18.1.2](https://www.github.com/yargs/yargs-parser/compare/v18.1.1...v18.1.2) (2020-03-26) + + +### Bug Fixes + +* **array, nargs:** support -o=--value and --option=--value format ([#262](https://www.github.com/yargs/yargs-parser/issues/262)) ([41d3f81](https://www.github.com/yargs/yargs-parser/commit/41d3f8139e116706b28de9b0de3433feb08d2f13)) + +### [18.1.1](https://www.github.com/yargs/yargs-parser/compare/v18.1.0...v18.1.1) (2020-03-16) + + +### Bug Fixes + +* \_\_proto\_\_ will now be replaced with \_\_\_proto\_\_\_ in parse ([#258](https://www.github.com/yargs/yargs-parser/issues/258)), patching a potential +prototype pollution vulnerability. This was reported by the Snyk Security Research Team.([63810ca](https://www.github.com/yargs/yargs-parser/commit/63810ca1ae1a24b08293a4d971e70e058c7a41e2)) + +## [18.1.0](https://www.github.com/yargs/yargs-parser/compare/v18.0.0...v18.1.0) (2020-03-07) + + +### Features + +* introduce single-digit boolean aliases ([#255](https://www.github.com/yargs/yargs-parser/issues/255)) ([9c60265](https://www.github.com/yargs/yargs-parser/commit/9c60265fd7a03cb98e6df3e32c8c5e7508d9f56f)) + +## [18.0.0](https://www.github.com/yargs/yargs-parser/compare/v17.1.0...v18.0.0) (2020-03-02) + + +### ⚠ BREAKING CHANGES + +* the narg count is now enforced when parsing arrays. + +### Features + +* NaN can now be provided as a value for nargs, indicating "at least" one value is expected for array ([#251](https://www.github.com/yargs/yargs-parser/issues/251)) ([9db4be8](https://www.github.com/yargs/yargs-parser/commit/9db4be81417a2c7097128db34d86fe70ef4af70c)) + +## [17.1.0](https://www.github.com/yargs/yargs-parser/compare/v17.0.1...v17.1.0) (2020-03-01) + + +### Features + +* introduce greedy-arrays config, for specifying whether arrays consume multiple positionals ([#249](https://www.github.com/yargs/yargs-parser/issues/249)) ([60e880a](https://www.github.com/yargs/yargs-parser/commit/60e880a837046314d89fa4725f923837fd33a9eb)) + +### [17.0.1](https://www.github.com/yargs/yargs-parser/compare/v17.0.0...v17.0.1) (2020-02-29) + + +### Bug Fixes + +* normalized keys were not enumerable ([#247](https://www.github.com/yargs/yargs-parser/issues/247)) ([57119f9](https://www.github.com/yargs/yargs-parser/commit/57119f9f17cf27499bd95e61c2f72d18314f11ba)) + +## [17.0.0](https://www.github.com/yargs/yargs-parser/compare/v16.1.0...v17.0.0) (2020-02-10) + + +### ⚠ BREAKING CHANGES + +* this reverts parsing behavior of booleans to that of yargs@14 +* objects used during parsing are now created with a null +prototype. There may be some scenarios where this change in behavior +leaks externally. + +### Features + +* boolean arguments will not be collected into an implicit array ([#236](https://www.github.com/yargs/yargs-parser/issues/236)) ([34c4e19](https://www.github.com/yargs/yargs-parser/commit/34c4e19bae4e7af63e3cb6fa654a97ed476e5eb5)) +* introduce nargs-eats-options config option ([#246](https://www.github.com/yargs/yargs-parser/issues/246)) ([d50822a](https://www.github.com/yargs/yargs-parser/commit/d50822ac10e1b05f2e9643671ca131ac251b6732)) + + +### Bug Fixes + +* address bugs with "uknown-options-as-args" ([bc023e3](https://www.github.com/yargs/yargs-parser/commit/bc023e3b13e20a118353f9507d1c999bf388a346)) +* array should take precedence over nargs, but enforce nargs ([#243](https://www.github.com/yargs/yargs-parser/issues/243)) ([4cbc188](https://www.github.com/yargs/yargs-parser/commit/4cbc188b7abb2249529a19c090338debdad2fe6c)) +* support keys that collide with object prototypes ([#234](https://www.github.com/yargs/yargs-parser/issues/234)) ([1587b6d](https://www.github.com/yargs/yargs-parser/commit/1587b6d91db853a9109f1be6b209077993fee4de)) +* unknown options terminated with digits now handled by unknown-options-as-args ([#238](https://www.github.com/yargs/yargs-parser/issues/238)) ([d36cdfa](https://www.github.com/yargs/yargs-parser/commit/d36cdfa854254d7c7e0fe1d583818332ac46c2a5)) + +## [16.1.0](https://www.github.com/yargs/yargs-parser/compare/v16.0.0...v16.1.0) (2019-11-01) + + +### ⚠ BREAKING CHANGES + +* populate error if incompatible narg/count or array/count options are used (#191) + +### Features + +* options that have had their default value used are now tracked ([#211](https://www.github.com/yargs/yargs-parser/issues/211)) ([a525234](https://www.github.com/yargs/yargs-parser/commit/a525234558c847deedd73f8792e0a3b77b26e2c0)) +* populate error if incompatible narg/count or array/count options are used ([#191](https://www.github.com/yargs/yargs-parser/issues/191)) ([84a401f](https://www.github.com/yargs/yargs-parser/commit/84a401f0fa3095e0a19661670d1570d0c3b9d3c9)) + + +### Reverts + +* revert 16.0.0 CHANGELOG entry ([920320a](https://www.github.com/yargs/yargs-parser/commit/920320ad9861bbfd58eda39221ae211540fc1daf)) diff --git a/mybulma/node_modules/yargs/node_modules/yargs-parser/LICENSE.txt b/mybulma/node_modules/yargs/node_modules/yargs-parser/LICENSE.txt new file mode 100644 index 0000000..836440b --- /dev/null +++ b/mybulma/node_modules/yargs/node_modules/yargs-parser/LICENSE.txt @@ -0,0 +1,14 @@ +Copyright (c) 2016, Contributors + +Permission to use, copy, modify, and/or distribute this software +for any purpose with or without fee is hereby granted, provided +that the above copyright notice and this permission notice +appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE +LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES +OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, +ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/mybulma/node_modules/yargs/node_modules/yargs-parser/README.md b/mybulma/node_modules/yargs/node_modules/yargs-parser/README.md new file mode 100644 index 0000000..2614840 --- /dev/null +++ b/mybulma/node_modules/yargs/node_modules/yargs-parser/README.md @@ -0,0 +1,518 @@ +# yargs-parser + +![ci](https://github.com/yargs/yargs-parser/workflows/ci/badge.svg) +[![NPM version](https://img.shields.io/npm/v/yargs-parser.svg)](https://www.npmjs.com/package/yargs-parser) +[![Conventional Commits](https://img.shields.io/badge/Conventional%20Commits-1.0.0-yellow.svg)](https://conventionalcommits.org) +![nycrc config on GitHub](https://img.shields.io/nycrc/yargs/yargs-parser) + +The mighty option parser used by [yargs](https://github.com/yargs/yargs). + +visit the [yargs website](http://yargs.js.org/) for more examples, and thorough usage instructions. + + + +## Example + +```sh +npm i yargs-parser --save +``` + +```js +const argv = require('yargs-parser')(process.argv.slice(2)) +console.log(argv) +``` + +```console +$ node example.js --foo=33 --bar hello +{ _: [], foo: 33, bar: 'hello' } +``` + +_or parse a string!_ + +```js +const argv = require('yargs-parser')('--foo=99 --bar=33') +console.log(argv) +``` + +```console +{ _: [], foo: 99, bar: 33 } +``` + +Convert an array of mixed types before passing to `yargs-parser`: + +```js +const parse = require('yargs-parser') +parse(['-f', 11, '--zoom', 55].join(' ')) // <-- array to string +parse(['-f', 11, '--zoom', 55].map(String)) // <-- array of strings +``` + +## Deno Example + +As of `v19` `yargs-parser` supports [Deno](https://github.com/denoland/deno): + +```typescript +import parser from "https://deno.land/x/yargs_parser/deno.ts"; + +const argv = parser('--foo=99 --bar=9987930', { + string: ['bar'] +}) +console.log(argv) +``` + +## ESM Example + +As of `v19` `yargs-parser` supports ESM (_both in Node.js and in the browser_): + +**Node.js:** + +```js +import parser from 'yargs-parser' + +const argv = parser('--foo=99 --bar=9987930', { + string: ['bar'] +}) +console.log(argv) +``` + +**Browsers:** + +```html + + + + +``` + +## API + +### parser(args, opts={}) + +Parses command line arguments returning a simple mapping of keys and values. + +**expects:** + +* `args`: a string or array of strings representing the options to parse. +* `opts`: provide a set of hints indicating how `args` should be parsed: + * `opts.alias`: an object representing the set of aliases for a key: `{alias: {foo: ['f']}}`. + * `opts.array`: indicate that keys should be parsed as an array: `{array: ['foo', 'bar']}`.
+ Indicate that keys should be parsed as an array and coerced to booleans / numbers:
+ `{array: [{ key: 'foo', boolean: true }, {key: 'bar', number: true}]}`. + * `opts.boolean`: arguments should be parsed as booleans: `{boolean: ['x', 'y']}`. + * `opts.coerce`: provide a custom synchronous function that returns a coerced value from the argument provided + (or throws an error). For arrays the function is called only once for the entire array:
+ `{coerce: {foo: function (arg) {return modifiedArg}}}`. + * `opts.config`: indicate a key that represents a path to a configuration file (this file will be loaded and parsed). + * `opts.configObjects`: configuration objects to parse, their properties will be set as arguments:
+ `{configObjects: [{'x': 5, 'y': 33}, {'z': 44}]}`. + * `opts.configuration`: provide configuration options to the yargs-parser (see: [configuration](#configuration)). + * `opts.count`: indicate a key that should be used as a counter, e.g., `-vvv` = `{v: 3}`. + * `opts.default`: provide default values for keys: `{default: {x: 33, y: 'hello world!'}}`. + * `opts.envPrefix`: environment variables (`process.env`) with the prefix provided should be parsed. + * `opts.narg`: specify that a key requires `n` arguments: `{narg: {x: 2}}`. + * `opts.normalize`: `path.normalize()` will be applied to values set to this key. + * `opts.number`: keys should be treated as numbers. + * `opts.string`: keys should be treated as strings (even if they resemble a number `-x 33`). + +**returns:** + +* `obj`: an object representing the parsed value of `args` + * `key/value`: key value pairs for each argument and their aliases. + * `_`: an array representing the positional arguments. + * [optional] `--`: an array with arguments after the end-of-options flag `--`. + +### require('yargs-parser').detailed(args, opts={}) + +Parses a command line string, returning detailed information required by the +yargs engine. + +**expects:** + +* `args`: a string or array of strings representing options to parse. +* `opts`: provide a set of hints indicating how `args`, inputs are identical to `require('yargs-parser')(args, opts={})`. + +**returns:** + +* `argv`: an object representing the parsed value of `args` + * `key/value`: key value pairs for each argument and their aliases. + * `_`: an array representing the positional arguments. + * [optional] `--`: an array with arguments after the end-of-options flag `--`. +* `error`: populated with an error object if an exception occurred during parsing. +* `aliases`: the inferred list of aliases built by combining lists in `opts.alias`. +* `newAliases`: any new aliases added via camel-case expansion: + * `boolean`: `{ fooBar: true }` +* `defaulted`: any new argument created by `opts.default`, no aliases included. + * `boolean`: `{ foo: true }` +* `configuration`: given by default settings and `opts.configuration`. + + + +### Configuration + +The yargs-parser applies several automated transformations on the keys provided +in `args`. These features can be turned on and off using the `configuration` field +of `opts`. + +```js +var parsed = parser(['--no-dice'], { + configuration: { + 'boolean-negation': false + } +}) +``` + +### short option groups + +* default: `true`. +* key: `short-option-groups`. + +Should a group of short-options be treated as boolean flags? + +```console +$ node example.js -abc +{ _: [], a: true, b: true, c: true } +``` + +_if disabled:_ + +```console +$ node example.js -abc +{ _: [], abc: true } +``` + +### camel-case expansion + +* default: `true`. +* key: `camel-case-expansion`. + +Should hyphenated arguments be expanded into camel-case aliases? + +```console +$ node example.js --foo-bar +{ _: [], 'foo-bar': true, fooBar: true } +``` + +_if disabled:_ + +```console +$ node example.js --foo-bar +{ _: [], 'foo-bar': true } +``` + +### dot-notation + +* default: `true` +* key: `dot-notation` + +Should keys that contain `.` be treated as objects? + +```console +$ node example.js --foo.bar +{ _: [], foo: { bar: true } } +``` + +_if disabled:_ + +```console +$ node example.js --foo.bar +{ _: [], "foo.bar": true } +``` + +### parse numbers + +* default: `true` +* key: `parse-numbers` + +Should keys that look like numbers be treated as such? + +```console +$ node example.js --foo=99.3 +{ _: [], foo: 99.3 } +``` + +_if disabled:_ + +```console +$ node example.js --foo=99.3 +{ _: [], foo: "99.3" } +``` + +### parse positional numbers + +* default: `true` +* key: `parse-positional-numbers` + +Should positional keys that look like numbers be treated as such. + +```console +$ node example.js 99.3 +{ _: [99.3] } +``` + +_if disabled:_ + +```console +$ node example.js 99.3 +{ _: ['99.3'] } +``` + +### boolean negation + +* default: `true` +* key: `boolean-negation` + +Should variables prefixed with `--no` be treated as negations? + +```console +$ node example.js --no-foo +{ _: [], foo: false } +``` + +_if disabled:_ + +```console +$ node example.js --no-foo +{ _: [], "no-foo": true } +``` + +### combine arrays + +* default: `false` +* key: `combine-arrays` + +Should arrays be combined when provided by both command line arguments and +a configuration file. + +### duplicate arguments array + +* default: `true` +* key: `duplicate-arguments-array` + +Should arguments be coerced into an array when duplicated: + +```console +$ node example.js -x 1 -x 2 +{ _: [], x: [1, 2] } +``` + +_if disabled:_ + +```console +$ node example.js -x 1 -x 2 +{ _: [], x: 2 } +``` + +### flatten duplicate arrays + +* default: `true` +* key: `flatten-duplicate-arrays` + +Should array arguments be coerced into a single array when duplicated: + +```console +$ node example.js -x 1 2 -x 3 4 +{ _: [], x: [1, 2, 3, 4] } +``` + +_if disabled:_ + +```console +$ node example.js -x 1 2 -x 3 4 +{ _: [], x: [[1, 2], [3, 4]] } +``` + +### greedy arrays + +* default: `true` +* key: `greedy-arrays` + +Should arrays consume more than one positional argument following their flag. + +```console +$ node example --arr 1 2 +{ _: [], arr: [1, 2] } +``` + +_if disabled:_ + +```console +$ node example --arr 1 2 +{ _: [2], arr: [1] } +``` + +**Note: in `v18.0.0` we are considering defaulting greedy arrays to `false`.** + +### nargs eats options + +* default: `false` +* key: `nargs-eats-options` + +Should nargs consume dash options as well as positional arguments. + +### negation prefix + +* default: `no-` +* key: `negation-prefix` + +The prefix to use for negated boolean variables. + +```console +$ node example.js --no-foo +{ _: [], foo: false } +``` + +_if set to `quux`:_ + +```console +$ node example.js --quuxfoo +{ _: [], foo: false } +``` + +### populate -- + +* default: `false`. +* key: `populate--` + +Should unparsed flags be stored in `--` or `_`. + +_If disabled:_ + +```console +$ node example.js a -b -- x y +{ _: [ 'a', 'x', 'y' ], b: true } +``` + +_If enabled:_ + +```console +$ node example.js a -b -- x y +{ _: [ 'a' ], '--': [ 'x', 'y' ], b: true } +``` + +### set placeholder key + +* default: `false`. +* key: `set-placeholder-key`. + +Should a placeholder be added for keys not set via the corresponding CLI argument? + +_If disabled:_ + +```console +$ node example.js -a 1 -c 2 +{ _: [], a: 1, c: 2 } +``` + +_If enabled:_ + +```console +$ node example.js -a 1 -c 2 +{ _: [], a: 1, b: undefined, c: 2 } +``` + +### halt at non-option + +* default: `false`. +* key: `halt-at-non-option`. + +Should parsing stop at the first positional argument? This is similar to how e.g. `ssh` parses its command line. + +_If disabled:_ + +```console +$ node example.js -a run b -x y +{ _: [ 'b' ], a: 'run', x: 'y' } +``` + +_If enabled:_ + +```console +$ node example.js -a run b -x y +{ _: [ 'b', '-x', 'y' ], a: 'run' } +``` + +### strip aliased + +* default: `false` +* key: `strip-aliased` + +Should aliases be removed before returning results? + +_If disabled:_ + +```console +$ node example.js --test-field 1 +{ _: [], 'test-field': 1, testField: 1, 'test-alias': 1, testAlias: 1 } +``` + +_If enabled:_ + +```console +$ node example.js --test-field 1 +{ _: [], 'test-field': 1, testField: 1 } +``` + +### strip dashed + +* default: `false` +* key: `strip-dashed` + +Should dashed keys be removed before returning results? This option has no effect if +`camel-case-expansion` is disabled. + +_If disabled:_ + +```console +$ node example.js --test-field 1 +{ _: [], 'test-field': 1, testField: 1 } +``` + +_If enabled:_ + +```console +$ node example.js --test-field 1 +{ _: [], testField: 1 } +``` + +### unknown options as args + +* default: `false` +* key: `unknown-options-as-args` + +Should unknown options be treated like regular arguments? An unknown option is one that is not +configured in `opts`. + +_If disabled_ + +```console +$ node example.js --unknown-option --known-option 2 --string-option --unknown-option2 +{ _: [], unknownOption: true, knownOption: 2, stringOption: '', unknownOption2: true } +``` + +_If enabled_ + +```console +$ node example.js --unknown-option --known-option 2 --string-option --unknown-option2 +{ _: ['--unknown-option'], knownOption: 2, stringOption: '--unknown-option2' } +``` + +## Supported Node.js Versions + +Libraries in this ecosystem make a best effort to track +[Node.js' release schedule](https://nodejs.org/en/about/releases/). Here's [a +post on why we think this is important](https://medium.com/the-node-js-collection/maintainers-should-consider-following-node-js-release-schedule-ab08ed4de71a). + +## Special Thanks + +The yargs project evolves from optimist and minimist. It owes its +existence to a lot of James Halliday's hard work. Thanks [substack](https://github.com/substack) **beep** **boop** \o/ + +## License + +ISC diff --git a/mybulma/node_modules/yargs/node_modules/yargs-parser/browser.js b/mybulma/node_modules/yargs/node_modules/yargs-parser/browser.js new file mode 100644 index 0000000..241202c --- /dev/null +++ b/mybulma/node_modules/yargs/node_modules/yargs-parser/browser.js @@ -0,0 +1,29 @@ +// Main entrypoint for ESM web browser environments. Avoids using Node.js +// specific libraries, such as "path". +// +// TODO: figure out reasonable web equivalents for "resolve", "normalize", etc. +import { camelCase, decamelize, looksLikeNumber } from './build/lib/string-utils.js' +import { YargsParser } from './build/lib/yargs-parser.js' +const parser = new YargsParser({ + cwd: () => { return '' }, + format: (str, arg) => { return str.replace('%s', arg) }, + normalize: (str) => { return str }, + resolve: (str) => { return str }, + require: () => { + throw Error('loading config from files not currently supported in browser') + }, + env: () => {} +}) + +const yargsParser = function Parser (args, opts) { + const result = parser.parse(args.slice(), opts) + return result.argv +} +yargsParser.detailed = function (args, opts) { + return parser.parse(args.slice(), opts) +} +yargsParser.camelCase = camelCase +yargsParser.decamelize = decamelize +yargsParser.looksLikeNumber = looksLikeNumber + +export default yargsParser diff --git a/mybulma/node_modules/yargs/node_modules/yargs-parser/build/index.cjs b/mybulma/node_modules/yargs/node_modules/yargs-parser/build/index.cjs new file mode 100644 index 0000000..cf6f50f --- /dev/null +++ b/mybulma/node_modules/yargs/node_modules/yargs-parser/build/index.cjs @@ -0,0 +1,1050 @@ +'use strict'; + +var util = require('util'); +var path = require('path'); +var fs = require('fs'); + +function camelCase(str) { + const isCamelCase = str !== str.toLowerCase() && str !== str.toUpperCase(); + if (!isCamelCase) { + str = str.toLowerCase(); + } + if (str.indexOf('-') === -1 && str.indexOf('_') === -1) { + return str; + } + else { + let camelcase = ''; + let nextChrUpper = false; + const leadingHyphens = str.match(/^-+/); + for (let i = leadingHyphens ? leadingHyphens[0].length : 0; i < str.length; i++) { + let chr = str.charAt(i); + if (nextChrUpper) { + nextChrUpper = false; + chr = chr.toUpperCase(); + } + if (i !== 0 && (chr === '-' || chr === '_')) { + nextChrUpper = true; + } + else if (chr !== '-' && chr !== '_') { + camelcase += chr; + } + } + return camelcase; + } +} +function decamelize(str, joinString) { + const lowercase = str.toLowerCase(); + joinString = joinString || '-'; + let notCamelcase = ''; + for (let i = 0; i < str.length; i++) { + const chrLower = lowercase.charAt(i); + const chrString = str.charAt(i); + if (chrLower !== chrString && i > 0) { + notCamelcase += `${joinString}${lowercase.charAt(i)}`; + } + else { + notCamelcase += chrString; + } + } + return notCamelcase; +} +function looksLikeNumber(x) { + if (x === null || x === undefined) + return false; + if (typeof x === 'number') + return true; + if (/^0x[0-9a-f]+$/i.test(x)) + return true; + if (/^0[^.]/.test(x)) + return false; + return /^[-]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/.test(x); +} + +function tokenizeArgString(argString) { + if (Array.isArray(argString)) { + return argString.map(e => typeof e !== 'string' ? e + '' : e); + } + argString = argString.trim(); + let i = 0; + let prevC = null; + let c = null; + let opening = null; + const args = []; + for (let ii = 0; ii < argString.length; ii++) { + prevC = c; + c = argString.charAt(ii); + if (c === ' ' && !opening) { + if (!(prevC === ' ')) { + i++; + } + continue; + } + if (c === opening) { + opening = null; + } + else if ((c === "'" || c === '"') && !opening) { + opening = c; + } + if (!args[i]) + args[i] = ''; + args[i] += c; + } + return args; +} + +var DefaultValuesForTypeKey; +(function (DefaultValuesForTypeKey) { + DefaultValuesForTypeKey["BOOLEAN"] = "boolean"; + DefaultValuesForTypeKey["STRING"] = "string"; + DefaultValuesForTypeKey["NUMBER"] = "number"; + DefaultValuesForTypeKey["ARRAY"] = "array"; +})(DefaultValuesForTypeKey || (DefaultValuesForTypeKey = {})); + +let mixin; +class YargsParser { + constructor(_mixin) { + mixin = _mixin; + } + parse(argsInput, options) { + const opts = Object.assign({ + alias: undefined, + array: undefined, + boolean: undefined, + config: undefined, + configObjects: undefined, + configuration: undefined, + coerce: undefined, + count: undefined, + default: undefined, + envPrefix: undefined, + narg: undefined, + normalize: undefined, + string: undefined, + number: undefined, + __: undefined, + key: undefined + }, options); + const args = tokenizeArgString(argsInput); + const inputIsString = typeof argsInput === 'string'; + const aliases = combineAliases(Object.assign(Object.create(null), opts.alias)); + const configuration = Object.assign({ + 'boolean-negation': true, + 'camel-case-expansion': true, + 'combine-arrays': false, + 'dot-notation': true, + 'duplicate-arguments-array': true, + 'flatten-duplicate-arrays': true, + 'greedy-arrays': true, + 'halt-at-non-option': false, + 'nargs-eats-options': false, + 'negation-prefix': 'no-', + 'parse-numbers': true, + 'parse-positional-numbers': true, + 'populate--': false, + 'set-placeholder-key': false, + 'short-option-groups': true, + 'strip-aliased': false, + 'strip-dashed': false, + 'unknown-options-as-args': false + }, opts.configuration); + const defaults = Object.assign(Object.create(null), opts.default); + const configObjects = opts.configObjects || []; + const envPrefix = opts.envPrefix; + const notFlagsOption = configuration['populate--']; + const notFlagsArgv = notFlagsOption ? '--' : '_'; + const newAliases = Object.create(null); + const defaulted = Object.create(null); + const __ = opts.__ || mixin.format; + const flags = { + aliases: Object.create(null), + arrays: Object.create(null), + bools: Object.create(null), + strings: Object.create(null), + numbers: Object.create(null), + counts: Object.create(null), + normalize: Object.create(null), + configs: Object.create(null), + nargs: Object.create(null), + coercions: Object.create(null), + keys: [] + }; + const negative = /^-([0-9]+(\.[0-9]+)?|\.[0-9]+)$/; + const negatedBoolean = new RegExp('^--' + configuration['negation-prefix'] + '(.+)'); + [].concat(opts.array || []).filter(Boolean).forEach(function (opt) { + const key = typeof opt === 'object' ? opt.key : opt; + const assignment = Object.keys(opt).map(function (key) { + const arrayFlagKeys = { + boolean: 'bools', + string: 'strings', + number: 'numbers' + }; + return arrayFlagKeys[key]; + }).filter(Boolean).pop(); + if (assignment) { + flags[assignment][key] = true; + } + flags.arrays[key] = true; + flags.keys.push(key); + }); + [].concat(opts.boolean || []).filter(Boolean).forEach(function (key) { + flags.bools[key] = true; + flags.keys.push(key); + }); + [].concat(opts.string || []).filter(Boolean).forEach(function (key) { + flags.strings[key] = true; + flags.keys.push(key); + }); + [].concat(opts.number || []).filter(Boolean).forEach(function (key) { + flags.numbers[key] = true; + flags.keys.push(key); + }); + [].concat(opts.count || []).filter(Boolean).forEach(function (key) { + flags.counts[key] = true; + flags.keys.push(key); + }); + [].concat(opts.normalize || []).filter(Boolean).forEach(function (key) { + flags.normalize[key] = true; + flags.keys.push(key); + }); + if (typeof opts.narg === 'object') { + Object.entries(opts.narg).forEach(([key, value]) => { + if (typeof value === 'number') { + flags.nargs[key] = value; + flags.keys.push(key); + } + }); + } + if (typeof opts.coerce === 'object') { + Object.entries(opts.coerce).forEach(([key, value]) => { + if (typeof value === 'function') { + flags.coercions[key] = value; + flags.keys.push(key); + } + }); + } + if (typeof opts.config !== 'undefined') { + if (Array.isArray(opts.config) || typeof opts.config === 'string') { + [].concat(opts.config).filter(Boolean).forEach(function (key) { + flags.configs[key] = true; + }); + } + else if (typeof opts.config === 'object') { + Object.entries(opts.config).forEach(([key, value]) => { + if (typeof value === 'boolean' || typeof value === 'function') { + flags.configs[key] = value; + } + }); + } + } + extendAliases(opts.key, aliases, opts.default, flags.arrays); + Object.keys(defaults).forEach(function (key) { + (flags.aliases[key] || []).forEach(function (alias) { + defaults[alias] = defaults[key]; + }); + }); + let error = null; + checkConfiguration(); + let notFlags = []; + const argv = Object.assign(Object.create(null), { _: [] }); + const argvReturn = {}; + for (let i = 0; i < args.length; i++) { + const arg = args[i]; + const truncatedArg = arg.replace(/^-{3,}/, '---'); + let broken; + let key; + let letters; + let m; + let next; + let value; + if (arg !== '--' && /^-/.test(arg) && isUnknownOptionAsArg(arg)) { + pushPositional(arg); + } + else if (truncatedArg.match(/^---+(=|$)/)) { + pushPositional(arg); + continue; + } + else if (arg.match(/^--.+=/) || (!configuration['short-option-groups'] && arg.match(/^-.+=/))) { + m = arg.match(/^--?([^=]+)=([\s\S]*)$/); + if (m !== null && Array.isArray(m) && m.length >= 3) { + if (checkAllAliases(m[1], flags.arrays)) { + i = eatArray(i, m[1], args, m[2]); + } + else if (checkAllAliases(m[1], flags.nargs) !== false) { + i = eatNargs(i, m[1], args, m[2]); + } + else { + setArg(m[1], m[2], true); + } + } + } + else if (arg.match(negatedBoolean) && configuration['boolean-negation']) { + m = arg.match(negatedBoolean); + if (m !== null && Array.isArray(m) && m.length >= 2) { + key = m[1]; + setArg(key, checkAllAliases(key, flags.arrays) ? [false] : false); + } + } + else if (arg.match(/^--.+/) || (!configuration['short-option-groups'] && arg.match(/^-[^-]+/))) { + m = arg.match(/^--?(.+)/); + if (m !== null && Array.isArray(m) && m.length >= 2) { + key = m[1]; + if (checkAllAliases(key, flags.arrays)) { + i = eatArray(i, key, args); + } + else if (checkAllAliases(key, flags.nargs) !== false) { + i = eatNargs(i, key, args); + } + else { + next = args[i + 1]; + if (next !== undefined && (!next.match(/^-/) || + next.match(negative)) && + !checkAllAliases(key, flags.bools) && + !checkAllAliases(key, flags.counts)) { + setArg(key, next); + i++; + } + else if (/^(true|false)$/.test(next)) { + setArg(key, next); + i++; + } + else { + setArg(key, defaultValue(key)); + } + } + } + } + else if (arg.match(/^-.\..+=/)) { + m = arg.match(/^-([^=]+)=([\s\S]*)$/); + if (m !== null && Array.isArray(m) && m.length >= 3) { + setArg(m[1], m[2]); + } + } + else if (arg.match(/^-.\..+/) && !arg.match(negative)) { + next = args[i + 1]; + m = arg.match(/^-(.\..+)/); + if (m !== null && Array.isArray(m) && m.length >= 2) { + key = m[1]; + if (next !== undefined && !next.match(/^-/) && + !checkAllAliases(key, flags.bools) && + !checkAllAliases(key, flags.counts)) { + setArg(key, next); + i++; + } + else { + setArg(key, defaultValue(key)); + } + } + } + else if (arg.match(/^-[^-]+/) && !arg.match(negative)) { + letters = arg.slice(1, -1).split(''); + broken = false; + for (let j = 0; j < letters.length; j++) { + next = arg.slice(j + 2); + if (letters[j + 1] && letters[j + 1] === '=') { + value = arg.slice(j + 3); + key = letters[j]; + if (checkAllAliases(key, flags.arrays)) { + i = eatArray(i, key, args, value); + } + else if (checkAllAliases(key, flags.nargs) !== false) { + i = eatNargs(i, key, args, value); + } + else { + setArg(key, value); + } + broken = true; + break; + } + if (next === '-') { + setArg(letters[j], next); + continue; + } + if (/[A-Za-z]/.test(letters[j]) && + /^-?\d+(\.\d*)?(e-?\d+)?$/.test(next) && + checkAllAliases(next, flags.bools) === false) { + setArg(letters[j], next); + broken = true; + break; + } + if (letters[j + 1] && letters[j + 1].match(/\W/)) { + setArg(letters[j], next); + broken = true; + break; + } + else { + setArg(letters[j], defaultValue(letters[j])); + } + } + key = arg.slice(-1)[0]; + if (!broken && key !== '-') { + if (checkAllAliases(key, flags.arrays)) { + i = eatArray(i, key, args); + } + else if (checkAllAliases(key, flags.nargs) !== false) { + i = eatNargs(i, key, args); + } + else { + next = args[i + 1]; + if (next !== undefined && (!/^(-|--)[^-]/.test(next) || + next.match(negative)) && + !checkAllAliases(key, flags.bools) && + !checkAllAliases(key, flags.counts)) { + setArg(key, next); + i++; + } + else if (/^(true|false)$/.test(next)) { + setArg(key, next); + i++; + } + else { + setArg(key, defaultValue(key)); + } + } + } + } + else if (arg.match(/^-[0-9]$/) && + arg.match(negative) && + checkAllAliases(arg.slice(1), flags.bools)) { + key = arg.slice(1); + setArg(key, defaultValue(key)); + } + else if (arg === '--') { + notFlags = args.slice(i + 1); + break; + } + else if (configuration['halt-at-non-option']) { + notFlags = args.slice(i); + break; + } + else { + pushPositional(arg); + } + } + applyEnvVars(argv, true); + applyEnvVars(argv, false); + setConfig(argv); + setConfigObjects(); + applyDefaultsAndAliases(argv, flags.aliases, defaults, true); + applyCoercions(argv); + if (configuration['set-placeholder-key']) + setPlaceholderKeys(argv); + Object.keys(flags.counts).forEach(function (key) { + if (!hasKey(argv, key.split('.'))) + setArg(key, 0); + }); + if (notFlagsOption && notFlags.length) + argv[notFlagsArgv] = []; + notFlags.forEach(function (key) { + argv[notFlagsArgv].push(key); + }); + if (configuration['camel-case-expansion'] && configuration['strip-dashed']) { + Object.keys(argv).filter(key => key !== '--' && key.includes('-')).forEach(key => { + delete argv[key]; + }); + } + if (configuration['strip-aliased']) { + [].concat(...Object.keys(aliases).map(k => aliases[k])).forEach(alias => { + if (configuration['camel-case-expansion'] && alias.includes('-')) { + delete argv[alias.split('.').map(prop => camelCase(prop)).join('.')]; + } + delete argv[alias]; + }); + } + function pushPositional(arg) { + const maybeCoercedNumber = maybeCoerceNumber('_', arg); + if (typeof maybeCoercedNumber === 'string' || typeof maybeCoercedNumber === 'number') { + argv._.push(maybeCoercedNumber); + } + } + function eatNargs(i, key, args, argAfterEqualSign) { + let ii; + let toEat = checkAllAliases(key, flags.nargs); + toEat = typeof toEat !== 'number' || isNaN(toEat) ? 1 : toEat; + if (toEat === 0) { + if (!isUndefined(argAfterEqualSign)) { + error = Error(__('Argument unexpected for: %s', key)); + } + setArg(key, defaultValue(key)); + return i; + } + let available = isUndefined(argAfterEqualSign) ? 0 : 1; + if (configuration['nargs-eats-options']) { + if (args.length - (i + 1) + available < toEat) { + error = Error(__('Not enough arguments following: %s', key)); + } + available = toEat; + } + else { + for (ii = i + 1; ii < args.length; ii++) { + if (!args[ii].match(/^-[^0-9]/) || args[ii].match(negative) || isUnknownOptionAsArg(args[ii])) + available++; + else + break; + } + if (available < toEat) + error = Error(__('Not enough arguments following: %s', key)); + } + let consumed = Math.min(available, toEat); + if (!isUndefined(argAfterEqualSign) && consumed > 0) { + setArg(key, argAfterEqualSign); + consumed--; + } + for (ii = i + 1; ii < (consumed + i + 1); ii++) { + setArg(key, args[ii]); + } + return (i + consumed); + } + function eatArray(i, key, args, argAfterEqualSign) { + let argsToSet = []; + let next = argAfterEqualSign || args[i + 1]; + const nargsCount = checkAllAliases(key, flags.nargs); + if (checkAllAliases(key, flags.bools) && !(/^(true|false)$/.test(next))) { + argsToSet.push(true); + } + else if (isUndefined(next) || + (isUndefined(argAfterEqualSign) && /^-/.test(next) && !negative.test(next) && !isUnknownOptionAsArg(next))) { + if (defaults[key] !== undefined) { + const defVal = defaults[key]; + argsToSet = Array.isArray(defVal) ? defVal : [defVal]; + } + } + else { + if (!isUndefined(argAfterEqualSign)) { + argsToSet.push(processValue(key, argAfterEqualSign, true)); + } + for (let ii = i + 1; ii < args.length; ii++) { + if ((!configuration['greedy-arrays'] && argsToSet.length > 0) || + (nargsCount && typeof nargsCount === 'number' && argsToSet.length >= nargsCount)) + break; + next = args[ii]; + if (/^-/.test(next) && !negative.test(next) && !isUnknownOptionAsArg(next)) + break; + i = ii; + argsToSet.push(processValue(key, next, inputIsString)); + } + } + if (typeof nargsCount === 'number' && ((nargsCount && argsToSet.length < nargsCount) || + (isNaN(nargsCount) && argsToSet.length === 0))) { + error = Error(__('Not enough arguments following: %s', key)); + } + setArg(key, argsToSet); + return i; + } + function setArg(key, val, shouldStripQuotes = inputIsString) { + if (/-/.test(key) && configuration['camel-case-expansion']) { + const alias = key.split('.').map(function (prop) { + return camelCase(prop); + }).join('.'); + addNewAlias(key, alias); + } + const value = processValue(key, val, shouldStripQuotes); + const splitKey = key.split('.'); + setKey(argv, splitKey, value); + if (flags.aliases[key]) { + flags.aliases[key].forEach(function (x) { + const keyProperties = x.split('.'); + setKey(argv, keyProperties, value); + }); + } + if (splitKey.length > 1 && configuration['dot-notation']) { + (flags.aliases[splitKey[0]] || []).forEach(function (x) { + let keyProperties = x.split('.'); + const a = [].concat(splitKey); + a.shift(); + keyProperties = keyProperties.concat(a); + if (!(flags.aliases[key] || []).includes(keyProperties.join('.'))) { + setKey(argv, keyProperties, value); + } + }); + } + if (checkAllAliases(key, flags.normalize) && !checkAllAliases(key, flags.arrays)) { + const keys = [key].concat(flags.aliases[key] || []); + keys.forEach(function (key) { + Object.defineProperty(argvReturn, key, { + enumerable: true, + get() { + return val; + }, + set(value) { + val = typeof value === 'string' ? mixin.normalize(value) : value; + } + }); + }); + } + } + function addNewAlias(key, alias) { + if (!(flags.aliases[key] && flags.aliases[key].length)) { + flags.aliases[key] = [alias]; + newAliases[alias] = true; + } + if (!(flags.aliases[alias] && flags.aliases[alias].length)) { + addNewAlias(alias, key); + } + } + function processValue(key, val, shouldStripQuotes) { + if (shouldStripQuotes) { + val = stripQuotes(val); + } + if (checkAllAliases(key, flags.bools) || checkAllAliases(key, flags.counts)) { + if (typeof val === 'string') + val = val === 'true'; + } + let value = Array.isArray(val) + ? val.map(function (v) { return maybeCoerceNumber(key, v); }) + : maybeCoerceNumber(key, val); + if (checkAllAliases(key, flags.counts) && (isUndefined(value) || typeof value === 'boolean')) { + value = increment(); + } + if (checkAllAliases(key, flags.normalize) && checkAllAliases(key, flags.arrays)) { + if (Array.isArray(val)) + value = val.map((val) => { return mixin.normalize(val); }); + else + value = mixin.normalize(val); + } + return value; + } + function maybeCoerceNumber(key, value) { + if (!configuration['parse-positional-numbers'] && key === '_') + return value; + if (!checkAllAliases(key, flags.strings) && !checkAllAliases(key, flags.bools) && !Array.isArray(value)) { + const shouldCoerceNumber = looksLikeNumber(value) && configuration['parse-numbers'] && (Number.isSafeInteger(Math.floor(parseFloat(`${value}`)))); + if (shouldCoerceNumber || (!isUndefined(value) && checkAllAliases(key, flags.numbers))) { + value = Number(value); + } + } + return value; + } + function setConfig(argv) { + const configLookup = Object.create(null); + applyDefaultsAndAliases(configLookup, flags.aliases, defaults); + Object.keys(flags.configs).forEach(function (configKey) { + const configPath = argv[configKey] || configLookup[configKey]; + if (configPath) { + try { + let config = null; + const resolvedConfigPath = mixin.resolve(mixin.cwd(), configPath); + const resolveConfig = flags.configs[configKey]; + if (typeof resolveConfig === 'function') { + try { + config = resolveConfig(resolvedConfigPath); + } + catch (e) { + config = e; + } + if (config instanceof Error) { + error = config; + return; + } + } + else { + config = mixin.require(resolvedConfigPath); + } + setConfigObject(config); + } + catch (ex) { + if (ex.name === 'PermissionDenied') + error = ex; + else if (argv[configKey]) + error = Error(__('Invalid JSON config file: %s', configPath)); + } + } + }); + } + function setConfigObject(config, prev) { + Object.keys(config).forEach(function (key) { + const value = config[key]; + const fullKey = prev ? prev + '.' + key : key; + if (typeof value === 'object' && value !== null && !Array.isArray(value) && configuration['dot-notation']) { + setConfigObject(value, fullKey); + } + else { + if (!hasKey(argv, fullKey.split('.')) || (checkAllAliases(fullKey, flags.arrays) && configuration['combine-arrays'])) { + setArg(fullKey, value); + } + } + }); + } + function setConfigObjects() { + if (typeof configObjects !== 'undefined') { + configObjects.forEach(function (configObject) { + setConfigObject(configObject); + }); + } + } + function applyEnvVars(argv, configOnly) { + if (typeof envPrefix === 'undefined') + return; + const prefix = typeof envPrefix === 'string' ? envPrefix : ''; + const env = mixin.env(); + Object.keys(env).forEach(function (envVar) { + if (prefix === '' || envVar.lastIndexOf(prefix, 0) === 0) { + const keys = envVar.split('__').map(function (key, i) { + if (i === 0) { + key = key.substring(prefix.length); + } + return camelCase(key); + }); + if (((configOnly && flags.configs[keys.join('.')]) || !configOnly) && !hasKey(argv, keys)) { + setArg(keys.join('.'), env[envVar]); + } + } + }); + } + function applyCoercions(argv) { + let coerce; + const applied = new Set(); + Object.keys(argv).forEach(function (key) { + if (!applied.has(key)) { + coerce = checkAllAliases(key, flags.coercions); + if (typeof coerce === 'function') { + try { + const value = maybeCoerceNumber(key, coerce(argv[key])); + ([].concat(flags.aliases[key] || [], key)).forEach(ali => { + applied.add(ali); + argv[ali] = value; + }); + } + catch (err) { + error = err; + } + } + } + }); + } + function setPlaceholderKeys(argv) { + flags.keys.forEach((key) => { + if (~key.indexOf('.')) + return; + if (typeof argv[key] === 'undefined') + argv[key] = undefined; + }); + return argv; + } + function applyDefaultsAndAliases(obj, aliases, defaults, canLog = false) { + Object.keys(defaults).forEach(function (key) { + if (!hasKey(obj, key.split('.'))) { + setKey(obj, key.split('.'), defaults[key]); + if (canLog) + defaulted[key] = true; + (aliases[key] || []).forEach(function (x) { + if (hasKey(obj, x.split('.'))) + return; + setKey(obj, x.split('.'), defaults[key]); + }); + } + }); + } + function hasKey(obj, keys) { + let o = obj; + if (!configuration['dot-notation']) + keys = [keys.join('.')]; + keys.slice(0, -1).forEach(function (key) { + o = (o[key] || {}); + }); + const key = keys[keys.length - 1]; + if (typeof o !== 'object') + return false; + else + return key in o; + } + function setKey(obj, keys, value) { + let o = obj; + if (!configuration['dot-notation']) + keys = [keys.join('.')]; + keys.slice(0, -1).forEach(function (key) { + key = sanitizeKey(key); + if (typeof o === 'object' && o[key] === undefined) { + o[key] = {}; + } + if (typeof o[key] !== 'object' || Array.isArray(o[key])) { + if (Array.isArray(o[key])) { + o[key].push({}); + } + else { + o[key] = [o[key], {}]; + } + o = o[key][o[key].length - 1]; + } + else { + o = o[key]; + } + }); + const key = sanitizeKey(keys[keys.length - 1]); + const isTypeArray = checkAllAliases(keys.join('.'), flags.arrays); + const isValueArray = Array.isArray(value); + let duplicate = configuration['duplicate-arguments-array']; + if (!duplicate && checkAllAliases(key, flags.nargs)) { + duplicate = true; + if ((!isUndefined(o[key]) && flags.nargs[key] === 1) || (Array.isArray(o[key]) && o[key].length === flags.nargs[key])) { + o[key] = undefined; + } + } + if (value === increment()) { + o[key] = increment(o[key]); + } + else if (Array.isArray(o[key])) { + if (duplicate && isTypeArray && isValueArray) { + o[key] = configuration['flatten-duplicate-arrays'] ? o[key].concat(value) : (Array.isArray(o[key][0]) ? o[key] : [o[key]]).concat([value]); + } + else if (!duplicate && Boolean(isTypeArray) === Boolean(isValueArray)) { + o[key] = value; + } + else { + o[key] = o[key].concat([value]); + } + } + else if (o[key] === undefined && isTypeArray) { + o[key] = isValueArray ? value : [value]; + } + else if (duplicate && !(o[key] === undefined || + checkAllAliases(key, flags.counts) || + checkAllAliases(key, flags.bools))) { + o[key] = [o[key], value]; + } + else { + o[key] = value; + } + } + function extendAliases(...args) { + args.forEach(function (obj) { + Object.keys(obj || {}).forEach(function (key) { + if (flags.aliases[key]) + return; + flags.aliases[key] = [].concat(aliases[key] || []); + flags.aliases[key].concat(key).forEach(function (x) { + if (/-/.test(x) && configuration['camel-case-expansion']) { + const c = camelCase(x); + if (c !== key && flags.aliases[key].indexOf(c) === -1) { + flags.aliases[key].push(c); + newAliases[c] = true; + } + } + }); + flags.aliases[key].concat(key).forEach(function (x) { + if (x.length > 1 && /[A-Z]/.test(x) && configuration['camel-case-expansion']) { + const c = decamelize(x, '-'); + if (c !== key && flags.aliases[key].indexOf(c) === -1) { + flags.aliases[key].push(c); + newAliases[c] = true; + } + } + }); + flags.aliases[key].forEach(function (x) { + flags.aliases[x] = [key].concat(flags.aliases[key].filter(function (y) { + return x !== y; + })); + }); + }); + }); + } + function checkAllAliases(key, flag) { + const toCheck = [].concat(flags.aliases[key] || [], key); + const keys = Object.keys(flag); + const setAlias = toCheck.find(key => keys.includes(key)); + return setAlias ? flag[setAlias] : false; + } + function hasAnyFlag(key) { + const flagsKeys = Object.keys(flags); + const toCheck = [].concat(flagsKeys.map(k => flags[k])); + return toCheck.some(function (flag) { + return Array.isArray(flag) ? flag.includes(key) : flag[key]; + }); + } + function hasFlagsMatching(arg, ...patterns) { + const toCheck = [].concat(...patterns); + return toCheck.some(function (pattern) { + const match = arg.match(pattern); + return match && hasAnyFlag(match[1]); + }); + } + function hasAllShortFlags(arg) { + if (arg.match(negative) || !arg.match(/^-[^-]+/)) { + return false; + } + let hasAllFlags = true; + let next; + const letters = arg.slice(1).split(''); + for (let j = 0; j < letters.length; j++) { + next = arg.slice(j + 2); + if (!hasAnyFlag(letters[j])) { + hasAllFlags = false; + break; + } + if ((letters[j + 1] && letters[j + 1] === '=') || + next === '-' || + (/[A-Za-z]/.test(letters[j]) && /^-?\d+(\.\d*)?(e-?\d+)?$/.test(next)) || + (letters[j + 1] && letters[j + 1].match(/\W/))) { + break; + } + } + return hasAllFlags; + } + function isUnknownOptionAsArg(arg) { + return configuration['unknown-options-as-args'] && isUnknownOption(arg); + } + function isUnknownOption(arg) { + arg = arg.replace(/^-{3,}/, '--'); + if (arg.match(negative)) { + return false; + } + if (hasAllShortFlags(arg)) { + return false; + } + const flagWithEquals = /^-+([^=]+?)=[\s\S]*$/; + const normalFlag = /^-+([^=]+?)$/; + const flagEndingInHyphen = /^-+([^=]+?)-$/; + const flagEndingInDigits = /^-+([^=]+?\d+)$/; + const flagEndingInNonWordCharacters = /^-+([^=]+?)\W+.*$/; + return !hasFlagsMatching(arg, flagWithEquals, negatedBoolean, normalFlag, flagEndingInHyphen, flagEndingInDigits, flagEndingInNonWordCharacters); + } + function defaultValue(key) { + if (!checkAllAliases(key, flags.bools) && + !checkAllAliases(key, flags.counts) && + `${key}` in defaults) { + return defaults[key]; + } + else { + return defaultForType(guessType(key)); + } + } + function defaultForType(type) { + const def = { + [DefaultValuesForTypeKey.BOOLEAN]: true, + [DefaultValuesForTypeKey.STRING]: '', + [DefaultValuesForTypeKey.NUMBER]: undefined, + [DefaultValuesForTypeKey.ARRAY]: [] + }; + return def[type]; + } + function guessType(key) { + let type = DefaultValuesForTypeKey.BOOLEAN; + if (checkAllAliases(key, flags.strings)) + type = DefaultValuesForTypeKey.STRING; + else if (checkAllAliases(key, flags.numbers)) + type = DefaultValuesForTypeKey.NUMBER; + else if (checkAllAliases(key, flags.bools)) + type = DefaultValuesForTypeKey.BOOLEAN; + else if (checkAllAliases(key, flags.arrays)) + type = DefaultValuesForTypeKey.ARRAY; + return type; + } + function isUndefined(num) { + return num === undefined; + } + function checkConfiguration() { + Object.keys(flags.counts).find(key => { + if (checkAllAliases(key, flags.arrays)) { + error = Error(__('Invalid configuration: %s, opts.count excludes opts.array.', key)); + return true; + } + else if (checkAllAliases(key, flags.nargs)) { + error = Error(__('Invalid configuration: %s, opts.count excludes opts.narg.', key)); + return true; + } + return false; + }); + } + return { + aliases: Object.assign({}, flags.aliases), + argv: Object.assign(argvReturn, argv), + configuration: configuration, + defaulted: Object.assign({}, defaulted), + error: error, + newAliases: Object.assign({}, newAliases) + }; + } +} +function combineAliases(aliases) { + const aliasArrays = []; + const combined = Object.create(null); + let change = true; + Object.keys(aliases).forEach(function (key) { + aliasArrays.push([].concat(aliases[key], key)); + }); + while (change) { + change = false; + for (let i = 0; i < aliasArrays.length; i++) { + for (let ii = i + 1; ii < aliasArrays.length; ii++) { + const intersect = aliasArrays[i].filter(function (v) { + return aliasArrays[ii].indexOf(v) !== -1; + }); + if (intersect.length) { + aliasArrays[i] = aliasArrays[i].concat(aliasArrays[ii]); + aliasArrays.splice(ii, 1); + change = true; + break; + } + } + } + } + aliasArrays.forEach(function (aliasArray) { + aliasArray = aliasArray.filter(function (v, i, self) { + return self.indexOf(v) === i; + }); + const lastAlias = aliasArray.pop(); + if (lastAlias !== undefined && typeof lastAlias === 'string') { + combined[lastAlias] = aliasArray; + } + }); + return combined; +} +function increment(orig) { + return orig !== undefined ? orig + 1 : 1; +} +function sanitizeKey(key) { + if (key === '__proto__') + return '___proto___'; + return key; +} +function stripQuotes(val) { + return (typeof val === 'string' && + (val[0] === "'" || val[0] === '"') && + val[val.length - 1] === val[0]) + ? val.substring(1, val.length - 1) + : val; +} + +var _a, _b, _c; +const minNodeVersion = (process && process.env && process.env.YARGS_MIN_NODE_VERSION) + ? Number(process.env.YARGS_MIN_NODE_VERSION) + : 12; +const nodeVersion = (_b = (_a = process === null || process === void 0 ? void 0 : process.versions) === null || _a === void 0 ? void 0 : _a.node) !== null && _b !== void 0 ? _b : (_c = process === null || process === void 0 ? void 0 : process.version) === null || _c === void 0 ? void 0 : _c.slice(1); +if (nodeVersion) { + const major = Number(nodeVersion.match(/^([^.]+)/)[1]); + if (major < minNodeVersion) { + throw Error(`yargs parser supports a minimum Node.js version of ${minNodeVersion}. Read our version support policy: https://github.com/yargs/yargs-parser#supported-nodejs-versions`); + } +} +const env = process ? process.env : {}; +const parser = new YargsParser({ + cwd: process.cwd, + env: () => { + return env; + }, + format: util.format, + normalize: path.normalize, + resolve: path.resolve, + require: (path) => { + if (typeof require !== 'undefined') { + return require(path); + } + else if (path.match(/\.json$/)) { + return JSON.parse(fs.readFileSync(path, 'utf8')); + } + else { + throw Error('only .json config files are supported in ESM'); + } + } +}); +const yargsParser = function Parser(args, opts) { + const result = parser.parse(args.slice(), opts); + return result.argv; +}; +yargsParser.detailed = function (args, opts) { + return parser.parse(args.slice(), opts); +}; +yargsParser.camelCase = camelCase; +yargsParser.decamelize = decamelize; +yargsParser.looksLikeNumber = looksLikeNumber; + +module.exports = yargsParser; diff --git a/mybulma/node_modules/yargs/node_modules/yargs-parser/build/lib/index.js b/mybulma/node_modules/yargs/node_modules/yargs-parser/build/lib/index.js new file mode 100644 index 0000000..43ef485 --- /dev/null +++ b/mybulma/node_modules/yargs/node_modules/yargs-parser/build/lib/index.js @@ -0,0 +1,62 @@ +/** + * @fileoverview Main entrypoint for libraries using yargs-parser in Node.js + * CJS and ESM environments. + * + * @license + * Copyright (c) 2016, Contributors + * SPDX-License-Identifier: ISC + */ +var _a, _b, _c; +import { format } from 'util'; +import { normalize, resolve } from 'path'; +import { camelCase, decamelize, looksLikeNumber } from './string-utils.js'; +import { YargsParser } from './yargs-parser.js'; +import { readFileSync } from 'fs'; +// See https://github.com/yargs/yargs-parser#supported-nodejs-versions for our +// version support policy. The YARGS_MIN_NODE_VERSION is used for testing only. +const minNodeVersion = (process && process.env && process.env.YARGS_MIN_NODE_VERSION) + ? Number(process.env.YARGS_MIN_NODE_VERSION) + : 12; +const nodeVersion = (_b = (_a = process === null || process === void 0 ? void 0 : process.versions) === null || _a === void 0 ? void 0 : _a.node) !== null && _b !== void 0 ? _b : (_c = process === null || process === void 0 ? void 0 : process.version) === null || _c === void 0 ? void 0 : _c.slice(1); +if (nodeVersion) { + const major = Number(nodeVersion.match(/^([^.]+)/)[1]); + if (major < minNodeVersion) { + throw Error(`yargs parser supports a minimum Node.js version of ${minNodeVersion}. Read our version support policy: https://github.com/yargs/yargs-parser#supported-nodejs-versions`); + } +} +// Creates a yargs-parser instance using Node.js standard libraries: +const env = process ? process.env : {}; +const parser = new YargsParser({ + cwd: process.cwd, + env: () => { + return env; + }, + format, + normalize, + resolve, + // TODO: figure out a way to combine ESM and CJS coverage, such that + // we can exercise all the lines below: + require: (path) => { + if (typeof require !== 'undefined') { + return require(path); + } + else if (path.match(/\.json$/)) { + // Addresses: https://github.com/yargs/yargs/issues/2040 + return JSON.parse(readFileSync(path, 'utf8')); + } + else { + throw Error('only .json config files are supported in ESM'); + } + } +}); +const yargsParser = function Parser(args, opts) { + const result = parser.parse(args.slice(), opts); + return result.argv; +}; +yargsParser.detailed = function (args, opts) { + return parser.parse(args.slice(), opts); +}; +yargsParser.camelCase = camelCase; +yargsParser.decamelize = decamelize; +yargsParser.looksLikeNumber = looksLikeNumber; +export default yargsParser; diff --git a/mybulma/node_modules/yargs/node_modules/yargs-parser/build/lib/string-utils.js b/mybulma/node_modules/yargs/node_modules/yargs-parser/build/lib/string-utils.js new file mode 100644 index 0000000..4e8bd99 --- /dev/null +++ b/mybulma/node_modules/yargs/node_modules/yargs-parser/build/lib/string-utils.js @@ -0,0 +1,65 @@ +/** + * @license + * Copyright (c) 2016, Contributors + * SPDX-License-Identifier: ISC + */ +export function camelCase(str) { + // Handle the case where an argument is provided as camel case, e.g., fooBar. + // by ensuring that the string isn't already mixed case: + const isCamelCase = str !== str.toLowerCase() && str !== str.toUpperCase(); + if (!isCamelCase) { + str = str.toLowerCase(); + } + if (str.indexOf('-') === -1 && str.indexOf('_') === -1) { + return str; + } + else { + let camelcase = ''; + let nextChrUpper = false; + const leadingHyphens = str.match(/^-+/); + for (let i = leadingHyphens ? leadingHyphens[0].length : 0; i < str.length; i++) { + let chr = str.charAt(i); + if (nextChrUpper) { + nextChrUpper = false; + chr = chr.toUpperCase(); + } + if (i !== 0 && (chr === '-' || chr === '_')) { + nextChrUpper = true; + } + else if (chr !== '-' && chr !== '_') { + camelcase += chr; + } + } + return camelcase; + } +} +export function decamelize(str, joinString) { + const lowercase = str.toLowerCase(); + joinString = joinString || '-'; + let notCamelcase = ''; + for (let i = 0; i < str.length; i++) { + const chrLower = lowercase.charAt(i); + const chrString = str.charAt(i); + if (chrLower !== chrString && i > 0) { + notCamelcase += `${joinString}${lowercase.charAt(i)}`; + } + else { + notCamelcase += chrString; + } + } + return notCamelcase; +} +export function looksLikeNumber(x) { + if (x === null || x === undefined) + return false; + // if loaded from config, may already be a number. + if (typeof x === 'number') + return true; + // hexadecimal. + if (/^0x[0-9a-f]+$/i.test(x)) + return true; + // don't treat 0123 as a number; as it drops the leading '0'. + if (/^0[^.]/.test(x)) + return false; + return /^[-]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/.test(x); +} diff --git a/mybulma/node_modules/yargs/node_modules/yargs-parser/build/lib/tokenize-arg-string.js b/mybulma/node_modules/yargs/node_modules/yargs-parser/build/lib/tokenize-arg-string.js new file mode 100644 index 0000000..5e732ef --- /dev/null +++ b/mybulma/node_modules/yargs/node_modules/yargs-parser/build/lib/tokenize-arg-string.js @@ -0,0 +1,40 @@ +/** + * @license + * Copyright (c) 2016, Contributors + * SPDX-License-Identifier: ISC + */ +// take an un-split argv string and tokenize it. +export function tokenizeArgString(argString) { + if (Array.isArray(argString)) { + return argString.map(e => typeof e !== 'string' ? e + '' : e); + } + argString = argString.trim(); + let i = 0; + let prevC = null; + let c = null; + let opening = null; + const args = []; + for (let ii = 0; ii < argString.length; ii++) { + prevC = c; + c = argString.charAt(ii); + // split on spaces unless we're in quotes. + if (c === ' ' && !opening) { + if (!(prevC === ' ')) { + i++; + } + continue; + } + // don't split the string if we're in matching + // opening or closing single and double quotes. + if (c === opening) { + opening = null; + } + else if ((c === "'" || c === '"') && !opening) { + opening = c; + } + if (!args[i]) + args[i] = ''; + args[i] += c; + } + return args; +} diff --git a/mybulma/node_modules/yargs/node_modules/yargs-parser/build/lib/yargs-parser-types.js b/mybulma/node_modules/yargs/node_modules/yargs-parser/build/lib/yargs-parser-types.js new file mode 100644 index 0000000..63b7c31 --- /dev/null +++ b/mybulma/node_modules/yargs/node_modules/yargs-parser/build/lib/yargs-parser-types.js @@ -0,0 +1,12 @@ +/** + * @license + * Copyright (c) 2016, Contributors + * SPDX-License-Identifier: ISC + */ +export var DefaultValuesForTypeKey; +(function (DefaultValuesForTypeKey) { + DefaultValuesForTypeKey["BOOLEAN"] = "boolean"; + DefaultValuesForTypeKey["STRING"] = "string"; + DefaultValuesForTypeKey["NUMBER"] = "number"; + DefaultValuesForTypeKey["ARRAY"] = "array"; +})(DefaultValuesForTypeKey || (DefaultValuesForTypeKey = {})); diff --git a/mybulma/node_modules/yargs/node_modules/yargs-parser/build/lib/yargs-parser.js b/mybulma/node_modules/yargs/node_modules/yargs-parser/build/lib/yargs-parser.js new file mode 100644 index 0000000..415d4bc --- /dev/null +++ b/mybulma/node_modules/yargs/node_modules/yargs-parser/build/lib/yargs-parser.js @@ -0,0 +1,1045 @@ +/** + * @license + * Copyright (c) 2016, Contributors + * SPDX-License-Identifier: ISC + */ +import { tokenizeArgString } from './tokenize-arg-string.js'; +import { DefaultValuesForTypeKey } from './yargs-parser-types.js'; +import { camelCase, decamelize, looksLikeNumber } from './string-utils.js'; +let mixin; +export class YargsParser { + constructor(_mixin) { + mixin = _mixin; + } + parse(argsInput, options) { + const opts = Object.assign({ + alias: undefined, + array: undefined, + boolean: undefined, + config: undefined, + configObjects: undefined, + configuration: undefined, + coerce: undefined, + count: undefined, + default: undefined, + envPrefix: undefined, + narg: undefined, + normalize: undefined, + string: undefined, + number: undefined, + __: undefined, + key: undefined + }, options); + // allow a string argument to be passed in rather + // than an argv array. + const args = tokenizeArgString(argsInput); + // tokenizeArgString adds extra quotes to args if argsInput is a string + // only strip those extra quotes in processValue if argsInput is a string + const inputIsString = typeof argsInput === 'string'; + // aliases might have transitive relationships, normalize this. + const aliases = combineAliases(Object.assign(Object.create(null), opts.alias)); + const configuration = Object.assign({ + 'boolean-negation': true, + 'camel-case-expansion': true, + 'combine-arrays': false, + 'dot-notation': true, + 'duplicate-arguments-array': true, + 'flatten-duplicate-arrays': true, + 'greedy-arrays': true, + 'halt-at-non-option': false, + 'nargs-eats-options': false, + 'negation-prefix': 'no-', + 'parse-numbers': true, + 'parse-positional-numbers': true, + 'populate--': false, + 'set-placeholder-key': false, + 'short-option-groups': true, + 'strip-aliased': false, + 'strip-dashed': false, + 'unknown-options-as-args': false + }, opts.configuration); + const defaults = Object.assign(Object.create(null), opts.default); + const configObjects = opts.configObjects || []; + const envPrefix = opts.envPrefix; + const notFlagsOption = configuration['populate--']; + const notFlagsArgv = notFlagsOption ? '--' : '_'; + const newAliases = Object.create(null); + const defaulted = Object.create(null); + // allow a i18n handler to be passed in, default to a fake one (util.format). + const __ = opts.__ || mixin.format; + const flags = { + aliases: Object.create(null), + arrays: Object.create(null), + bools: Object.create(null), + strings: Object.create(null), + numbers: Object.create(null), + counts: Object.create(null), + normalize: Object.create(null), + configs: Object.create(null), + nargs: Object.create(null), + coercions: Object.create(null), + keys: [] + }; + const negative = /^-([0-9]+(\.[0-9]+)?|\.[0-9]+)$/; + const negatedBoolean = new RegExp('^--' + configuration['negation-prefix'] + '(.+)'); + [].concat(opts.array || []).filter(Boolean).forEach(function (opt) { + const key = typeof opt === 'object' ? opt.key : opt; + // assign to flags[bools|strings|numbers] + const assignment = Object.keys(opt).map(function (key) { + const arrayFlagKeys = { + boolean: 'bools', + string: 'strings', + number: 'numbers' + }; + return arrayFlagKeys[key]; + }).filter(Boolean).pop(); + // assign key to be coerced + if (assignment) { + flags[assignment][key] = true; + } + flags.arrays[key] = true; + flags.keys.push(key); + }); + [].concat(opts.boolean || []).filter(Boolean).forEach(function (key) { + flags.bools[key] = true; + flags.keys.push(key); + }); + [].concat(opts.string || []).filter(Boolean).forEach(function (key) { + flags.strings[key] = true; + flags.keys.push(key); + }); + [].concat(opts.number || []).filter(Boolean).forEach(function (key) { + flags.numbers[key] = true; + flags.keys.push(key); + }); + [].concat(opts.count || []).filter(Boolean).forEach(function (key) { + flags.counts[key] = true; + flags.keys.push(key); + }); + [].concat(opts.normalize || []).filter(Boolean).forEach(function (key) { + flags.normalize[key] = true; + flags.keys.push(key); + }); + if (typeof opts.narg === 'object') { + Object.entries(opts.narg).forEach(([key, value]) => { + if (typeof value === 'number') { + flags.nargs[key] = value; + flags.keys.push(key); + } + }); + } + if (typeof opts.coerce === 'object') { + Object.entries(opts.coerce).forEach(([key, value]) => { + if (typeof value === 'function') { + flags.coercions[key] = value; + flags.keys.push(key); + } + }); + } + if (typeof opts.config !== 'undefined') { + if (Array.isArray(opts.config) || typeof opts.config === 'string') { + ; + [].concat(opts.config).filter(Boolean).forEach(function (key) { + flags.configs[key] = true; + }); + } + else if (typeof opts.config === 'object') { + Object.entries(opts.config).forEach(([key, value]) => { + if (typeof value === 'boolean' || typeof value === 'function') { + flags.configs[key] = value; + } + }); + } + } + // create a lookup table that takes into account all + // combinations of aliases: {f: ['foo'], foo: ['f']} + extendAliases(opts.key, aliases, opts.default, flags.arrays); + // apply default values to all aliases. + Object.keys(defaults).forEach(function (key) { + (flags.aliases[key] || []).forEach(function (alias) { + defaults[alias] = defaults[key]; + }); + }); + let error = null; + checkConfiguration(); + let notFlags = []; + const argv = Object.assign(Object.create(null), { _: [] }); + // TODO(bcoe): for the first pass at removing object prototype we didn't + // remove all prototypes from objects returned by this API, we might want + // to gradually move towards doing so. + const argvReturn = {}; + for (let i = 0; i < args.length; i++) { + const arg = args[i]; + const truncatedArg = arg.replace(/^-{3,}/, '---'); + let broken; + let key; + let letters; + let m; + let next; + let value; + // any unknown option (except for end-of-options, "--") + if (arg !== '--' && /^-/.test(arg) && isUnknownOptionAsArg(arg)) { + pushPositional(arg); + // ---, ---=, ----, etc, + } + else if (truncatedArg.match(/^---+(=|$)/)) { + // options without key name are invalid. + pushPositional(arg); + continue; + // -- separated by = + } + else if (arg.match(/^--.+=/) || (!configuration['short-option-groups'] && arg.match(/^-.+=/))) { + // Using [\s\S] instead of . because js doesn't support the + // 'dotall' regex modifier. See: + // http://stackoverflow.com/a/1068308/13216 + m = arg.match(/^--?([^=]+)=([\s\S]*)$/); + // arrays format = '--f=a b c' + if (m !== null && Array.isArray(m) && m.length >= 3) { + if (checkAllAliases(m[1], flags.arrays)) { + i = eatArray(i, m[1], args, m[2]); + } + else if (checkAllAliases(m[1], flags.nargs) !== false) { + // nargs format = '--f=monkey washing cat' + i = eatNargs(i, m[1], args, m[2]); + } + else { + setArg(m[1], m[2], true); + } + } + } + else if (arg.match(negatedBoolean) && configuration['boolean-negation']) { + m = arg.match(negatedBoolean); + if (m !== null && Array.isArray(m) && m.length >= 2) { + key = m[1]; + setArg(key, checkAllAliases(key, flags.arrays) ? [false] : false); + } + // -- separated by space. + } + else if (arg.match(/^--.+/) || (!configuration['short-option-groups'] && arg.match(/^-[^-]+/))) { + m = arg.match(/^--?(.+)/); + if (m !== null && Array.isArray(m) && m.length >= 2) { + key = m[1]; + if (checkAllAliases(key, flags.arrays)) { + // array format = '--foo a b c' + i = eatArray(i, key, args); + } + else if (checkAllAliases(key, flags.nargs) !== false) { + // nargs format = '--foo a b c' + // should be truthy even if: flags.nargs[key] === 0 + i = eatNargs(i, key, args); + } + else { + next = args[i + 1]; + if (next !== undefined && (!next.match(/^-/) || + next.match(negative)) && + !checkAllAliases(key, flags.bools) && + !checkAllAliases(key, flags.counts)) { + setArg(key, next); + i++; + } + else if (/^(true|false)$/.test(next)) { + setArg(key, next); + i++; + } + else { + setArg(key, defaultValue(key)); + } + } + } + // dot-notation flag separated by '='. + } + else if (arg.match(/^-.\..+=/)) { + m = arg.match(/^-([^=]+)=([\s\S]*)$/); + if (m !== null && Array.isArray(m) && m.length >= 3) { + setArg(m[1], m[2]); + } + // dot-notation flag separated by space. + } + else if (arg.match(/^-.\..+/) && !arg.match(negative)) { + next = args[i + 1]; + m = arg.match(/^-(.\..+)/); + if (m !== null && Array.isArray(m) && m.length >= 2) { + key = m[1]; + if (next !== undefined && !next.match(/^-/) && + !checkAllAliases(key, flags.bools) && + !checkAllAliases(key, flags.counts)) { + setArg(key, next); + i++; + } + else { + setArg(key, defaultValue(key)); + } + } + } + else if (arg.match(/^-[^-]+/) && !arg.match(negative)) { + letters = arg.slice(1, -1).split(''); + broken = false; + for (let j = 0; j < letters.length; j++) { + next = arg.slice(j + 2); + if (letters[j + 1] && letters[j + 1] === '=') { + value = arg.slice(j + 3); + key = letters[j]; + if (checkAllAliases(key, flags.arrays)) { + // array format = '-f=a b c' + i = eatArray(i, key, args, value); + } + else if (checkAllAliases(key, flags.nargs) !== false) { + // nargs format = '-f=monkey washing cat' + i = eatNargs(i, key, args, value); + } + else { + setArg(key, value); + } + broken = true; + break; + } + if (next === '-') { + setArg(letters[j], next); + continue; + } + // current letter is an alphabetic character and next value is a number + if (/[A-Za-z]/.test(letters[j]) && + /^-?\d+(\.\d*)?(e-?\d+)?$/.test(next) && + checkAllAliases(next, flags.bools) === false) { + setArg(letters[j], next); + broken = true; + break; + } + if (letters[j + 1] && letters[j + 1].match(/\W/)) { + setArg(letters[j], next); + broken = true; + break; + } + else { + setArg(letters[j], defaultValue(letters[j])); + } + } + key = arg.slice(-1)[0]; + if (!broken && key !== '-') { + if (checkAllAliases(key, flags.arrays)) { + // array format = '-f a b c' + i = eatArray(i, key, args); + } + else if (checkAllAliases(key, flags.nargs) !== false) { + // nargs format = '-f a b c' + // should be truthy even if: flags.nargs[key] === 0 + i = eatNargs(i, key, args); + } + else { + next = args[i + 1]; + if (next !== undefined && (!/^(-|--)[^-]/.test(next) || + next.match(negative)) && + !checkAllAliases(key, flags.bools) && + !checkAllAliases(key, flags.counts)) { + setArg(key, next); + i++; + } + else if (/^(true|false)$/.test(next)) { + setArg(key, next); + i++; + } + else { + setArg(key, defaultValue(key)); + } + } + } + } + else if (arg.match(/^-[0-9]$/) && + arg.match(negative) && + checkAllAliases(arg.slice(1), flags.bools)) { + // single-digit boolean alias, e.g: xargs -0 + key = arg.slice(1); + setArg(key, defaultValue(key)); + } + else if (arg === '--') { + notFlags = args.slice(i + 1); + break; + } + else if (configuration['halt-at-non-option']) { + notFlags = args.slice(i); + break; + } + else { + pushPositional(arg); + } + } + // order of precedence: + // 1. command line arg + // 2. value from env var + // 3. value from config file + // 4. value from config objects + // 5. configured default value + applyEnvVars(argv, true); // special case: check env vars that point to config file + applyEnvVars(argv, false); + setConfig(argv); + setConfigObjects(); + applyDefaultsAndAliases(argv, flags.aliases, defaults, true); + applyCoercions(argv); + if (configuration['set-placeholder-key']) + setPlaceholderKeys(argv); + // for any counts either not in args or without an explicit default, set to 0 + Object.keys(flags.counts).forEach(function (key) { + if (!hasKey(argv, key.split('.'))) + setArg(key, 0); + }); + // '--' defaults to undefined. + if (notFlagsOption && notFlags.length) + argv[notFlagsArgv] = []; + notFlags.forEach(function (key) { + argv[notFlagsArgv].push(key); + }); + if (configuration['camel-case-expansion'] && configuration['strip-dashed']) { + Object.keys(argv).filter(key => key !== '--' && key.includes('-')).forEach(key => { + delete argv[key]; + }); + } + if (configuration['strip-aliased']) { + ; + [].concat(...Object.keys(aliases).map(k => aliases[k])).forEach(alias => { + if (configuration['camel-case-expansion'] && alias.includes('-')) { + delete argv[alias.split('.').map(prop => camelCase(prop)).join('.')]; + } + delete argv[alias]; + }); + } + // Push argument into positional array, applying numeric coercion: + function pushPositional(arg) { + const maybeCoercedNumber = maybeCoerceNumber('_', arg); + if (typeof maybeCoercedNumber === 'string' || typeof maybeCoercedNumber === 'number') { + argv._.push(maybeCoercedNumber); + } + } + // how many arguments should we consume, based + // on the nargs option? + function eatNargs(i, key, args, argAfterEqualSign) { + let ii; + let toEat = checkAllAliases(key, flags.nargs); + // NaN has a special meaning for the array type, indicating that one or + // more values are expected. + toEat = typeof toEat !== 'number' || isNaN(toEat) ? 1 : toEat; + if (toEat === 0) { + if (!isUndefined(argAfterEqualSign)) { + error = Error(__('Argument unexpected for: %s', key)); + } + setArg(key, defaultValue(key)); + return i; + } + let available = isUndefined(argAfterEqualSign) ? 0 : 1; + if (configuration['nargs-eats-options']) { + // classic behavior, yargs eats positional and dash arguments. + if (args.length - (i + 1) + available < toEat) { + error = Error(__('Not enough arguments following: %s', key)); + } + available = toEat; + } + else { + // nargs will not consume flag arguments, e.g., -abc, --foo, + // and terminates when one is observed. + for (ii = i + 1; ii < args.length; ii++) { + if (!args[ii].match(/^-[^0-9]/) || args[ii].match(negative) || isUnknownOptionAsArg(args[ii])) + available++; + else + break; + } + if (available < toEat) + error = Error(__('Not enough arguments following: %s', key)); + } + let consumed = Math.min(available, toEat); + if (!isUndefined(argAfterEqualSign) && consumed > 0) { + setArg(key, argAfterEqualSign); + consumed--; + } + for (ii = i + 1; ii < (consumed + i + 1); ii++) { + setArg(key, args[ii]); + } + return (i + consumed); + } + // if an option is an array, eat all non-hyphenated arguments + // following it... YUM! + // e.g., --foo apple banana cat becomes ["apple", "banana", "cat"] + function eatArray(i, key, args, argAfterEqualSign) { + let argsToSet = []; + let next = argAfterEqualSign || args[i + 1]; + // If both array and nargs are configured, enforce the nargs count: + const nargsCount = checkAllAliases(key, flags.nargs); + if (checkAllAliases(key, flags.bools) && !(/^(true|false)$/.test(next))) { + argsToSet.push(true); + } + else if (isUndefined(next) || + (isUndefined(argAfterEqualSign) && /^-/.test(next) && !negative.test(next) && !isUnknownOptionAsArg(next))) { + // for keys without value ==> argsToSet remains an empty [] + // set user default value, if available + if (defaults[key] !== undefined) { + const defVal = defaults[key]; + argsToSet = Array.isArray(defVal) ? defVal : [defVal]; + } + } + else { + // value in --option=value is eaten as is + if (!isUndefined(argAfterEqualSign)) { + argsToSet.push(processValue(key, argAfterEqualSign, true)); + } + for (let ii = i + 1; ii < args.length; ii++) { + if ((!configuration['greedy-arrays'] && argsToSet.length > 0) || + (nargsCount && typeof nargsCount === 'number' && argsToSet.length >= nargsCount)) + break; + next = args[ii]; + if (/^-/.test(next) && !negative.test(next) && !isUnknownOptionAsArg(next)) + break; + i = ii; + argsToSet.push(processValue(key, next, inputIsString)); + } + } + // If both array and nargs are configured, create an error if less than + // nargs positionals were found. NaN has special meaning, indicating + // that at least one value is required (more are okay). + if (typeof nargsCount === 'number' && ((nargsCount && argsToSet.length < nargsCount) || + (isNaN(nargsCount) && argsToSet.length === 0))) { + error = Error(__('Not enough arguments following: %s', key)); + } + setArg(key, argsToSet); + return i; + } + function setArg(key, val, shouldStripQuotes = inputIsString) { + if (/-/.test(key) && configuration['camel-case-expansion']) { + const alias = key.split('.').map(function (prop) { + return camelCase(prop); + }).join('.'); + addNewAlias(key, alias); + } + const value = processValue(key, val, shouldStripQuotes); + const splitKey = key.split('.'); + setKey(argv, splitKey, value); + // handle populating aliases of the full key + if (flags.aliases[key]) { + flags.aliases[key].forEach(function (x) { + const keyProperties = x.split('.'); + setKey(argv, keyProperties, value); + }); + } + // handle populating aliases of the first element of the dot-notation key + if (splitKey.length > 1 && configuration['dot-notation']) { + ; + (flags.aliases[splitKey[0]] || []).forEach(function (x) { + let keyProperties = x.split('.'); + // expand alias with nested objects in key + const a = [].concat(splitKey); + a.shift(); // nuke the old key. + keyProperties = keyProperties.concat(a); + // populate alias only if is not already an alias of the full key + // (already populated above) + if (!(flags.aliases[key] || []).includes(keyProperties.join('.'))) { + setKey(argv, keyProperties, value); + } + }); + } + // Set normalize getter and setter when key is in 'normalize' but isn't an array + if (checkAllAliases(key, flags.normalize) && !checkAllAliases(key, flags.arrays)) { + const keys = [key].concat(flags.aliases[key] || []); + keys.forEach(function (key) { + Object.defineProperty(argvReturn, key, { + enumerable: true, + get() { + return val; + }, + set(value) { + val = typeof value === 'string' ? mixin.normalize(value) : value; + } + }); + }); + } + } + function addNewAlias(key, alias) { + if (!(flags.aliases[key] && flags.aliases[key].length)) { + flags.aliases[key] = [alias]; + newAliases[alias] = true; + } + if (!(flags.aliases[alias] && flags.aliases[alias].length)) { + addNewAlias(alias, key); + } + } + function processValue(key, val, shouldStripQuotes) { + // strings may be quoted, clean this up as we assign values. + if (shouldStripQuotes) { + val = stripQuotes(val); + } + // handle parsing boolean arguments --foo=true --bar false. + if (checkAllAliases(key, flags.bools) || checkAllAliases(key, flags.counts)) { + if (typeof val === 'string') + val = val === 'true'; + } + let value = Array.isArray(val) + ? val.map(function (v) { return maybeCoerceNumber(key, v); }) + : maybeCoerceNumber(key, val); + // increment a count given as arg (either no value or value parsed as boolean) + if (checkAllAliases(key, flags.counts) && (isUndefined(value) || typeof value === 'boolean')) { + value = increment(); + } + // Set normalized value when key is in 'normalize' and in 'arrays' + if (checkAllAliases(key, flags.normalize) && checkAllAliases(key, flags.arrays)) { + if (Array.isArray(val)) + value = val.map((val) => { return mixin.normalize(val); }); + else + value = mixin.normalize(val); + } + return value; + } + function maybeCoerceNumber(key, value) { + if (!configuration['parse-positional-numbers'] && key === '_') + return value; + if (!checkAllAliases(key, flags.strings) && !checkAllAliases(key, flags.bools) && !Array.isArray(value)) { + const shouldCoerceNumber = looksLikeNumber(value) && configuration['parse-numbers'] && (Number.isSafeInteger(Math.floor(parseFloat(`${value}`)))); + if (shouldCoerceNumber || (!isUndefined(value) && checkAllAliases(key, flags.numbers))) { + value = Number(value); + } + } + return value; + } + // set args from config.json file, this should be + // applied last so that defaults can be applied. + function setConfig(argv) { + const configLookup = Object.create(null); + // expand defaults/aliases, in-case any happen to reference + // the config.json file. + applyDefaultsAndAliases(configLookup, flags.aliases, defaults); + Object.keys(flags.configs).forEach(function (configKey) { + const configPath = argv[configKey] || configLookup[configKey]; + if (configPath) { + try { + let config = null; + const resolvedConfigPath = mixin.resolve(mixin.cwd(), configPath); + const resolveConfig = flags.configs[configKey]; + if (typeof resolveConfig === 'function') { + try { + config = resolveConfig(resolvedConfigPath); + } + catch (e) { + config = e; + } + if (config instanceof Error) { + error = config; + return; + } + } + else { + config = mixin.require(resolvedConfigPath); + } + setConfigObject(config); + } + catch (ex) { + // Deno will receive a PermissionDenied error if an attempt is + // made to load config without the --allow-read flag: + if (ex.name === 'PermissionDenied') + error = ex; + else if (argv[configKey]) + error = Error(__('Invalid JSON config file: %s', configPath)); + } + } + }); + } + // set args from config object. + // it recursively checks nested objects. + function setConfigObject(config, prev) { + Object.keys(config).forEach(function (key) { + const value = config[key]; + const fullKey = prev ? prev + '.' + key : key; + // if the value is an inner object and we have dot-notation + // enabled, treat inner objects in config the same as + // heavily nested dot notations (foo.bar.apple). + if (typeof value === 'object' && value !== null && !Array.isArray(value) && configuration['dot-notation']) { + // if the value is an object but not an array, check nested object + setConfigObject(value, fullKey); + } + else { + // setting arguments via CLI takes precedence over + // values within the config file. + if (!hasKey(argv, fullKey.split('.')) || (checkAllAliases(fullKey, flags.arrays) && configuration['combine-arrays'])) { + setArg(fullKey, value); + } + } + }); + } + // set all config objects passed in opts + function setConfigObjects() { + if (typeof configObjects !== 'undefined') { + configObjects.forEach(function (configObject) { + setConfigObject(configObject); + }); + } + } + function applyEnvVars(argv, configOnly) { + if (typeof envPrefix === 'undefined') + return; + const prefix = typeof envPrefix === 'string' ? envPrefix : ''; + const env = mixin.env(); + Object.keys(env).forEach(function (envVar) { + if (prefix === '' || envVar.lastIndexOf(prefix, 0) === 0) { + // get array of nested keys and convert them to camel case + const keys = envVar.split('__').map(function (key, i) { + if (i === 0) { + key = key.substring(prefix.length); + } + return camelCase(key); + }); + if (((configOnly && flags.configs[keys.join('.')]) || !configOnly) && !hasKey(argv, keys)) { + setArg(keys.join('.'), env[envVar]); + } + } + }); + } + function applyCoercions(argv) { + let coerce; + const applied = new Set(); + Object.keys(argv).forEach(function (key) { + if (!applied.has(key)) { // If we haven't already coerced this option via one of its aliases + coerce = checkAllAliases(key, flags.coercions); + if (typeof coerce === 'function') { + try { + const value = maybeCoerceNumber(key, coerce(argv[key])); + ([].concat(flags.aliases[key] || [], key)).forEach(ali => { + applied.add(ali); + argv[ali] = value; + }); + } + catch (err) { + error = err; + } + } + } + }); + } + function setPlaceholderKeys(argv) { + flags.keys.forEach((key) => { + // don't set placeholder keys for dot notation options 'foo.bar'. + if (~key.indexOf('.')) + return; + if (typeof argv[key] === 'undefined') + argv[key] = undefined; + }); + return argv; + } + function applyDefaultsAndAliases(obj, aliases, defaults, canLog = false) { + Object.keys(defaults).forEach(function (key) { + if (!hasKey(obj, key.split('.'))) { + setKey(obj, key.split('.'), defaults[key]); + if (canLog) + defaulted[key] = true; + (aliases[key] || []).forEach(function (x) { + if (hasKey(obj, x.split('.'))) + return; + setKey(obj, x.split('.'), defaults[key]); + }); + } + }); + } + function hasKey(obj, keys) { + let o = obj; + if (!configuration['dot-notation']) + keys = [keys.join('.')]; + keys.slice(0, -1).forEach(function (key) { + o = (o[key] || {}); + }); + const key = keys[keys.length - 1]; + if (typeof o !== 'object') + return false; + else + return key in o; + } + function setKey(obj, keys, value) { + let o = obj; + if (!configuration['dot-notation']) + keys = [keys.join('.')]; + keys.slice(0, -1).forEach(function (key) { + // TODO(bcoe): in the next major version of yargs, switch to + // Object.create(null) for dot notation: + key = sanitizeKey(key); + if (typeof o === 'object' && o[key] === undefined) { + o[key] = {}; + } + if (typeof o[key] !== 'object' || Array.isArray(o[key])) { + // ensure that o[key] is an array, and that the last item is an empty object. + if (Array.isArray(o[key])) { + o[key].push({}); + } + else { + o[key] = [o[key], {}]; + } + // we want to update the empty object at the end of the o[key] array, so set o to that object + o = o[key][o[key].length - 1]; + } + else { + o = o[key]; + } + }); + // TODO(bcoe): in the next major version of yargs, switch to + // Object.create(null) for dot notation: + const key = sanitizeKey(keys[keys.length - 1]); + const isTypeArray = checkAllAliases(keys.join('.'), flags.arrays); + const isValueArray = Array.isArray(value); + let duplicate = configuration['duplicate-arguments-array']; + // nargs has higher priority than duplicate + if (!duplicate && checkAllAliases(key, flags.nargs)) { + duplicate = true; + if ((!isUndefined(o[key]) && flags.nargs[key] === 1) || (Array.isArray(o[key]) && o[key].length === flags.nargs[key])) { + o[key] = undefined; + } + } + if (value === increment()) { + o[key] = increment(o[key]); + } + else if (Array.isArray(o[key])) { + if (duplicate && isTypeArray && isValueArray) { + o[key] = configuration['flatten-duplicate-arrays'] ? o[key].concat(value) : (Array.isArray(o[key][0]) ? o[key] : [o[key]]).concat([value]); + } + else if (!duplicate && Boolean(isTypeArray) === Boolean(isValueArray)) { + o[key] = value; + } + else { + o[key] = o[key].concat([value]); + } + } + else if (o[key] === undefined && isTypeArray) { + o[key] = isValueArray ? value : [value]; + } + else if (duplicate && !(o[key] === undefined || + checkAllAliases(key, flags.counts) || + checkAllAliases(key, flags.bools))) { + o[key] = [o[key], value]; + } + else { + o[key] = value; + } + } + // extend the aliases list with inferred aliases. + function extendAliases(...args) { + args.forEach(function (obj) { + Object.keys(obj || {}).forEach(function (key) { + // short-circuit if we've already added a key + // to the aliases array, for example it might + // exist in both 'opts.default' and 'opts.key'. + if (flags.aliases[key]) + return; + flags.aliases[key] = [].concat(aliases[key] || []); + // For "--option-name", also set argv.optionName + flags.aliases[key].concat(key).forEach(function (x) { + if (/-/.test(x) && configuration['camel-case-expansion']) { + const c = camelCase(x); + if (c !== key && flags.aliases[key].indexOf(c) === -1) { + flags.aliases[key].push(c); + newAliases[c] = true; + } + } + }); + // For "--optionName", also set argv['option-name'] + flags.aliases[key].concat(key).forEach(function (x) { + if (x.length > 1 && /[A-Z]/.test(x) && configuration['camel-case-expansion']) { + const c = decamelize(x, '-'); + if (c !== key && flags.aliases[key].indexOf(c) === -1) { + flags.aliases[key].push(c); + newAliases[c] = true; + } + } + }); + flags.aliases[key].forEach(function (x) { + flags.aliases[x] = [key].concat(flags.aliases[key].filter(function (y) { + return x !== y; + })); + }); + }); + }); + } + function checkAllAliases(key, flag) { + const toCheck = [].concat(flags.aliases[key] || [], key); + const keys = Object.keys(flag); + const setAlias = toCheck.find(key => keys.includes(key)); + return setAlias ? flag[setAlias] : false; + } + function hasAnyFlag(key) { + const flagsKeys = Object.keys(flags); + const toCheck = [].concat(flagsKeys.map(k => flags[k])); + return toCheck.some(function (flag) { + return Array.isArray(flag) ? flag.includes(key) : flag[key]; + }); + } + function hasFlagsMatching(arg, ...patterns) { + const toCheck = [].concat(...patterns); + return toCheck.some(function (pattern) { + const match = arg.match(pattern); + return match && hasAnyFlag(match[1]); + }); + } + // based on a simplified version of the short flag group parsing logic + function hasAllShortFlags(arg) { + // if this is a negative number, or doesn't start with a single hyphen, it's not a short flag group + if (arg.match(negative) || !arg.match(/^-[^-]+/)) { + return false; + } + let hasAllFlags = true; + let next; + const letters = arg.slice(1).split(''); + for (let j = 0; j < letters.length; j++) { + next = arg.slice(j + 2); + if (!hasAnyFlag(letters[j])) { + hasAllFlags = false; + break; + } + if ((letters[j + 1] && letters[j + 1] === '=') || + next === '-' || + (/[A-Za-z]/.test(letters[j]) && /^-?\d+(\.\d*)?(e-?\d+)?$/.test(next)) || + (letters[j + 1] && letters[j + 1].match(/\W/))) { + break; + } + } + return hasAllFlags; + } + function isUnknownOptionAsArg(arg) { + return configuration['unknown-options-as-args'] && isUnknownOption(arg); + } + function isUnknownOption(arg) { + arg = arg.replace(/^-{3,}/, '--'); + // ignore negative numbers + if (arg.match(negative)) { + return false; + } + // if this is a short option group and all of them are configured, it isn't unknown + if (hasAllShortFlags(arg)) { + return false; + } + // e.g. '--count=2' + const flagWithEquals = /^-+([^=]+?)=[\s\S]*$/; + // e.g. '-a' or '--arg' + const normalFlag = /^-+([^=]+?)$/; + // e.g. '-a-' + const flagEndingInHyphen = /^-+([^=]+?)-$/; + // e.g. '-abc123' + const flagEndingInDigits = /^-+([^=]+?\d+)$/; + // e.g. '-a/usr/local' + const flagEndingInNonWordCharacters = /^-+([^=]+?)\W+.*$/; + // check the different types of flag styles, including negatedBoolean, a pattern defined near the start of the parse method + return !hasFlagsMatching(arg, flagWithEquals, negatedBoolean, normalFlag, flagEndingInHyphen, flagEndingInDigits, flagEndingInNonWordCharacters); + } + // make a best effort to pick a default value + // for an option based on name and type. + function defaultValue(key) { + if (!checkAllAliases(key, flags.bools) && + !checkAllAliases(key, flags.counts) && + `${key}` in defaults) { + return defaults[key]; + } + else { + return defaultForType(guessType(key)); + } + } + // return a default value, given the type of a flag., + function defaultForType(type) { + const def = { + [DefaultValuesForTypeKey.BOOLEAN]: true, + [DefaultValuesForTypeKey.STRING]: '', + [DefaultValuesForTypeKey.NUMBER]: undefined, + [DefaultValuesForTypeKey.ARRAY]: [] + }; + return def[type]; + } + // given a flag, enforce a default type. + function guessType(key) { + let type = DefaultValuesForTypeKey.BOOLEAN; + if (checkAllAliases(key, flags.strings)) + type = DefaultValuesForTypeKey.STRING; + else if (checkAllAliases(key, flags.numbers)) + type = DefaultValuesForTypeKey.NUMBER; + else if (checkAllAliases(key, flags.bools)) + type = DefaultValuesForTypeKey.BOOLEAN; + else if (checkAllAliases(key, flags.arrays)) + type = DefaultValuesForTypeKey.ARRAY; + return type; + } + function isUndefined(num) { + return num === undefined; + } + // check user configuration settings for inconsistencies + function checkConfiguration() { + // count keys should not be set as array/narg + Object.keys(flags.counts).find(key => { + if (checkAllAliases(key, flags.arrays)) { + error = Error(__('Invalid configuration: %s, opts.count excludes opts.array.', key)); + return true; + } + else if (checkAllAliases(key, flags.nargs)) { + error = Error(__('Invalid configuration: %s, opts.count excludes opts.narg.', key)); + return true; + } + return false; + }); + } + return { + aliases: Object.assign({}, flags.aliases), + argv: Object.assign(argvReturn, argv), + configuration: configuration, + defaulted: Object.assign({}, defaulted), + error: error, + newAliases: Object.assign({}, newAliases) + }; + } +} +// if any aliases reference each other, we should +// merge them together. +function combineAliases(aliases) { + const aliasArrays = []; + const combined = Object.create(null); + let change = true; + // turn alias lookup hash {key: ['alias1', 'alias2']} into + // a simple array ['key', 'alias1', 'alias2'] + Object.keys(aliases).forEach(function (key) { + aliasArrays.push([].concat(aliases[key], key)); + }); + // combine arrays until zero changes are + // made in an iteration. + while (change) { + change = false; + for (let i = 0; i < aliasArrays.length; i++) { + for (let ii = i + 1; ii < aliasArrays.length; ii++) { + const intersect = aliasArrays[i].filter(function (v) { + return aliasArrays[ii].indexOf(v) !== -1; + }); + if (intersect.length) { + aliasArrays[i] = aliasArrays[i].concat(aliasArrays[ii]); + aliasArrays.splice(ii, 1); + change = true; + break; + } + } + } + } + // map arrays back to the hash-lookup (de-dupe while + // we're at it). + aliasArrays.forEach(function (aliasArray) { + aliasArray = aliasArray.filter(function (v, i, self) { + return self.indexOf(v) === i; + }); + const lastAlias = aliasArray.pop(); + if (lastAlias !== undefined && typeof lastAlias === 'string') { + combined[lastAlias] = aliasArray; + } + }); + return combined; +} +// this function should only be called when a count is given as an arg +// it is NOT called to set a default value +// thus we can start the count at 1 instead of 0 +function increment(orig) { + return orig !== undefined ? orig + 1 : 1; +} +// TODO(bcoe): in the next major version of yargs, switch to +// Object.create(null) for dot notation: +function sanitizeKey(key) { + if (key === '__proto__') + return '___proto___'; + return key; +} +function stripQuotes(val) { + return (typeof val === 'string' && + (val[0] === "'" || val[0] === '"') && + val[val.length - 1] === val[0]) + ? val.substring(1, val.length - 1) + : val; +} diff --git a/mybulma/node_modules/yargs/node_modules/yargs-parser/package.json b/mybulma/node_modules/yargs/node_modules/yargs-parser/package.json new file mode 100644 index 0000000..decd0c3 --- /dev/null +++ b/mybulma/node_modules/yargs/node_modules/yargs-parser/package.json @@ -0,0 +1,92 @@ +{ + "name": "yargs-parser", + "version": "21.1.1", + "description": "the mighty option parser used by yargs", + "main": "build/index.cjs", + "exports": { + ".": [ + { + "import": "./build/lib/index.js", + "require": "./build/index.cjs" + }, + "./build/index.cjs" + ], + "./browser": [ + "./browser.js" + ] + }, + "type": "module", + "module": "./build/lib/index.js", + "scripts": { + "check": "standardx '**/*.ts' && standardx '**/*.js' && standardx '**/*.cjs'", + "fix": "standardx --fix '**/*.ts' && standardx --fix '**/*.js' && standardx --fix '**/*.cjs'", + "pretest": "rimraf build && tsc -p tsconfig.test.json && cross-env NODE_ENV=test npm run build:cjs", + "test": "c8 --reporter=text --reporter=html mocha test/*.cjs", + "test:esm": "c8 --reporter=text --reporter=html mocha test/*.mjs", + "test:browser": "start-server-and-test 'serve ./ -p 8080' http://127.0.0.1:8080/package.json 'node ./test/browser/yargs-test.cjs'", + "pretest:typescript": "npm run pretest", + "test:typescript": "c8 mocha ./build/test/typescript/*.js", + "coverage": "c8 report --check-coverage", + "precompile": "rimraf build", + "compile": "tsc", + "postcompile": "npm run build:cjs", + "build:cjs": "rollup -c", + "prepare": "npm run compile" + }, + "repository": { + "type": "git", + "url": "https://github.com/yargs/yargs-parser.git" + }, + "keywords": [ + "argument", + "parser", + "yargs", + "command", + "cli", + "parsing", + "option", + "args", + "argument" + ], + "author": "Ben Coe ", + "license": "ISC", + "devDependencies": { + "@types/chai": "^4.2.11", + "@types/mocha": "^9.0.0", + "@types/node": "^16.11.4", + "@typescript-eslint/eslint-plugin": "^3.10.1", + "@typescript-eslint/parser": "^3.10.1", + "c8": "^7.3.0", + "chai": "^4.2.0", + "cross-env": "^7.0.2", + "eslint": "^7.0.0", + "eslint-plugin-import": "^2.20.1", + "eslint-plugin-node": "^11.0.0", + "gts": "^3.0.0", + "mocha": "^10.0.0", + "puppeteer": "^16.0.0", + "rimraf": "^3.0.2", + "rollup": "^2.22.1", + "rollup-plugin-cleanup": "^3.1.1", + "rollup-plugin-ts": "^3.0.2", + "serve": "^14.0.0", + "standardx": "^7.0.0", + "start-server-and-test": "^1.11.2", + "ts-transform-default-export": "^1.0.2", + "typescript": "^4.0.0" + }, + "files": [ + "browser.js", + "build", + "!*.d.ts", + "!*.d.cts" + ], + "engines": { + "node": ">=12" + }, + "standardx": { + "ignore": [ + "build" + ] + } +} diff --git a/mybulma/package-lock.json b/mybulma/package-lock.json new file mode 100644 index 0000000..3a6d3db --- /dev/null +++ b/mybulma/package-lock.json @@ -0,0 +1,4218 @@ +{ + "name": "mybulma", + "version": "1.0.0", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "name": "mybulma", + "version": "1.0.0", + "license": "ISC", + "dependencies": { + "bulma-pageloader": "^0.3.0" + }, + "devDependencies": { + "bulma": "^0.9.4", + "node-sass": "^8.0.0" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.18.6.tgz", + "integrity": "sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==", + "dev": true, + "dependencies": { + "@babel/highlight": "^7.18.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.19.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz", + "integrity": "sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.18.6.tgz", + "integrity": "sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==", + "dev": true, + "dependencies": { + "@babel/helper-validator-identifier": "^7.18.6", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight/node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/@babel/highlight/node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", + "dev": true + }, + "node_modules/@babel/highlight/node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@gar/promisify": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@gar/promisify/-/promisify-1.1.3.tgz", + "integrity": "sha512-k2Ty1JcVojjJFwrg/ThKi2ujJ7XNLYaFGNB/bWT9wGR+oSMJHMa5w+CUq6p/pVrKeNNgA7pCqEcjSnHVoqJQFw==", + "dev": true + }, + "node_modules/@npmcli/fs": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-2.1.2.tgz", + "integrity": "sha512-yOJKRvohFOaLqipNtwYB9WugyZKhC/DZC4VYPmpaCzDBrA8YpK3qHZ8/HGscMnE4GqbkLNuVcCnxkeQEdGt6LQ==", + "dev": true, + "dependencies": { + "@gar/promisify": "^1.1.3", + "semver": "^7.3.5" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/@npmcli/move-file": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@npmcli/move-file/-/move-file-2.0.1.tgz", + "integrity": "sha512-mJd2Z5TjYWq/ttPLLGqArdtnC74J6bOzg4rMDnN+p1xTacZ2yPRCk2y0oSWQtygLR9YVQXgOcONrwtnk3JupxQ==", + "deprecated": "This functionality has been moved to @npmcli/fs", + "dev": true, + "dependencies": { + "mkdirp": "^1.0.4", + "rimraf": "^3.0.2" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/@tootallnate/once": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", + "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", + "dev": true, + "engines": { + "node": ">= 10" + } + }, + "node_modules/@types/minimist": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@types/minimist/-/minimist-1.2.2.tgz", + "integrity": "sha512-jhuKLIRrhvCPLqwPcx6INqmKeiA5EWrsCOPhrlFSrbrmU4ZMPjj5Ul/oLCMDO98XRUIwVm78xICz4EPCektzeQ==", + "dev": true + }, + "node_modules/@types/normalize-package-data": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.1.tgz", + "integrity": "sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw==", + "dev": true + }, + "node_modules/abbrev": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", + "dev": true + }, + "node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dev": true, + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/agentkeepalive": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.2.1.tgz", + "integrity": "sha512-Zn4cw2NEqd+9fiSVWMscnjyQ1a8Yfoc5oBajLeo5w+YBHgDUcEBY2hS4YpTz6iN5f/2zQiktcuM6tS8x1p9dpA==", + "dev": true, + "dependencies": { + "debug": "^4.1.0", + "depd": "^1.1.2", + "humanize-ms": "^1.2.1" + }, + "engines": { + "node": ">= 8.0.0" + } + }, + "node_modules/aggregate-error": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "dev": true, + "dependencies": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/aproba": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz", + "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==", + "dev": true + }, + "node_modules/are-we-there-yet": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-3.0.1.tgz", + "integrity": "sha512-QZW4EDmGwlYur0Yyf/b2uGucHQMa8aFUP7eu9ddR73vvhFyt4V0Vl3QHPcTNJ8l6qYOBdxgXdnBXQrHilfRQBg==", + "dev": true, + "dependencies": { + "delegates": "^1.0.0", + "readable-stream": "^3.6.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/arrify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", + "integrity": "sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/async-foreach": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/async-foreach/-/async-foreach-0.1.3.tgz", + "integrity": "sha512-VUeSMD8nEGBWaZK4lizI1sf3yEC7pnAQ/mrI7pC2fBz2s/tq5jWWEngTwaf0Gruu/OoXRGLGg1XFqpYBiGTYJA==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/bulma": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/bulma/-/bulma-0.9.4.tgz", + "integrity": "sha512-86FlT5+1GrsgKbPLRRY7cGDg8fsJiP/jzTqXXVqiUZZ2aZT8uemEOHlU1CDU+TxklPEZ11HZNNWclRBBecP4CQ==", + "dev": true + }, + "node_modules/bulma-pageloader": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/bulma-pageloader/-/bulma-pageloader-0.3.0.tgz", + "integrity": "sha512-lbahiqhBCov5AYdziHFnC5/JOhCrJWFTpdRiAkwW49IM/mf0whCWHVe8MuejZFu2PEs1mtH8Gnz8exEks4Q+7g==" + }, + "node_modules/cacache": { + "version": "16.1.3", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.1.3.tgz", + "integrity": "sha512-/+Emcj9DAXxX4cwlLmRI9c166RuL3w30zp4R7Joiv2cQTtTtA+jeuCAjH3ZlGnYS3tKENSrKhAzVVP9GVyzeYQ==", + "dev": true, + "dependencies": { + "@npmcli/fs": "^2.1.0", + "@npmcli/move-file": "^2.0.0", + "chownr": "^2.0.0", + "fs-minipass": "^2.1.0", + "glob": "^8.0.1", + "infer-owner": "^1.0.4", + "lru-cache": "^7.7.1", + "minipass": "^3.1.6", + "minipass-collect": "^1.0.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "mkdirp": "^1.0.4", + "p-map": "^4.0.0", + "promise-inflight": "^1.0.1", + "rimraf": "^3.0.2", + "ssri": "^9.0.0", + "tar": "^6.1.11", + "unique-filename": "^2.0.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/cacache/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/cacache/node_modules/glob": { + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.0.3.tgz", + "integrity": "sha512-ull455NHSHI/Y1FqGaaYFaLGkNMMJbavMrEGFXG/PGrg6y7sutWHUHrz6gy6WEBH6akM1M414dWKCNs+IhKdiQ==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/cacache/node_modules/minimatch": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.2.tgz", + "integrity": "sha512-bNH9mmM9qsJ2X4r2Nat1B//1dJVcn3+iBLa3IgqJ7EbGaDNepL9QSHOxN4ng33s52VMMhhIfgCYDk3C4ZmlDAg==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase-keys": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-6.2.2.tgz", + "integrity": "sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg==", + "dev": true, + "dependencies": { + "camelcase": "^5.3.1", + "map-obj": "^4.0.0", + "quick-lru": "^4.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/chownr": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", + "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/color-support": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", + "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", + "dev": true, + "bin": { + "color-support": "bin.js" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true + }, + "node_modules/console-control-strings": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", + "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==", + "dev": true + }, + "node_modules/core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "dev": true + }, + "node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/decamelize-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/decamelize-keys/-/decamelize-keys-1.1.1.tgz", + "integrity": "sha512-WiPxgEirIV0/eIOMcnFBA3/IJZAZqKnwAwWyvvdi4lsr1WCN22nhdf/3db3DoZcUjTV2SqfzIwNyp6y2xs3nmg==", + "dev": true, + "dependencies": { + "decamelize": "^1.1.0", + "map-obj": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/decamelize-keys/node_modules/map-obj": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz", + "integrity": "sha512-7N/q3lyZ+LVCp7PzuxrJr4KMbBE2hW7BT7YNia330OFxIf4d3r5zVpicP2650l7CPN6RM9zOJRl3NGpqSiw3Eg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/delegates": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", + "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==", + "dev": true + }, + "node_modules/depd": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", + "integrity": "sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/encoding": { + "version": "0.1.13", + "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz", + "integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==", + "dev": true, + "optional": true, + "dependencies": { + "iconv-lite": "^0.6.2" + } + }, + "node_modules/env-paths": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz", + "integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/err-code": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/err-code/-/err-code-2.0.3.tgz", + "integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==", + "dev": true + }, + "node_modules/error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/fs-minipass": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", + "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", + "dev": true, + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true + }, + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "node_modules/gauge": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-4.0.4.tgz", + "integrity": "sha512-f9m+BEN5jkg6a0fZjleidjN51VE1X+mPFQ2DJ0uv1V39oCLCbsGe6yjbBnp7eK7z/+GAon99a3nHuqbuuthyPg==", + "dev": true, + "dependencies": { + "aproba": "^1.0.3 || ^2.0.0", + "color-support": "^1.1.3", + "console-control-strings": "^1.1.0", + "has-unicode": "^2.0.1", + "signal-exit": "^3.0.7", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1", + "wide-align": "^1.1.5" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/gaze": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/gaze/-/gaze-1.1.3.tgz", + "integrity": "sha512-BRdNm8hbWzFzWHERTrejLqwHDfS4GibPoq5wjTPIoJHoBtKGPg3xAFfxmM+9ztbXelxcf2hwQcaz1PtmFeue8g==", + "dev": true, + "dependencies": { + "globule": "^1.0.0" + }, + "engines": { + "node": ">= 4.0.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-stdin": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-4.0.1.tgz", + "integrity": "sha512-F5aQMywwJ2n85s4hJPTT9RPxGmubonuB10MNYo17/xph174n2MIR33HRguhzVag10O/npM7SPk73LMZNP+FaWw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/globule": { + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/globule/-/globule-1.3.4.tgz", + "integrity": "sha512-OPTIfhMBh7JbBYDpa5b+Q5ptmMWKwcNcFSR/0c6t8V4f3ZAVBEsKNY37QdVqmLRYSMhOUGYrY0QhSoEpzGr/Eg==", + "dev": true, + "dependencies": { + "glob": "~7.1.1", + "lodash": "^4.17.21", + "minimatch": "~3.0.2" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/globule/node_modules/glob": { + "version": "7.1.7", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", + "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/globule/node_modules/minimatch": { + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.8.tgz", + "integrity": "sha512-6FsRAQsxQ61mw+qP1ZzbL9Bc78x2p5OqNgNpnoAFLTrX8n5Kxph0CsnhmKKNXTWjXqU5L0pGPR7hYk+XWZr60Q==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.10", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", + "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==", + "dev": true + }, + "node_modules/hard-rejection": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/hard-rejection/-/hard-rejection-2.1.0.tgz", + "integrity": "sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/has-unicode": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", + "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==", + "dev": true + }, + "node_modules/hosted-git-info": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", + "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/hosted-git-info/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/http-cache-semantics": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz", + "integrity": "sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==", + "dev": true + }, + "node_modules/http-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", + "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", + "dev": true, + "dependencies": { + "@tootallnate/once": "2", + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "dev": true, + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/humanize-ms": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/humanize-ms/-/humanize-ms-1.2.1.tgz", + "integrity": "sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==", + "dev": true, + "dependencies": { + "ms": "^2.0.0" + } + }, + "node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dev": true, + "optional": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/infer-owner": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/infer-owner/-/infer-owner-1.0.4.tgz", + "integrity": "sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A==", + "dev": true + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "dev": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "node_modules/ip": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ip/-/ip-2.0.0.tgz", + "integrity": "sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==", + "dev": true + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "dev": true + }, + "node_modules/is-core-module": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.11.0.tgz", + "integrity": "sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw==", + "dev": true, + "dependencies": { + "has": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-lambda": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-lambda/-/is-lambda-1.0.1.tgz", + "integrity": "sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==", + "dev": true + }, + "node_modules/is-plain-obj": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", + "integrity": "sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "dev": true + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true + }, + "node_modules/js-base64": { + "version": "2.6.4", + "resolved": "https://registry.npmjs.org/js-base64/-/js-base64-2.6.4.tgz", + "integrity": "sha512-pZe//GGmwJndub7ZghVHz7vjb2LgC1m8B07Au3eYqeqv9emhESByMXxaEgkUkEqJe87oBbSniGYoQNIBklc7IQ==", + "dev": true + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true + }, + "node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true + }, + "node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true + }, + "node_modules/lru-cache": { + "version": "7.14.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.14.1.tgz", + "integrity": "sha512-ysxwsnTKdAx96aTRdhDOCQfDgbHnt8SK0KY8SEjO0wHinhWOFTESbjVCMPbU1uGXg/ch4lifqx0wfjOawU2+WA==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/make-fetch-happen": { + "version": "10.2.1", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-10.2.1.tgz", + "integrity": "sha512-NgOPbRiaQM10DYXvN3/hhGVI2M5MtITFryzBGxHM5p4wnFxsVCbxkrBrDsk+EZ5OB4jEOT7AjDxtdF+KVEFT7w==", + "dev": true, + "dependencies": { + "agentkeepalive": "^4.2.1", + "cacache": "^16.1.0", + "http-cache-semantics": "^4.1.0", + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "is-lambda": "^1.0.1", + "lru-cache": "^7.7.1", + "minipass": "^3.1.6", + "minipass-collect": "^1.0.2", + "minipass-fetch": "^2.0.3", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "promise-retry": "^2.0.1", + "socks-proxy-agent": "^7.0.0", + "ssri": "^9.0.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/map-obj": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.3.0.tgz", + "integrity": "sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/meow": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/meow/-/meow-9.0.0.tgz", + "integrity": "sha512-+obSblOQmRhcyBt62furQqRAQpNyWXo8BuQ5bN7dG8wmwQ+vwHKp/rCFD4CrTP8CsDQD1sjoZ94K417XEUk8IQ==", + "dev": true, + "dependencies": { + "@types/minimist": "^1.2.0", + "camelcase-keys": "^6.2.2", + "decamelize": "^1.2.0", + "decamelize-keys": "^1.1.0", + "hard-rejection": "^2.1.0", + "minimist-options": "4.1.0", + "normalize-package-data": "^3.0.0", + "read-pkg-up": "^7.0.1", + "redent": "^3.0.0", + "trim-newlines": "^3.0.0", + "type-fest": "^0.18.0", + "yargs-parser": "^20.2.3" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/min-indent": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", + "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist-options": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/minimist-options/-/minimist-options-4.1.0.tgz", + "integrity": "sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A==", + "dev": true, + "dependencies": { + "arrify": "^1.0.1", + "is-plain-obj": "^1.1.0", + "kind-of": "^6.0.3" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minipass-collect": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/minipass-collect/-/minipass-collect-1.0.2.tgz", + "integrity": "sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA==", + "dev": true, + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/minipass-fetch": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-2.1.2.tgz", + "integrity": "sha512-LT49Zi2/WMROHYoqGgdlQIZh8mLPZmOrN2NdJjMXxYe4nkN6FUyuPuOAOedNJDrx0IRGg9+4guZewtp8hE6TxA==", + "dev": true, + "dependencies": { + "minipass": "^3.1.6", + "minipass-sized": "^1.0.3", + "minizlib": "^2.1.2" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + }, + "optionalDependencies": { + "encoding": "^0.1.13" + } + }, + "node_modules/minipass-flush": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/minipass-flush/-/minipass-flush-1.0.5.tgz", + "integrity": "sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==", + "dev": true, + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/minipass-pipeline": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz", + "integrity": "sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==", + "dev": true, + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minipass-sized": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/minipass-sized/-/minipass-sized-1.0.3.tgz", + "integrity": "sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==", + "dev": true, + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minizlib": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", + "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", + "dev": true, + "dependencies": { + "minipass": "^3.0.0", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "dev": true, + "bin": { + "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/nan": { + "version": "2.17.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.17.0.tgz", + "integrity": "sha512-2ZTgtl0nJsO0KQCjEpxcIr5D+Yv90plTitZt9JBfQvVJDS5seMl3FOvsh3+9CoYWXf/1l5OaZzzF6nDm4cagaQ==", + "dev": true + }, + "node_modules/negotiator": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/node-gyp": { + "version": "8.4.1", + "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-8.4.1.tgz", + "integrity": "sha512-olTJRgUtAb/hOXG0E93wZDs5YiJlgbXxTwQAFHyNlRsXQnYzUaF2aGgujZbw+hR8aF4ZG/rST57bWMWD16jr9w==", + "dev": true, + "dependencies": { + "env-paths": "^2.2.0", + "glob": "^7.1.4", + "graceful-fs": "^4.2.6", + "make-fetch-happen": "^9.1.0", + "nopt": "^5.0.0", + "npmlog": "^6.0.0", + "rimraf": "^3.0.2", + "semver": "^7.3.5", + "tar": "^6.1.2", + "which": "^2.0.2" + }, + "bin": { + "node-gyp": "bin/node-gyp.js" + }, + "engines": { + "node": ">= 10.12.0" + } + }, + "node_modules/node-gyp/node_modules/@npmcli/fs": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-1.1.1.tgz", + "integrity": "sha512-8KG5RD0GVP4ydEzRn/I4BNDuxDtqVbOdm8675T49OIG/NGhaK0pjPX7ZcDlvKYbA+ulvVK3ztfcF4uBdOxuJbQ==", + "dev": true, + "dependencies": { + "@gar/promisify": "^1.0.1", + "semver": "^7.3.5" + } + }, + "node_modules/node-gyp/node_modules/@npmcli/move-file": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@npmcli/move-file/-/move-file-1.1.2.tgz", + "integrity": "sha512-1SUf/Cg2GzGDyaf15aR9St9TWlb+XvbZXWpDx8YKs7MLzMH/BCeopv+y9vzrzgkfykCGuWOlSu3mZhj2+FQcrg==", + "deprecated": "This functionality has been moved to @npmcli/fs", + "dev": true, + "dependencies": { + "mkdirp": "^1.0.4", + "rimraf": "^3.0.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/node-gyp/node_modules/@tootallnate/once": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", + "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/node-gyp/node_modules/cacache": { + "version": "15.3.0", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-15.3.0.tgz", + "integrity": "sha512-VVdYzXEn+cnbXpFgWs5hTT7OScegHVmLhJIR8Ufqk3iFD6A6j5iSX1KuBTfNEv4tdJWE2PzA6IVFtcLC7fN9wQ==", + "dev": true, + "dependencies": { + "@npmcli/fs": "^1.0.0", + "@npmcli/move-file": "^1.0.1", + "chownr": "^2.0.0", + "fs-minipass": "^2.0.0", + "glob": "^7.1.4", + "infer-owner": "^1.0.4", + "lru-cache": "^6.0.0", + "minipass": "^3.1.1", + "minipass-collect": "^1.0.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.2", + "mkdirp": "^1.0.3", + "p-map": "^4.0.0", + "promise-inflight": "^1.0.1", + "rimraf": "^3.0.2", + "ssri": "^8.0.1", + "tar": "^6.0.2", + "unique-filename": "^1.1.1" + }, + "engines": { + "node": ">= 10" + } + }, + "node_modules/node-gyp/node_modules/http-proxy-agent": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz", + "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==", + "dev": true, + "dependencies": { + "@tootallnate/once": "1", + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/node-gyp/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/node-gyp/node_modules/make-fetch-happen": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-9.1.0.tgz", + "integrity": "sha512-+zopwDy7DNknmwPQplem5lAZX/eCOzSvSNNcSKm5eVwTkOBzoktEfXsa9L23J/GIRhxRsaxzkPEhrJEpE2F4Gg==", + "dev": true, + "dependencies": { + "agentkeepalive": "^4.1.3", + "cacache": "^15.2.0", + "http-cache-semantics": "^4.1.0", + "http-proxy-agent": "^4.0.1", + "https-proxy-agent": "^5.0.0", + "is-lambda": "^1.0.1", + "lru-cache": "^6.0.0", + "minipass": "^3.1.3", + "minipass-collect": "^1.0.2", + "minipass-fetch": "^1.3.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.2", + "promise-retry": "^2.0.1", + "socks-proxy-agent": "^6.0.0", + "ssri": "^8.0.0" + }, + "engines": { + "node": ">= 10" + } + }, + "node_modules/node-gyp/node_modules/minipass-fetch": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-1.4.1.tgz", + "integrity": "sha512-CGH1eblLq26Y15+Azk7ey4xh0J/XfJfrCox5LDJiKqI2Q2iwOLOKrlmIaODiSQS8d18jalF6y2K2ePUm0CmShw==", + "dev": true, + "dependencies": { + "minipass": "^3.1.0", + "minipass-sized": "^1.0.3", + "minizlib": "^2.0.0" + }, + "engines": { + "node": ">=8" + }, + "optionalDependencies": { + "encoding": "^0.1.12" + } + }, + "node_modules/node-gyp/node_modules/socks-proxy-agent": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-6.2.1.tgz", + "integrity": "sha512-a6KW9G+6B3nWZ1yB8G7pJwL3ggLy1uTzKAgCb7ttblwqdz9fMGJUuTy3uFzEP48FAs9FLILlmzDlE2JJhVQaXQ==", + "dev": true, + "dependencies": { + "agent-base": "^6.0.2", + "debug": "^4.3.3", + "socks": "^2.6.2" + }, + "engines": { + "node": ">= 10" + } + }, + "node_modules/node-gyp/node_modules/ssri": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/ssri/-/ssri-8.0.1.tgz", + "integrity": "sha512-97qShzy1AiyxvPNIkLWoGua7xoQzzPjQ0HAH4B0rWKo7SZ6USuPcrUiAFrws0UH8RrbWmgq3LMTObhPIHbbBeQ==", + "dev": true, + "dependencies": { + "minipass": "^3.1.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/node-gyp/node_modules/unique-filename": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-1.1.1.tgz", + "integrity": "sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ==", + "dev": true, + "dependencies": { + "unique-slug": "^2.0.0" + } + }, + "node_modules/node-gyp/node_modules/unique-slug": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-2.0.2.tgz", + "integrity": "sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w==", + "dev": true, + "dependencies": { + "imurmurhash": "^0.1.4" + } + }, + "node_modules/node-sass": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/node-sass/-/node-sass-8.0.0.tgz", + "integrity": "sha512-jPzqCF2/e6JXw6r3VxfIqYc8tKQdkj5Z/BDATYyG6FL6b/LuYBNFGFVhus0mthcWifHm/JzBpKAd+3eXsWeK/A==", + "dev": true, + "hasInstallScript": true, + "dependencies": { + "async-foreach": "^0.1.3", + "chalk": "^4.1.2", + "cross-spawn": "^7.0.3", + "gaze": "^1.0.0", + "get-stdin": "^4.0.1", + "glob": "^7.0.3", + "lodash": "^4.17.15", + "make-fetch-happen": "^10.0.4", + "meow": "^9.0.0", + "nan": "^2.17.0", + "node-gyp": "^8.4.1", + "sass-graph": "^4.0.1", + "stdout-stream": "^1.4.0", + "true-case-path": "^2.2.1" + }, + "bin": { + "node-sass": "bin/node-sass" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/nopt": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", + "integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==", + "dev": true, + "dependencies": { + "abbrev": "1" + }, + "bin": { + "nopt": "bin/nopt.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/normalize-package-data": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.3.tgz", + "integrity": "sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==", + "dev": true, + "dependencies": { + "hosted-git-info": "^4.0.1", + "is-core-module": "^2.5.0", + "semver": "^7.3.4", + "validate-npm-package-license": "^3.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/npmlog": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-6.0.2.tgz", + "integrity": "sha512-/vBvz5Jfr9dT/aFWd0FIRf+T/Q2WBsLENygUaFUqstqsycmZAP/t5BvFJTK0viFmSUxiUKTUplWy5vt+rvKIxg==", + "dev": true, + "dependencies": { + "are-we-there-yet": "^3.0.0", + "console-control-strings": "^1.1.0", + "gauge": "^4.0.3", + "set-blocking": "^2.0.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/p-map": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", + "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", + "dev": true, + "dependencies": { + "aggregate-error": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "dev": true + }, + "node_modules/promise-inflight": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/promise-inflight/-/promise-inflight-1.0.1.tgz", + "integrity": "sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==", + "dev": true + }, + "node_modules/promise-retry": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/promise-retry/-/promise-retry-2.0.1.tgz", + "integrity": "sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==", + "dev": true, + "dependencies": { + "err-code": "^2.0.2", + "retry": "^0.12.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/quick-lru": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-4.0.1.tgz", + "integrity": "sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/read-pkg": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz", + "integrity": "sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==", + "dev": true, + "dependencies": { + "@types/normalize-package-data": "^2.4.0", + "normalize-package-data": "^2.5.0", + "parse-json": "^5.0.0", + "type-fest": "^0.6.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/read-pkg-up": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-7.0.1.tgz", + "integrity": "sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==", + "dev": true, + "dependencies": { + "find-up": "^4.1.0", + "read-pkg": "^5.2.0", + "type-fest": "^0.8.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/read-pkg-up/node_modules/type-fest": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", + "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/read-pkg/node_modules/hosted-git-info": { + "version": "2.8.9", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", + "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", + "dev": true + }, + "node_modules/read-pkg/node_modules/normalize-package-data": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "dev": true, + "dependencies": { + "hosted-git-info": "^2.1.4", + "resolve": "^1.10.0", + "semver": "2 || 3 || 4 || 5", + "validate-npm-package-license": "^3.0.1" + } + }, + "node_modules/read-pkg/node_modules/semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/read-pkg/node_modules/type-fest": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz", + "integrity": "sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dev": true, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/redent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz", + "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==", + "dev": true, + "dependencies": { + "indent-string": "^4.0.0", + "strip-indent": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve": { + "version": "1.22.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz", + "integrity": "sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==", + "dev": true, + "dependencies": { + "is-core-module": "^2.9.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/retry": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", + "integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "dev": true, + "optional": true + }, + "node_modules/sass-graph": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/sass-graph/-/sass-graph-4.0.1.tgz", + "integrity": "sha512-5YCfmGBmxoIRYHnKK2AKzrAkCoQ8ozO+iumT8K4tXJXRVCPf+7s1/9KxTSW3Rbvf+7Y7b4FR3mWyLnQr3PHocA==", + "dev": true, + "dependencies": { + "glob": "^7.0.0", + "lodash": "^4.17.11", + "scss-tokenizer": "^0.4.3", + "yargs": "^17.2.1" + }, + "bin": { + "sassgraph": "bin/sassgraph" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/scss-tokenizer": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/scss-tokenizer/-/scss-tokenizer-0.4.3.tgz", + "integrity": "sha512-raKLgf1LI5QMQnG+RxHz6oK0sL3x3I4FN2UDLqgLOGO8hodECNnNh5BXn7fAyBxrA8zVzdQizQ6XjNJQ+uBwMw==", + "dev": true, + "dependencies": { + "js-base64": "^2.4.9", + "source-map": "^0.7.3" + } + }, + "node_modules/semver": { + "version": "7.3.8", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", + "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/semver/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==", + "dev": true + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, + "node_modules/smart-buffer": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", + "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==", + "dev": true, + "engines": { + "node": ">= 6.0.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/socks": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/socks/-/socks-2.7.1.tgz", + "integrity": "sha512-7maUZy1N7uo6+WVEX6psASxtNlKaNVMlGQKkG/63nEDdLOWNbiUMoLK7X4uYoLhQstau72mLgfEWcXcwsaHbYQ==", + "dev": true, + "dependencies": { + "ip": "^2.0.0", + "smart-buffer": "^4.2.0" + }, + "engines": { + "node": ">= 10.13.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/socks-proxy-agent": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-7.0.0.tgz", + "integrity": "sha512-Fgl0YPZ902wEsAyiQ+idGd1A7rSFx/ayC1CQVMw5P+EQx2V0SgpGtf6OKFhVjPflPUl9YMmEOnmfjCdMUsygww==", + "dev": true, + "dependencies": { + "agent-base": "^6.0.2", + "debug": "^4.3.3", + "socks": "^2.6.2" + }, + "engines": { + "node": ">= 10" + } + }, + "node_modules/source-map": { + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz", + "integrity": "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/spdx-correct": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.1.tgz", + "integrity": "sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==", + "dev": true, + "dependencies": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-exceptions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz", + "integrity": "sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==", + "dev": true + }, + "node_modules/spdx-expression-parse": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", + "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", + "dev": true, + "dependencies": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-license-ids": { + "version": "3.0.12", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.12.tgz", + "integrity": "sha512-rr+VVSXtRhO4OHbXUiAF7xW3Bo9DuuF6C5jH+q/x15j2jniycgKbxU09Hr0WqlSLUs4i4ltHGXqTe7VHclYWyA==", + "dev": true + }, + "node_modules/ssri": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/ssri/-/ssri-9.0.1.tgz", + "integrity": "sha512-o57Wcn66jMQvfHG1FlYbWeZWW/dHZhJXjpIcTfXldXEk5nz5lStPo3mK0OJQfGR3RbZUlbISexbljkJzuEj/8Q==", + "dev": true, + "dependencies": { + "minipass": "^3.1.1" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/stdout-stream": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/stdout-stream/-/stdout-stream-1.4.1.tgz", + "integrity": "sha512-j4emi03KXqJWcIeF8eIXkjMFN1Cmb8gUlDYGeBALLPo5qdyTfA9bOtl8m33lRoC+vFMkP3gl0WsDr6+gzxbbTA==", + "dev": true, + "dependencies": { + "readable-stream": "^2.0.1" + } + }, + "node_modules/stdout-stream/node_modules/readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/stdout-stream/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "node_modules/stdout-stream/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-indent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz", + "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==", + "dev": true, + "dependencies": { + "min-indent": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/tar": { + "version": "6.1.13", + "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.13.tgz", + "integrity": "sha512-jdIBIN6LTIe2jqzay/2vtYLlBHa3JF42ot3h1dW8Q0PaAG4v8rm0cvpVePtau5C6OKXGGcgO9q2AMNSWxiLqKw==", + "dev": true, + "dependencies": { + "chownr": "^2.0.0", + "fs-minipass": "^2.0.0", + "minipass": "^4.0.0", + "minizlib": "^2.1.1", + "mkdirp": "^1.0.3", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/tar/node_modules/minipass": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-4.0.0.tgz", + "integrity": "sha512-g2Uuh2jEKoht+zvO6vJqXmYpflPqzRBT+Th2h01DKh5z7wbY/AZ2gCQ78cP70YoHPyFdY30YBV5WxgLOEwOykw==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/trim-newlines": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-3.0.1.tgz", + "integrity": "sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/true-case-path": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/true-case-path/-/true-case-path-2.2.1.tgz", + "integrity": "sha512-0z3j8R7MCjy10kc/g+qg7Ln3alJTodw9aDuVWZa3uiWqfuBMKeAeP2ocWcxoyM3D73yz3Jt/Pu4qPr4wHSdB/Q==", + "dev": true + }, + "node_modules/type-fest": { + "version": "0.18.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.18.1.tgz", + "integrity": "sha512-OIAYXk8+ISY+qTOwkHtKqzAuxchoMiD9Udx+FSGQDuiRR+PJKJHc2NJAXlbhkGwTt/4/nKZxELY1w3ReWOL8mw==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/unique-filename": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-2.0.1.tgz", + "integrity": "sha512-ODWHtkkdx3IAR+veKxFV+VBkUMcN+FaqzUUd7IZzt+0zhDZFPFxhlqwPF3YQvMHx1TD0tdgYl+kuPnJ8E6ql7A==", + "dev": true, + "dependencies": { + "unique-slug": "^3.0.0" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/unique-slug": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-3.0.0.tgz", + "integrity": "sha512-8EyMynh679x/0gqE9fT9oilG+qEt+ibFyqjuVTsZn1+CMxH+XLlpvr2UZx4nVcCwTpx81nICr2JQFkM+HPLq4w==", + "dev": true, + "dependencies": { + "imurmurhash": "^0.1.4" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true + }, + "node_modules/validate-npm-package-license": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", + "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, + "dependencies": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/wide-align": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", + "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", + "dev": true, + "dependencies": { + "string-width": "^1.0.2 || 2 || 3 || 4" + } + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/yargs": { + "version": "17.6.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.6.2.tgz", + "integrity": "sha512-1/9UrdHjDZc0eOU0HxOHoS78C69UD3JRMvzlJ7S79S2nTaWRA/whGCTV8o9e/N/1Va9YIV7Q4sOxD8VV4pCWOw==", + "dev": true, + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "20.2.9", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", + "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs/node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "engines": { + "node": ">=12" + } + } + }, + "dependencies": { + "@babel/code-frame": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.18.6.tgz", + "integrity": "sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==", + "dev": true, + "requires": { + "@babel/highlight": "^7.18.6" + } + }, + "@babel/helper-validator-identifier": { + "version": "7.19.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz", + "integrity": "sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==", + "dev": true + }, + "@babel/highlight": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.18.6.tgz", + "integrity": "sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.18.6", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "requires": { + "color-convert": "^1.9.0" + } + }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", + "dev": true + }, + "has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true + }, + "supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } + } + } + }, + "@gar/promisify": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@gar/promisify/-/promisify-1.1.3.tgz", + "integrity": "sha512-k2Ty1JcVojjJFwrg/ThKi2ujJ7XNLYaFGNB/bWT9wGR+oSMJHMa5w+CUq6p/pVrKeNNgA7pCqEcjSnHVoqJQFw==", + "dev": true + }, + "@npmcli/fs": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-2.1.2.tgz", + "integrity": "sha512-yOJKRvohFOaLqipNtwYB9WugyZKhC/DZC4VYPmpaCzDBrA8YpK3qHZ8/HGscMnE4GqbkLNuVcCnxkeQEdGt6LQ==", + "dev": true, + "requires": { + "@gar/promisify": "^1.1.3", + "semver": "^7.3.5" + } + }, + "@npmcli/move-file": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@npmcli/move-file/-/move-file-2.0.1.tgz", + "integrity": "sha512-mJd2Z5TjYWq/ttPLLGqArdtnC74J6bOzg4rMDnN+p1xTacZ2yPRCk2y0oSWQtygLR9YVQXgOcONrwtnk3JupxQ==", + "dev": true, + "requires": { + "mkdirp": "^1.0.4", + "rimraf": "^3.0.2" + } + }, + "@tootallnate/once": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", + "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", + "dev": true + }, + "@types/minimist": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@types/minimist/-/minimist-1.2.2.tgz", + "integrity": "sha512-jhuKLIRrhvCPLqwPcx6INqmKeiA5EWrsCOPhrlFSrbrmU4ZMPjj5Ul/oLCMDO98XRUIwVm78xICz4EPCektzeQ==", + "dev": true + }, + "@types/normalize-package-data": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.1.tgz", + "integrity": "sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw==", + "dev": true + }, + "abbrev": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", + "dev": true + }, + "agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dev": true, + "requires": { + "debug": "4" + } + }, + "agentkeepalive": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.2.1.tgz", + "integrity": "sha512-Zn4cw2NEqd+9fiSVWMscnjyQ1a8Yfoc5oBajLeo5w+YBHgDUcEBY2hS4YpTz6iN5f/2zQiktcuM6tS8x1p9dpA==", + "dev": true, + "requires": { + "debug": "^4.1.0", + "depd": "^1.1.2", + "humanize-ms": "^1.2.1" + } + }, + "aggregate-error": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "dev": true, + "requires": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + } + }, + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true + }, + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "aproba": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz", + "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==", + "dev": true + }, + "are-we-there-yet": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-3.0.1.tgz", + "integrity": "sha512-QZW4EDmGwlYur0Yyf/b2uGucHQMa8aFUP7eu9ddR73vvhFyt4V0Vl3QHPcTNJ8l6qYOBdxgXdnBXQrHilfRQBg==", + "dev": true, + "requires": { + "delegates": "^1.0.0", + "readable-stream": "^3.6.0" + } + }, + "arrify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", + "integrity": "sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==", + "dev": true + }, + "async-foreach": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/async-foreach/-/async-foreach-0.1.3.tgz", + "integrity": "sha512-VUeSMD8nEGBWaZK4lizI1sf3yEC7pnAQ/mrI7pC2fBz2s/tq5jWWEngTwaf0Gruu/OoXRGLGg1XFqpYBiGTYJA==", + "dev": true + }, + "balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "bulma": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/bulma/-/bulma-0.9.4.tgz", + "integrity": "sha512-86FlT5+1GrsgKbPLRRY7cGDg8fsJiP/jzTqXXVqiUZZ2aZT8uemEOHlU1CDU+TxklPEZ11HZNNWclRBBecP4CQ==", + "dev": true + }, + "bulma-pageloader": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/bulma-pageloader/-/bulma-pageloader-0.3.0.tgz", + "integrity": "sha512-lbahiqhBCov5AYdziHFnC5/JOhCrJWFTpdRiAkwW49IM/mf0whCWHVe8MuejZFu2PEs1mtH8Gnz8exEks4Q+7g==" + }, + "cacache": { + "version": "16.1.3", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-16.1.3.tgz", + "integrity": "sha512-/+Emcj9DAXxX4cwlLmRI9c166RuL3w30zp4R7Joiv2cQTtTtA+jeuCAjH3ZlGnYS3tKENSrKhAzVVP9GVyzeYQ==", + "dev": true, + "requires": { + "@npmcli/fs": "^2.1.0", + "@npmcli/move-file": "^2.0.0", + "chownr": "^2.0.0", + "fs-minipass": "^2.1.0", + "glob": "^8.0.1", + "infer-owner": "^1.0.4", + "lru-cache": "^7.7.1", + "minipass": "^3.1.6", + "minipass-collect": "^1.0.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "mkdirp": "^1.0.4", + "p-map": "^4.0.0", + "promise-inflight": "^1.0.1", + "rimraf": "^3.0.2", + "ssri": "^9.0.0", + "tar": "^6.1.11", + "unique-filename": "^2.0.0" + }, + "dependencies": { + "brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0" + } + }, + "glob": { + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.0.3.tgz", + "integrity": "sha512-ull455NHSHI/Y1FqGaaYFaLGkNMMJbavMrEGFXG/PGrg6y7sutWHUHrz6gy6WEBH6akM1M414dWKCNs+IhKdiQ==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0" + } + }, + "minimatch": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.2.tgz", + "integrity": "sha512-bNH9mmM9qsJ2X4r2Nat1B//1dJVcn3+iBLa3IgqJ7EbGaDNepL9QSHOxN4ng33s52VMMhhIfgCYDk3C4ZmlDAg==", + "dev": true, + "requires": { + "brace-expansion": "^2.0.1" + } + } + } + }, + "camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true + }, + "camelcase-keys": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-6.2.2.tgz", + "integrity": "sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg==", + "dev": true, + "requires": { + "camelcase": "^5.3.1", + "map-obj": "^4.0.0", + "quick-lru": "^4.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "chownr": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", + "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", + "dev": true + }, + "clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "dev": true + }, + "cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "requires": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "color-support": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", + "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", + "dev": true + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true + }, + "console-control-strings": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", + "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==", + "dev": true + }, + "core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "dev": true + }, + "cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "requires": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + } + }, + "debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "requires": { + "ms": "2.1.2" + } + }, + "decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==", + "dev": true + }, + "decamelize-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/decamelize-keys/-/decamelize-keys-1.1.1.tgz", + "integrity": "sha512-WiPxgEirIV0/eIOMcnFBA3/IJZAZqKnwAwWyvvdi4lsr1WCN22nhdf/3db3DoZcUjTV2SqfzIwNyp6y2xs3nmg==", + "dev": true, + "requires": { + "decamelize": "^1.1.0", + "map-obj": "^1.0.0" + }, + "dependencies": { + "map-obj": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz", + "integrity": "sha512-7N/q3lyZ+LVCp7PzuxrJr4KMbBE2hW7BT7YNia330OFxIf4d3r5zVpicP2650l7CPN6RM9zOJRl3NGpqSiw3Eg==", + "dev": true + } + } + }, + "delegates": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", + "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==", + "dev": true + }, + "depd": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", + "integrity": "sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==", + "dev": true + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "encoding": { + "version": "0.1.13", + "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz", + "integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==", + "dev": true, + "optional": true, + "requires": { + "iconv-lite": "^0.6.2" + } + }, + "env-paths": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz", + "integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==", + "dev": true + }, + "err-code": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/err-code/-/err-code-2.0.3.tgz", + "integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==", + "dev": true + }, + "error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "requires": { + "is-arrayish": "^0.2.1" + } + }, + "escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true + }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true + }, + "find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "requires": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + } + }, + "fs-minipass": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", + "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", + "dev": true, + "requires": { + "minipass": "^3.0.0" + } + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true + }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "gauge": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-4.0.4.tgz", + "integrity": "sha512-f9m+BEN5jkg6a0fZjleidjN51VE1X+mPFQ2DJ0uv1V39oCLCbsGe6yjbBnp7eK7z/+GAon99a3nHuqbuuthyPg==", + "dev": true, + "requires": { + "aproba": "^1.0.3 || ^2.0.0", + "color-support": "^1.1.3", + "console-control-strings": "^1.1.0", + "has-unicode": "^2.0.1", + "signal-exit": "^3.0.7", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1", + "wide-align": "^1.1.5" + } + }, + "gaze": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/gaze/-/gaze-1.1.3.tgz", + "integrity": "sha512-BRdNm8hbWzFzWHERTrejLqwHDfS4GibPoq5wjTPIoJHoBtKGPg3xAFfxmM+9ztbXelxcf2hwQcaz1PtmFeue8g==", + "dev": true, + "requires": { + "globule": "^1.0.0" + } + }, + "get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true + }, + "get-stdin": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-4.0.1.tgz", + "integrity": "sha512-F5aQMywwJ2n85s4hJPTT9RPxGmubonuB10MNYo17/xph174n2MIR33HRguhzVag10O/npM7SPk73LMZNP+FaWw==", + "dev": true + }, + "glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "globule": { + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/globule/-/globule-1.3.4.tgz", + "integrity": "sha512-OPTIfhMBh7JbBYDpa5b+Q5ptmMWKwcNcFSR/0c6t8V4f3ZAVBEsKNY37QdVqmLRYSMhOUGYrY0QhSoEpzGr/Eg==", + "dev": true, + "requires": { + "glob": "~7.1.1", + "lodash": "^4.17.21", + "minimatch": "~3.0.2" + }, + "dependencies": { + "glob": { + "version": "7.1.7", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", + "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "minimatch": { + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.8.tgz", + "integrity": "sha512-6FsRAQsxQ61mw+qP1ZzbL9Bc78x2p5OqNgNpnoAFLTrX8n5Kxph0CsnhmKKNXTWjXqU5L0pGPR7hYk+XWZr60Q==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + } + } + }, + "graceful-fs": { + "version": "4.2.10", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", + "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==", + "dev": true + }, + "hard-rejection": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/hard-rejection/-/hard-rejection-2.1.0.tgz", + "integrity": "sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA==", + "dev": true + }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "requires": { + "function-bind": "^1.1.1" + } + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "has-unicode": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", + "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==", + "dev": true + }, + "hosted-git-info": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", + "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", + "dev": true, + "requires": { + "lru-cache": "^6.0.0" + }, + "dependencies": { + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "requires": { + "yallist": "^4.0.0" + } + } + } + }, + "http-cache-semantics": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz", + "integrity": "sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==", + "dev": true + }, + "http-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", + "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", + "dev": true, + "requires": { + "@tootallnate/once": "2", + "agent-base": "6", + "debug": "4" + } + }, + "https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "dev": true, + "requires": { + "agent-base": "6", + "debug": "4" + } + }, + "humanize-ms": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/humanize-ms/-/humanize-ms-1.2.1.tgz", + "integrity": "sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==", + "dev": true, + "requires": { + "ms": "^2.0.0" + } + }, + "iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dev": true, + "optional": true, + "requires": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + } + }, + "imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true + }, + "indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "dev": true + }, + "infer-owner": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/infer-owner/-/infer-owner-1.0.4.tgz", + "integrity": "sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A==", + "dev": true + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "dev": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "ip": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ip/-/ip-2.0.0.tgz", + "integrity": "sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==", + "dev": true + }, + "is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "dev": true + }, + "is-core-module": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.11.0.tgz", + "integrity": "sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw==", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "is-lambda": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-lambda/-/is-lambda-1.0.1.tgz", + "integrity": "sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==", + "dev": true + }, + "is-plain-obj": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", + "integrity": "sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==", + "dev": true + }, + "isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "dev": true + }, + "isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true + }, + "js-base64": { + "version": "2.6.4", + "resolved": "https://registry.npmjs.org/js-base64/-/js-base64-2.6.4.tgz", + "integrity": "sha512-pZe//GGmwJndub7ZghVHz7vjb2LgC1m8B07Au3eYqeqv9emhESByMXxaEgkUkEqJe87oBbSniGYoQNIBklc7IQ==", + "dev": true + }, + "js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true + }, + "kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "dev": true + }, + "lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true + }, + "locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "requires": { + "p-locate": "^4.1.0" + } + }, + "lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true + }, + "lru-cache": { + "version": "7.14.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.14.1.tgz", + "integrity": "sha512-ysxwsnTKdAx96aTRdhDOCQfDgbHnt8SK0KY8SEjO0wHinhWOFTESbjVCMPbU1uGXg/ch4lifqx0wfjOawU2+WA==", + "dev": true + }, + "make-fetch-happen": { + "version": "10.2.1", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-10.2.1.tgz", + "integrity": "sha512-NgOPbRiaQM10DYXvN3/hhGVI2M5MtITFryzBGxHM5p4wnFxsVCbxkrBrDsk+EZ5OB4jEOT7AjDxtdF+KVEFT7w==", + "dev": true, + "requires": { + "agentkeepalive": "^4.2.1", + "cacache": "^16.1.0", + "http-cache-semantics": "^4.1.0", + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "is-lambda": "^1.0.1", + "lru-cache": "^7.7.1", + "minipass": "^3.1.6", + "minipass-collect": "^1.0.2", + "minipass-fetch": "^2.0.3", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "promise-retry": "^2.0.1", + "socks-proxy-agent": "^7.0.0", + "ssri": "^9.0.0" + } + }, + "map-obj": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.3.0.tgz", + "integrity": "sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==", + "dev": true + }, + "meow": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/meow/-/meow-9.0.0.tgz", + "integrity": "sha512-+obSblOQmRhcyBt62furQqRAQpNyWXo8BuQ5bN7dG8wmwQ+vwHKp/rCFD4CrTP8CsDQD1sjoZ94K417XEUk8IQ==", + "dev": true, + "requires": { + "@types/minimist": "^1.2.0", + "camelcase-keys": "^6.2.2", + "decamelize": "^1.2.0", + "decamelize-keys": "^1.1.0", + "hard-rejection": "^2.1.0", + "minimist-options": "4.1.0", + "normalize-package-data": "^3.0.0", + "read-pkg-up": "^7.0.1", + "redent": "^3.0.0", + "trim-newlines": "^3.0.0", + "type-fest": "^0.18.0", + "yargs-parser": "^20.2.3" + } + }, + "min-indent": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", + "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==", + "dev": true + }, + "minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "minimist-options": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/minimist-options/-/minimist-options-4.1.0.tgz", + "integrity": "sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A==", + "dev": true, + "requires": { + "arrify": "^1.0.1", + "is-plain-obj": "^1.1.0", + "kind-of": "^6.0.3" + } + }, + "minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dev": true, + "requires": { + "yallist": "^4.0.0" + } + }, + "minipass-collect": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/minipass-collect/-/minipass-collect-1.0.2.tgz", + "integrity": "sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA==", + "dev": true, + "requires": { + "minipass": "^3.0.0" + } + }, + "minipass-fetch": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-2.1.2.tgz", + "integrity": "sha512-LT49Zi2/WMROHYoqGgdlQIZh8mLPZmOrN2NdJjMXxYe4nkN6FUyuPuOAOedNJDrx0IRGg9+4guZewtp8hE6TxA==", + "dev": true, + "requires": { + "encoding": "^0.1.13", + "minipass": "^3.1.6", + "minipass-sized": "^1.0.3", + "minizlib": "^2.1.2" + } + }, + "minipass-flush": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/minipass-flush/-/minipass-flush-1.0.5.tgz", + "integrity": "sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==", + "dev": true, + "requires": { + "minipass": "^3.0.0" + } + }, + "minipass-pipeline": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz", + "integrity": "sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==", + "dev": true, + "requires": { + "minipass": "^3.0.0" + } + }, + "minipass-sized": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/minipass-sized/-/minipass-sized-1.0.3.tgz", + "integrity": "sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==", + "dev": true, + "requires": { + "minipass": "^3.0.0" + } + }, + "minizlib": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", + "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", + "dev": true, + "requires": { + "minipass": "^3.0.0", + "yallist": "^4.0.0" + } + }, + "mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "dev": true + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "nan": { + "version": "2.17.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.17.0.tgz", + "integrity": "sha512-2ZTgtl0nJsO0KQCjEpxcIr5D+Yv90plTitZt9JBfQvVJDS5seMl3FOvsh3+9CoYWXf/1l5OaZzzF6nDm4cagaQ==", + "dev": true + }, + "negotiator": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "dev": true + }, + "node-gyp": { + "version": "8.4.1", + "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-8.4.1.tgz", + "integrity": "sha512-olTJRgUtAb/hOXG0E93wZDs5YiJlgbXxTwQAFHyNlRsXQnYzUaF2aGgujZbw+hR8aF4ZG/rST57bWMWD16jr9w==", + "dev": true, + "requires": { + "env-paths": "^2.2.0", + "glob": "^7.1.4", + "graceful-fs": "^4.2.6", + "make-fetch-happen": "^9.1.0", + "nopt": "^5.0.0", + "npmlog": "^6.0.0", + "rimraf": "^3.0.2", + "semver": "^7.3.5", + "tar": "^6.1.2", + "which": "^2.0.2" + }, + "dependencies": { + "@npmcli/fs": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-1.1.1.tgz", + "integrity": "sha512-8KG5RD0GVP4ydEzRn/I4BNDuxDtqVbOdm8675T49OIG/NGhaK0pjPX7ZcDlvKYbA+ulvVK3ztfcF4uBdOxuJbQ==", + "dev": true, + "requires": { + "@gar/promisify": "^1.0.1", + "semver": "^7.3.5" + } + }, + "@npmcli/move-file": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@npmcli/move-file/-/move-file-1.1.2.tgz", + "integrity": "sha512-1SUf/Cg2GzGDyaf15aR9St9TWlb+XvbZXWpDx8YKs7MLzMH/BCeopv+y9vzrzgkfykCGuWOlSu3mZhj2+FQcrg==", + "dev": true, + "requires": { + "mkdirp": "^1.0.4", + "rimraf": "^3.0.2" + } + }, + "@tootallnate/once": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", + "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==", + "dev": true + }, + "cacache": { + "version": "15.3.0", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-15.3.0.tgz", + "integrity": "sha512-VVdYzXEn+cnbXpFgWs5hTT7OScegHVmLhJIR8Ufqk3iFD6A6j5iSX1KuBTfNEv4tdJWE2PzA6IVFtcLC7fN9wQ==", + "dev": true, + "requires": { + "@npmcli/fs": "^1.0.0", + "@npmcli/move-file": "^1.0.1", + "chownr": "^2.0.0", + "fs-minipass": "^2.0.0", + "glob": "^7.1.4", + "infer-owner": "^1.0.4", + "lru-cache": "^6.0.0", + "minipass": "^3.1.1", + "minipass-collect": "^1.0.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.2", + "mkdirp": "^1.0.3", + "p-map": "^4.0.0", + "promise-inflight": "^1.0.1", + "rimraf": "^3.0.2", + "ssri": "^8.0.1", + "tar": "^6.0.2", + "unique-filename": "^1.1.1" + } + }, + "http-proxy-agent": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz", + "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==", + "dev": true, + "requires": { + "@tootallnate/once": "1", + "agent-base": "6", + "debug": "4" + } + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "requires": { + "yallist": "^4.0.0" + } + }, + "make-fetch-happen": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-9.1.0.tgz", + "integrity": "sha512-+zopwDy7DNknmwPQplem5lAZX/eCOzSvSNNcSKm5eVwTkOBzoktEfXsa9L23J/GIRhxRsaxzkPEhrJEpE2F4Gg==", + "dev": true, + "requires": { + "agentkeepalive": "^4.1.3", + "cacache": "^15.2.0", + "http-cache-semantics": "^4.1.0", + "http-proxy-agent": "^4.0.1", + "https-proxy-agent": "^5.0.0", + "is-lambda": "^1.0.1", + "lru-cache": "^6.0.0", + "minipass": "^3.1.3", + "minipass-collect": "^1.0.2", + "minipass-fetch": "^1.3.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.2", + "promise-retry": "^2.0.1", + "socks-proxy-agent": "^6.0.0", + "ssri": "^8.0.0" + } + }, + "minipass-fetch": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-1.4.1.tgz", + "integrity": "sha512-CGH1eblLq26Y15+Azk7ey4xh0J/XfJfrCox5LDJiKqI2Q2iwOLOKrlmIaODiSQS8d18jalF6y2K2ePUm0CmShw==", + "dev": true, + "requires": { + "encoding": "^0.1.12", + "minipass": "^3.1.0", + "minipass-sized": "^1.0.3", + "minizlib": "^2.0.0" + } + }, + "socks-proxy-agent": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-6.2.1.tgz", + "integrity": "sha512-a6KW9G+6B3nWZ1yB8G7pJwL3ggLy1uTzKAgCb7ttblwqdz9fMGJUuTy3uFzEP48FAs9FLILlmzDlE2JJhVQaXQ==", + "dev": true, + "requires": { + "agent-base": "^6.0.2", + "debug": "^4.3.3", + "socks": "^2.6.2" + } + }, + "ssri": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/ssri/-/ssri-8.0.1.tgz", + "integrity": "sha512-97qShzy1AiyxvPNIkLWoGua7xoQzzPjQ0HAH4B0rWKo7SZ6USuPcrUiAFrws0UH8RrbWmgq3LMTObhPIHbbBeQ==", + "dev": true, + "requires": { + "minipass": "^3.1.1" + } + }, + "unique-filename": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-1.1.1.tgz", + "integrity": "sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ==", + "dev": true, + "requires": { + "unique-slug": "^2.0.0" + } + }, + "unique-slug": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-2.0.2.tgz", + "integrity": "sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w==", + "dev": true, + "requires": { + "imurmurhash": "^0.1.4" + } + } + } + }, + "node-sass": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/node-sass/-/node-sass-8.0.0.tgz", + "integrity": "sha512-jPzqCF2/e6JXw6r3VxfIqYc8tKQdkj5Z/BDATYyG6FL6b/LuYBNFGFVhus0mthcWifHm/JzBpKAd+3eXsWeK/A==", + "dev": true, + "requires": { + "async-foreach": "^0.1.3", + "chalk": "^4.1.2", + "cross-spawn": "^7.0.3", + "gaze": "^1.0.0", + "get-stdin": "^4.0.1", + "glob": "^7.0.3", + "lodash": "^4.17.15", + "make-fetch-happen": "^10.0.4", + "meow": "^9.0.0", + "nan": "^2.17.0", + "node-gyp": "^8.4.1", + "sass-graph": "^4.0.1", + "stdout-stream": "^1.4.0", + "true-case-path": "^2.2.1" + } + }, + "nopt": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", + "integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==", + "dev": true, + "requires": { + "abbrev": "1" + } + }, + "normalize-package-data": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.3.tgz", + "integrity": "sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==", + "dev": true, + "requires": { + "hosted-git-info": "^4.0.1", + "is-core-module": "^2.5.0", + "semver": "^7.3.4", + "validate-npm-package-license": "^3.0.1" + } + }, + "npmlog": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-6.0.2.tgz", + "integrity": "sha512-/vBvz5Jfr9dT/aFWd0FIRf+T/Q2WBsLENygUaFUqstqsycmZAP/t5BvFJTK0viFmSUxiUKTUplWy5vt+rvKIxg==", + "dev": true, + "requires": { + "are-we-there-yet": "^3.0.0", + "console-control-strings": "^1.1.0", + "gauge": "^4.0.3", + "set-blocking": "^2.0.0" + } + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "requires": { + "wrappy": "1" + } + }, + "p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "requires": { + "p-limit": "^2.2.0" + } + }, + "p-map": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", + "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", + "dev": true, + "requires": { + "aggregate-error": "^3.0.0" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true + }, + "parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + } + }, + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true + }, + "path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true + }, + "path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "dev": true + }, + "promise-inflight": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/promise-inflight/-/promise-inflight-1.0.1.tgz", + "integrity": "sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==", + "dev": true + }, + "promise-retry": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/promise-retry/-/promise-retry-2.0.1.tgz", + "integrity": "sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==", + "dev": true, + "requires": { + "err-code": "^2.0.2", + "retry": "^0.12.0" + } + }, + "quick-lru": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-4.0.1.tgz", + "integrity": "sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g==", + "dev": true + }, + "read-pkg": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz", + "integrity": "sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==", + "dev": true, + "requires": { + "@types/normalize-package-data": "^2.4.0", + "normalize-package-data": "^2.5.0", + "parse-json": "^5.0.0", + "type-fest": "^0.6.0" + }, + "dependencies": { + "hosted-git-info": { + "version": "2.8.9", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", + "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", + "dev": true + }, + "normalize-package-data": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "dev": true, + "requires": { + "hosted-git-info": "^2.1.4", + "resolve": "^1.10.0", + "semver": "2 || 3 || 4 || 5", + "validate-npm-package-license": "^3.0.1" + } + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + }, + "type-fest": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz", + "integrity": "sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==", + "dev": true + } + } + }, + "read-pkg-up": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-7.0.1.tgz", + "integrity": "sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==", + "dev": true, + "requires": { + "find-up": "^4.1.0", + "read-pkg": "^5.2.0", + "type-fest": "^0.8.1" + }, + "dependencies": { + "type-fest": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", + "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", + "dev": true + } + } + }, + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + }, + "redent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz", + "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==", + "dev": true, + "requires": { + "indent-string": "^4.0.0", + "strip-indent": "^3.0.0" + } + }, + "require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true + }, + "resolve": { + "version": "1.22.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz", + "integrity": "sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==", + "dev": true, + "requires": { + "is-core-module": "^2.9.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + } + }, + "retry": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", + "integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==", + "dev": true + }, + "rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "requires": { + "glob": "^7.1.3" + } + }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true + }, + "safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "dev": true, + "optional": true + }, + "sass-graph": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/sass-graph/-/sass-graph-4.0.1.tgz", + "integrity": "sha512-5YCfmGBmxoIRYHnKK2AKzrAkCoQ8ozO+iumT8K4tXJXRVCPf+7s1/9KxTSW3Rbvf+7Y7b4FR3mWyLnQr3PHocA==", + "dev": true, + "requires": { + "glob": "^7.0.0", + "lodash": "^4.17.11", + "scss-tokenizer": "^0.4.3", + "yargs": "^17.2.1" + } + }, + "scss-tokenizer": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/scss-tokenizer/-/scss-tokenizer-0.4.3.tgz", + "integrity": "sha512-raKLgf1LI5QMQnG+RxHz6oK0sL3x3I4FN2UDLqgLOGO8hodECNnNh5BXn7fAyBxrA8zVzdQizQ6XjNJQ+uBwMw==", + "dev": true, + "requires": { + "js-base64": "^2.4.9", + "source-map": "^0.7.3" + } + }, + "semver": { + "version": "7.3.8", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", + "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", + "dev": true, + "requires": { + "lru-cache": "^6.0.0" + }, + "dependencies": { + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "requires": { + "yallist": "^4.0.0" + } + } + } + }, + "set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==", + "dev": true + }, + "shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "requires": { + "shebang-regex": "^3.0.0" + } + }, + "shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true + }, + "signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, + "smart-buffer": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", + "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==", + "dev": true + }, + "socks": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/socks/-/socks-2.7.1.tgz", + "integrity": "sha512-7maUZy1N7uo6+WVEX6psASxtNlKaNVMlGQKkG/63nEDdLOWNbiUMoLK7X4uYoLhQstau72mLgfEWcXcwsaHbYQ==", + "dev": true, + "requires": { + "ip": "^2.0.0", + "smart-buffer": "^4.2.0" + } + }, + "socks-proxy-agent": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-7.0.0.tgz", + "integrity": "sha512-Fgl0YPZ902wEsAyiQ+idGd1A7rSFx/ayC1CQVMw5P+EQx2V0SgpGtf6OKFhVjPflPUl9YMmEOnmfjCdMUsygww==", + "dev": true, + "requires": { + "agent-base": "^6.0.2", + "debug": "^4.3.3", + "socks": "^2.6.2" + } + }, + "source-map": { + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz", + "integrity": "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==", + "dev": true + }, + "spdx-correct": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.1.tgz", + "integrity": "sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==", + "dev": true, + "requires": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "spdx-exceptions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz", + "integrity": "sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==", + "dev": true + }, + "spdx-expression-parse": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", + "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", + "dev": true, + "requires": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "spdx-license-ids": { + "version": "3.0.12", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.12.tgz", + "integrity": "sha512-rr+VVSXtRhO4OHbXUiAF7xW3Bo9DuuF6C5jH+q/x15j2jniycgKbxU09Hr0WqlSLUs4i4ltHGXqTe7VHclYWyA==", + "dev": true + }, + "ssri": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/ssri/-/ssri-9.0.1.tgz", + "integrity": "sha512-o57Wcn66jMQvfHG1FlYbWeZWW/dHZhJXjpIcTfXldXEk5nz5lStPo3mK0OJQfGR3RbZUlbISexbljkJzuEj/8Q==", + "dev": true, + "requires": { + "minipass": "^3.1.1" + } + }, + "stdout-stream": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/stdout-stream/-/stdout-stream-1.4.1.tgz", + "integrity": "sha512-j4emi03KXqJWcIeF8eIXkjMFN1Cmb8gUlDYGeBALLPo5qdyTfA9bOtl8m33lRoC+vFMkP3gl0WsDr6+gzxbbTA==", + "dev": true, + "requires": { + "readable-stream": "^2.0.1" + }, + "dependencies": { + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "requires": { + "safe-buffer": "~5.1.0" + } + } + } + }, + "string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dev": true, + "requires": { + "safe-buffer": "~5.2.0" + } + }, + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + }, + "strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.1" + } + }, + "strip-indent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz", + "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==", + "dev": true, + "requires": { + "min-indent": "^1.0.0" + } + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + }, + "supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true + }, + "tar": { + "version": "6.1.13", + "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.13.tgz", + "integrity": "sha512-jdIBIN6LTIe2jqzay/2vtYLlBHa3JF42ot3h1dW8Q0PaAG4v8rm0cvpVePtau5C6OKXGGcgO9q2AMNSWxiLqKw==", + "dev": true, + "requires": { + "chownr": "^2.0.0", + "fs-minipass": "^2.0.0", + "minipass": "^4.0.0", + "minizlib": "^2.1.1", + "mkdirp": "^1.0.3", + "yallist": "^4.0.0" + }, + "dependencies": { + "minipass": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-4.0.0.tgz", + "integrity": "sha512-g2Uuh2jEKoht+zvO6vJqXmYpflPqzRBT+Th2h01DKh5z7wbY/AZ2gCQ78cP70YoHPyFdY30YBV5WxgLOEwOykw==", + "dev": true, + "requires": { + "yallist": "^4.0.0" + } + } + } + }, + "trim-newlines": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-3.0.1.tgz", + "integrity": "sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw==", + "dev": true + }, + "true-case-path": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/true-case-path/-/true-case-path-2.2.1.tgz", + "integrity": "sha512-0z3j8R7MCjy10kc/g+qg7Ln3alJTodw9aDuVWZa3uiWqfuBMKeAeP2ocWcxoyM3D73yz3Jt/Pu4qPr4wHSdB/Q==", + "dev": true + }, + "type-fest": { + "version": "0.18.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.18.1.tgz", + "integrity": "sha512-OIAYXk8+ISY+qTOwkHtKqzAuxchoMiD9Udx+FSGQDuiRR+PJKJHc2NJAXlbhkGwTt/4/nKZxELY1w3ReWOL8mw==", + "dev": true + }, + "unique-filename": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-2.0.1.tgz", + "integrity": "sha512-ODWHtkkdx3IAR+veKxFV+VBkUMcN+FaqzUUd7IZzt+0zhDZFPFxhlqwPF3YQvMHx1TD0tdgYl+kuPnJ8E6ql7A==", + "dev": true, + "requires": { + "unique-slug": "^3.0.0" + } + }, + "unique-slug": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-3.0.0.tgz", + "integrity": "sha512-8EyMynh679x/0gqE9fT9oilG+qEt+ibFyqjuVTsZn1+CMxH+XLlpvr2UZx4nVcCwTpx81nICr2JQFkM+HPLq4w==", + "dev": true, + "requires": { + "imurmurhash": "^0.1.4" + } + }, + "util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true + }, + "validate-npm-package-license": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", + "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, + "requires": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, + "which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "requires": { + "isexe": "^2.0.0" + } + }, + "wide-align": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", + "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", + "dev": true, + "requires": { + "string-width": "^1.0.2 || 2 || 3 || 4" + } + }, + "wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + } + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true + }, + "y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "yargs": { + "version": "17.6.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.6.2.tgz", + "integrity": "sha512-1/9UrdHjDZc0eOU0HxOHoS78C69UD3JRMvzlJ7S79S2nTaWRA/whGCTV8o9e/N/1Va9YIV7Q4sOxD8VV4pCWOw==", + "dev": true, + "requires": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "dependencies": { + "yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true + } + } + }, + "yargs-parser": { + "version": "20.2.9", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", + "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", + "dev": true + } + } +} diff --git a/mybulma/package.json b/mybulma/package.json new file mode 100644 index 0000000..c496f8c --- /dev/null +++ b/mybulma/package.json @@ -0,0 +1,20 @@ +{ + "name": "mybulma", + "version": "1.0.0", + "description": "", + "main": "sass/mystyles.scss", + "scripts": { + "css-build": "node-sass --omit-source-map-url sass/mystyles.scss css/mystyles.css", + "css-watch": "npm run css-build -- --watch", + "start": "npm run css-watch" + }, + "author": "", + "license": "ISC", + "devDependencies": { + "bulma": "^0.9.4", + "node-sass": "^8.0.0" + }, + "dependencies": { + "bulma-pageloader": "^0.3.0" + } +} diff --git a/mybulma/sass/mystyles.scss b/mybulma/sass/mystyles.scss new file mode 100644 index 0000000..389b511 --- /dev/null +++ b/mybulma/sass/mystyles.scss @@ -0,0 +1,58 @@ +@charset "utf-8"; + +// Import a Google Font +@import url('https://fonts.googleapis.com/css?family=Nunito:400,700'); + +// Set your brand colors +$lime:#00fdbe; +$purple: #a45e8d; +$trans-pink: #F5A9B8; +$beige-light: #eaeaea; +$trans-blue: #5bcefa; +$greyish: #1d1d1d; +$mauve: #81c8be; + +// Update Bulma's global variables +$family-sans-serif: "Nunito", sans-serif; +$grey-dark: #414559; +$grey-light: #51576d; +$primary: #cdd6f4; +$link: $mauve; +$widescreen-enabled: false; +$fullhd-enabled: false; + +// Update some of Bulma's component variables +$body-background-color: #303446; +$control-border-width: 2px; +$input-border-color: transparent; +$input-shadow: none; + + +//my shit +$title-color: $mauve; +$input-color: #b5bfe2; +$primary: #c6d0f5; +$text-strong: #b5bfe2; +$text: #c6d0f5; +/////navbar +$navbar-background-color: #303446; +$navbar-item-color: #81c8be; +$navbar-item-hover-background-color: $grey-dark; +$navbar-dropdown-background-color: $grey-dark; +$navbar-dropdown-item-hover-color: #81c8be; +$navbar-dropdown-item-hover-background-color: $grey-dark; +$navbar-divider-background-color: $grey-light; +//$navbar-dropdown-item-active-color: $off-off-off-dark; +//$navbar-dropdown-item-active-background-color: $off-off-dark; + + +// Import only what you need from Bulma +@import "../node_modules/bulma/sass/utilities/_all.sass"; +@import "../node_modules/bulma/sass/base/_all.sass"; +@import "../node_modules/bulma/sass/elements/button.sass"; +@import "../node_modules/bulma/sass/elements/container.sass"; +@import "../node_modules/bulma/sass/elements/title.sass"; +@import "../node_modules/bulma/sass/form/_all.sass"; +@import "../node_modules/bulma/sass/components/navbar.sass"; +@import "../node_modules/bulma/sass/layout/hero.sass"; +@import "../node_modules/bulma/sass/layout/section.sass"; \ No newline at end of file diff --git a/old.html b/old.html new file mode 100644 index 0000000..ed2d0ea --- /dev/null +++ b/old.html @@ -0,0 +1,63 @@ + + + + + +
+ +
+
+⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⢀⣀⣀⣀⣀⣀⣀⣄⣀⠀⠀⠀⠀⠀⠀⠀ +⠀⠀⠀⠀⠀⠀⠀⠀⢀⣠⣴⡶⢿⣟⡛⣿⢉⣿⠛⢿⣯⡈⠙⣿⣦⡀⠀⠀⠀⠀ +⠀⠀⠀⠀⠀⠀⣠⡾⠻⣧⣬⣿⣿⣿⣿⣿⡟⠉⣠⣾⣿⠿⠿⠿⢿⣿⣦⠀⠀⠀ +⠀⠀⠀⠀⣠⣾⡋⣻⣾⣿⣿⣿⠿⠟⠛⠛⠛⠀⢻⣿⡇⢀⣴⡶⡄⠈⠛⠀⠀⠀ +⠀⠀⠀⣸⣿⣉⣿⣿⣿⡿⠋⠀⠀⠀⠀⠀⠀⠀⠈⢿⣇⠈⢿⣤⡿⣦⠀⠀⠀⠀ +⠀⠀⢰⣿⣉⣿⣿⣿⠏⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠙⠦⠀⢻⣦⠾⣆⠀⠀⠀ +⠀⠀⣾⣏⣿⣿⣿⡟⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠈⣿⡶⢾⡀⠀⠀ +⠀⠀⣿⠉⣿⣿⣿⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⣿⣧⣼⡇⠀⠀ +⠀⠀⣿⡛⣿⣿⣿⡇⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⢀⣿⣧⣼⡇⠀⠀ +⠀⠀⠸⡿⢻⣿⣿⣿⡄⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⢀⣼⣿⣥⣽⠁⠀⠀ +⠀⠀⠀⢻⡟⢙⣿⣿⣿⣦⡀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⣠⣾⣿⣧⣸⡏⠀⠀⠀ +⠀⠀⠀⠀⠻⣿⡋⣻⣿⣿⣿⣦⣤⣀⣀⣀⣀⣀⣠⣴⣿⣿⢿⣥⣼⠟⠀⠀⠀⠀ +⠀⠀⠀⠀⠀⠈⠻⣯⣤⣿⠻⣿⣿⣿⣿⣿⣿⣿⣿⣿⠛⣷⣴⡿⠋⠀⠀⠀⠀⠀ +⠀⠀⠀⠀⠀⠀⠀⠈⠙⠛⠾⣧⣼⣟⣉⣿⣉⣻⣧⡿⠟⠋⠁⠀⠀⠀⠀⠀⠀⠀ +⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠉⠉⠉⠉⠁⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀ +
+
+

brain genuinely not working with me so instead of thinking of a design, im just gonna write the links down for now:

+
+ForgeJo +
+Ouroboros.Gay - Fediverse +
+Ouroboros.Gay - Fediverse (Akkoma Version) +
+Uptime Kuma +
+limepot.xyz - Personal Website +
+ \ No newline at end of file