2020-08-16 00:36:41 +02:00
module . exports =
/******/ ( function ( modules , runtime ) { // webpackBootstrap
/******/ "use strict" ;
/******/ // The module cache
/******/ var installedModules = { } ;
/******/
/******/ // The require function
/******/ function _ _webpack _require _ _ ( moduleId ) {
/******/
/******/ // Check if module is in cache
/******/ if ( installedModules [ moduleId ] ) {
/******/ return installedModules [ moduleId ] . exports ;
/******/ }
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules [ moduleId ] = {
/******/ i : moduleId ,
/******/ l : false ,
/******/ exports : { }
/******/ } ;
/******/
/******/ // Execute the module function
/******/ var threw = true ;
/******/ try {
/******/ modules [ moduleId ] . call ( module . exports , module , module . exports , _ _webpack _require _ _ ) ;
/******/ threw = false ;
/******/ } finally {
/******/ if ( threw ) delete installedModules [ moduleId ] ;
/******/ }
/******/
/******/ // Flag the module as loaded
/******/ module . l = true ;
/******/
/******/ // Return the exports of the module
/******/ return module . exports ;
/******/ }
/******/
/******/
/******/ _ _webpack _require _ _ . ab = _ _dirname + "/" ;
/******/
/******/ // the startup function
/******/ function startup ( ) {
/******/ // Load entry module and return exports
2020-08-21 13:39:42 +02:00
/******/ return _ _webpack _require _ _ ( 109 ) ;
2020-08-16 00:36:41 +02:00
/******/ } ;
/******/
/******/ // run startup
/******/ return startup ( ) ;
/******/ } )
/************************************************************************/
2020-08-17 18:35:15 +02:00
/******/ ( {
2020-08-23 03:31:38 +02:00
/***/ 8 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const Range = _ _webpack _require _ _ ( 828 )
const intersects = ( r1 , r2 , options ) => {
r1 = new Range ( r1 , options )
r2 = new Range ( r2 , options )
return r1 . intersects ( r2 )
}
module . exports = intersects
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 10 :
2020-08-23 03:31:38 +02:00
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-09-02 10:07:11 +02:00
module . exports = globSync
globSync . GlobSync = GlobSync
var fs = _ _webpack _require _ _ ( 747 )
var rp = _ _webpack _require _ _ ( 863 )
var minimatch = _ _webpack _require _ _ ( 973 )
var Minimatch = minimatch . Minimatch
var Glob = _ _webpack _require _ _ ( 957 ) . Glob
var util = _ _webpack _require _ _ ( 669 )
var path = _ _webpack _require _ _ ( 622 )
var assert = _ _webpack _require _ _ ( 357 )
var isAbsolute = _ _webpack _require _ _ ( 714 )
var common = _ _webpack _require _ _ ( 625 )
var alphasort = common . alphasort
var alphasorti = common . alphasorti
var setopts = common . setopts
var ownProp = common . ownProp
var childrenIgnored = common . childrenIgnored
var isIgnored = common . isIgnored
function globSync ( pattern , options ) {
if ( typeof options === 'function' || arguments . length === 3 )
throw new TypeError ( 'callback provided to sync glob\n' +
'See: https://github.com/isaacs/node-glob/issues/167' )
return new GlobSync ( pattern , options ) . found
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
function GlobSync ( pattern , options ) {
if ( ! pattern )
throw new Error ( 'must provide pattern' )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( typeof options === 'function' || arguments . length === 3 )
throw new TypeError ( 'callback provided to sync glob\n' +
'See: https://github.com/isaacs/node-glob/issues/167' )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( ! ( this instanceof GlobSync ) )
return new GlobSync ( pattern , options )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
setopts ( this , pattern , options )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( this . noprocess )
return this
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var n = this . minimatch . set . length
this . matches = new Array ( n )
for ( var i = 0 ; i < n ; i ++ ) {
this . _process ( this . minimatch . set [ i ] , i , false )
}
this . _finish ( )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
GlobSync . prototype . _finish = function ( ) {
assert ( this instanceof GlobSync )
if ( this . realpath ) {
var self = this
this . matches . forEach ( function ( matchset , index ) {
var set = self . matches [ index ] = Object . create ( null )
for ( var p in matchset ) {
try {
p = self . _makeAbs ( p )
var real = rp . realpathSync ( p , self . realpathCache )
set [ real ] = true
} catch ( er ) {
if ( er . syscall === 'stat' )
set [ self . _makeAbs ( p ) ] = true
else
throw er
}
}
} )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
common . finish ( this )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
GlobSync . prototype . _process = function ( pattern , index , inGlobStar ) {
assert ( this instanceof GlobSync )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// Get the first [n] parts of pattern that are all strings.
var n = 0
while ( typeof pattern [ n ] === 'string' ) {
n ++
}
// now n is the index of the first one that is *not* a string.
// See if there's anything else
var prefix
switch ( n ) {
// if not, then this is rather simple
case pattern . length :
this . _processSimple ( pattern . join ( '/' ) , index )
return
case 0 :
// pattern *starts* with some non-trivial item.
// going to readdir(cwd), but not include the prefix in matches.
prefix = null
break
2020-08-21 13:39:42 +02:00
2020-09-02 10:07:11 +02:00
default :
// pattern has some string bits in the front.
// whatever it starts with, whether that's 'absolute' like /foo/bar,
// or 'relative' like '../baz'
prefix = pattern . slice ( 0 , n ) . join ( '/' )
break
}
2020-08-21 13:39:42 +02:00
2020-09-02 10:07:11 +02:00
var remain = pattern . slice ( n )
// get the list of entries.
var read
if ( prefix === null )
read = '.'
else if ( isAbsolute ( prefix ) || isAbsolute ( pattern . join ( '/' ) ) ) {
if ( ! prefix || ! isAbsolute ( prefix ) )
prefix = '/' + prefix
read = prefix
} else
read = prefix
var abs = this . _makeAbs ( read )
//if ignored, skip processing
if ( childrenIgnored ( this , read ) )
return
var isGlobStar = remain [ 0 ] === minimatch . GLOBSTAR
if ( isGlobStar )
this . _processGlobStar ( prefix , read , abs , remain , index , inGlobStar )
else
this . _processReaddir ( prefix , read , abs , remain , index , inGlobStar )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
GlobSync . prototype . _processReaddir = function ( prefix , read , abs , remain , index , inGlobStar ) {
var entries = this . _readdir ( abs , inGlobStar )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// if the abs isn't a dir, then nothing can match!
if ( ! entries )
return
// It will only match dot entries if it starts with a dot, or if
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
var pn = remain [ 0 ]
var negate = ! ! this . minimatch . negate
var rawGlob = pn . _glob
var dotOk = this . dot || rawGlob . charAt ( 0 ) === '.'
var matchedEntries = [ ]
for ( var i = 0 ; i < entries . length ; i ++ ) {
var e = entries [ i ]
if ( e . charAt ( 0 ) !== '.' || dotOk ) {
var m
if ( negate && ! prefix ) {
m = ! e . match ( pn )
2020-08-23 03:31:38 +02:00
} else {
2020-09-02 10:07:11 +02:00
m = e . match ( pn )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
if ( m )
matchedEntries . push ( e )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var len = matchedEntries . length
// If there are no matched entries, then nothing matches.
if ( len === 0 )
return
// if this is the last remaining pattern bit, then no need for
// an additional stat *unless* the user has specified mark or
// stat explicitly. We know they exist, since readdir returned
// them.
if ( remain . length === 1 && ! this . mark && ! this . stat ) {
if ( ! this . matches [ index ] )
this . matches [ index ] = Object . create ( null )
for ( var i = 0 ; i < len ; i ++ ) {
var e = matchedEntries [ i ]
if ( prefix ) {
if ( prefix . slice ( - 1 ) !== '/' )
e = prefix + '/' + e
else
e = prefix + e
}
if ( e . charAt ( 0 ) === '/' && ! this . nomount ) {
e = path . join ( this . root , e )
}
this . _emitMatch ( index , e )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
// This was the last one, and no stats were needed
return
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// now test all matched entries as stand-ins for that part
// of the pattern.
remain . shift ( )
for ( var i = 0 ; i < len ; i ++ ) {
var e = matchedEntries [ i ]
var newPattern
if ( prefix )
newPattern = [ prefix , e ]
else
newPattern = [ e ]
this . _process ( newPattern . concat ( remain ) , index , inGlobStar )
}
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
GlobSync . prototype . _emitMatch = function ( index , e ) {
if ( isIgnored ( this , e ) )
return
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var abs = this . _makeAbs ( e )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( this . mark )
e = this . _mark ( e )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( this . absolute ) {
e = abs
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( this . matches [ index ] [ e ] )
return
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( this . nodir ) {
var c = this . cache [ abs ]
if ( c === 'DIR' || Array . isArray ( c ) )
return
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
this . matches [ index ] [ e ] = true
if ( this . stat )
this . _stat ( e )
}
GlobSync . prototype . _readdirInGlobStar = function ( abs ) {
// follow all symlinked directories forever
// just proceed as if this is a non-globstar situation
if ( this . follow )
return this . _readdir ( abs , false )
var entries
var lstat
var stat
try {
lstat = fs . lstatSync ( abs )
} catch ( er ) {
if ( er . code === 'ENOENT' ) {
// lstat failed, doesn't exist
return null
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var isSym = lstat && lstat . isSymbolicLink ( )
this . symlinks [ abs ] = isSym
// If it's not a symlink or a dir, then it's definitely a regular file.
// don't bother doing a readdir in that case.
if ( ! isSym && lstat && ! lstat . isDirectory ( ) )
this . cache [ abs ] = 'FILE'
else
entries = this . _readdir ( abs , false )
return entries
}
GlobSync . prototype . _readdir = function ( abs , inGlobStar ) {
var entries
if ( inGlobStar && ! ownProp ( this . symlinks , abs ) )
return this . _readdirInGlobStar ( abs )
if ( ownProp ( this . cache , abs ) ) {
var c = this . cache [ abs ]
if ( ! c || c === 'FILE' )
return null
if ( Array . isArray ( c ) )
return c
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
try {
return this . _readdirEntries ( abs , fs . readdirSync ( abs ) )
} catch ( er ) {
this . _readdirError ( abs , er )
return null
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
GlobSync . prototype . _readdirEntries = function ( abs , entries ) {
// if we haven't asked to stat everything, then just
// assume that everything in there exists, so we can avoid
// having to stat it a second time.
if ( ! this . mark && ! this . stat ) {
for ( var i = 0 ; i < entries . length ; i ++ ) {
var e = entries [ i ]
if ( abs === '/' )
e = abs + e
else
e = abs + '/' + e
this . cache [ e ] = true
}
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
this . cache [ abs ] = entries
// mark and cache dir-ness
return entries
}
GlobSync . prototype . _readdirError = function ( f , er ) {
// handle errors, and cache the information
switch ( er . code ) {
case 'ENOTSUP' : // https://github.com/isaacs/node-glob/issues/205
case 'ENOTDIR' : // totally normal. means it *does* exist.
var abs = this . _makeAbs ( f )
this . cache [ abs ] = 'FILE'
if ( abs === this . cwdAbs ) {
var error = new Error ( er . code + ' invalid cwd ' + this . cwd )
error . path = this . cwd
error . code = er . code
throw error
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
break
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
case 'ENOENT' : // not terribly unusual
case 'ELOOP' :
case 'ENAMETOOLONG' :
case 'UNKNOWN' :
this . cache [ this . _makeAbs ( f ) ] = false
break
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
default : // some unusual error. Treat as failure.
this . cache [ this . _makeAbs ( f ) ] = false
if ( this . strict )
throw er
if ( ! this . silent )
console . error ( 'glob error' , er )
break
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
GlobSync . prototype . _processGlobStar = function ( prefix , read , abs , remain , index , inGlobStar ) {
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var entries = this . _readdir ( abs , inGlobStar )
// no entries means not a dir, so it can never have matches
// foo.txt/** doesn't match foo.txt
if ( ! entries )
return
// test without the globstar, and with every child both below
// and replacing the globstar.
var remainWithoutGlobStar = remain . slice ( 1 )
var gspref = prefix ? [ prefix ] : [ ]
var noGlobStar = gspref . concat ( remainWithoutGlobStar )
// the noGlobStar pattern exits the inGlobStar state
this . _process ( noGlobStar , index , false )
var len = entries . length
var isSym = this . symlinks [ abs ]
// If it's a symlink, and we're in a globstar, then stop
if ( isSym && inGlobStar )
return
for ( var i = 0 ; i < len ; i ++ ) {
var e = entries [ i ]
if ( e . charAt ( 0 ) === '.' && ! this . dot )
continue
// these two cases enter the inGlobStar state
var instead = gspref . concat ( entries [ i ] , remainWithoutGlobStar )
this . _process ( instead , index , true )
var below = gspref . concat ( entries [ i ] , remain )
this . _process ( below , index , true )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
GlobSync . prototype . _processSimple = function ( prefix , index ) {
// XXX review this. Shouldn't it be doing the mounting etc
// before doing stat? kinda weird?
var exists = this . _stat ( prefix )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( ! this . matches [ index ] )
this . matches [ index ] = Object . create ( null )
// If it doesn't exist, then just mark the lack of results
if ( ! exists )
return
if ( prefix && isAbsolute ( prefix ) && ! this . nomount ) {
var trail = /[\/\\]$/ . test ( prefix )
if ( prefix . charAt ( 0 ) === '/' ) {
prefix = path . join ( this . root , prefix )
} else {
prefix = path . resolve ( this . root , prefix )
if ( trail )
prefix += '/'
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( process . platform === 'win32' )
prefix = prefix . replace ( /\\/g , '/' )
// Mark this as a match
this . _emitMatch ( index , prefix )
}
// Returns either 'DIR', 'FILE', or false
GlobSync . prototype . _stat = function ( f ) {
var abs = this . _makeAbs ( f )
var needDir = f . slice ( - 1 ) === '/'
if ( f . length > this . maxLength )
return false
if ( ! this . stat && ownProp ( this . cache , abs ) ) {
var c = this . cache [ abs ]
if ( Array . isArray ( c ) )
c = 'DIR'
// It exists, but maybe not how we need it
if ( ! needDir || c === 'DIR' )
return c
if ( needDir && c === 'FILE' )
return false
// otherwise we have to stat, because maybe c=true
// if we know it exists, but not what it is.
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
var exists
var stat = this . statCache [ abs ]
if ( ! stat ) {
var lstat
try {
lstat = fs . lstatSync ( abs )
} catch ( er ) {
if ( er && ( er . code === 'ENOENT' || er . code === 'ENOTDIR' ) ) {
this . statCache [ abs ] = false
return false
}
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
if ( lstat && lstat . isSymbolicLink ( ) ) {
try {
stat = fs . statSync ( abs )
} catch ( er ) {
stat = lstat
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
} else {
stat = lstat
}
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
this . statCache [ abs ] = stat
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var c = true
if ( stat )
c = stat . isDirectory ( ) ? 'DIR' : 'FILE'
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
this . cache [ abs ] = this . cache [ abs ] || c
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( needDir && c === 'FILE' )
return false
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
return c
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
GlobSync . prototype . _mark = function ( p ) {
return common . mark ( this , p )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
GlobSync . prototype . _makeAbs = function ( f ) {
return common . makeAbs ( this , f )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
/***/ } ) ,
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
/***/ 15 :
/***/ ( function ( module ) {
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
module . exports = { "uChars" : [ 128 , 165 , 169 , 178 , 184 , 216 , 226 , 235 , 238 , 244 , 248 , 251 , 253 , 258 , 276 , 284 , 300 , 325 , 329 , 334 , 364 , 463 , 465 , 467 , 469 , 471 , 473 , 475 , 477 , 506 , 594 , 610 , 712 , 716 , 730 , 930 , 938 , 962 , 970 , 1026 , 1104 , 1106 , 8209 , 8215 , 8218 , 8222 , 8231 , 8241 , 8244 , 8246 , 8252 , 8365 , 8452 , 8454 , 8458 , 8471 , 8482 , 8556 , 8570 , 8596 , 8602 , 8713 , 8720 , 8722 , 8726 , 8731 , 8737 , 8740 , 8742 , 8748 , 8751 , 8760 , 8766 , 8777 , 8781 , 8787 , 8802 , 8808 , 8816 , 8854 , 8858 , 8870 , 8896 , 8979 , 9322 , 9372 , 9548 , 9588 , 9616 , 9622 , 9634 , 9652 , 9662 , 9672 , 9676 , 9680 , 9702 , 9735 , 9738 , 9793 , 9795 , 11906 , 11909 , 11913 , 11917 , 11928 , 11944 , 11947 , 11951 , 11956 , 11960 , 11964 , 11979 , 12284 , 12292 , 12312 , 12319 , 12330 , 12351 , 12436 , 12447 , 12535 , 12543 , 12586 , 12842 , 12850 , 12964 , 13200 , 13215 , 13218 , 13253 , 13263 , 13267 , 13270 , 13384 , 13428 , 13727 , 13839 , 13851 , 14617 , 14703 , 14801 , 14816 , 14964 , 15183 , 15471 , 15585 , 16471 , 16736 , 17208 , 17325 , 17330 , 17374 , 17623 , 17997 , 18018 , 18212 , 18218 , 18301 , 18318 , 18760 , 18811 , 18814 , 18820 , 18823 , 18844 , 18848 , 18872 , 19576 , 19620 , 19738 , 19887 , 40870 , 59244 , 59336 , 59367 , 59413 , 59417 , 59423 , 59431 , 59437 , 59443 , 59452 , 59460 , 59478 , 59493 , 63789 , 63866 , 63894 , 63976 , 63986 , 64016 , 64018 , 64021 , 64025 , 64034 , 64037 , 64042 , 65074 , 65093 , 65107 , 65112 , 65127 , 65132 , 65375 , 65510 , 65536 ] , "gbChars" : [ 0 , 36 , 38 , 45 , 50 , 81 , 89 , 95 , 96 , 100 , 103 , 104 , 105 , 109 , 126 , 133 , 148 , 172 , 175 , 179 , 208 , 306 , 307 , 308 , 309 , 310 , 311 , 312 , 313 , 341 , 428 , 443 , 544 , 545 , 558 , 741 , 742 , 749 , 750 , 805 , 819 , 820 , 7922 , 7924 , 7925 , 7927 , 7934 , 7943 , 7944 , 7945 , 7950 , 8062 , 8148 , 8149 , 8152 , 8164 , 8174 , 8236 , 8240 , 8262 , 8264 , 8374 , 8380 , 8381 , 8384 , 8388 , 8390 , 8392 , 8393 , 8394 , 8396 , 8401 , 8406 , 8416 , 8419 , 8424 , 8437 , 8439 , 8445 , 8482 , 8485 , 8496 , 8521 , 8603 , 8936 , 8946 , 9046 , 9050 , 9063 , 9066 , 9076 , 9092 , 9100 , 9108 , 9111 , 9113 , 9131 , 9162 , 9164 , 9218 , 9219 , 11329 , 11331 , 11334 , 11336 , 11346 , 11361 , 11363 , 11366 , 11370 , 11372 , 11375 , 11389 , 11682 , 11686 , 11687 , 11692 , 11694 , 11714 , 11716 , 11723 , 11725 , 11730 , 11736 , 11982 , 11989 , 12102 , 12336 , 12348 , 12350 , 12384 , 12393 , 12395 , 12397 , 12510 , 12553 , 12851 , 12962 , 12973 , 13738 , 13823 , 13919 , 13933 , 14080 , 14298 , 14585 , 14698 , 15583 , 15847 , 16318 , 16434 , 16438 , 16481 , 16729 , 17102 , 17122 , 17315 , 17320 , 17402 , 17418 , 17859 , 17909 , 17911 , 17915 , 17916 , 17936 , 17939 , 17961 , 18664 , 18703 , 18814 , 18962 , 19043 , 33469 , 33470 , 33471 , 33484 , 33485 , 33490 , 33497 , 33501 , 33505 , 33513 , 33520 , 33536 , 33550 , 37845 , 37921 , 37948 , 38029 , 38038 , 38064 , 38065 , 38066 , 38069 , 38075 , 38076 , 38078 , 39108 , 39109 , 39113 , 39114 , 39115 , 39116 , 39265 , 39394 , 189000 ] } ;
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
/***/ } ) ,
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
/***/ 16 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
const parse = _ _webpack _require _ _ ( 925 )
const prerelease = ( version , options ) => {
const parsed = parse ( version , options )
return ( parsed && parsed . prerelease . length ) ? parsed . prerelease : null
}
module . exports = prerelease
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
/***/ } ) ,
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
/***/ 17 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const compare = _ _webpack _require _ _ ( 309 )
const neq = ( a , b , loose ) => compare ( a , b , loose ) !== 0
module . exports = neq
2020-08-23 03:31:38 +02:00
2020-08-21 13:39:42 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 30 :
2020-08-16 00:36:41 +02:00
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
2020-08-21 13:39:42 +02:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
2020-09-02 10:07:11 +02:00
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
2020-08-21 13:39:42 +02:00
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
2020-08-16 00:36:41 +02:00
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
2020-09-02 10:07:11 +02:00
exports . getOctokitOptions = exports . GitHub = exports . context = void 0 ;
const Context = _ _importStar ( _ _webpack _require _ _ ( 53 ) ) ;
const Utils = _ _importStar ( _ _webpack _require _ _ ( 914 ) ) ;
// octokit + plugins
const core _1 = _ _webpack _require _ _ ( 762 ) ;
const plugin _rest _endpoint _methods _1 = _ _webpack _require _ _ ( 44 ) ;
const plugin _paginate _rest _1 = _ _webpack _require _ _ ( 431 ) ;
exports . context = new Context . Context ( ) ;
const baseUrl = Utils . getApiBaseUrl ( ) ;
const defaults = {
baseUrl ,
request : {
agent : Utils . getProxyAgent ( baseUrl )
}
} ;
exports . GitHub = core _1 . Octokit . plugin ( plugin _rest _endpoint _methods _1 . restEndpointMethods , plugin _paginate _rest _1 . paginateRest ) . defaults ( defaults ) ;
/ * *
* Convience function to correctly format Octokit Options to pass into the constructor .
*
* @ param token the repo PAT or GITHUB _TOKEN
* @ param options other options to set
* /
function getOctokitOptions ( token , options ) {
const opts = Object . assign ( { } , options || { } ) ; // Shallow clone - don't mutate the object provided by the caller
// Auth
const auth = Utils . getAuthString ( token , opts ) ;
if ( auth ) {
opts . auth = auth ;
}
return opts ;
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
exports . getOctokitOptions = getOctokitOptions ;
//# sourceMappingURL=utils.js.map
2020-08-21 13:39:42 +02:00
2020-08-23 03:31:38 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 38 :
/***/ ( function ( module ) {
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
"use strict" ;
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
/ * !
* is - plain - object < https : //github.com/jonschlinkert/is-plain-object>
*
* Copyright ( c ) 2014 - 2017 , Jon Schlinkert .
* Released under the MIT License .
* /
2020-08-21 13:39:42 +02:00
2020-09-02 10:07:11 +02:00
function isObject ( o ) {
return Object . prototype . toString . call ( o ) === '[object Object]' ;
}
2020-08-21 13:39:42 +02:00
2020-09-02 10:07:11 +02:00
function isPlainObject ( o ) {
var ctor , prot ;
2020-08-21 13:39:42 +02:00
2020-09-02 10:07:11 +02:00
if ( isObject ( o ) === false ) return false ;
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// If has modified constructor
ctor = o . constructor ;
if ( ctor === undefined ) return true ;
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// If has modified prototype
prot = ctor . prototype ;
if ( isObject ( prot ) === false ) return false ;
// If constructor does not have an Object-specific method
if ( prot . hasOwnProperty ( 'isPrototypeOf' ) === false ) {
return false ;
}
// Most likely a plain Object
return true ;
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
module . exports = isPlainObject ;
2020-08-23 03:31:38 +02:00
2020-08-21 13:39:42 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 44 :
/***/ ( function ( _ _unusedmodule , exports ) {
2020-08-21 13:39:42 +02:00
"use strict" ;
2020-09-02 10:07:11 +02:00
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
const Endpoints = {
actions : {
addSelectedRepoToOrgSecret : [ "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" ] ,
cancelWorkflowRun : [ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel" ] ,
createOrUpdateOrgSecret : [ "PUT /orgs/{org}/actions/secrets/{secret_name}" ] ,
createOrUpdateRepoSecret : [ "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}" ] ,
createRegistrationTokenForOrg : [ "POST /orgs/{org}/actions/runners/registration-token" ] ,
createRegistrationTokenForRepo : [ "POST /repos/{owner}/{repo}/actions/runners/registration-token" ] ,
createRemoveTokenForOrg : [ "POST /orgs/{org}/actions/runners/remove-token" ] ,
createRemoveTokenForRepo : [ "POST /repos/{owner}/{repo}/actions/runners/remove-token" ] ,
createWorkflowDispatch : [ "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches" ] ,
deleteArtifact : [ "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" ] ,
deleteOrgSecret : [ "DELETE /orgs/{org}/actions/secrets/{secret_name}" ] ,
deleteRepoSecret : [ "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}" ] ,
deleteSelfHostedRunnerFromOrg : [ "DELETE /orgs/{org}/actions/runners/{runner_id}" ] ,
deleteSelfHostedRunnerFromRepo : [ "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}" ] ,
deleteWorkflowRun : [ "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}" ] ,
deleteWorkflowRunLogs : [ "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs" ] ,
downloadArtifact : [ "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}" ] ,
downloadJobLogsForWorkflowRun : [ "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs" ] ,
downloadWorkflowRunLogs : [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs" ] ,
getArtifact : [ "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" ] ,
getJobForWorkflowRun : [ "GET /repos/{owner}/{repo}/actions/jobs/{job_id}" ] ,
getOrgPublicKey : [ "GET /orgs/{org}/actions/secrets/public-key" ] ,
getOrgSecret : [ "GET /orgs/{org}/actions/secrets/{secret_name}" ] ,
getRepoPublicKey : [ "GET /repos/{owner}/{repo}/actions/secrets/public-key" ] ,
getRepoSecret : [ "GET /repos/{owner}/{repo}/actions/secrets/{secret_name}" ] ,
getSelfHostedRunnerForOrg : [ "GET /orgs/{org}/actions/runners/{runner_id}" ] ,
getSelfHostedRunnerForRepo : [ "GET /repos/{owner}/{repo}/actions/runners/{runner_id}" ] ,
getWorkflow : [ "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}" ] ,
getWorkflowRun : [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}" ] ,
getWorkflowRunUsage : [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing" ] ,
getWorkflowUsage : [ "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing" ] ,
listArtifactsForRepo : [ "GET /repos/{owner}/{repo}/actions/artifacts" ] ,
listJobsForWorkflowRun : [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" ] ,
listOrgSecrets : [ "GET /orgs/{org}/actions/secrets" ] ,
listRepoSecrets : [ "GET /repos/{owner}/{repo}/actions/secrets" ] ,
listRepoWorkflows : [ "GET /repos/{owner}/{repo}/actions/workflows" ] ,
listRunnerApplicationsForOrg : [ "GET /orgs/{org}/actions/runners/downloads" ] ,
listRunnerApplicationsForRepo : [ "GET /repos/{owner}/{repo}/actions/runners/downloads" ] ,
listSelectedReposForOrgSecret : [ "GET /orgs/{org}/actions/secrets/{secret_name}/repositories" ] ,
listSelfHostedRunnersForOrg : [ "GET /orgs/{org}/actions/runners" ] ,
listSelfHostedRunnersForRepo : [ "GET /repos/{owner}/{repo}/actions/runners" ] ,
listWorkflowRunArtifacts : [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" ] ,
listWorkflowRuns : [ "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" ] ,
listWorkflowRunsForRepo : [ "GET /repos/{owner}/{repo}/actions/runs" ] ,
reRunWorkflow : [ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun" ] ,
removeSelectedRepoFromOrgSecret : [ "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" ] ,
setSelectedReposForOrgSecret : [ "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories" ]
} ,
activity : {
checkRepoIsStarredByAuthenticatedUser : [ "GET /user/starred/{owner}/{repo}" ] ,
deleteRepoSubscription : [ "DELETE /repos/{owner}/{repo}/subscription" ] ,
deleteThreadSubscription : [ "DELETE /notifications/threads/{thread_id}/subscription" ] ,
getFeeds : [ "GET /feeds" ] ,
getRepoSubscription : [ "GET /repos/{owner}/{repo}/subscription" ] ,
getThread : [ "GET /notifications/threads/{thread_id}" ] ,
getThreadSubscriptionForAuthenticatedUser : [ "GET /notifications/threads/{thread_id}/subscription" ] ,
listEventsForAuthenticatedUser : [ "GET /users/{username}/events" ] ,
listNotificationsForAuthenticatedUser : [ "GET /notifications" ] ,
listOrgEventsForAuthenticatedUser : [ "GET /users/{username}/events/orgs/{org}" ] ,
listPublicEvents : [ "GET /events" ] ,
listPublicEventsForRepoNetwork : [ "GET /networks/{owner}/{repo}/events" ] ,
listPublicEventsForUser : [ "GET /users/{username}/events/public" ] ,
listPublicOrgEvents : [ "GET /orgs/{org}/events" ] ,
listReceivedEventsForUser : [ "GET /users/{username}/received_events" ] ,
listReceivedPublicEventsForUser : [ "GET /users/{username}/received_events/public" ] ,
listRepoEvents : [ "GET /repos/{owner}/{repo}/events" ] ,
listRepoNotificationsForAuthenticatedUser : [ "GET /repos/{owner}/{repo}/notifications" ] ,
listReposStarredByAuthenticatedUser : [ "GET /user/starred" ] ,
listReposStarredByUser : [ "GET /users/{username}/starred" ] ,
listReposWatchedByUser : [ "GET /users/{username}/subscriptions" ] ,
listStargazersForRepo : [ "GET /repos/{owner}/{repo}/stargazers" ] ,
listWatchedReposForAuthenticatedUser : [ "GET /user/subscriptions" ] ,
listWatchersForRepo : [ "GET /repos/{owner}/{repo}/subscribers" ] ,
markNotificationsAsRead : [ "PUT /notifications" ] ,
markRepoNotificationsAsRead : [ "PUT /repos/{owner}/{repo}/notifications" ] ,
markThreadAsRead : [ "PATCH /notifications/threads/{thread_id}" ] ,
setRepoSubscription : [ "PUT /repos/{owner}/{repo}/subscription" ] ,
setThreadSubscription : [ "PUT /notifications/threads/{thread_id}/subscription" ] ,
starRepoForAuthenticatedUser : [ "PUT /user/starred/{owner}/{repo}" ] ,
unstarRepoForAuthenticatedUser : [ "DELETE /user/starred/{owner}/{repo}" ]
} ,
apps : {
addRepoToInstallation : [ "PUT /user/installations/{installation_id}/repositories/{repository_id}" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
checkToken : [ "POST /applications/{client_id}/token" ] ,
createContentAttachment : [ "POST /content_references/{content_reference_id}/attachments" , {
mediaType : {
previews : [ "corsair" ]
}
} ] ,
createFromManifest : [ "POST /app-manifests/{code}/conversions" ] ,
createInstallationAccessToken : [ "POST /app/installations/{installation_id}/access_tokens" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
deleteAuthorization : [ "DELETE /applications/{client_id}/grant" ] ,
deleteInstallation : [ "DELETE /app/installations/{installation_id}" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
deleteToken : [ "DELETE /applications/{client_id}/token" ] ,
getAuthenticated : [ "GET /app" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
getBySlug : [ "GET /apps/{app_slug}" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
getInstallation : [ "GET /app/installations/{installation_id}" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
getOrgInstallation : [ "GET /orgs/{org}/installation" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
getRepoInstallation : [ "GET /repos/{owner}/{repo}/installation" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
getSubscriptionPlanForAccount : [ "GET /marketplace_listing/accounts/{account_id}" ] ,
getSubscriptionPlanForAccountStubbed : [ "GET /marketplace_listing/stubbed/accounts/{account_id}" ] ,
getUserInstallation : [ "GET /users/{username}/installation" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
listAccountsForPlan : [ "GET /marketplace_listing/plans/{plan_id}/accounts" ] ,
listAccountsForPlanStubbed : [ "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts" ] ,
listInstallationReposForAuthenticatedUser : [ "GET /user/installations/{installation_id}/repositories" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
listInstallations : [ "GET /app/installations" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
listInstallationsForAuthenticatedUser : [ "GET /user/installations" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
listPlans : [ "GET /marketplace_listing/plans" ] ,
listPlansStubbed : [ "GET /marketplace_listing/stubbed/plans" ] ,
listReposAccessibleToInstallation : [ "GET /installation/repositories" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
listSubscriptionsForAuthenticatedUser : [ "GET /user/marketplace_purchases" ] ,
listSubscriptionsForAuthenticatedUserStubbed : [ "GET /user/marketplace_purchases/stubbed" ] ,
removeRepoFromInstallation : [ "DELETE /user/installations/{installation_id}/repositories/{repository_id}" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
resetToken : [ "PATCH /applications/{client_id}/token" ] ,
revokeInstallationAccessToken : [ "DELETE /installation/token" ] ,
suspendInstallation : [ "PUT /app/installations/{installation_id}/suspended" ] ,
unsuspendInstallation : [ "DELETE /app/installations/{installation_id}/suspended" ]
} ,
billing : {
getGithubActionsBillingOrg : [ "GET /orgs/{org}/settings/billing/actions" ] ,
getGithubActionsBillingUser : [ "GET /users/{username}/settings/billing/actions" ] ,
getGithubPackagesBillingOrg : [ "GET /orgs/{org}/settings/billing/packages" ] ,
getGithubPackagesBillingUser : [ "GET /users/{username}/settings/billing/packages" ] ,
getSharedStorageBillingOrg : [ "GET /orgs/{org}/settings/billing/shared-storage" ] ,
getSharedStorageBillingUser : [ "GET /users/{username}/settings/billing/shared-storage" ]
} ,
checks : {
create : [ "POST /repos/{owner}/{repo}/check-runs" , {
mediaType : {
previews : [ "antiope" ]
}
} ] ,
createSuite : [ "POST /repos/{owner}/{repo}/check-suites" , {
mediaType : {
previews : [ "antiope" ]
}
} ] ,
get : [ "GET /repos/{owner}/{repo}/check-runs/{check_run_id}" , {
mediaType : {
previews : [ "antiope" ]
}
} ] ,
getSuite : [ "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}" , {
mediaType : {
previews : [ "antiope" ]
}
} ] ,
listAnnotations : [ "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations" , {
mediaType : {
previews : [ "antiope" ]
}
} ] ,
listForRef : [ "GET /repos/{owner}/{repo}/commits/{ref}/check-runs" , {
mediaType : {
previews : [ "antiope" ]
}
} ] ,
listForSuite : [ "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs" , {
mediaType : {
previews : [ "antiope" ]
}
} ] ,
listSuitesForRef : [ "GET /repos/{owner}/{repo}/commits/{ref}/check-suites" , {
mediaType : {
previews : [ "antiope" ]
}
} ] ,
rerequestSuite : [ "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest" , {
mediaType : {
previews : [ "antiope" ]
}
} ] ,
setSuitesPreferences : [ "PATCH /repos/{owner}/{repo}/check-suites/preferences" , {
mediaType : {
previews : [ "antiope" ]
}
} ] ,
update : [ "PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}" , {
mediaType : {
previews : [ "antiope" ]
}
} ]
} ,
codeScanning : {
getAlert : [ "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_id}" ] ,
listAlertsForRepo : [ "GET /repos/{owner}/{repo}/code-scanning/alerts" ]
} ,
codesOfConduct : {
getAllCodesOfConduct : [ "GET /codes_of_conduct" , {
mediaType : {
previews : [ "scarlet-witch" ]
}
} ] ,
getConductCode : [ "GET /codes_of_conduct/{key}" , {
mediaType : {
previews : [ "scarlet-witch" ]
}
} ] ,
getForRepo : [ "GET /repos/{owner}/{repo}/community/code_of_conduct" , {
mediaType : {
previews : [ "scarlet-witch" ]
}
} ]
} ,
emojis : {
get : [ "GET /emojis" ]
} ,
gists : {
checkIsStarred : [ "GET /gists/{gist_id}/star" ] ,
create : [ "POST /gists" ] ,
createComment : [ "POST /gists/{gist_id}/comments" ] ,
delete : [ "DELETE /gists/{gist_id}" ] ,
deleteComment : [ "DELETE /gists/{gist_id}/comments/{comment_id}" ] ,
fork : [ "POST /gists/{gist_id}/forks" ] ,
get : [ "GET /gists/{gist_id}" ] ,
getComment : [ "GET /gists/{gist_id}/comments/{comment_id}" ] ,
getRevision : [ "GET /gists/{gist_id}/{sha}" ] ,
list : [ "GET /gists" ] ,
listComments : [ "GET /gists/{gist_id}/comments" ] ,
listCommits : [ "GET /gists/{gist_id}/commits" ] ,
listForUser : [ "GET /users/{username}/gists" ] ,
listForks : [ "GET /gists/{gist_id}/forks" ] ,
listPublic : [ "GET /gists/public" ] ,
listStarred : [ "GET /gists/starred" ] ,
star : [ "PUT /gists/{gist_id}/star" ] ,
unstar : [ "DELETE /gists/{gist_id}/star" ] ,
update : [ "PATCH /gists/{gist_id}" ] ,
updateComment : [ "PATCH /gists/{gist_id}/comments/{comment_id}" ]
} ,
git : {
createBlob : [ "POST /repos/{owner}/{repo}/git/blobs" ] ,
createCommit : [ "POST /repos/{owner}/{repo}/git/commits" ] ,
createRef : [ "POST /repos/{owner}/{repo}/git/refs" ] ,
createTag : [ "POST /repos/{owner}/{repo}/git/tags" ] ,
createTree : [ "POST /repos/{owner}/{repo}/git/trees" ] ,
deleteRef : [ "DELETE /repos/{owner}/{repo}/git/refs/{ref}" ] ,
getBlob : [ "GET /repos/{owner}/{repo}/git/blobs/{file_sha}" ] ,
getCommit : [ "GET /repos/{owner}/{repo}/git/commits/{commit_sha}" ] ,
getRef : [ "GET /repos/{owner}/{repo}/git/ref/{ref}" ] ,
getTag : [ "GET /repos/{owner}/{repo}/git/tags/{tag_sha}" ] ,
getTree : [ "GET /repos/{owner}/{repo}/git/trees/{tree_sha}" ] ,
listMatchingRefs : [ "GET /repos/{owner}/{repo}/git/matching-refs/{ref}" ] ,
updateRef : [ "PATCH /repos/{owner}/{repo}/git/refs/{ref}" ]
} ,
gitignore : {
getAllTemplates : [ "GET /gitignore/templates" ] ,
getTemplate : [ "GET /gitignore/templates/{name}" ]
} ,
interactions : {
getRestrictionsForOrg : [ "GET /orgs/{org}/interaction-limits" , {
mediaType : {
previews : [ "sombra" ]
}
} ] ,
getRestrictionsForRepo : [ "GET /repos/{owner}/{repo}/interaction-limits" , {
mediaType : {
previews : [ "sombra" ]
}
} ] ,
removeRestrictionsForOrg : [ "DELETE /orgs/{org}/interaction-limits" , {
mediaType : {
previews : [ "sombra" ]
}
} ] ,
removeRestrictionsForRepo : [ "DELETE /repos/{owner}/{repo}/interaction-limits" , {
mediaType : {
previews : [ "sombra" ]
}
} ] ,
setRestrictionsForOrg : [ "PUT /orgs/{org}/interaction-limits" , {
mediaType : {
previews : [ "sombra" ]
}
} ] ,
setRestrictionsForRepo : [ "PUT /repos/{owner}/{repo}/interaction-limits" , {
mediaType : {
previews : [ "sombra" ]
}
} ]
} ,
issues : {
addAssignees : [ "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees" ] ,
addLabels : [ "POST /repos/{owner}/{repo}/issues/{issue_number}/labels" ] ,
checkUserCanBeAssigned : [ "GET /repos/{owner}/{repo}/assignees/{assignee}" ] ,
create : [ "POST /repos/{owner}/{repo}/issues" ] ,
createComment : [ "POST /repos/{owner}/{repo}/issues/{issue_number}/comments" ] ,
createLabel : [ "POST /repos/{owner}/{repo}/labels" ] ,
createMilestone : [ "POST /repos/{owner}/{repo}/milestones" ] ,
deleteComment : [ "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}" ] ,
deleteLabel : [ "DELETE /repos/{owner}/{repo}/labels/{name}" ] ,
deleteMilestone : [ "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}" ] ,
get : [ "GET /repos/{owner}/{repo}/issues/{issue_number}" ] ,
getComment : [ "GET /repos/{owner}/{repo}/issues/comments/{comment_id}" ] ,
getEvent : [ "GET /repos/{owner}/{repo}/issues/events/{event_id}" ] ,
getLabel : [ "GET /repos/{owner}/{repo}/labels/{name}" ] ,
getMilestone : [ "GET /repos/{owner}/{repo}/milestones/{milestone_number}" ] ,
list : [ "GET /issues" ] ,
listAssignees : [ "GET /repos/{owner}/{repo}/assignees" ] ,
listComments : [ "GET /repos/{owner}/{repo}/issues/{issue_number}/comments" ] ,
listCommentsForRepo : [ "GET /repos/{owner}/{repo}/issues/comments" ] ,
listEvents : [ "GET /repos/{owner}/{repo}/issues/{issue_number}/events" ] ,
listEventsForRepo : [ "GET /repos/{owner}/{repo}/issues/events" ] ,
listEventsForTimeline : [ "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline" , {
mediaType : {
previews : [ "mockingbird" ]
}
} ] ,
listForAuthenticatedUser : [ "GET /user/issues" ] ,
listForOrg : [ "GET /orgs/{org}/issues" ] ,
listForRepo : [ "GET /repos/{owner}/{repo}/issues" ] ,
listLabelsForMilestone : [ "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels" ] ,
listLabelsForRepo : [ "GET /repos/{owner}/{repo}/labels" ] ,
listLabelsOnIssue : [ "GET /repos/{owner}/{repo}/issues/{issue_number}/labels" ] ,
listMilestones : [ "GET /repos/{owner}/{repo}/milestones" ] ,
lock : [ "PUT /repos/{owner}/{repo}/issues/{issue_number}/lock" ] ,
removeAllLabels : [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels" ] ,
removeAssignees : [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees" ] ,
removeLabel : [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}" ] ,
setLabels : [ "PUT /repos/{owner}/{repo}/issues/{issue_number}/labels" ] ,
unlock : [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock" ] ,
update : [ "PATCH /repos/{owner}/{repo}/issues/{issue_number}" ] ,
updateComment : [ "PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}" ] ,
updateLabel : [ "PATCH /repos/{owner}/{repo}/labels/{name}" ] ,
updateMilestone : [ "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}" ]
} ,
licenses : {
get : [ "GET /licenses/{license}" ] ,
getAllCommonlyUsed : [ "GET /licenses" ] ,
getForRepo : [ "GET /repos/{owner}/{repo}/license" ]
} ,
markdown : {
render : [ "POST /markdown" ] ,
renderRaw : [ "POST /markdown/raw" , {
headers : {
"content-type" : "text/plain; charset=utf-8"
}
} ]
} ,
meta : {
get : [ "GET /meta" ]
} ,
migrations : {
cancelImport : [ "DELETE /repos/{owner}/{repo}/import" ] ,
deleteArchiveForAuthenticatedUser : [ "DELETE /user/migrations/{migration_id}/archive" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
deleteArchiveForOrg : [ "DELETE /orgs/{org}/migrations/{migration_id}/archive" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
downloadArchiveForOrg : [ "GET /orgs/{org}/migrations/{migration_id}/archive" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
getArchiveForAuthenticatedUser : [ "GET /user/migrations/{migration_id}/archive" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
getCommitAuthors : [ "GET /repos/{owner}/{repo}/import/authors" ] ,
getImportStatus : [ "GET /repos/{owner}/{repo}/import" ] ,
getLargeFiles : [ "GET /repos/{owner}/{repo}/import/large_files" ] ,
getStatusForAuthenticatedUser : [ "GET /user/migrations/{migration_id}" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
getStatusForOrg : [ "GET /orgs/{org}/migrations/{migration_id}" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
listForAuthenticatedUser : [ "GET /user/migrations" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
listForOrg : [ "GET /orgs/{org}/migrations" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
listReposForOrg : [ "GET /orgs/{org}/migrations/{migration_id}/repositories" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
listReposForUser : [ "GET /user/migrations/{migration_id}/repositories" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
mapCommitAuthor : [ "PATCH /repos/{owner}/{repo}/import/authors/{author_id}" ] ,
setLfsPreference : [ "PATCH /repos/{owner}/{repo}/import/lfs" ] ,
startForAuthenticatedUser : [ "POST /user/migrations" ] ,
startForOrg : [ "POST /orgs/{org}/migrations" ] ,
startImport : [ "PUT /repos/{owner}/{repo}/import" ] ,
unlockRepoForAuthenticatedUser : [ "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
unlockRepoForOrg : [ "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock" , {
mediaType : {
previews : [ "wyandotte" ]
}
} ] ,
updateImport : [ "PATCH /repos/{owner}/{repo}/import" ]
} ,
orgs : {
blockUser : [ "PUT /orgs/{org}/blocks/{username}" ] ,
checkBlockedUser : [ "GET /orgs/{org}/blocks/{username}" ] ,
checkMembershipForUser : [ "GET /orgs/{org}/members/{username}" ] ,
checkPublicMembershipForUser : [ "GET /orgs/{org}/public_members/{username}" ] ,
convertMemberToOutsideCollaborator : [ "PUT /orgs/{org}/outside_collaborators/{username}" ] ,
createInvitation : [ "POST /orgs/{org}/invitations" ] ,
createWebhook : [ "POST /orgs/{org}/hooks" ] ,
deleteWebhook : [ "DELETE /orgs/{org}/hooks/{hook_id}" ] ,
get : [ "GET /orgs/{org}" ] ,
getMembershipForAuthenticatedUser : [ "GET /user/memberships/orgs/{org}" ] ,
getMembershipForUser : [ "GET /orgs/{org}/memberships/{username}" ] ,
getWebhook : [ "GET /orgs/{org}/hooks/{hook_id}" ] ,
list : [ "GET /organizations" ] ,
listAppInstallations : [ "GET /orgs/{org}/installations" , {
mediaType : {
previews : [ "machine-man" ]
}
} ] ,
listBlockedUsers : [ "GET /orgs/{org}/blocks" ] ,
listForAuthenticatedUser : [ "GET /user/orgs" ] ,
listForUser : [ "GET /users/{username}/orgs" ] ,
listInvitationTeams : [ "GET /orgs/{org}/invitations/{invitation_id}/teams" ] ,
listMembers : [ "GET /orgs/{org}/members" ] ,
listMembershipsForAuthenticatedUser : [ "GET /user/memberships/orgs" ] ,
listOutsideCollaborators : [ "GET /orgs/{org}/outside_collaborators" ] ,
listPendingInvitations : [ "GET /orgs/{org}/invitations" ] ,
listPublicMembers : [ "GET /orgs/{org}/public_members" ] ,
listWebhooks : [ "GET /orgs/{org}/hooks" ] ,
pingWebhook : [ "POST /orgs/{org}/hooks/{hook_id}/pings" ] ,
removeMember : [ "DELETE /orgs/{org}/members/{username}" ] ,
removeMembershipForUser : [ "DELETE /orgs/{org}/memberships/{username}" ] ,
removeOutsideCollaborator : [ "DELETE /orgs/{org}/outside_collaborators/{username}" ] ,
removePublicMembershipForAuthenticatedUser : [ "DELETE /orgs/{org}/public_members/{username}" ] ,
setMembershipForUser : [ "PUT /orgs/{org}/memberships/{username}" ] ,
setPublicMembershipForAuthenticatedUser : [ "PUT /orgs/{org}/public_members/{username}" ] ,
unblockUser : [ "DELETE /orgs/{org}/blocks/{username}" ] ,
update : [ "PATCH /orgs/{org}" ] ,
updateMembershipForAuthenticatedUser : [ "PATCH /user/memberships/orgs/{org}" ] ,
updateWebhook : [ "PATCH /orgs/{org}/hooks/{hook_id}" ]
} ,
projects : {
addCollaborator : [ "PUT /projects/{project_id}/collaborators/{username}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
createCard : [ "POST /projects/columns/{column_id}/cards" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
createColumn : [ "POST /projects/{project_id}/columns" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
createForAuthenticatedUser : [ "POST /user/projects" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
createForOrg : [ "POST /orgs/{org}/projects" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
createForRepo : [ "POST /repos/{owner}/{repo}/projects" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
delete : [ "DELETE /projects/{project_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
deleteCard : [ "DELETE /projects/columns/cards/{card_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
deleteColumn : [ "DELETE /projects/columns/{column_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
get : [ "GET /projects/{project_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
getCard : [ "GET /projects/columns/cards/{card_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
getColumn : [ "GET /projects/columns/{column_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
getPermissionForUser : [ "GET /projects/{project_id}/collaborators/{username}/permission" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
listCards : [ "GET /projects/columns/{column_id}/cards" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
listCollaborators : [ "GET /projects/{project_id}/collaborators" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
listColumns : [ "GET /projects/{project_id}/columns" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
listForOrg : [ "GET /orgs/{org}/projects" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
listForRepo : [ "GET /repos/{owner}/{repo}/projects" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
listForUser : [ "GET /users/{username}/projects" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
moveCard : [ "POST /projects/columns/cards/{card_id}/moves" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
moveColumn : [ "POST /projects/columns/{column_id}/moves" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
removeCollaborator : [ "DELETE /projects/{project_id}/collaborators/{username}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
update : [ "PATCH /projects/{project_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
updateCard : [ "PATCH /projects/columns/cards/{card_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
updateColumn : [ "PATCH /projects/columns/{column_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ]
} ,
pulls : {
checkIfMerged : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/merge" ] ,
create : [ "POST /repos/{owner}/{repo}/pulls" ] ,
createReplyForReviewComment : [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies" ] ,
createReview : [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews" ] ,
createReviewComment : [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments" ] ,
deletePendingReview : [ "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" ] ,
deleteReviewComment : [ "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}" ] ,
dismissReview : [ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals" ] ,
get : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}" ] ,
getReview : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" ] ,
getReviewComment : [ "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}" ] ,
list : [ "GET /repos/{owner}/{repo}/pulls" ] ,
listCommentsForReview : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments" ] ,
listCommits : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits" ] ,
listFiles : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/files" ] ,
listRequestedReviewers : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" ] ,
listReviewComments : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments" ] ,
listReviewCommentsForRepo : [ "GET /repos/{owner}/{repo}/pulls/comments" ] ,
listReviews : [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews" ] ,
merge : [ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge" ] ,
removeRequestedReviewers : [ "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" ] ,
requestReviewers : [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" ] ,
submitReview : [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events" ] ,
update : [ "PATCH /repos/{owner}/{repo}/pulls/{pull_number}" ] ,
updateBranch : [ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch" , {
mediaType : {
previews : [ "lydian" ]
}
} ] ,
updateReview : [ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" ] ,
updateReviewComment : [ "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}" ]
} ,
rateLimit : {
get : [ "GET /rate_limit" ]
} ,
reactions : {
createForCommitComment : [ "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
createForIssue : [ "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
createForIssueComment : [ "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
createForPullRequestReviewComment : [ "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
createForTeamDiscussionCommentInOrg : [ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
createForTeamDiscussionInOrg : [ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
deleteForCommitComment : [ "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
deleteForIssue : [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
deleteForIssueComment : [ "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
deleteForPullRequestComment : [ "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
deleteForTeamDiscussion : [ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
deleteForTeamDiscussionComment : [ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
deleteLegacy : [ "DELETE /reactions/{reaction_id}" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} , {
deprecated : "octokit.reactions.deleteLegacy() is deprecated, see https://developer.github.com/v3/reactions/#delete-a-reaction-legacy"
} ] ,
listForCommitComment : [ "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
listForIssue : [ "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
listForIssueComment : [ "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
listForPullRequestReviewComment : [ "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
listForTeamDiscussionCommentInOrg : [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ] ,
listForTeamDiscussionInOrg : [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" , {
mediaType : {
previews : [ "squirrel-girl" ]
}
} ]
} ,
repos : {
acceptInvitation : [ "PATCH /user/repository_invitations/{invitation_id}" ] ,
addAppAccessRestrictions : [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" , { } , {
mapToData : "apps"
} ] ,
addCollaborator : [ "PUT /repos/{owner}/{repo}/collaborators/{username}" ] ,
addStatusCheckContexts : [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" , { } , {
mapToData : "contexts"
} ] ,
addTeamAccessRestrictions : [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" , { } , {
mapToData : "teams"
} ] ,
addUserAccessRestrictions : [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" , { } , {
mapToData : "users"
} ] ,
checkCollaborator : [ "GET /repos/{owner}/{repo}/collaborators/{username}" ] ,
checkVulnerabilityAlerts : [ "GET /repos/{owner}/{repo}/vulnerability-alerts" , {
mediaType : {
previews : [ "dorian" ]
}
} ] ,
compareCommits : [ "GET /repos/{owner}/{repo}/compare/{base}...{head}" ] ,
createCommitComment : [ "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments" ] ,
createCommitSignatureProtection : [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" , {
mediaType : {
previews : [ "zzzax" ]
}
} ] ,
createCommitStatus : [ "POST /repos/{owner}/{repo}/statuses/{sha}" ] ,
createDeployKey : [ "POST /repos/{owner}/{repo}/keys" ] ,
createDeployment : [ "POST /repos/{owner}/{repo}/deployments" ] ,
createDeploymentStatus : [ "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" ] ,
createDispatchEvent : [ "POST /repos/{owner}/{repo}/dispatches" ] ,
createForAuthenticatedUser : [ "POST /user/repos" ] ,
createFork : [ "POST /repos/{owner}/{repo}/forks" ] ,
createInOrg : [ "POST /orgs/{org}/repos" ] ,
createOrUpdateFileContents : [ "PUT /repos/{owner}/{repo}/contents/{path}" ] ,
createPagesSite : [ "POST /repos/{owner}/{repo}/pages" , {
mediaType : {
previews : [ "switcheroo" ]
}
} ] ,
createRelease : [ "POST /repos/{owner}/{repo}/releases" ] ,
createUsingTemplate : [ "POST /repos/{template_owner}/{template_repo}/generate" , {
mediaType : {
previews : [ "baptiste" ]
}
} ] ,
createWebhook : [ "POST /repos/{owner}/{repo}/hooks" ] ,
declineInvitation : [ "DELETE /user/repository_invitations/{invitation_id}" ] ,
delete : [ "DELETE /repos/{owner}/{repo}" ] ,
deleteAccessRestrictions : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" ] ,
deleteAdminBranchProtection : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" ] ,
deleteBranchProtection : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection" ] ,
deleteCommitComment : [ "DELETE /repos/{owner}/{repo}/comments/{comment_id}" ] ,
deleteCommitSignatureProtection : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" , {
mediaType : {
previews : [ "zzzax" ]
}
} ] ,
deleteDeployKey : [ "DELETE /repos/{owner}/{repo}/keys/{key_id}" ] ,
deleteDeployment : [ "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}" ] ,
deleteFile : [ "DELETE /repos/{owner}/{repo}/contents/{path}" ] ,
deleteInvitation : [ "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}" ] ,
deletePagesSite : [ "DELETE /repos/{owner}/{repo}/pages" , {
mediaType : {
previews : [ "switcheroo" ]
}
} ] ,
deletePullRequestReviewProtection : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" ] ,
deleteRelease : [ "DELETE /repos/{owner}/{repo}/releases/{release_id}" ] ,
deleteReleaseAsset : [ "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}" ] ,
deleteWebhook : [ "DELETE /repos/{owner}/{repo}/hooks/{hook_id}" ] ,
disableAutomatedSecurityFixes : [ "DELETE /repos/{owner}/{repo}/automated-security-fixes" , {
mediaType : {
previews : [ "london" ]
}
} ] ,
disableVulnerabilityAlerts : [ "DELETE /repos/{owner}/{repo}/vulnerability-alerts" , {
mediaType : {
previews : [ "dorian" ]
}
} ] ,
downloadArchive : [ "GET /repos/{owner}/{repo}/{archive_format}/{ref}" ] ,
enableAutomatedSecurityFixes : [ "PUT /repos/{owner}/{repo}/automated-security-fixes" , {
mediaType : {
previews : [ "london" ]
}
} ] ,
enableVulnerabilityAlerts : [ "PUT /repos/{owner}/{repo}/vulnerability-alerts" , {
mediaType : {
previews : [ "dorian" ]
}
} ] ,
get : [ "GET /repos/{owner}/{repo}" ] ,
getAccessRestrictions : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" ] ,
getAdminBranchProtection : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" ] ,
getAllStatusCheckContexts : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" ] ,
getAllTopics : [ "GET /repos/{owner}/{repo}/topics" , {
mediaType : {
previews : [ "mercy" ]
}
} ] ,
getAppsWithAccessToProtectedBranch : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" ] ,
getBranch : [ "GET /repos/{owner}/{repo}/branches/{branch}" ] ,
getBranchProtection : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection" ] ,
getClones : [ "GET /repos/{owner}/{repo}/traffic/clones" ] ,
getCodeFrequencyStats : [ "GET /repos/{owner}/{repo}/stats/code_frequency" ] ,
getCollaboratorPermissionLevel : [ "GET /repos/{owner}/{repo}/collaborators/{username}/permission" ] ,
getCombinedStatusForRef : [ "GET /repos/{owner}/{repo}/commits/{ref}/status" ] ,
getCommit : [ "GET /repos/{owner}/{repo}/commits/{ref}" ] ,
getCommitActivityStats : [ "GET /repos/{owner}/{repo}/stats/commit_activity" ] ,
getCommitComment : [ "GET /repos/{owner}/{repo}/comments/{comment_id}" ] ,
getCommitSignatureProtection : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" , {
mediaType : {
previews : [ "zzzax" ]
}
} ] ,
getCommunityProfileMetrics : [ "GET /repos/{owner}/{repo}/community/profile" , {
mediaType : {
previews : [ "black-panther" ]
}
} ] ,
getContent : [ "GET /repos/{owner}/{repo}/contents/{path}" ] ,
getContributorsStats : [ "GET /repos/{owner}/{repo}/stats/contributors" ] ,
getDeployKey : [ "GET /repos/{owner}/{repo}/keys/{key_id}" ] ,
getDeployment : [ "GET /repos/{owner}/{repo}/deployments/{deployment_id}" ] ,
getDeploymentStatus : [ "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}" ] ,
getLatestPagesBuild : [ "GET /repos/{owner}/{repo}/pages/builds/latest" ] ,
getLatestRelease : [ "GET /repos/{owner}/{repo}/releases/latest" ] ,
getPages : [ "GET /repos/{owner}/{repo}/pages" ] ,
getPagesBuild : [ "GET /repos/{owner}/{repo}/pages/builds/{build_id}" ] ,
getParticipationStats : [ "GET /repos/{owner}/{repo}/stats/participation" ] ,
getPullRequestReviewProtection : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" ] ,
getPunchCardStats : [ "GET /repos/{owner}/{repo}/stats/punch_card" ] ,
getReadme : [ "GET /repos/{owner}/{repo}/readme" ] ,
getRelease : [ "GET /repos/{owner}/{repo}/releases/{release_id}" ] ,
getReleaseAsset : [ "GET /repos/{owner}/{repo}/releases/assets/{asset_id}" ] ,
getReleaseByTag : [ "GET /repos/{owner}/{repo}/releases/tags/{tag}" ] ,
getStatusChecksProtection : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" ] ,
getTeamsWithAccessToProtectedBranch : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" ] ,
getTopPaths : [ "GET /repos/{owner}/{repo}/traffic/popular/paths" ] ,
getTopReferrers : [ "GET /repos/{owner}/{repo}/traffic/popular/referrers" ] ,
getUsersWithAccessToProtectedBranch : [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" ] ,
getViews : [ "GET /repos/{owner}/{repo}/traffic/views" ] ,
getWebhook : [ "GET /repos/{owner}/{repo}/hooks/{hook_id}" ] ,
listBranches : [ "GET /repos/{owner}/{repo}/branches" ] ,
listBranchesForHeadCommit : [ "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head" , {
mediaType : {
previews : [ "groot" ]
}
} ] ,
listCollaborators : [ "GET /repos/{owner}/{repo}/collaborators" ] ,
listCommentsForCommit : [ "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments" ] ,
listCommitCommentsForRepo : [ "GET /repos/{owner}/{repo}/comments" ] ,
listCommitStatusesForRef : [ "GET /repos/{owner}/{repo}/commits/{ref}/statuses" ] ,
listCommits : [ "GET /repos/{owner}/{repo}/commits" ] ,
listContributors : [ "GET /repos/{owner}/{repo}/contributors" ] ,
listDeployKeys : [ "GET /repos/{owner}/{repo}/keys" ] ,
listDeploymentStatuses : [ "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" ] ,
listDeployments : [ "GET /repos/{owner}/{repo}/deployments" ] ,
listForAuthenticatedUser : [ "GET /user/repos" ] ,
listForOrg : [ "GET /orgs/{org}/repos" ] ,
listForUser : [ "GET /users/{username}/repos" ] ,
listForks : [ "GET /repos/{owner}/{repo}/forks" ] ,
listInvitations : [ "GET /repos/{owner}/{repo}/invitations" ] ,
listInvitationsForAuthenticatedUser : [ "GET /user/repository_invitations" ] ,
listLanguages : [ "GET /repos/{owner}/{repo}/languages" ] ,
listPagesBuilds : [ "GET /repos/{owner}/{repo}/pages/builds" ] ,
listPublic : [ "GET /repositories" ] ,
listPullRequestsAssociatedWithCommit : [ "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls" , {
mediaType : {
previews : [ "groot" ]
}
} ] ,
listReleaseAssets : [ "GET /repos/{owner}/{repo}/releases/{release_id}/assets" ] ,
listReleases : [ "GET /repos/{owner}/{repo}/releases" ] ,
listTags : [ "GET /repos/{owner}/{repo}/tags" ] ,
listTeams : [ "GET /repos/{owner}/{repo}/teams" ] ,
listWebhooks : [ "GET /repos/{owner}/{repo}/hooks" ] ,
merge : [ "POST /repos/{owner}/{repo}/merges" ] ,
pingWebhook : [ "POST /repos/{owner}/{repo}/hooks/{hook_id}/pings" ] ,
removeAppAccessRestrictions : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" , { } , {
mapToData : "apps"
} ] ,
removeCollaborator : [ "DELETE /repos/{owner}/{repo}/collaborators/{username}" ] ,
removeStatusCheckContexts : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" , { } , {
mapToData : "contexts"
} ] ,
removeStatusCheckProtection : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" ] ,
removeTeamAccessRestrictions : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" , { } , {
mapToData : "teams"
} ] ,
removeUserAccessRestrictions : [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" , { } , {
mapToData : "users"
} ] ,
replaceAllTopics : [ "PUT /repos/{owner}/{repo}/topics" , {
mediaType : {
previews : [ "mercy" ]
}
} ] ,
requestPagesBuild : [ "POST /repos/{owner}/{repo}/pages/builds" ] ,
setAdminBranchProtection : [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" ] ,
setAppAccessRestrictions : [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" , { } , {
mapToData : "apps"
} ] ,
setStatusCheckContexts : [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" , { } , {
mapToData : "contexts"
} ] ,
setTeamAccessRestrictions : [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" , { } , {
mapToData : "teams"
} ] ,
setUserAccessRestrictions : [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" , { } , {
mapToData : "users"
} ] ,
testPushWebhook : [ "POST /repos/{owner}/{repo}/hooks/{hook_id}/tests" ] ,
transfer : [ "POST /repos/{owner}/{repo}/transfer" ] ,
update : [ "PATCH /repos/{owner}/{repo}" ] ,
updateBranchProtection : [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection" ] ,
updateCommitComment : [ "PATCH /repos/{owner}/{repo}/comments/{comment_id}" ] ,
updateInformationAboutPagesSite : [ "PUT /repos/{owner}/{repo}/pages" ] ,
updateInvitation : [ "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}" ] ,
updatePullRequestReviewProtection : [ "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" ] ,
updateRelease : [ "PATCH /repos/{owner}/{repo}/releases/{release_id}" ] ,
updateReleaseAsset : [ "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}" ] ,
updateStatusCheckPotection : [ "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" ] ,
updateWebhook : [ "PATCH /repos/{owner}/{repo}/hooks/{hook_id}" ] ,
uploadReleaseAsset : [ "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}" , {
baseUrl : "https://uploads.github.com"
} ]
} ,
search : {
code : [ "GET /search/code" ] ,
commits : [ "GET /search/commits" , {
mediaType : {
previews : [ "cloak" ]
}
} ] ,
issuesAndPullRequests : [ "GET /search/issues" ] ,
labels : [ "GET /search/labels" ] ,
repos : [ "GET /search/repositories" ] ,
topics : [ "GET /search/topics" , {
mediaType : {
previews : [ "mercy" ]
}
} ] ,
users : [ "GET /search/users" ]
} ,
teams : {
addOrUpdateMembershipForUserInOrg : [ "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}" ] ,
addOrUpdateProjectPermissionsInOrg : [ "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
addOrUpdateRepoPermissionsInOrg : [ "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" ] ,
checkPermissionsForProjectInOrg : [ "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
checkPermissionsForRepoInOrg : [ "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" ] ,
create : [ "POST /orgs/{org}/teams" ] ,
createDiscussionCommentInOrg : [ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" ] ,
createDiscussionInOrg : [ "POST /orgs/{org}/teams/{team_slug}/discussions" ] ,
deleteDiscussionCommentInOrg : [ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" ] ,
deleteDiscussionInOrg : [ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" ] ,
deleteInOrg : [ "DELETE /orgs/{org}/teams/{team_slug}" ] ,
getByName : [ "GET /orgs/{org}/teams/{team_slug}" ] ,
getDiscussionCommentInOrg : [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" ] ,
getDiscussionInOrg : [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" ] ,
getMembershipForUserInOrg : [ "GET /orgs/{org}/teams/{team_slug}/memberships/{username}" ] ,
list : [ "GET /orgs/{org}/teams" ] ,
listChildInOrg : [ "GET /orgs/{org}/teams/{team_slug}/teams" ] ,
listDiscussionCommentsInOrg : [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" ] ,
listDiscussionsInOrg : [ "GET /orgs/{org}/teams/{team_slug}/discussions" ] ,
listForAuthenticatedUser : [ "GET /user/teams" ] ,
listMembersInOrg : [ "GET /orgs/{org}/teams/{team_slug}/members" ] ,
listPendingInvitationsInOrg : [ "GET /orgs/{org}/teams/{team_slug}/invitations" ] ,
listProjectsInOrg : [ "GET /orgs/{org}/teams/{team_slug}/projects" , {
mediaType : {
previews : [ "inertia" ]
}
} ] ,
listReposInOrg : [ "GET /orgs/{org}/teams/{team_slug}/repos" ] ,
removeMembershipForUserInOrg : [ "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}" ] ,
removeProjectInOrg : [ "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}" ] ,
removeRepoInOrg : [ "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" ] ,
updateDiscussionCommentInOrg : [ "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" ] ,
updateDiscussionInOrg : [ "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" ] ,
updateInOrg : [ "PATCH /orgs/{org}/teams/{team_slug}" ]
} ,
users : {
addEmailForAuthenticated : [ "POST /user/emails" ] ,
block : [ "PUT /user/blocks/{username}" ] ,
checkBlocked : [ "GET /user/blocks/{username}" ] ,
checkFollowingForUser : [ "GET /users/{username}/following/{target_user}" ] ,
checkPersonIsFollowedByAuthenticated : [ "GET /user/following/{username}" ] ,
createGpgKeyForAuthenticated : [ "POST /user/gpg_keys" ] ,
createPublicSshKeyForAuthenticated : [ "POST /user/keys" ] ,
deleteEmailForAuthenticated : [ "DELETE /user/emails" ] ,
deleteGpgKeyForAuthenticated : [ "DELETE /user/gpg_keys/{gpg_key_id}" ] ,
deletePublicSshKeyForAuthenticated : [ "DELETE /user/keys/{key_id}" ] ,
follow : [ "PUT /user/following/{username}" ] ,
getAuthenticated : [ "GET /user" ] ,
getByUsername : [ "GET /users/{username}" ] ,
getContextForUser : [ "GET /users/{username}/hovercard" ] ,
getGpgKeyForAuthenticated : [ "GET /user/gpg_keys/{gpg_key_id}" ] ,
getPublicSshKeyForAuthenticated : [ "GET /user/keys/{key_id}" ] ,
list : [ "GET /users" ] ,
listBlockedByAuthenticated : [ "GET /user/blocks" ] ,
listEmailsForAuthenticated : [ "GET /user/emails" ] ,
listFollowedByAuthenticated : [ "GET /user/following" ] ,
listFollowersForAuthenticatedUser : [ "GET /user/followers" ] ,
listFollowersForUser : [ "GET /users/{username}/followers" ] ,
listFollowingForUser : [ "GET /users/{username}/following" ] ,
listGpgKeysForAuthenticated : [ "GET /user/gpg_keys" ] ,
listGpgKeysForUser : [ "GET /users/{username}/gpg_keys" ] ,
listPublicEmailsForAuthenticated : [ "GET /user/public_emails" ] ,
listPublicKeysForUser : [ "GET /users/{username}/keys" ] ,
listPublicSshKeysForAuthenticated : [ "GET /user/keys" ] ,
setPrimaryEmailVisibilityForAuthenticated : [ "PATCH /user/email/visibility" ] ,
unblock : [ "DELETE /user/blocks/{username}" ] ,
unfollow : [ "DELETE /user/following/{username}" ] ,
updateAuthenticated : [ "PATCH /user" ]
}
2020-08-21 13:39:42 +02:00
} ;
2020-09-02 10:07:11 +02:00
const VERSION = "4.1.3" ;
function endpointsToMethods ( octokit , endpointsMap ) {
const newMethods = { } ;
for ( const [ scope , endpoints ] of Object . entries ( endpointsMap ) ) {
for ( const [ methodName , endpoint ] of Object . entries ( endpoints ) ) {
const [ route , defaults , decorations ] = endpoint ;
const [ method , url ] = route . split ( / / ) ;
const endpointDefaults = Object . assign ( {
method ,
url
} , defaults ) ;
if ( ! newMethods [ scope ] ) {
newMethods [ scope ] = { } ;
}
const scopeMethods = newMethods [ scope ] ;
if ( decorations ) {
scopeMethods [ methodName ] = decorate ( octokit , scope , methodName , endpointDefaults , decorations ) ;
continue ;
}
scopeMethods [ methodName ] = octokit . request . defaults ( endpointDefaults ) ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
}
return newMethods ;
}
function decorate ( octokit , scope , methodName , defaults , decorations ) {
const requestWithDefaults = octokit . request . defaults ( defaults ) ;
/* istanbul ignore next */
function withDecorations ( ... args ) {
// @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
let options = requestWithDefaults . endpoint . merge ( ... args ) ; // There are currently no other decorations than `.mapToData`
if ( decorations . mapToData ) {
options = Object . assign ( { } , options , {
data : options [ decorations . mapToData ] ,
[ decorations . mapToData ] : undefined
} ) ;
return requestWithDefaults ( options ) ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
if ( decorations . renamed ) {
const [ newScope , newMethodName ] = decorations . renamed ;
octokit . log . warn ( ` octokit. ${ scope } . ${ methodName } () has been renamed to octokit. ${ newScope } . ${ newMethodName } () ` ) ;
}
if ( decorations . deprecated ) {
octokit . log . warn ( decorations . deprecated ) ;
}
if ( decorations . renamedParameters ) {
// @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
const options = requestWithDefaults . endpoint . merge ( ... args ) ;
for ( const [ name , alias ] of Object . entries ( decorations . renamedParameters ) ) {
if ( name in options ) {
octokit . log . warn ( ` " ${ name } " parameter is deprecated for "octokit. ${ scope } . ${ methodName } ()". Use " ${ alias } " instead ` ) ;
if ( ! ( alias in options ) ) {
options [ alias ] = options [ name ] ;
}
delete options [ name ] ;
}
}
return requestWithDefaults ( options ) ;
} // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
return requestWithDefaults ( ... args ) ;
}
return Object . assign ( withDecorations , requestWithDefaults ) ;
}
/ * *
* This plugin is a 1 : 1 copy of internal @ octokit / rest plugins . The primary
* goal is to rebuild @ octokit / rest on top of @ octokit / core . Once that is
* done , we will remove the registerEndpoints methods and return the methods
* directly as with the other plugins . At that point we will also remove the
* legacy workarounds and deprecations .
*
* See the plan at
* https : //github.com/octokit/plugin-rest-endpoint-methods.js/pull/1
* /
function restEndpointMethods ( octokit ) {
return endpointsToMethods ( octokit , Endpoints ) ;
}
restEndpointMethods . VERSION = VERSION ;
exports . restEndpointMethods = restEndpointMethods ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 53 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
exports . Context = void 0 ;
const fs _1 = _ _webpack _require _ _ ( 747 ) ;
const os _1 = _ _webpack _require _ _ ( 87 ) ;
class Context {
/ * *
* Hydrate the context from the environment
* /
constructor ( ) {
this . payload = { } ;
if ( process . env . GITHUB _EVENT _PATH ) {
if ( fs _1 . existsSync ( process . env . GITHUB _EVENT _PATH ) ) {
this . payload = JSON . parse ( fs _1 . readFileSync ( process . env . GITHUB _EVENT _PATH , { encoding : 'utf8' } ) ) ;
2020-08-16 00:36:41 +02:00
}
2020-08-21 13:39:42 +02:00
else {
2020-09-02 10:07:11 +02:00
const path = process . env . GITHUB _EVENT _PATH ;
process . stdout . write ( ` GITHUB_EVENT_PATH ${ path } does not exist ${ os _1 . EOL } ` ) ;
2020-08-16 00:36:41 +02:00
}
}
2020-09-02 10:07:11 +02:00
this . eventName = process . env . GITHUB _EVENT _NAME ;
this . sha = process . env . GITHUB _SHA ;
this . ref = process . env . GITHUB _REF ;
this . workflow = process . env . GITHUB _WORKFLOW ;
this . action = process . env . GITHUB _ACTION ;
this . actor = process . env . GITHUB _ACTOR ;
this . job = process . env . GITHUB _JOB ;
this . runNumber = parseInt ( process . env . GITHUB _RUN _NUMBER , 10 ) ;
this . runId = parseInt ( process . env . GITHUB _RUN _ID , 10 ) ;
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
get issue ( ) {
const payload = this . payload ;
return Object . assign ( Object . assign ( { } , this . repo ) , { number : ( payload . issue || payload . pull _request || payload ) . number } ) ;
}
get repo ( ) {
if ( process . env . GITHUB _REPOSITORY ) {
const [ owner , repo ] = process . env . GITHUB _REPOSITORY . split ( '/' ) ;
return { owner , repo } ;
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
if ( this . payload . repository ) {
return {
owner : this . payload . repository . owner . login ,
repo : this . payload . repository . name
} ;
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
throw new Error ( "context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'" ) ;
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
}
exports . Context = Context ;
//# sourceMappingURL=context.js.map
/***/ } ) ,
/***/ 55 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const Range = _ _webpack _require _ _ ( 828 )
const satisfies = ( version , range , options ) => {
try {
range = new Range ( range , options )
} catch ( er ) {
return false
}
return range . test ( version )
}
module . exports = satisfies
/***/ } ) ,
/***/ 87 :
/***/ ( function ( module ) {
module . exports = require ( "os" ) ;
/***/ } ) ,
/***/ 88 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const debug = _ _webpack _require _ _ ( 427 )
const { MAX _LENGTH , MAX _SAFE _INTEGER } = _ _webpack _require _ _ ( 293 )
const { re , t } = _ _webpack _require _ _ ( 523 )
const { compareIdentifiers } = _ _webpack _require _ _ ( 463 )
class SemVer {
constructor ( version , options ) {
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
}
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
if ( version instanceof SemVer ) {
if ( version . loose === ! ! options . loose &&
version . includePrerelease === ! ! options . includePrerelease ) {
return version
} else {
version = version . version
}
} else if ( typeof version !== 'string' ) {
throw new TypeError ( ` Invalid Version: ${ version } ` )
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
if ( version . length > MAX _LENGTH ) {
throw new TypeError (
` version is longer than ${ MAX _LENGTH } characters `
)
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
debug ( 'SemVer' , version , options )
this . options = options
this . loose = ! ! options . loose
// this isn't actually relevant for versions, but keep it so that we
// don't run into trouble passing this.options around.
this . includePrerelease = ! ! options . includePrerelease
const m = version . trim ( ) . match ( options . loose ? re [ t . LOOSE ] : re [ t . FULL ] )
if ( ! m ) {
throw new TypeError ( ` Invalid Version: ${ version } ` )
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
this . raw = version
// these are actually numbers
this . major = + m [ 1 ]
this . minor = + m [ 2 ]
this . patch = + m [ 3 ]
if ( this . major > MAX _SAFE _INTEGER || this . major < 0 ) {
throw new TypeError ( 'Invalid major version' )
}
if ( this . minor > MAX _SAFE _INTEGER || this . minor < 0 ) {
throw new TypeError ( 'Invalid minor version' )
}
if ( this . patch > MAX _SAFE _INTEGER || this . patch < 0 ) {
throw new TypeError ( 'Invalid patch version' )
}
// numberify any prerelease numeric ids
if ( ! m [ 4 ] ) {
this . prerelease = [ ]
} else {
this . prerelease = m [ 4 ] . split ( '.' ) . map ( ( id ) => {
if ( /^[0-9]+$/ . test ( id ) ) {
const num = + id
if ( num >= 0 && num < MAX _SAFE _INTEGER ) {
return num
}
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
return id
} )
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
this . build = m [ 5 ] ? m [ 5 ] . split ( '.' ) : [ ]
this . format ( )
}
format ( ) {
this . version = ` ${ this . major } . ${ this . minor } . ${ this . patch } `
if ( this . prerelease . length ) {
this . version += ` - ${ this . prerelease . join ( '.' ) } `
}
return this . version
}
toString ( ) {
return this . version
}
compare ( other ) {
debug ( 'SemVer.compare' , this . version , this . options , other )
if ( ! ( other instanceof SemVer ) ) {
if ( typeof other === 'string' && other === this . version ) {
return 0
}
other = new SemVer ( other , this . options )
}
if ( other . version === this . version ) {
return 0
}
return this . compareMain ( other ) || this . comparePre ( other )
}
compareMain ( other ) {
if ( ! ( other instanceof SemVer ) ) {
other = new SemVer ( other , this . options )
}
return (
compareIdentifiers ( this . major , other . major ) ||
compareIdentifiers ( this . minor , other . minor ) ||
compareIdentifiers ( this . patch , other . patch )
)
}
comparePre ( other ) {
if ( ! ( other instanceof SemVer ) ) {
other = new SemVer ( other , this . options )
}
// NOT having a prerelease is > having one
if ( this . prerelease . length && ! other . prerelease . length ) {
return - 1
} else if ( ! this . prerelease . length && other . prerelease . length ) {
return 1
} else if ( ! this . prerelease . length && ! other . prerelease . length ) {
return 0
}
let i = 0
do {
const a = this . prerelease [ i ]
const b = other . prerelease [ i ]
debug ( 'prerelease compare' , i , a , b )
if ( a === undefined && b === undefined ) {
return 0
} else if ( b === undefined ) {
return 1
} else if ( a === undefined ) {
return - 1
} else if ( a === b ) {
continue
} else {
return compareIdentifiers ( a , b )
}
} while ( ++ i )
}
compareBuild ( other ) {
if ( ! ( other instanceof SemVer ) ) {
other = new SemVer ( other , this . options )
}
let i = 0
do {
const a = this . build [ i ]
const b = other . build [ i ]
debug ( 'prerelease compare' , i , a , b )
if ( a === undefined && b === undefined ) {
return 0
} else if ( b === undefined ) {
return 1
} else if ( a === undefined ) {
return - 1
} else if ( a === b ) {
continue
} else {
return compareIdentifiers ( a , b )
}
} while ( ++ i )
}
// preminor will bump the version up to the next minor release, and immediately
// down to pre-release. premajor and prepatch work the same way.
inc ( release , identifier ) {
switch ( release ) {
case 'premajor' :
this . prerelease . length = 0
this . patch = 0
this . minor = 0
this . major ++
this . inc ( 'pre' , identifier )
break
case 'preminor' :
this . prerelease . length = 0
this . patch = 0
this . minor ++
this . inc ( 'pre' , identifier )
break
case 'prepatch' :
// If this is already a prerelease, it will bump to the next version
// drop any prereleases that might already exist, since they are not
// relevant at this point.
this . prerelease . length = 0
this . inc ( 'patch' , identifier )
this . inc ( 'pre' , identifier )
break
// If the input is a non-prerelease version, this acts the same as
// prepatch.
case 'prerelease' :
if ( this . prerelease . length === 0 ) {
this . inc ( 'patch' , identifier )
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
this . inc ( 'pre' , identifier )
break
case 'major' :
// If this is a pre-major version, bump up to the same major version.
// Otherwise increment major.
// 1.0.0-5 bumps to 1.0.0
// 1.1.0 bumps to 2.0.0
if (
this . minor !== 0 ||
this . patch !== 0 ||
this . prerelease . length === 0
) {
this . major ++
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
this . minor = 0
this . patch = 0
this . prerelease = [ ]
break
case 'minor' :
// If this is a pre-minor version, bump up to the same minor version.
// Otherwise increment minor.
// 1.2.0-5 bumps to 1.2.0
// 1.2.1 bumps to 1.3.0
if ( this . patch !== 0 || this . prerelease . length === 0 ) {
this . minor ++
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
this . patch = 0
this . prerelease = [ ]
break
case 'patch' :
// If this is not a pre-release version, it will increment the patch.
// If it is a pre-release it will bump up to the same patch version.
// 1.2.0-5 patches to 1.2.0
// 1.2.0 patches to 1.2.1
if ( this . prerelease . length === 0 ) {
this . patch ++
}
this . prerelease = [ ]
break
// This probably shouldn't be used publicly.
// 1.0.0 'pre' would become 1.0.0-0 which is the wrong direction.
case 'pre' :
if ( this . prerelease . length === 0 ) {
this . prerelease = [ 0 ]
} else {
let i = this . prerelease . length
while ( -- i >= 0 ) {
if ( typeof this . prerelease [ i ] === 'number' ) {
this . prerelease [ i ] ++
i = - 2
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
}
if ( i === - 1 ) {
// didn't increment anything
this . prerelease . push ( 0 )
}
}
if ( identifier ) {
// 1.2.0-beta.1 bumps to 1.2.0-beta.2,
// 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
if ( this . prerelease [ 0 ] === identifier ) {
if ( isNaN ( this . prerelease [ 1 ] ) ) {
this . prerelease = [ identifier , 0 ]
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
} else {
this . prerelease = [ identifier , 0 ]
}
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
break
default :
throw new Error ( ` invalid increment argument: ${ release } ` )
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
this . format ( )
this . raw = this . version
return this
}
}
module . exports = SemVer
/***/ } ) ,
/***/ 98 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const eq = _ _webpack _require _ _ ( 898 )
const neq = _ _webpack _require _ _ ( 17 )
const gt = _ _webpack _require _ _ ( 123 )
const gte = _ _webpack _require _ _ ( 522 )
const lt = _ _webpack _require _ _ ( 194 )
const lte = _ _webpack _require _ _ ( 520 )
const cmp = ( a , op , b , loose ) => {
switch ( op ) {
case '===' :
if ( typeof a === 'object' )
a = a . version
if ( typeof b === 'object' )
b = b . version
return a === b
case '!==' :
if ( typeof a === 'object' )
a = a . version
if ( typeof b === 'object' )
b = b . version
return a !== b
case '' :
case '=' :
case '==' :
return eq ( a , b , loose )
case '!=' :
return neq ( a , b , loose )
case '>' :
return gt ( a , b , loose )
case '>=' :
return gte ( a , b , loose )
case '<' :
return lt ( a , b , loose )
case '<=' :
return lte ( a , b , loose )
default :
throw new TypeError ( ` Invalid operator: ${ op } ` )
}
}
module . exports = cmp
/***/ } ) ,
/***/ 109 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const fs = _ _importStar ( _ _webpack _require _ _ ( 747 ) ) ;
const os = _ _importStar ( _ _webpack _require _ _ ( 87 ) ) ;
const buildx = _ _importStar ( _ _webpack _require _ _ ( 295 ) ) ;
const context = _ _importStar ( _ _webpack _require _ _ ( 842 ) ) ;
const stateHelper = _ _importStar ( _ _webpack _require _ _ ( 647 ) ) ;
const core = _ _importStar ( _ _webpack _require _ _ ( 186 ) ) ;
const exec = _ _importStar ( _ _webpack _require _ _ ( 514 ) ) ;
function run ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
try {
if ( os . platform ( ) !== 'linux' ) {
core . setFailed ( 'Only supported on linux platform' ) ;
return ;
}
if ( ! ( yield buildx . isAvailable ( ) ) ) {
core . setFailed ( ` Buildx is required. See https://github.com/docker/setup-buildx-action to set up buildx. ` ) ;
return ;
}
stateHelper . setTmpDir ( context . tmpDir ) ;
const buildxVersion = yield buildx . getVersion ( ) ;
core . info ( ` 📣 Buildx version: ${ buildxVersion } ` ) ;
let inputs = yield context . getInputs ( ) ;
core . info ( ` 🏃 Starting build... ` ) ;
const args = yield context . getArgs ( inputs , buildxVersion ) ;
yield exec . exec ( 'docker' , args ) ;
const imageID = yield buildx . getImageID ( ) ;
if ( imageID ) {
core . info ( '🛒 Extracting digest...' ) ;
core . info ( ` ${ imageID } ` ) ;
core . setOutput ( 'digest' , imageID ) ;
}
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
catch ( error ) {
core . setFailed ( error . message ) ;
}
} ) ;
}
function cleanup ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( stateHelper . tmpDir . length > 0 ) {
core . info ( ` 🚿 Removing temp folder ${ stateHelper . tmpDir } ` ) ;
fs . rmdirSync ( stateHelper . tmpDir , { recursive : true } ) ;
}
} ) ;
}
if ( ! stateHelper . IsPost ) {
run ( ) ;
}
else {
cleanup ( ) ;
}
//# sourceMappingURL=main.js.map
/***/ } ) ,
/***/ 121 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var Buffer = _ _webpack _require _ _ ( 937 ) . Buffer ;
// Multibyte codec. In this scheme, a character is represented by 1 or more bytes.
// Our codec supports UTF-16 surrogates, extensions for GB18030 and unicode sequences.
// To save memory and loading time, we read table files only when requested.
exports . _dbcs = DBCSCodec ;
var UNASSIGNED = - 1 ,
GB18030 _CODE = - 2 ,
SEQ _START = - 10 ,
NODE _START = - 1000 ,
UNASSIGNED _NODE = new Array ( 0x100 ) ,
DEF _CHAR = - 1 ;
for ( var i = 0 ; i < 0x100 ; i ++ )
UNASSIGNED _NODE [ i ] = UNASSIGNED ;
// Class DBCSCodec reads and initializes mapping tables.
function DBCSCodec ( codecOptions , iconv ) {
this . encodingName = codecOptions . encodingName ;
if ( ! codecOptions )
throw new Error ( "DBCS codec is called without the data." )
if ( ! codecOptions . table )
throw new Error ( "Encoding '" + this . encodingName + "' has no data." ) ;
// Load tables.
var mappingTable = codecOptions . table ( ) ;
// Decode tables: MBCS -> Unicode.
// decodeTables is a trie, encoded as an array of arrays of integers. Internal arrays are trie nodes and all have len = 256.
// Trie root is decodeTables[0].
// Values: >= 0 -> unicode character code. can be > 0xFFFF
// == UNASSIGNED -> unknown/unassigned sequence.
// == GB18030_CODE -> this is the end of a GB18030 4-byte sequence.
// <= NODE_START -> index of the next node in our trie to process next byte.
// <= SEQ_START -> index of the start of a character code sequence, in decodeTableSeq.
this . decodeTables = [ ] ;
this . decodeTables [ 0 ] = UNASSIGNED _NODE . slice ( 0 ) ; // Create root node.
// Sometimes a MBCS char corresponds to a sequence of unicode chars. We store them as arrays of integers here.
this . decodeTableSeq = [ ] ;
// Actual mapping tables consist of chunks. Use them to fill up decode tables.
for ( var i = 0 ; i < mappingTable . length ; i ++ )
this . _addDecodeChunk ( mappingTable [ i ] ) ;
this . defaultCharUnicode = iconv . defaultCharUnicode ;
// Encode tables: Unicode -> DBCS.
// `encodeTable` is array mapping from unicode char to encoded char. All its values are integers for performance.
// Because it can be sparse, it is represented as array of buckets by 256 chars each. Bucket can be null.
// Values: >= 0 -> it is a normal char. Write the value (if <=256 then 1 byte, if <=65536 then 2 bytes, etc.).
// == UNASSIGNED -> no conversion found. Output a default char.
// <= SEQ_START -> it's an index in encodeTableSeq, see below. The character starts a sequence.
this . encodeTable = [ ] ;
// `encodeTableSeq` is used when a sequence of unicode characters is encoded as a single code. We use a tree of
// objects where keys correspond to characters in sequence and leafs are the encoded dbcs values. A special DEF_CHAR key
// means end of sequence (needed when one sequence is a strict subsequence of another).
// Objects are kept separately from encodeTable to increase performance.
this . encodeTableSeq = [ ] ;
// Some chars can be decoded, but need not be encoded.
var skipEncodeChars = { } ;
if ( codecOptions . encodeSkipVals )
for ( var i = 0 ; i < codecOptions . encodeSkipVals . length ; i ++ ) {
var val = codecOptions . encodeSkipVals [ i ] ;
if ( typeof val === 'number' )
skipEncodeChars [ val ] = true ;
else
for ( var j = val . from ; j <= val . to ; j ++ )
skipEncodeChars [ j ] = true ;
}
// Use decode trie to recursively fill out encode tables.
this . _fillEncodeTable ( 0 , 0 , skipEncodeChars ) ;
// Add more encoding pairs when needed.
if ( codecOptions . encodeAdd ) {
for ( var uChar in codecOptions . encodeAdd )
if ( Object . prototype . hasOwnProperty . call ( codecOptions . encodeAdd , uChar ) )
this . _setEncodeChar ( uChar . charCodeAt ( 0 ) , codecOptions . encodeAdd [ uChar ] ) ;
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
this . defCharSB = this . encodeTable [ 0 ] [ iconv . defaultCharSingleByte . charCodeAt ( 0 ) ] ;
if ( this . defCharSB === UNASSIGNED ) this . defCharSB = this . encodeTable [ 0 ] [ '?' ] ;
if ( this . defCharSB === UNASSIGNED ) this . defCharSB = "?" . charCodeAt ( 0 ) ;
// Load & create GB18030 tables when needed.
if ( typeof codecOptions . gb18030 === 'function' ) {
this . gb18030 = codecOptions . gb18030 ( ) ; // Load GB18030 ranges.
// Add GB18030 decode tables.
var thirdByteNodeIdx = this . decodeTables . length ;
var thirdByteNode = this . decodeTables [ thirdByteNodeIdx ] = UNASSIGNED _NODE . slice ( 0 ) ;
var fourthByteNodeIdx = this . decodeTables . length ;
var fourthByteNode = this . decodeTables [ fourthByteNodeIdx ] = UNASSIGNED _NODE . slice ( 0 ) ;
for ( var i = 0x81 ; i <= 0xFE ; i ++ ) {
var secondByteNodeIdx = NODE _START - this . decodeTables [ 0 ] [ i ] ;
var secondByteNode = this . decodeTables [ secondByteNodeIdx ] ;
for ( var j = 0x30 ; j <= 0x39 ; j ++ )
secondByteNode [ j ] = NODE _START - thirdByteNodeIdx ;
}
for ( var i = 0x81 ; i <= 0xFE ; i ++ )
thirdByteNode [ i ] = NODE _START - fourthByteNodeIdx ;
for ( var i = 0x30 ; i <= 0x39 ; i ++ )
fourthByteNode [ i ] = GB18030 _CODE
}
}
DBCSCodec . prototype . encoder = DBCSEncoder ;
DBCSCodec . prototype . decoder = DBCSDecoder ;
// Decoder helpers
DBCSCodec . prototype . _getDecodeTrieNode = function ( addr ) {
var bytes = [ ] ;
for ( ; addr > 0 ; addr >>= 8 )
bytes . push ( addr & 0xFF ) ;
if ( bytes . length == 0 )
bytes . push ( 0 ) ;
var node = this . decodeTables [ 0 ] ;
for ( var i = bytes . length - 1 ; i > 0 ; i -- ) { // Traverse nodes deeper into the trie.
var val = node [ bytes [ i ] ] ;
if ( val == UNASSIGNED ) { // Create new node.
node [ bytes [ i ] ] = NODE _START - this . decodeTables . length ;
this . decodeTables . push ( node = UNASSIGNED _NODE . slice ( 0 ) ) ;
}
else if ( val <= NODE _START ) { // Existing node.
node = this . decodeTables [ NODE _START - val ] ;
}
else
throw new Error ( "Overwrite byte in " + this . encodingName + ", addr: " + addr . toString ( 16 ) ) ;
}
return node ;
}
DBCSCodec . prototype . _addDecodeChunk = function ( chunk ) {
// First element of chunk is the hex mbcs code where we start.
var curAddr = parseInt ( chunk [ 0 ] , 16 ) ;
// Choose the decoding node where we'll write our chars.
var writeTable = this . _getDecodeTrieNode ( curAddr ) ;
curAddr = curAddr & 0xFF ;
// Write all other elements of the chunk to the table.
for ( var k = 1 ; k < chunk . length ; k ++ ) {
var part = chunk [ k ] ;
if ( typeof part === "string" ) { // String, write as-is.
for ( var l = 0 ; l < part . length ; ) {
var code = part . charCodeAt ( l ++ ) ;
if ( 0xD800 <= code && code < 0xDC00 ) { // Decode surrogate
var codeTrail = part . charCodeAt ( l ++ ) ;
if ( 0xDC00 <= codeTrail && codeTrail < 0xE000 )
writeTable [ curAddr ++ ] = 0x10000 + ( code - 0xD800 ) * 0x400 + ( codeTrail - 0xDC00 ) ;
else
throw new Error ( "Incorrect surrogate pair in " + this . encodingName + " at chunk " + chunk [ 0 ] ) ;
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
else if ( 0x0FF0 < code && code <= 0x0FFF ) { // Character sequence (our own encoding used)
var len = 0xFFF - code + 2 ;
var seq = [ ] ;
for ( var m = 0 ; m < len ; m ++ )
seq . push ( part . charCodeAt ( l ++ ) ) ; // Simple variation: don't support surrogates or subsequences in seq.
writeTable [ curAddr ++ ] = SEQ _START - this . decodeTableSeq . length ;
this . decodeTableSeq . push ( seq ) ;
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
else
writeTable [ curAddr ++ ] = code ; // Basic char
}
}
else if ( typeof part === "number" ) { // Integer, meaning increasing sequence starting with prev character.
var charCode = writeTable [ curAddr - 1 ] + 1 ;
for ( var l = 0 ; l < part ; l ++ )
writeTable [ curAddr ++ ] = charCode ++ ;
}
else
throw new Error ( "Incorrect type '" + typeof part + "' given in " + this . encodingName + " at chunk " + chunk [ 0 ] ) ;
}
if ( curAddr > 0xFF )
throw new Error ( "Incorrect chunk in " + this . encodingName + " at addr " + chunk [ 0 ] + ": too long" + curAddr ) ;
}
// Encoder helpers
DBCSCodec . prototype . _getEncodeBucket = function ( uCode ) {
var high = uCode >> 8 ; // This could be > 0xFF because of astral characters.
if ( this . encodeTable [ high ] === undefined )
this . encodeTable [ high ] = UNASSIGNED _NODE . slice ( 0 ) ; // Create bucket on demand.
return this . encodeTable [ high ] ;
}
DBCSCodec . prototype . _setEncodeChar = function ( uCode , dbcsCode ) {
var bucket = this . _getEncodeBucket ( uCode ) ;
var low = uCode & 0xFF ;
if ( bucket [ low ] <= SEQ _START )
this . encodeTableSeq [ SEQ _START - bucket [ low ] ] [ DEF _CHAR ] = dbcsCode ; // There's already a sequence, set a single-char subsequence of it.
else if ( bucket [ low ] == UNASSIGNED )
bucket [ low ] = dbcsCode ;
}
DBCSCodec . prototype . _setEncodeSequence = function ( seq , dbcsCode ) {
// Get the root of character tree according to first character of the sequence.
var uCode = seq [ 0 ] ;
var bucket = this . _getEncodeBucket ( uCode ) ;
var low = uCode & 0xFF ;
var node ;
if ( bucket [ low ] <= SEQ _START ) {
// There's already a sequence with - use it.
node = this . encodeTableSeq [ SEQ _START - bucket [ low ] ] ;
}
else {
// There was no sequence object - allocate a new one.
node = { } ;
if ( bucket [ low ] !== UNASSIGNED ) node [ DEF _CHAR ] = bucket [ low ] ; // If a char was set before - make it a single-char subsequence.
bucket [ low ] = SEQ _START - this . encodeTableSeq . length ;
this . encodeTableSeq . push ( node ) ;
}
// Traverse the character tree, allocating new nodes as needed.
for ( var j = 1 ; j < seq . length - 1 ; j ++ ) {
var oldVal = node [ uCode ] ;
if ( typeof oldVal === 'object' )
node = oldVal ;
else {
node = node [ uCode ] = { }
if ( oldVal !== undefined )
node [ DEF _CHAR ] = oldVal
}
}
// Set the leaf to given dbcsCode.
uCode = seq [ seq . length - 1 ] ;
node [ uCode ] = dbcsCode ;
}
DBCSCodec . prototype . _fillEncodeTable = function ( nodeIdx , prefix , skipEncodeChars ) {
var node = this . decodeTables [ nodeIdx ] ;
for ( var i = 0 ; i < 0x100 ; i ++ ) {
var uCode = node [ i ] ;
var mbCode = prefix + i ;
if ( skipEncodeChars [ mbCode ] )
continue ;
if ( uCode >= 0 )
this . _setEncodeChar ( uCode , mbCode ) ;
else if ( uCode <= NODE _START )
this . _fillEncodeTable ( NODE _START - uCode , mbCode << 8 , skipEncodeChars ) ;
else if ( uCode <= SEQ _START )
this . _setEncodeSequence ( this . decodeTableSeq [ SEQ _START - uCode ] , mbCode ) ;
}
}
// == Encoder ==================================================================
function DBCSEncoder ( options , codec ) {
// Encoder state
this . leadSurrogate = - 1 ;
this . seqObj = undefined ;
// Static data
this . encodeTable = codec . encodeTable ;
this . encodeTableSeq = codec . encodeTableSeq ;
this . defaultCharSingleByte = codec . defCharSB ;
this . gb18030 = codec . gb18030 ;
}
DBCSEncoder . prototype . write = function ( str ) {
var newBuf = Buffer . alloc ( str . length * ( this . gb18030 ? 4 : 3 ) ) ,
leadSurrogate = this . leadSurrogate ,
seqObj = this . seqObj , nextChar = - 1 ,
i = 0 , j = 0 ;
while ( true ) {
// 0. Get next character.
if ( nextChar === - 1 ) {
if ( i == str . length ) break ;
var uCode = str . charCodeAt ( i ++ ) ;
}
else {
var uCode = nextChar ;
nextChar = - 1 ;
}
// 1. Handle surrogates.
if ( 0xD800 <= uCode && uCode < 0xE000 ) { // Char is one of surrogates.
if ( uCode < 0xDC00 ) { // We've got lead surrogate.
if ( leadSurrogate === - 1 ) {
leadSurrogate = uCode ;
continue ;
} else {
leadSurrogate = uCode ;
// Double lead surrogate found.
uCode = UNASSIGNED ;
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
} else { // We've got trail surrogate.
if ( leadSurrogate !== - 1 ) {
uCode = 0x10000 + ( leadSurrogate - 0xD800 ) * 0x400 + ( uCode - 0xDC00 ) ;
leadSurrogate = - 1 ;
} else {
// Incomplete surrogate pair - only trail surrogate found.
uCode = UNASSIGNED ;
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
}
}
else if ( leadSurrogate !== - 1 ) {
// Incomplete surrogate pair - only lead surrogate found.
nextChar = uCode ; uCode = UNASSIGNED ; // Write an error, then current char.
leadSurrogate = - 1 ;
}
// 2. Convert uCode character.
var dbcsCode = UNASSIGNED ;
if ( seqObj !== undefined && uCode != UNASSIGNED ) { // We are in the middle of the sequence
var resCode = seqObj [ uCode ] ;
if ( typeof resCode === 'object' ) { // Sequence continues.
seqObj = resCode ;
continue ;
} else if ( typeof resCode == 'number' ) { // Sequence finished. Write it.
dbcsCode = resCode ;
} else if ( resCode == undefined ) { // Current character is not part of the sequence.
// Try default character for this sequence
resCode = seqObj [ DEF _CHAR ] ;
if ( resCode !== undefined ) {
dbcsCode = resCode ; // Found. Write it.
nextChar = uCode ; // Current character will be written too in the next iteration.
} else {
// TODO: What if we have no default? (resCode == undefined)
// Then, we should write first char of the sequence as-is and try the rest recursively.
// Didn't do it for now because no encoding has this situation yet.
// Currently, just skip the sequence and write current char.
}
}
seqObj = undefined ;
}
else if ( uCode >= 0 ) { // Regular character
var subtable = this . encodeTable [ uCode >> 8 ] ;
if ( subtable !== undefined )
dbcsCode = subtable [ uCode & 0xFF ] ;
if ( dbcsCode <= SEQ _START ) { // Sequence start
seqObj = this . encodeTableSeq [ SEQ _START - dbcsCode ] ;
continue ;
}
if ( dbcsCode == UNASSIGNED && this . gb18030 ) {
// Use GB18030 algorithm to find character(s) to write.
var idx = findIdx ( this . gb18030 . uChars , uCode ) ;
if ( idx != - 1 ) {
var dbcsCode = this . gb18030 . gbChars [ idx ] + ( uCode - this . gb18030 . uChars [ idx ] ) ;
newBuf [ j ++ ] = 0x81 + Math . floor ( dbcsCode / 12600 ) ; dbcsCode = dbcsCode % 12600 ;
newBuf [ j ++ ] = 0x30 + Math . floor ( dbcsCode / 1260 ) ; dbcsCode = dbcsCode % 1260 ;
newBuf [ j ++ ] = 0x81 + Math . floor ( dbcsCode / 10 ) ; dbcsCode = dbcsCode % 10 ;
newBuf [ j ++ ] = 0x30 + dbcsCode ;
continue ;
}
}
}
// 3. Write dbcsCode character.
if ( dbcsCode === UNASSIGNED )
dbcsCode = this . defaultCharSingleByte ;
if ( dbcsCode < 0x100 ) {
newBuf [ j ++ ] = dbcsCode ;
}
else if ( dbcsCode < 0x10000 ) {
newBuf [ j ++ ] = dbcsCode >> 8 ; // high byte
newBuf [ j ++ ] = dbcsCode & 0xFF ; // low byte
}
else {
newBuf [ j ++ ] = dbcsCode >> 16 ;
newBuf [ j ++ ] = ( dbcsCode >> 8 ) & 0xFF ;
newBuf [ j ++ ] = dbcsCode & 0xFF ;
}
}
this . seqObj = seqObj ;
this . leadSurrogate = leadSurrogate ;
return newBuf . slice ( 0 , j ) ;
}
DBCSEncoder . prototype . end = function ( ) {
if ( this . leadSurrogate === - 1 && this . seqObj === undefined )
return ; // All clean. Most often case.
var newBuf = Buffer . alloc ( 10 ) , j = 0 ;
if ( this . seqObj ) { // We're in the sequence.
var dbcsCode = this . seqObj [ DEF _CHAR ] ;
if ( dbcsCode !== undefined ) { // Write beginning of the sequence.
if ( dbcsCode < 0x100 ) {
newBuf [ j ++ ] = dbcsCode ;
}
else {
newBuf [ j ++ ] = dbcsCode >> 8 ; // high byte
newBuf [ j ++ ] = dbcsCode & 0xFF ; // low byte
}
} else {
// See todo above.
}
this . seqObj = undefined ;
}
if ( this . leadSurrogate !== - 1 ) {
// Incomplete surrogate pair - only lead surrogate found.
newBuf [ j ++ ] = this . defaultCharSingleByte ;
this . leadSurrogate = - 1 ;
}
return newBuf . slice ( 0 , j ) ;
}
// Export for testing
DBCSEncoder . prototype . findIdx = findIdx ;
// == Decoder ==================================================================
function DBCSDecoder ( options , codec ) {
// Decoder state
this . nodeIdx = 0 ;
this . prevBuf = Buffer . alloc ( 0 ) ;
// Static data
this . decodeTables = codec . decodeTables ;
this . decodeTableSeq = codec . decodeTableSeq ;
this . defaultCharUnicode = codec . defaultCharUnicode ;
this . gb18030 = codec . gb18030 ;
}
DBCSDecoder . prototype . write = function ( buf ) {
var newBuf = Buffer . alloc ( buf . length * 2 ) ,
nodeIdx = this . nodeIdx ,
prevBuf = this . prevBuf , prevBufOffset = this . prevBuf . length ,
seqStart = - this . prevBuf . length , // idx of the start of current parsed sequence.
uCode ;
if ( prevBufOffset > 0 ) // Make prev buf overlap a little to make it easier to slice later.
prevBuf = Buffer . concat ( [ prevBuf , buf . slice ( 0 , 10 ) ] ) ;
for ( var i = 0 , j = 0 ; i < buf . length ; i ++ ) {
var curByte = ( i >= 0 ) ? buf [ i ] : prevBuf [ i + prevBufOffset ] ;
// Lookup in current trie node.
var uCode = this . decodeTables [ nodeIdx ] [ curByte ] ;
if ( uCode >= 0 ) {
// Normal character, just use it.
}
else if ( uCode === UNASSIGNED ) { // Unknown char.
// TODO: Callback with seq.
//var curSeq = (seqStart >= 0) ? buf.slice(seqStart, i+1) : prevBuf.slice(seqStart + prevBufOffset, i+1 + prevBufOffset);
i = seqStart ; // Try to parse again, after skipping first byte of the sequence ('i' will be incremented by 'for' cycle).
uCode = this . defaultCharUnicode . charCodeAt ( 0 ) ;
}
else if ( uCode === GB18030 _CODE ) {
var curSeq = ( seqStart >= 0 ) ? buf . slice ( seqStart , i + 1 ) : prevBuf . slice ( seqStart + prevBufOffset , i + 1 + prevBufOffset ) ;
var ptr = ( curSeq [ 0 ] - 0x81 ) * 12600 + ( curSeq [ 1 ] - 0x30 ) * 1260 + ( curSeq [ 2 ] - 0x81 ) * 10 + ( curSeq [ 3 ] - 0x30 ) ;
var idx = findIdx ( this . gb18030 . gbChars , ptr ) ;
uCode = this . gb18030 . uChars [ idx ] + ptr - this . gb18030 . gbChars [ idx ] ;
}
else if ( uCode <= NODE _START ) { // Go to next trie node.
nodeIdx = NODE _START - uCode ;
continue ;
}
else if ( uCode <= SEQ _START ) { // Output a sequence of chars.
var seq = this . decodeTableSeq [ SEQ _START - uCode ] ;
for ( var k = 0 ; k < seq . length - 1 ; k ++ ) {
uCode = seq [ k ] ;
newBuf [ j ++ ] = uCode & 0xFF ;
newBuf [ j ++ ] = uCode >> 8 ;
}
uCode = seq [ seq . length - 1 ] ;
}
else
throw new Error ( "iconv-lite internal error: invalid decoding table value " + uCode + " at " + nodeIdx + "/" + curByte ) ;
// Write the character to buffer, handling higher planes using surrogate pair.
if ( uCode > 0xFFFF ) {
uCode -= 0x10000 ;
var uCodeLead = 0xD800 + Math . floor ( uCode / 0x400 ) ;
newBuf [ j ++ ] = uCodeLead & 0xFF ;
newBuf [ j ++ ] = uCodeLead >> 8 ;
uCode = 0xDC00 + uCode % 0x400 ;
}
newBuf [ j ++ ] = uCode & 0xFF ;
newBuf [ j ++ ] = uCode >> 8 ;
// Reset trie node.
nodeIdx = 0 ; seqStart = i + 1 ;
}
this . nodeIdx = nodeIdx ;
this . prevBuf = ( seqStart >= 0 ) ? buf . slice ( seqStart ) : prevBuf . slice ( seqStart + prevBufOffset ) ;
return newBuf . slice ( 0 , j ) . toString ( 'ucs2' ) ;
}
DBCSDecoder . prototype . end = function ( ) {
var ret = '' ;
// Try to parse all remaining chars.
while ( this . prevBuf . length > 0 ) {
// Skip 1 character in the buffer.
ret += this . defaultCharUnicode ;
var buf = this . prevBuf . slice ( 1 ) ;
// Parse remaining as usual.
this . prevBuf = Buffer . alloc ( 0 ) ;
this . nodeIdx = 0 ;
if ( buf . length > 0 )
ret += this . write ( buf ) ;
}
this . nodeIdx = 0 ;
return ret ;
}
// Binary search for GB18030. Returns largest i such that table[i] <= val.
function findIdx ( table , val ) {
if ( table [ 0 ] > val )
return - 1 ;
var l = 0 , r = table . length ;
while ( l < r - 1 ) { // always table[l] <= val < table[r]
var mid = l + Math . floor ( ( r - l + 1 ) / 2 ) ;
if ( table [ mid ] <= val )
l = mid ;
else
r = mid ;
}
return l ;
}
/***/ } ) ,
/***/ 123 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const compare = _ _webpack _require _ _ ( 309 )
const gt = ( a , b , loose ) => compare ( a , b , loose ) > 0
module . exports = gt
/***/ } ) ,
/***/ 124 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
try {
var util = _ _webpack _require _ _ ( 669 ) ;
/* istanbul ignore next */
if ( typeof util . inherits !== 'function' ) throw '' ;
module . exports = util . inherits ;
} catch ( e ) {
/* istanbul ignore next */
module . exports = _ _webpack _require _ _ ( 544 ) ;
}
/***/ } ) ,
/***/ 129 :
/***/ ( function ( module ) {
module . exports = require ( "child_process" ) ;
/***/ } ) ,
/***/ 133 :
/***/ ( function ( module ) {
module . exports = eval ( "require" ) ( "iconv" ) ;
/***/ } ) ,
/***/ 156 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const SemVer = _ _webpack _require _ _ ( 88 )
const compareBuild = ( a , b , loose ) => {
const versionA = new SemVer ( a , loose )
const versionB = new SemVer ( b , loose )
return versionA . compare ( versionB ) || versionA . compareBuild ( versionB )
}
module . exports = compareBuild
/***/ } ) ,
/***/ 159 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const os = _ _importStar ( _ _webpack _require _ _ ( 87 ) ) ;
const events = _ _importStar ( _ _webpack _require _ _ ( 614 ) ) ;
const child = _ _importStar ( _ _webpack _require _ _ ( 129 ) ) ;
const path = _ _importStar ( _ _webpack _require _ _ ( 622 ) ) ;
const io = _ _importStar ( _ _webpack _require _ _ ( 436 ) ) ;
const ioUtil = _ _importStar ( _ _webpack _require _ _ ( 962 ) ) ;
/* eslint-disable @typescript-eslint/unbound-method */
const IS _WINDOWS = process . platform === 'win32' ;
/ *
* Class for running command line tools . Handles quoting and arg parsing in a platform agnostic way .
* /
class ToolRunner extends events . EventEmitter {
constructor ( toolPath , args , options ) {
super ( ) ;
if ( ! toolPath ) {
throw new Error ( "Parameter 'toolPath' cannot be null or empty." ) ;
}
this . toolPath = toolPath ;
this . args = args || [ ] ;
this . options = options || { } ;
}
_debug ( message ) {
if ( this . options . listeners && this . options . listeners . debug ) {
this . options . listeners . debug ( message ) ;
}
}
_getCommandString ( options , noPrefix ) {
const toolPath = this . _getSpawnFileName ( ) ;
const args = this . _getSpawnArgs ( options ) ;
let cmd = noPrefix ? '' : '[command]' ; // omit prefix when piped to a second tool
if ( IS _WINDOWS ) {
// Windows + cmd file
if ( this . _isCmdFile ( ) ) {
cmd += toolPath ;
for ( const a of args ) {
cmd += ` ${ a } ` ;
}
}
// Windows + verbatim
else if ( options . windowsVerbatimArguments ) {
cmd += ` " ${ toolPath } " ` ;
for ( const a of args ) {
cmd += ` ${ a } ` ;
}
}
// Windows (regular)
else {
cmd += this . _windowsQuoteCmdArg ( toolPath ) ;
for ( const a of args ) {
cmd += ` ${ this . _windowsQuoteCmdArg ( a ) } ` ;
}
}
}
else {
// OSX/Linux - this can likely be improved with some form of quoting.
// creating processes on Unix is fundamentally different than Windows.
// on Unix, execvp() takes an arg array.
cmd += toolPath ;
for ( const a of args ) {
cmd += ` ${ a } ` ;
}
}
return cmd ;
}
_processLineBuffer ( data , strBuffer , onLine ) {
try {
let s = strBuffer + data . toString ( ) ;
let n = s . indexOf ( os . EOL ) ;
while ( n > - 1 ) {
const line = s . substring ( 0 , n ) ;
onLine ( line ) ;
// the rest of the string ...
s = s . substring ( n + os . EOL . length ) ;
n = s . indexOf ( os . EOL ) ;
}
strBuffer = s ;
}
catch ( err ) {
// streaming lines to console is best effort. Don't fail a build.
this . _debug ( ` error processing line. Failed with error ${ err } ` ) ;
}
}
_getSpawnFileName ( ) {
if ( IS _WINDOWS ) {
if ( this . _isCmdFile ( ) ) {
return process . env [ 'COMSPEC' ] || 'cmd.exe' ;
}
}
return this . toolPath ;
}
_getSpawnArgs ( options ) {
if ( IS _WINDOWS ) {
if ( this . _isCmdFile ( ) ) {
let argline = ` /D /S /C " ${ this . _windowsQuoteCmdArg ( this . toolPath ) } ` ;
for ( const a of this . args ) {
argline += ' ' ;
argline += options . windowsVerbatimArguments
? a
: this . _windowsQuoteCmdArg ( a ) ;
}
argline += '"' ;
return [ argline ] ;
}
}
return this . args ;
}
_endsWith ( str , end ) {
return str . endsWith ( end ) ;
}
_isCmdFile ( ) {
const upperToolPath = this . toolPath . toUpperCase ( ) ;
return ( this . _endsWith ( upperToolPath , '.CMD' ) ||
this . _endsWith ( upperToolPath , '.BAT' ) ) ;
}
_windowsQuoteCmdArg ( arg ) {
// for .exe, apply the normal quoting rules that libuv applies
if ( ! this . _isCmdFile ( ) ) {
return this . _uvQuoteCmdArg ( arg ) ;
}
// otherwise apply quoting rules specific to the cmd.exe command line parser.
// the libuv rules are generic and are not designed specifically for cmd.exe
// command line parser.
//
// for a detailed description of the cmd.exe command line parser, refer to
// http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912
// need quotes for empty arg
if ( ! arg ) {
return '""' ;
}
// determine whether the arg needs to be quoted
const cmdSpecialChars = [
' ' ,
'\t' ,
'&' ,
'(' ,
')' ,
'[' ,
']' ,
'{' ,
'}' ,
'^' ,
'=' ,
';' ,
'!' ,
"'" ,
'+' ,
',' ,
'`' ,
'~' ,
'|' ,
'<' ,
'>' ,
'"'
] ;
let needsQuotes = false ;
for ( const char of arg ) {
if ( cmdSpecialChars . some ( x => x === char ) ) {
needsQuotes = true ;
break ;
}
}
// short-circuit if quotes not needed
if ( ! needsQuotes ) {
return arg ;
}
// the following quoting rules are very similar to the rules that by libuv applies.
//
// 1) wrap the string in quotes
//
// 2) double-up quotes - i.e. " => ""
//
// this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately
// doesn't work well with a cmd.exe command line.
//
// note, replacing " with "" also works well if the arg is passed to a downstream .NET console app.
// for example, the command line:
// foo.exe "myarg:""my val"""
// is parsed by a .NET console app into an arg array:
// [ "myarg:\"my val\"" ]
// which is the same end result when applying libuv quoting rules. although the actual
// command line from libuv quoting rules would look like:
// foo.exe "myarg:\"my val\""
//
// 3) double-up slashes that precede a quote,
// e.g. hello \world => "hello \world"
// hello\"world => "hello\\""world"
// hello\\"world => "hello\\\\""world"
// hello world\ => "hello world\\"
//
// technically this is not required for a cmd.exe command line, or the batch argument parser.
// the reasons for including this as a .cmd quoting rule are:
//
// a) this is optimized for the scenario where the argument is passed from the .cmd file to an
// external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule.
//
// b) it's what we've been doing previously (by deferring to node default behavior) and we
// haven't heard any complaints about that aspect.
//
// note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be
// escaped when used on the command line directly - even though within a .cmd file % can be escaped
// by using %%.
//
// the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts
// the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing.
//
// one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would
// often work, since it is unlikely that var^ would exist, and the ^ character is removed when the
// variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args
// to an external program.
//
// an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file.
// % can be escaped within a .cmd file.
let reverse = '"' ;
let quoteHit = true ;
for ( let i = arg . length ; i > 0 ; i -- ) {
// walk the string in reverse
reverse += arg [ i - 1 ] ;
if ( quoteHit && arg [ i - 1 ] === '\\' ) {
reverse += '\\' ; // double the slash
}
else if ( arg [ i - 1 ] === '"' ) {
quoteHit = true ;
reverse += '"' ; // double the quote
}
else {
quoteHit = false ;
}
}
reverse += '"' ;
return reverse
. split ( '' )
. reverse ( )
. join ( '' ) ;
}
_uvQuoteCmdArg ( arg ) {
// Tool runner wraps child_process.spawn() and needs to apply the same quoting as
// Node in certain cases where the undocumented spawn option windowsVerbatimArguments
// is used.
//
// Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV,
// see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details),
// pasting copyright notice from Node within this function:
//
// Copyright Joyent, Inc. and other Node contributors. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
if ( ! arg ) {
// Need double quotation for empty argument
return '""' ;
}
if ( ! arg . includes ( ' ' ) && ! arg . includes ( '\t' ) && ! arg . includes ( '"' ) ) {
// No quotation needed
return arg ;
}
if ( ! arg . includes ( '"' ) && ! arg . includes ( '\\' ) ) {
// No embedded double quotes or backslashes, so I can just wrap
// quote marks around the whole thing.
return ` " ${ arg } " ` ;
}
// Expected input/output:
// input : hello"world
// output: "hello\"world"
// input : hello""world
// output: "hello\"\"world"
// input : hello\world
// output: hello\world
// input : hello\\world
// output: hello\\world
// input : hello\"world
// output: "hello\\\"world"
// input : hello\\"world
// output: "hello\\\\\"world"
// input : hello world\
// output: "hello world\\" - note the comment in libuv actually reads "hello world\"
// but it appears the comment is wrong, it should be "hello world\\"
let reverse = '"' ;
let quoteHit = true ;
for ( let i = arg . length ; i > 0 ; i -- ) {
// walk the string in reverse
reverse += arg [ i - 1 ] ;
if ( quoteHit && arg [ i - 1 ] === '\\' ) {
reverse += '\\' ;
}
else if ( arg [ i - 1 ] === '"' ) {
quoteHit = true ;
reverse += '\\' ;
}
else {
quoteHit = false ;
}
}
reverse += '"' ;
return reverse
. split ( '' )
. reverse ( )
. join ( '' ) ;
}
_cloneExecOptions ( options ) {
options = options || { } ;
const result = {
cwd : options . cwd || process . cwd ( ) ,
env : options . env || process . env ,
silent : options . silent || false ,
windowsVerbatimArguments : options . windowsVerbatimArguments || false ,
failOnStdErr : options . failOnStdErr || false ,
ignoreReturnCode : options . ignoreReturnCode || false ,
delay : options . delay || 10000
} ;
result . outStream = options . outStream || process . stdout ;
result . errStream = options . errStream || process . stderr ;
return result ;
}
_getSpawnOptions ( options , toolPath ) {
options = options || { } ;
const result = { } ;
result . cwd = options . cwd ;
result . env = options . env ;
result [ 'windowsVerbatimArguments' ] =
options . windowsVerbatimArguments || this . _isCmdFile ( ) ;
if ( options . windowsVerbatimArguments ) {
result . argv0 = ` " ${ toolPath } " ` ;
}
return result ;
}
/ * *
* Exec a tool .
* Output will be streamed to the live console .
* Returns promise with return code
*
* @ param tool path to tool to exec
* @ param options optional exec options . See ExecOptions
* @ returns number
* /
exec ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// root the tool path if it is unrooted and contains relative pathing
if ( ! ioUtil . isRooted ( this . toolPath ) &&
( this . toolPath . includes ( '/' ) ||
( IS _WINDOWS && this . toolPath . includes ( '\\' ) ) ) ) {
// prefer options.cwd if it is specified, however options.cwd may also need to be rooted
this . toolPath = path . resolve ( process . cwd ( ) , this . options . cwd || process . cwd ( ) , this . toolPath ) ;
}
// if the tool is only a file name, then resolve it from the PATH
// otherwise verify it exists (add extension on Windows if necessary)
this . toolPath = yield io . which ( this . toolPath , true ) ;
return new Promise ( ( resolve , reject ) => {
this . _debug ( ` exec tool: ${ this . toolPath } ` ) ;
this . _debug ( 'arguments:' ) ;
for ( const arg of this . args ) {
this . _debug ( ` ${ arg } ` ) ;
}
const optionsNonNull = this . _cloneExecOptions ( this . options ) ;
if ( ! optionsNonNull . silent && optionsNonNull . outStream ) {
optionsNonNull . outStream . write ( this . _getCommandString ( optionsNonNull ) + os . EOL ) ;
}
const state = new ExecState ( optionsNonNull , this . toolPath ) ;
state . on ( 'debug' , ( message ) => {
this . _debug ( message ) ;
} ) ;
const fileName = this . _getSpawnFileName ( ) ;
const cp = child . spawn ( fileName , this . _getSpawnArgs ( optionsNonNull ) , this . _getSpawnOptions ( this . options , fileName ) ) ;
const stdbuffer = '' ;
if ( cp . stdout ) {
cp . stdout . on ( 'data' , ( data ) => {
if ( this . options . listeners && this . options . listeners . stdout ) {
this . options . listeners . stdout ( data ) ;
}
if ( ! optionsNonNull . silent && optionsNonNull . outStream ) {
optionsNonNull . outStream . write ( data ) ;
}
this . _processLineBuffer ( data , stdbuffer , ( line ) => {
if ( this . options . listeners && this . options . listeners . stdline ) {
this . options . listeners . stdline ( line ) ;
}
} ) ;
} ) ;
}
const errbuffer = '' ;
if ( cp . stderr ) {
cp . stderr . on ( 'data' , ( data ) => {
state . processStderr = true ;
if ( this . options . listeners && this . options . listeners . stderr ) {
this . options . listeners . stderr ( data ) ;
}
if ( ! optionsNonNull . silent &&
optionsNonNull . errStream &&
optionsNonNull . outStream ) {
const s = optionsNonNull . failOnStdErr
? optionsNonNull . errStream
: optionsNonNull . outStream ;
s . write ( data ) ;
}
this . _processLineBuffer ( data , errbuffer , ( line ) => {
if ( this . options . listeners && this . options . listeners . errline ) {
this . options . listeners . errline ( line ) ;
}
} ) ;
} ) ;
}
cp . on ( 'error' , ( err ) => {
state . processError = err . message ;
state . processExited = true ;
state . processClosed = true ;
state . CheckComplete ( ) ;
} ) ;
cp . on ( 'exit' , ( code ) => {
state . processExitCode = code ;
state . processExited = true ;
this . _debug ( ` Exit code ${ code } received from tool ' ${ this . toolPath } ' ` ) ;
state . CheckComplete ( ) ;
} ) ;
cp . on ( 'close' , ( code ) => {
state . processExitCode = code ;
state . processExited = true ;
state . processClosed = true ;
this . _debug ( ` STDIO streams have closed for tool ' ${ this . toolPath } ' ` ) ;
state . CheckComplete ( ) ;
} ) ;
state . on ( 'done' , ( error , exitCode ) => {
if ( stdbuffer . length > 0 ) {
this . emit ( 'stdline' , stdbuffer ) ;
}
if ( errbuffer . length > 0 ) {
this . emit ( 'errline' , errbuffer ) ;
}
cp . removeAllListeners ( ) ;
if ( error ) {
reject ( error ) ;
}
else {
resolve ( exitCode ) ;
}
} ) ;
if ( this . options . input ) {
if ( ! cp . stdin ) {
throw new Error ( 'child process missing stdin' ) ;
}
cp . stdin . end ( this . options . input ) ;
}
} ) ;
} ) ;
}
}
exports . ToolRunner = ToolRunner ;
/ * *
* Convert an arg string to an array of args . Handles escaping
*
* @ param argString string of arguments
* @ returns string [ ] array of arguments
* /
function argStringToArray ( argString ) {
const args = [ ] ;
let inQuotes = false ;
let escaped = false ;
let arg = '' ;
function append ( c ) {
// we only escape double quotes.
if ( escaped && c !== '"' ) {
arg += '\\' ;
}
arg += c ;
escaped = false ;
}
for ( let i = 0 ; i < argString . length ; i ++ ) {
const c = argString . charAt ( i ) ;
if ( c === '"' ) {
if ( ! escaped ) {
inQuotes = ! inQuotes ;
}
else {
append ( c ) ;
}
continue ;
}
if ( c === '\\' && escaped ) {
append ( c ) ;
continue ;
}
if ( c === '\\' && inQuotes ) {
escaped = true ;
continue ;
}
if ( c === ' ' && ! inQuotes ) {
if ( arg . length > 0 ) {
args . push ( arg ) ;
arg = '' ;
}
continue ;
}
append ( c ) ;
}
if ( arg . length > 0 ) {
args . push ( arg . trim ( ) ) ;
}
return args ;
}
exports . argStringToArray = argStringToArray ;
class ExecState extends events . EventEmitter {
constructor ( options , toolPath ) {
super ( ) ;
this . processClosed = false ; // tracks whether the process has exited and stdio is closed
this . processError = '' ;
this . processExitCode = 0 ;
this . processExited = false ; // tracks whether the process has exited
this . processStderr = false ; // tracks whether stderr was written to
this . delay = 10000 ; // 10 seconds
this . done = false ;
this . timeout = null ;
if ( ! toolPath ) {
throw new Error ( 'toolPath must not be empty' ) ;
}
this . options = options ;
this . toolPath = toolPath ;
if ( options . delay ) {
this . delay = options . delay ;
}
}
CheckComplete ( ) {
if ( this . done ) {
return ;
}
if ( this . processClosed ) {
this . _setResult ( ) ;
}
else if ( this . processExited ) {
this . timeout = setTimeout ( ExecState . HandleTimeout , this . delay , this ) ;
}
}
_debug ( message ) {
this . emit ( 'debug' , message ) ;
}
_setResult ( ) {
// determine whether there is an error
let error ;
if ( this . processExited ) {
if ( this . processError ) {
error = new Error ( ` There was an error when attempting to execute the process ' ${ this . toolPath } '. This may indicate the process failed to start. Error: ${ this . processError } ` ) ;
}
else if ( this . processExitCode !== 0 && ! this . options . ignoreReturnCode ) {
error = new Error ( ` The process ' ${ this . toolPath } ' failed with exit code ${ this . processExitCode } ` ) ;
}
else if ( this . processStderr && this . options . failOnStdErr ) {
error = new Error ( ` The process ' ${ this . toolPath } ' failed because one or more lines were written to the STDERR stream ` ) ;
}
}
// clear the timeout
if ( this . timeout ) {
clearTimeout ( this . timeout ) ;
this . timeout = null ;
}
this . done = true ;
this . emit ( 'done' , error , this . processExitCode ) ;
}
static HandleTimeout ( state ) {
if ( state . done ) {
return ;
}
if ( ! state . processClosed && state . processExited ) {
const message = ` The STDIO streams did not close within ${ state . delay /
1000 } seconds of the exit event from process '${state.toolPath}' . This may indicate a child process inherited the STDIO streams and has not yet exited . ` ;
state . _debug ( message ) ;
}
state . _setResult ( ) ;
}
}
//# sourceMappingURL=toolrunner.js.map
/***/ } ) ,
/***/ 174 :
/***/ ( function ( module ) {
"use strict" ;
// Generated data for sbcs codec. Don't edit manually. Regenerate using generation/gen-sbcs.js script.
module . exports = {
"437" : "cp437" ,
"737" : "cp737" ,
"775" : "cp775" ,
"850" : "cp850" ,
"852" : "cp852" ,
"855" : "cp855" ,
"856" : "cp856" ,
"857" : "cp857" ,
"858" : "cp858" ,
"860" : "cp860" ,
"861" : "cp861" ,
"862" : "cp862" ,
"863" : "cp863" ,
"864" : "cp864" ,
"865" : "cp865" ,
"866" : "cp866" ,
"869" : "cp869" ,
"874" : "windows874" ,
"922" : "cp922" ,
"1046" : "cp1046" ,
"1124" : "cp1124" ,
"1125" : "cp1125" ,
"1129" : "cp1129" ,
"1133" : "cp1133" ,
"1161" : "cp1161" ,
"1162" : "cp1162" ,
"1163" : "cp1163" ,
"1250" : "windows1250" ,
"1251" : "windows1251" ,
"1252" : "windows1252" ,
"1253" : "windows1253" ,
"1254" : "windows1254" ,
"1255" : "windows1255" ,
"1256" : "windows1256" ,
"1257" : "windows1257" ,
"1258" : "windows1258" ,
"28591" : "iso88591" ,
"28592" : "iso88592" ,
"28593" : "iso88593" ,
"28594" : "iso88594" ,
"28595" : "iso88595" ,
"28596" : "iso88596" ,
"28597" : "iso88597" ,
"28598" : "iso88598" ,
"28599" : "iso88599" ,
"28600" : "iso885910" ,
"28601" : "iso885911" ,
"28603" : "iso885913" ,
"28604" : "iso885914" ,
"28605" : "iso885915" ,
"28606" : "iso885916" ,
"windows874" : {
"type" : "_sbcs" ,
"chars" : "€<> <E282AC> <EFBFBD> <EFBFBD> …<EFBFBD> <E280A6> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> ‘ ’ “”•– —<E28093> <E28094> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู<E0B8B9> <E0B8BA> <EFBFBD> <EFBFBD> ฿เแโใไๅๆ็่้๊๋์ํ๎๏๐ ๑๒๓๔๕๖๗๘๙๚๛<E0B99A> <E0B99B> <EFBFBD> <EFBFBD> "
} ,
"win874" : "windows874" ,
"cp874" : "windows874" ,
"windows1250" : {
"type" : "_sbcs" ,
"chars" : "€<> ‚ <EFBFBD> „…†‡<E280A0> ‰Š‹ ŚŤŽŹ<C5BD> ‘ ’ “”•– —<E28093> ™š› śťžź ˇ˘Ł¤Ą¦§¨©Ş«¬ ®Ż°±˛ ł´ µ¶·¸ ąş»Ľ˝ľżŔÁÂĂÄĹĆÇČÉĘËĚÍÎĎĐŃŇÓÔŐÖ× ŘŮÚŰÜÝŢßŕáâăäĺćçčéęëěíîďđńňóôőö÷řůúűüýţ˙"
} ,
"win1250" : "windows1250" ,
"cp1250" : "windows1250" ,
"windows1251" : {
"type" : "_sbcs" ,
"chars" : "ЂЃ‚ ѓ„…†‡€‰Љ‹ ЊЌЋЏђ‘ ’ “”•– —<E28093> ™љ› њќћџ ЎўЈ ¤Ґ¦§Ё©Є«¬ ®Ї°±І і ґµ¶·ё№є»ј Ѕ ѕ їА БВ ГДЕ ЖЗ ИЙК ЛМ Н О ПР С Т У ФХ ЦЧШЩЪЫЬ ЭЮЯа б вг де жзийклмно пр с ту фх цчшщъыьэюя"
} ,
"win1251" : "windows1251" ,
"cp1251" : "windows1251" ,
"windows1252" : {
"type" : "_sbcs" ,
"chars" : "€<> ‚ ƒ„…†‡ˆ ‰Š‹ Œ<E280B9> Ž<EFBFBD> <C5BD> ‘ ’ “”•– —˜ ™š› œ<E280BA> žŸ ¡¢£¤¥¦§¨©ª«¬ ®¯°±²³´ µ¶·¸ ¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖ× ØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ"
} ,
"win1252" : "windows1252" ,
"cp1252" : "windows1252" ,
"windows1253" : {
"type" : "_sbcs" ,
"chars" : "€<> ‚ ƒ„…†‡<E280A0> ‰<EFBFBD> ‹ <EFBFBD> <E280B9> <EFBFBD> <EFBFBD> <EFBFBD> ‘ ’ “”•– —<E28093> ™<EFBFBD> › <EFBFBD> <E280BA> <EFBFBD> <EFBFBD> ΅Ά£¤¥¦§¨©<C2A8> «¬ ®―°±²³΄ µ¶·ΈΉΊ»Ό½ΎΏΐΑ Β ΓΔΕ Ζ Η ΘΙ Κ ΛΜ Ν ΞΟ ΠΡ <CEA0> ΣΤ Υ ΦΧ ΨΩΪΫάέήίΰα βγ δεζηθι κλμν ξο πρ ςσ τυ φχψωϊϋόύώ<CF8D> "
} ,
"win1253" : "windows1253" ,
"cp1253" : "windows1253" ,
"windows1254" : {
"type" : "_sbcs" ,
"chars" : "€<> ‚ ƒ„…†‡ˆ ‰Š‹ Œ<E280B9> <C592> <EFBFBD> <EFBFBD> ‘ ’ “”•– —˜ ™š› œ<E280BA> <C593> Ÿ ¡¢£¤¥¦§¨©ª«¬ ®¯°±²³´ µ¶·¸ ¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏĞÑÒÓÔÕÖ× ØÙÚÛÜİŞßàáâãäåæçèéêëìíîïğñòóôõö÷øùúûüı şÿ"
} ,
"win1254" : "windows1254" ,
"cp1254" : "windows1254" ,
"windows1255" : {
"type" : "_sbcs" ,
"chars" : "€<> ‚ ƒ„…†‡ˆ ‰<CB86> ‹ <EFBFBD> <E280B9> <EFBFBD> <EFBFBD> <EFBFBD> ‘ ’ “”•– —˜ ™<CB9C> › <EFBFBD> <E280BA> <EFBFBD> <EFBFBD> ¡¢£₪¥¦§¨©× «¬ ®¯°±²³´ µ¶·¸ ¹÷»¼½¾¿ְֱֲֳִֵֶַָֹֺֻּֽ־ֿ׀ ׁׂ׃ װױײ׳ ״<D7B3> <D7B4> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> אבגדהו זחט י ךכלםמן נס עףפץצקרשת<D7A9> <D7AA> <E2808E> "
} ,
"win1255" : "windows1255" ,
"cp1255" : "windows1255" ,
"windows1256" : {
"type" : "_sbcs" ,
"chars" : "€پ‚ƒ„…†‡ˆ‰ٹ‹Œچژڈگ‘’“”•–—ک™ڑ›œں ،¢£¤¥¦§¨©ھ«¬®¯°±²³´µ¶·¸¹؛»¼½¾؟ہءآأؤإئابةتثجحخدذرزسشصض×طظعغـفقكàلâمنهوçèéêëىيîïًٌٍَôُِ÷ّùْûüے"
} ,
"win1256" : "windows1256" ,
"cp1256" : "windows1256" ,
"windows1257" : {
"type" : "_sbcs" ,
"chars" : "€<> ‚ <EFBFBD> „…†‡<E280A0> ‰<EFBFBD> ‹ <EFBFBD> ¨ˇ¸ <CB87> ‘ ’ “”•– —<E28093> ™<EFBFBD> › <EFBFBD> ¯˛ <C2AF> <EFBFBD> ¢£¤<C2A3> ¦§Ø©Ŗ«¬ ®Æ°±²³´ µ¶·ø¹ŗ»¼½¾æĄĮĀĆÄÅĘĒČÉŹĖĢĶĪĻŠŃŅÓŌÕÖ× ŲŁŚŪÜŻŽßąįāćäåęēčéźėģķīļšńņóōõö÷ųłśūüżž˙"
} ,
"win1257" : "windows1257" ,
"cp1257" : "windows1257" ,
"windows1258" : {
"type" : "_sbcs" ,
"chars" : "€<> ‚ ƒ„…†‡ˆ ‰<CB86> ‹ Œ<E280B9> <C592> <EFBFBD> <EFBFBD> ‘ ’ “”•– —˜ ™<CB9C> › œ<E280BA> <C593> Ÿ ¡¢£¤¥¦§¨©ª«¬ ®¯°±²³´ µ¶·¸ ¹º»¼½¾¿ÀÁÂĂÄÅÆÇÈÉÊË̀ÍÎÏĐÑ̉ÓÔƠÖ× ØÙÚÛÜỮßàáâăäåæçèéêë́íîïđṇ̃óôơö÷øùúûüư₫ÿ"
} ,
"win1258" : "windows1258" ,
"cp1258" : "windows1258" ,
"iso88591" : {
"type" : "_sbcs" ,
"chars" : "
¡¢£¤¥¦§¨©ª«¬®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖרÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ"
} ,
"cp28591" : "iso88591" ,
"iso88592" : {
"type" : "_sbcs" ,
"chars" : "
Ą˘Ł¤ĽŚ§¨ŠŞŤŹŽŻ°ą˛ł´ľśˇ¸šşťź˝žżŔÁÂĂÄĹĆÇČÉĘËĚÍÎĎĐŃŇÓÔŐÖ×ŘŮÚŰÜÝŢßŕáâăäĺćçčéęëěíîďđńňóôőö÷řůúűüýţ˙"
} ,
"cp28592" : "iso88592" ,
"iso88593" : {
"type" : "_sbcs" ,
"chars" : "
Ħ˘£¤<C2A3> Ĥ§¨İŞĞĴ <C4B4> ݰħ²³´ µĥ·¸ ı şğĵ½<C4B5> żÀÁÂ<C381> ÄĊĈÇÈÉÊËÌÍÎÏ<C38E> ÑÒÓÔĠÖ× ĜÙÚÛÜŬŜßàáâ<C3A1> äċĉçèéêëìíîï<C3AE> ñòóôġö÷ĝùúûüŭŝ˙"
} ,
"cp28593" : "iso88593" ,
"iso88594" : {
"type" : "_sbcs" ,
"chars" : "
ĄĸŖ¤Ĩϧ¨ŠĒĢŦޝ°ą˛ŗ´ĩšēģŧŊžŋĀÁÂÃÄÅÆĮČÉĘËĖÍÎĪĐŅŌĶÔÕÖרŲÚÛÜŨŪßāáâãäåæįčéęëėíîīđņōķôõö÷øųúûüũū˙"
} ,
"cp28594" : "iso88594" ,
"iso88595" : {
"type" : "_sbcs" ,
"chars" : "
ЁЂЃЄЅІЇЈЉЊЋЌЎЏАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя№ёђѓєѕіїјљњћќ§ўџ"
} ,
"cp28595" : "iso88595" ,
"iso88596" : {
"type" : "_sbcs" ,
"chars" : "
<C29F> <C2A0> <EFBFBD> ¤<EFBFBD> <C2A4> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> ، <D88C> <C2AD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> ؛<EFBFBD> <D89B> <EFBFBD> ؟<EFBFBD> ءآأؤإئا بةتثجحخدذرزسشصضطظعغ<D8B9> <D8BA> <EFBFBD> <EFBFBD> <EFBFBD> ـفقكلمنه وىيًٌٍَُِّْ<D991> <D992> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> "
} ,
"cp28596" : "iso88596" ,
"iso88597" : {
"type" : "_sbcs" ,
"chars" : "
‘ ’ £€₯¦§¨©ͺ «¬ <C2AC> ―°±²³΄ ΅Ά·ΈΉΊ»Ό½ΎΏΐΑ Β ΓΔΕ Ζ Η ΘΙ Κ ΛΜ Ν ΞΟ ΠΡ <CEA0> ΣΤ Υ ΦΧ ΨΩΪΫάέήίΰα βγ δεζηθι κλμν ξο πρ ςσ τυ φχψωϊϋόύώ<CF8D> "
} ,
"cp28597" : "iso88597" ,
"iso88598" : {
"type" : "_sbcs" ,
"chars" : "
<C29F> ¢£¤¥¦§¨©× «¬ ®¯°±²³´ µ¶·¸ ¹÷»¼½¾<C2BD> <C2BE> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> ‗אבגדהו זחט י ךכלםמן נס עףפץצקרשת<D7A9> <D7AA> <E2808E> "
} ,
"cp28598" : "iso88598" ,
"iso88599" : {
"type" : "_sbcs" ,
"chars" : "
¡¢£¤¥¦§¨©ª«¬®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏĞÑÒÓÔÕÖרÙÚÛÜİŞßàáâãäåæçèéêëìíîïğñòóôõö÷øùúûüışÿ"
} ,
"cp28599" : "iso88599" ,
"iso885910" : {
"type" : "_sbcs" ,
"chars" : "
ĄĒĢĪĨͧĻĐŠŦŽŪŊ°ąēģīĩķ·ļđšŧž―ūŋĀÁÂÃÄÅÆĮČÉĘËĖÍÎÏÐŅŌÓÔÕÖŨØŲÚÛÜÝÞßāáâãäåæįčéęëėíîïðņōóôõöũøųúûüýþĸ"
} ,
"cp28600" : "iso885910" ,
"iso885911" : {
"type" : "_sbcs" ,
"chars" : "
กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู<E0B8B9> <E0B8BA> <EFBFBD> <EFBFBD> ฿เแโใไๅๆ็่้๊๋์ํ๎๏๐ ๑๒๓๔๕๖๗๘๙๚๛<E0B99A> <E0B99B> <EFBFBD> <EFBFBD> "
} ,
"cp28601" : "iso885911" ,
"iso885913" : {
"type" : "_sbcs" ,
"chars" : "
”¢£¤„¦§Ø©Ŗ«¬®Æ°±²³“µ¶·ø¹ŗ»¼½¾æĄĮĀĆÄÅĘĒČÉŹĖĢĶĪĻŠŃŅÓŌÕÖ×ŲŁŚŪÜŻŽßąįāćäåęēčéźėģķīļšńņóōõö÷ųłśūüżž’"
} ,
"cp28603" : "iso885913" ,
"iso885914" : {
"type" : "_sbcs" ,
"chars" : "
Ḃḃ£ĊċḊ§Ẁ©ẂḋỲ®ŸḞḟĠġṀṁ¶ṖẁṗẃṠỳẄẅṡÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏŴÑÒÓÔÕÖṪØÙÚÛÜÝŶßàáâãäåæçèéêëìíîïŵñòóôõöṫøùúûüýŷÿ"
} ,
"cp28604" : "iso885914" ,
"iso885915" : {
"type" : "_sbcs" ,
"chars" : "
¡¢£€¥Š§š©ª«¬®¯°±²³Žµ¶·ž¹º»ŒœŸ¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖרÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ"
} ,
"cp28605" : "iso885915" ,
"iso885916" : {
"type" : "_sbcs" ,
"chars" : "
ĄąŁ€„Чš©Ș«ŹźŻ°±ČłŽ”¶·žčș»ŒœŸżÀÁÂĂÄĆÆÇÈÉÊËÌÍÎÏĐŃÒÓÔŐÖŚŰÙÚÛÜĘȚßàáâăäćæçèéêëìíîïđńòóôőöśűùúûüęțÿ"
} ,
"cp28606" : "iso885916" ,
"cp437" : {
"type" : "_sbcs" ,
"chars" : "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜ¢£¥₧ƒáíóúñѪº¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ "
} ,
"ibm437" : "cp437" ,
"csibm437" : "cp437" ,
"cp737" : {
"type" : "_sbcs" ,
"chars" : "ΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩαβγδεζηθικλμνξοπρσςτυφχψ░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀ωάέήϊίόύϋώΆΈΉΊΌΎΏ±≥≤ΪΫ÷≈°∙·√ⁿ²■ "
} ,
"ibm737" : "cp737" ,
"csibm737" : "cp737" ,
"cp775" : {
"type" : "_sbcs" ,
"chars" : "ĆüéāäģåćłēŖŗīŹÄÅÉæÆōöĢ¢ŚśÖÜø£Ø×¤ĀĪóŻżź”¦©®¬½¼Ł«»░▒▓│┤ĄČĘĖ╣║╗╝ĮŠ┐└┴┬├─┼ŲŪ╚╔╩╦╠═╬Žąčęėįšųūž┘┌█▄▌▐▀ÓßŌŃõÕµńĶķĻļņĒŅ’±“¾¶§÷„°∙·¹³²■ "
} ,
"ibm775" : "cp775" ,
"csibm775" : "cp775" ,
"cp850" : {
"type" : "_sbcs" ,
"chars" : "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜø£Ø×ƒáíóúñѪº¿®¬½¼¡«»░▒▓│┤ÁÂÀ©╣║╗╝¢¥┐└┴┬├─┼ãÃ╚╔╩╦╠═╬¤ðÐÊËÈıÍÎÏ┘┌█▄¦Ì▀ÓßÔÒõÕµþÞÚÛÙýݯ´±‗¾¶§÷¸°¨·¹³²■ "
} ,
"ibm850" : "cp850" ,
"csibm850" : "cp850" ,
"cp852" : {
"type" : "_sbcs" ,
"chars" : "ÇüéâäůćçłëŐőîŹÄĆÉĹĺôöĽľŚśÖÜŤťŁ×čáíóúĄąŽžĘ꬟Ⱥ«»░▒▓│┤ÁÂĚŞ╣║╗╝Żż┐└┴┬├─┼Ăă╚╔╩╦╠═╬¤đĐĎËďŇÍÎě┘┌█▄ŢŮ▀ÓßÔŃńňŠšŔÚŕŰýÝţ´˝˛ˇ˘§÷¸°¨˙űŘř■ "
} ,
"ibm852" : "cp852" ,
"csibm852" : "cp852" ,
"cp855" : {
"type" : "_sbcs" ,
"chars" : "ђЂѓЃёЁєЄѕЅіІїЇјЈљЉњЊћЋќЌўЎџЏюЮъЪаАбБцЦдДеЕфФгГ«»░▒▓│┤хХиИ╣║╗╝йЙ┐└┴┬├─┼кК╚╔╩╦╠═╬¤лЛмМнНоОп┘┌█▄Пя▀ЯрРсСтТуУжЖвВьЬ№ыЫзЗшШэЭщЩчЧ§■ "
} ,
"ibm855" : "cp855" ,
"csibm855" : "cp855" ,
"cp856" : {
"type" : "_sbcs" ,
"chars" : "אבגדהו זחט י ךכלםמן נס עףפץצקרשת<D7A9> £<EFBFBD> × <EFBFBD> <C397> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> ®¬½¼<C2BD> «»░▒▓│┤<E29482> <E294A4> <EFBFBD> ©╣║╗╝¢¥┐└┴┬├─┼<E29480> <E294BC> ╚╔╩╦╠═╬¤<E295AC> <C2A4> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> ┘┌█▄¦<E29684> ▀<EFBFBD> <E29680> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> µ<EFBFBD> <C2B5> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> ¯´ ±‗¾¶§÷¸ °¨·¹³²■ "
} ,
"ibm856" : "cp856" ,
"csibm856" : "cp856" ,
"cp857" : {
"type" : "_sbcs" ,
"chars" : "Çüéâäàåçêëèïîı ÄÅÉæÆôöòûùİÖÜø£ØŞşáíóúñÑĞ𿮬½¼¡«»░▒▓│┤ÁÂÀ©╣║╗╝¢¥┐└┴┬├─┼ãÃ╚╔╩╦╠═╬¤ºªÊËÈ<C38B> ÍÎÏ┘┌█▄¦Ì▀ÓßÔÒõÕµ<C395> × ÚÛÙìÿ¯´ ±<C2AD> ¾¶§÷¸ °¨·¹³²■ "
} ,
"ibm857" : "cp857" ,
"csibm857" : "cp857" ,
"cp858" : {
"type" : "_sbcs" ,
"chars" : "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜø£Ø×ƒáíóúñѪº¿®¬½¼¡«»░▒▓│┤ÁÂÀ©╣║╗╝¢¥┐└┴┬├─┼ãÃ╚╔╩╦╠═╬¤ðÐÊËÈ€ÍÎÏ┘┌█▄¦Ì▀ÓßÔÒõÕµþÞÚÛÙýݯ´±‗¾¶§÷¸°¨·¹³²■ "
} ,
"ibm858" : "cp858" ,
"csibm858" : "cp858" ,
"cp860" : {
"type" : "_sbcs" ,
"chars" : "ÇüéâãàÁçêÊèÍÔìÃÂÉÀÈôõòÚùÌÕÜ¢£Ù₧ÓáíóúñѪº¿Ò¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ "
} ,
"ibm860" : "cp860" ,
"csibm860" : "cp860" ,
"cp861" : {
"type" : "_sbcs" ,
"chars" : "ÇüéâäàåçêëèÐðÞÄÅÉæÆôöþûÝýÖÜø£Ø₧ƒáíóúÁÍÓÚ¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ "
} ,
"ibm861" : "cp861" ,
"csibm861" : "cp861" ,
"cp862" : {
"type" : "_sbcs" ,
"chars" : "אבגדהוזחטיךכלםמןנסעףפץצקרשת¢£¥₧ƒáíóúñѪº¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ "
} ,
"ibm862" : "cp862" ,
"csibm862" : "cp862" ,
"cp863" : {
"type" : "_sbcs" ,
"chars" : "ÇüéâÂà¶çêëèïî‗À§ÉÈÊôËÏûù¤ÔÜ¢£ÙÛƒ¦´óú¨¸³¯Î⌐¬½¼¾«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ "
} ,
"ibm863" : "cp863" ,
"csibm863" : "cp863" ,
"cp864" : {
"type" : "_sbcs" ,
"chars" : "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$٪&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~ °·∙√▒─│┼┤┬├┴┐┌└┘β∞φ±½¼≈«»ﻷﻸ<EFBBB7> <EFBBB8> ﻻﻼ<EFBBBB> ﺂ£¤ﺄ<C2A4> <EFBA84> ﺎ ﺏﺕﺙ،ﺝﺡﺥ٠ ١ ٢٣٤٥ ٦٧ ٨٩ﻑ؛ﺱﺵﺹ؟¢ﺀﺁﺃﺅﻊﺋﺍ ﺑﺓﺗﺛﺟﺣﺧﺩﺫﺭﺯﺳﺷﺻﺿﻁﻅﻋﻏ¦¬÷× ﻉـﻓﻗﻛﻟﻣﻧﻫ ﻭﻯﻳﺽﻌﻎﻍﻡﹽّﻥﻩ ﻬ ﻰﻲﻐﻕﻵﻶﻝﻙﻱ■<EFBBB1> "
} ,
"ibm864" : "cp864" ,
"csibm864" : "cp864" ,
"cp865" : {
"type" : "_sbcs" ,
"chars" : "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜø£Ø₧ƒáíóúñѪº¿⌐¬½¼¡«¤░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ "
} ,
"ibm865" : "cp865" ,
"csibm865" : "cp865" ,
"cp866" : {
"type" : "_sbcs" ,
"chars" : "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмноп░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀рстуфхцчшщъыьэюяЁёЄєЇїЎў°∙·√№¤■ "
} ,
"ibm866" : "cp866" ,
"csibm866" : "cp866" ,
"cp869" : {
"type" : "_sbcs" ,
"chars" : "<22> <> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> Ά<EFBFBD> ·¬¦‘ ’ Έ―ΉΊΪΌ<CEAA> <CE8C> ΎΫ©Ώ²³ά£έήίϊΐόύΑ Β ΓΔΕ Ζ Η ½ΘΙ «»░▒▓│┤Κ ΛΜ Ν ╣║╗╝ΞΟ ┐└┴┬├─┼ΠΡ ╚╔╩╦╠═╬ΣΤ Υ ΦΧ ΨΩα βγ ┘┌█▄δε▀ζηθι κλμν ξο πρ σ ςτ΄ ±υ φχ§ψ΅°¨ωϋΰώ■ "
} ,
"ibm869" : "cp869" ,
"csibm869" : "cp869" ,
"cp922" : {
"type" : "_sbcs" ,
"chars" : "
¡¢£¤¥¦§¨©ª«¬®‾°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏŠÑÒÓÔÕÖרÙÚÛÜÝŽßàáâãäåæçèéêëìíîïšñòóôõö÷øùúûüýžÿ"
} ,
"ibm922" : "cp922" ,
"csibm922" : "cp922" ,
"cp1046" : {
"type" : "_sbcs" ,
"chars" : "ﺈ× ÷ﹱ ■│─┐┌└┘ﹹﹻﹽﹿﹷﺊﻰﻳﻲﻎﻏﻐﻶﻸﻺﻼ ¤ﺋﺑﺗﺛﺟﺣ، ﺧﺳ٠ ١ ٢٣٤٥ ٦٧ ٨٩ﺷ؛ﺻﺿﻊ؟ﻋءآأؤإئا بةتثجحخدذرزسشصضطﻇعغﻌﺂﺄﺎ ﻓـفقكلمنه وىيًٌٍَُِّْﻗﻛﻟﻵﻷﻹﻻﻣﻧﻬ ﻩ <EFBBAC> "
} ,
"ibm1046" : "cp1046" ,
"csibm1046" : "cp1046" ,
"cp1124" : {
"type" : "_sbcs" ,
"chars" : "
ЁЂҐЄЅІЇЈЉЊЋЌЎЏАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя№ёђґєѕіїјљњћќ§ўџ"
} ,
"ibm1124" : "cp1124" ,
"csibm1124" : "cp1124" ,
"cp1125" : {
"type" : "_sbcs" ,
"chars" : "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмноп░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀рстуфхцчшщъыьэюяЁёҐґЄєІіЇї·√№¤■ "
} ,
"ibm1125" : "cp1125" ,
"csibm1125" : "cp1125" ,
"cp1129" : {
"type" : "_sbcs" ,
"chars" : "
¡¢£¤¥¦§œ©ª«¬®¯°±²³Ÿµ¶·Œ¹º»¼½¾¿ÀÁÂĂÄÅÆÇÈÉÊË̀ÍÎÏĐÑ̉ÓÔƠÖרÙÚÛÜỮßàáâăäåæçèéêë́íîïđṇ̃óôơö÷øùúûüư₫ÿ"
} ,
"ibm1129" : "cp1129" ,
"csibm1129" : "cp1129" ,
"cp1133" : {
"type" : "_sbcs" ,
"chars" : "
ກຂຄງຈສຊຍດຕຖທນບປຜຝພຟມຢຣລວຫອຮ<E0BAAD> <E0BAAE> <EFBFBD> ຯະາຳິີຶືຸູຼັົຽ<E0BABB> <E0BABD> <EFBFBD> ເແໂໃໄ່້໊໋໌ໍໆ<E0BB8D> ໜໝ₭<E0BB9D> <E282AD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> ໐ ໑໒໓໔໕໖໗໘໙<E0BB98> <E0BB99> ¢¬¦<C2AC> "
} ,
"ibm1133" : "cp1133" ,
"csibm1133" : "cp1133" ,
"cp1161" : {
"type" : "_sbcs" ,
"chars" : "<22> <> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> ่กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู้๊๋€฿เแโใไๅๆ็่้๊๋์ํ๎๏๐ ๑๒๓๔๕๖๗๘๙๚๛¢¬¦ "
} ,
"ibm1161" : "cp1161" ,
"csibm1161" : "cp1161" ,
"cp1162" : {
"type" : "_sbcs" ,
"chars" : "€ … ‘ ’ “”•– — กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู<E0B8B9> <E0B8BA> <EFBFBD> <EFBFBD> ฿เแโใไๅๆ็่้๊๋์ํ๎๏๐ ๑๒๓๔๕๖๗๘๙๚๛<E0B99A> <E0B99B> <EFBFBD> <EFBFBD> "
} ,
"ibm1162" : "cp1162" ,
"csibm1162" : "cp1162" ,
"cp1163" : {
"type" : "_sbcs" ,
"chars" : "
¡¢£€¥¦§œ©ª«¬®¯°±²³Ÿµ¶·Œ¹º»¼½¾¿ÀÁÂĂÄÅÆÇÈÉÊË̀ÍÎÏĐÑ̉ÓÔƠÖרÙÚÛÜỮßàáâăäåæçèéêë́íîïđṇ̃óôơö÷øùúûüư₫ÿ"
} ,
"ibm1163" : "cp1163" ,
"csibm1163" : "cp1163" ,
"maccroatian" : {
"type" : "_sbcs" ,
"chars" : "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®Š™´ ¨≠ŽØ∞±≤≥∆µ∂∑∏š∫ªºΩžø¿¡¬√ƒ≈ƫȅ ÀÃÕŒœĐ—“”‘ ’ ÷◊<C3B7> ©⁄ ¤‹ › Æ»– ·‚ „‰ÂćÁčÈÍÎÏÌÓÔđÒÚÛÙı ˆ ˜ ¯πË˚¸ Êæˇ"
} ,
"maccyrillic" : {
"type" : "_sbcs" ,
"chars" : "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ†°¢£§•¶І®©™Ђђ≠Ѓѓ∞±≤≥іµ∂ЈЄєЇїЉљЊњјЅ¬√ƒ≈∆«»… ЋћЌќѕ–—“”‘’÷„ЎўЏџ№Ёёяабвгдежзийклмнопрстуфхцчшщъыьэю¤"
} ,
"macgreek" : {
"type" : "_sbcs" ,
"chars" : "Ĺ²É³ÖÜ΅àâä΄ ¨çéèê룙î‰ôö¦ ùûü†ΓΔΘΛΞΠß®©ΣΪ§≠°·Α ±≤≥¥Β Ε Ζ Η Ι Κ Μ ΦΫΨΩάΝ ¬Ο Ρ ≈Τ «»… Υ Χ ΆΈœ– ―“”‘ ’ ÷ΉΊΌΎέήίόΏύα βψδεφγ ηι ξκλμν ο πώρ σ τθωςχυ ζϊϋΐΰ<CE90> "
} ,
"maciceland" : {
"type" : "_sbcs" ,
"chars" : "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûüݰ¢£§•¶ß®©™´ ¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ– —“”‘ ’ ÷◊ÿŸ⁄ ¤ÐðÞþý·‚ „‰ÂÊÁËÈÍÎÏÌÓÔ<C393> ÒÚÛÙı ˆ ˜ ¯˘˙˚¸ ˝˛ ˇ"
} ,
"macroman" : {
"type" : "_sbcs" ,
"chars" : "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´ ¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ– —“”‘ ’ ÷◊ÿŸ⁄ ¤‹ › fifl‡·‚ „‰ÂÊÁËÈÍÎÏÌÓÔ<C393> ÒÚÛÙı ˆ ˜ ¯˘˙˚¸ ˝˛ ˇ"
} ,
"macromania" : {
"type" : "_sbcs" ,
"chars" : "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´ ¨≠ĂŞ∞±≤≥¥µ∂∑∏π∫ªºΩăş¿¡¬√ƒ≈∆«»… ÀÃÕŒœ– —“”‘ ’ ÷◊ÿŸ⁄ ¤‹ › Ţţ‡·‚ „‰ÂÊÁËÈÍÎÏÌÓÔ<C393> ÒÚÛÙı ˆ ˜ ¯˘˙˚¸ ˝˛ ˇ"
} ,
"macthai" : {
"type" : "_sbcs" ,
"chars" : "«»…“”<E2809D> •‘ ’ <E28098> กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู – —฿เแโใไๅๆ็่้๊๋์ํ™๏๐ ๑๒๓๔๕๖๗๘๙®©<C2AE> <C2A9> <EFBFBD> <EFBFBD> "
} ,
"macturkish" : {
"type" : "_sbcs" ,
"chars" : "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´ ¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ– —“”‘ ’ ÷◊ÿŸĞğİı Şş‡·‚ „‰ÂÊÁËÈÍÎÏÌÓÔ<C393> ÒÚÛÙ<C39B> ˆ ˜ ¯˘˙˚¸ ˝˛ ˇ"
} ,
"macukraine" : {
"type" : "_sbcs" ,
"chars" : "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ†°Ґ£§•¶І®©™Ђђ≠Ѓѓ∞±≤≥іµґЈЄєЇїЉљЊњјЅ¬√ƒ≈∆«»… ЋћЌќѕ–—“”‘’÷„ЎўЏџ№Ёёяабвгдежзийклмнопрстуфхцчшщъыьэю¤"
} ,
"koi8r" : {
"type" : "_sbcs" ,
"chars" : "─│┌┐└┘├┤┬┴┼▀▄█▌▐░▒▓⌠■∙√≈≤≥ ⌡°²·÷═║╒ё╓╔╕╖╗╘╙╚╛╜╝╞╟╠╡Ё╢╣╤╥╦╧╨╩╪╫╬©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ"
} ,
"koi8u" : {
"type" : "_sbcs" ,
"chars" : "─│┌┐└┘├┤┬┴┼▀▄█▌▐░▒▓⌠■∙√≈≤≥ ⌡°²·÷═║╒ёє╔ії╗╘╙╚╛ґ╝╞╟╠╡ЁЄ╣ІЇ╦╧╨╩╪Ґ╬©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ"
} ,
"koi8ru" : {
"type" : "_sbcs" ,
"chars" : "─│┌┐└┘├┤┬┴┼▀▄█▌▐░▒▓⌠■∙√≈≤≥ ⌡°²·÷═║╒ёє╔ії╗╘╙╚╛ґў╞╟╠╡ЁЄ╣ІЇ╦╧╨╩╪ҐЎ©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ"
} ,
"koi8t" : {
"type" : "_sbcs" ,
"chars" : "қғ‚ Ғ„…†‡<E280A0> ‰ҳ‹ ҲҷҶ<D2B7> Қ‘ ’ “”•– —<E28093> ™<EFBFBD> › <EFBFBD> <E280BA> <EFBFBD> <EFBFBD> <EFBFBD> ӯӮё¤ӣ¦§<C2A6> <C2A7> <EFBFBD> «¬ ®<C2AD> °±²Ё<C2B2> Ӣ¶·<C2B6> №<EFBFBD> »<EFBFBD> <C2BB> <EFBFBD> ©юа б цде фг х ийклмно пяр с ту жвьызшэщчъЮА БЦДЕ ФГХ ИЙК ЛМ Н О ПЯР С Т У ЖВ Ь ЫЗ ШЭЩЧЪ"
} ,
"armscii8" : {
"type" : "_sbcs" ,
"chars" : "
<C29F> և։ )(»«—.՝ ,-֊…՜՛՞Աա ԲբԳգ ԴդԵեԶզ ԷէԸըԹթԺժԻիԼլԽխԾծԿկՀհ ՁձՂղՃճՄմՅյՆնՇշՈո ՉչՊպՋջՌռ Ս ս ՎվՏ տՐրՑց ՒւՓփՔք Օ օ Ֆֆ՚ <D686> "
} ,
"rk1048" : {
"type" : "_sbcs" ,
"chars" : "ЂЃ‚ ѓ„…†‡€‰Љ‹ ЊҚҺЏђ‘ ’ “”•– —<E28093> ™љ› њқһ џ ҰұӘ¤Ө¦§Ё©Ғ«¬ ®Ү °±І і өµ¶·ё№ғ»әҢңү А БВ ГДЕ ЖЗ ИЙК ЛМ Н О ПР С Т У ФХ ЦЧШЩЪЫЬ ЭЮЯа б вг де жзийклмно пр с ту фх цчшщъыьэюя"
} ,
"tcvn" : {
"type" : "_sbcs" ,
"chars" : "\u0000ÚỤ\u0003ỪỬỮ\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010ỨỰỲỶỸÝỴ\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~ÀẢÃÁẠẶẬÈẺẼÉẸỆÌỈĨÍỊÒỎÕÓỌỘỜỞỠỚỢÙỦŨ ĂÂÊÔƠƯĐăâêôơưđẶ̀̀̉̃́àảãáạẲằẳẵắẴẮẦẨẪẤỀặầẩẫấậèỂẻẽéẹềểễếệìỉỄẾỒĩíịòỔỏõóọồổỗốộờởỡớợùỖủũúụừửữứựỳỷỹýỵỐ"
} ,
"georgianacademy" : {
"type" : "_sbcs" ,
"chars" : "‚ƒ„…†‡ˆ‰Š‹Œ‘’“”•–—˜™š›œŸ ¡¢£¤¥¦§¨©ª«¬®¯°±²³´µ¶·¸¹º»¼½¾¿აბგდევზთიკლმნოპჟრსტუფქღყშჩცძწჭხჯჰჱჲჳჴჵჶçèéêëìíîïðñòóôõö÷øùúûüýþÿ"
} ,
"georgianps" : {
"type" : "_sbcs" ,
"chars" : "‚ƒ„…†‡ˆ‰Š‹Œ‘’“”•–—˜™š›œŸ ¡¢£¤¥¦§¨©ª«¬®¯°±²³´µ¶·¸¹º»¼½¾¿აბგდევზჱთიკლმნჲოპჟრსტჳუფქღყშჩცძწჭხჴჯჰჵæçèéêëìíîïðñòóôõö÷øùúûüýþÿ"
} ,
"pt154" : {
"type" : "_sbcs" ,
"chars" : "ҖҒӮғ„…ҶҮҲүҠӢҢҚҺҸҗ‘’“”•–—ҳҷҡӣңқһҹ ЎўЈӨҘҰ§Ё©Ә«¬ӯ®Ҝ°ұІіҙө¶·ё№ә»јҪҫҝАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя"
} ,
"viscii" : {
"type" : "_sbcs" ,
"chars" : "\u0000\u0001Ẳ\u0003\u0004ẴẪ\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013Ỷ\u0015\u0016\u0017\u0018Ỹ\u001a\u001b\u001c\u001dỴ\u001f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~ẠẮẰẶẤẦẨẬẼẸẾỀỂỄỆỐỒỔỖỘỢỚỜỞỊỎỌỈỦŨỤỲÕắằặấầẩậẽẹếềểễệốồổỗỠƠộờởịỰỨỪỬơớƯÀÁÂÃẢĂẳẵÈÉÊẺÌÍĨỳĐứÒÓÔạỷừửÙÚỹỵÝỡưàáâãảăữẫèéêẻìíĩỉđựòóôõỏọụùúũủýợỮ"
} ,
"iso646cn" : {
"type" : "_sbcs" ,
"chars" : "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#¥%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}‾ <E280BE> <7F> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> "
} ,
"iso646jp" : {
"type" : "_sbcs" ,
"chars" : "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[¥]^_`abcdefghijklmnopqrstuvwxyz{|}‾ <E280BE> <7F> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> "
} ,
"hproman8" : {
"type" : "_sbcs" ,
"chars" : "
ÀÂÈÊËÎÏ´ ˋ ˆ ¨˜ ÙÛ₤¯Ýý°ÇçÑñ¡¿¤£¥§ƒ¢âêôûáéóúàèòùäëöüÅîØÆåíøæÄìÖÜÉïßÔÁÃãÐðÍÌÓÒÕõŠšÚŸÿÞþ·µ¶¾—¼½ªº«■»±<C2BB> "
} ,
"macintosh" : {
"type" : "_sbcs" ,
"chars" : "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´ ¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ– —“”‘ ’ ÷◊ÿŸ⁄ ¤‹ › fifl‡·‚ „‰ÂÊÁËÈÍÎÏÌÓÔ<C393> ÒÚÛÙı ˆ ˜ ¯˘˙˚¸ ˝˛ ˇ"
} ,
"ascii" : {
"type" : "_sbcs" ,
"chars" : "<22> <> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> "
} ,
"tis620" : {
"type" : "_sbcs" ,
"chars" : "<22> <> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> <EFBFBD> กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู<E0B8B9> <E0B8BA> <EFBFBD> <EFBFBD> ฿เแโใไๅๆ็่้๊๋์ํ๎๏๐ ๑๒๓๔๕๖๗๘๙๚๛<E0B99A> <E0B99B> <EFBFBD> <EFBFBD> "
}
}
/***/ } ) ,
/***/ 179 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const SemVer = _ _webpack _require _ _ ( 88 )
const Range = _ _webpack _require _ _ ( 828 )
const gt = _ _webpack _require _ _ ( 123 )
const minVersion = ( range , loose ) => {
range = new Range ( range , loose )
let minver = new SemVer ( '0.0.0' )
if ( range . test ( minver ) ) {
return minver
}
minver = new SemVer ( '0.0.0-0' )
if ( range . test ( minver ) ) {
return minver
}
minver = null
for ( let i = 0 ; i < range . set . length ; ++ i ) {
const comparators = range . set [ i ]
comparators . forEach ( ( comparator ) => {
// Clone to avoid manipulating the comparator's semver object.
const compver = new SemVer ( comparator . semver . version )
switch ( comparator . operator ) {
case '>' :
if ( compver . prerelease . length === 0 ) {
compver . patch ++
} else {
compver . prerelease . push ( 0 )
}
compver . raw = compver . format ( )
/* fallthrough */
case '' :
case '>=' :
if ( ! minver || gt ( minver , compver ) ) {
minver = compver
}
break
case '<' :
case '<=' :
/* Ignore maximum versions */
break
/* istanbul ignore next */
default :
throw new Error ( ` Unexpected operation: ${ comparator . operator } ` )
}
} )
}
if ( minver && range . test ( minver ) ) {
return minver
}
return null
}
module . exports = minVersion
/***/ } ) ,
/***/ 185 :
/***/ ( function ( module ) {
module . exports = [ [ "0" , "\u0000" , 127 , "€" ] , [ "8140" , "丂丄丅丆丏丒丗丟丠両丣並丩丮丯丱丳丵丷丼乀乁乂乄乆乊乑乕乗乚乛乢乣乤乥乧乨乪" , 5 , "乲乴" , 9 , "乿" , 6 , "亇亊" ] , [ "8180" , "亐亖亗亙亜亝亞亣亪亯亰亱亴亶亷亸亹亼亽亾仈仌仏仐仒仚仛仜仠仢仦仧仩仭仮仯仱仴仸仹仺仼仾伀伂" , 6 , "伋伌伒" , 4 , "伜伝伡伣伨伩伬伭伮伱伳伵伷伹伻伾" , 4 , "佄佅佇" , 5 , "佒佔佖佡佢佦佨佪佫佭佮佱佲併佷佸佹佺佽侀侁侂侅來侇侊侌侎侐侒侓侕侖侘侙侚侜侞侟価侢" ] , [ "8240" , "侤侫侭侰" , 4 , "侶" , 8 , "俀俁係俆俇俈俉俋俌俍俒" , 4 , "俙俛俠俢俤俥俧俫俬俰俲俴俵俶俷俹俻俼俽俿" , 11 ] , [ "8280" , "個倎倐們倓倕倖倗倛倝倞倠倢倣値倧倫倯" , 10 , "倻倽倿偀偁偂偄偅偆偉偊偋偍偐" , 4 , "偖偗偘偙偛偝" , 7 , "偦" , 5 , "偭" , 8 , "偸偹偺偼偽傁傂傃傄傆傇傉傊傋傌傎" , 20 , "傤傦傪傫傭" , 4 , "傳" , 6 , "傼" ] , [ "8340" , "傽" , 17 , "僐" , 5 , "僗僘僙僛" , 10 , "僨僩僪僫僯僰僱僲僴僶" , 4 , "僼" , 9 , "儈" ] , [ "8380" , "儉儊儌" , 5 , "儓" , 13 , "儢" , 28 , "兂兇兊兌兎兏児兒兓兗兘兙兛兝" , 4 , "兣兤兦內兩兪兯兲兺兾兿冃冄円冇冊冋冎冏冐冑冓冔冘冚冝冞冟冡冣冦" , 4 , "冭冮冴冸冹冺冾冿凁凂凃凅凈凊凍凎凐凒" , 5 ] , [ "8440" , "凘凙凚凜凞凟凢凣凥" , 5 , "凬凮凱凲凴凷凾刄刅刉刋刌刏刐刓刔刕刜刞刟刡刢刣別刦刧刪刬刯刱刲刴刵刼刾剄" , 5 , "剋剎剏剒剓剕剗剘" ] , [ "8480" , "剙剚剛剝剟剠剢剣剤剦剨剫剬剭剮剰剱剳" , 9 , "剾劀劃" , 4 , "劉" , 6 , "劑劒劔" , 6 , "劜劤劥劦劧劮劯劰労" , 9 , "勀勁勂勄勅勆勈勊勌勍勎勏勑勓勔動勗務" , 5 , "勠勡勢勣勥" , 10 , "勱" , 7 , "勻勼勽匁匂匃匄匇匉匊匋匌匎" ] , [ "8540" , "匑匒匓匔匘匛匜匞匟匢匤匥匧匨匩匫匬匭匯" , 9 , "匼匽區卂卄卆卋卌卍卐協単卙卛卝卥卨卪卬卭卲卶卹卻卼卽卾厀厁厃厇厈厊厎厏" ] , [ "8580" , "厐" , 4 , "厖厗厙厛厜厞厠厡厤厧厪厫厬厭厯" , 6 , "厷厸厹厺厼厽厾叀參" , 4 , "収叏叐叒叓叕叚叜叝叞叡叢叧叴叺叾叿吀吂吅吇吋吔吘吙吚吜吢吤吥吪吰吳吶吷吺吽吿呁呂呄呅呇呉呌呍呎呏呑呚呝" , 4 , "呣呥呧呩" , 7 , "呴呹呺呾呿咁咃咅咇咈咉咊咍咑咓咗咘咜咞咟咠咡" ] , [ "8640" , "咢咥咮咰咲咵咶咷咹咺咼咾哃哅哊哋哖哘哛哠" , 4 , "哫哬哯哰哱哴" , 5 , "哻哾唀唂唃唄唅唈唊" , 4 , "唒唓唕" , 5 , "唜唝唞唟唡唥唦" ] , [ "8680" , "唨唩唫唭唲唴唵唶唸唹唺唻唽啀啂啅啇啈啋" , 4 , "啑啒啓啔啗" , 4 , "啝啞啟啠啢啣啨啩啫啯" , 5 , "啹啺啽啿喅喆喌喍喎喐喒喓喕喖喗喚喛喞喠" , 6 , "喨" , 8 , "喲喴営喸喺喼喿" , 4 , "嗆嗇嗈嗊嗋嗎嗏嗐嗕嗗" , 4 , "嗞嗠嗢嗧嗩嗭嗮嗰嗱嗴嗶嗸" , 4 , "嗿嘂嘃嘄嘅" ] , [ "8740" , "嘆嘇嘊嘋嘍嘐" , 7 , "嘙嘚嘜嘝嘠嘡嘢嘥嘦嘨嘩嘪嘫嘮嘯嘰嘳嘵嘷嘸嘺嘼嘽嘾噀" , 11 , "噏" , 4 , "噕噖噚噛噝" , 4 ] , [ "8780" , "噣噥噦噧噭噮噯噰噲噳噴噵噷噸噹噺噽" , 7 , "嚇" , 6 , "嚐嚑嚒嚔" , 14 , "嚤" , 10 , "嚰" , 6 , "嚸嚹嚺嚻嚽" , 12 , "囋" , 8 , "囕囖囘囙囜団囥" , 5 , "囬囮囯囲図囶囷囸囻囼圀圁圂圅圇國" , 6 ] , [ "8840" , "園" , 9 , "圝圞圠圡圢圤圥圦圧圫圱圲圴" , 4 , "圼圽圿坁坃坄坅坆坈坉坋坒" , 4 , "坘坙坢坣坥坧坬坮坰坱坲坴坵坸坹坺坽坾坿垀" ] , [ "8880" , "垁垇垈垉垊垍" , 4 , "垔" , 6 , "垜垝垞垟垥垨垪垬垯垰垱垳垵垶垷垹" , 8 , "埄" , 6 , "埌埍埐埑埓埖埗埛埜埞埡埢埣埥" , 7 , "埮埰埱埲埳埵埶執埻埼埾埿堁堃堄堅堈堉堊堌堎堏堐堒堓堔堖堗堘堚堛堜堝堟堢堣堥" , 4 , "堫" , 4 , "報堲堳場堶" , 7 ] , [ "8940" , "堾" , 5 , "塅" , 6 , "塎塏塐塒塓塕塖塗塙" , 4 , "塟" , 5 , "塦" , 4 , "塭" , 16 , "塿墂墄墆墇墈墊墋墌" ] , [ "8980" , "墍" , 4 , "墔" , 4 , "墛墜墝墠" , 7 , "墪" , 17 , "墽墾墿壀壂壃壄壆" , 10 , "壒壓壔壖" , 13 , "壥" , 5 , "壭壯壱売壴壵壷壸壺" , 7 , "夃夅夆夈" , 4 , "夎夐夑夒夓夗夘夛夝夞夠夡夢夣夦夨夬夰夲夳夵夶夻" ] , [ "8a40" , "夽夾夿奀奃奅奆奊奌奍奐奒奓奙奛" , 4 , "奡奣奤奦" , 12 , "奵奷奺奻奼奾奿妀妅妉妋妌妎妏妐妑妔妕妘妚妛妜妝妟妠妡妢妦" ] , [ "8a80" , "妧妬妭妰妱妳" , 5 , "妺妼妽妿" , 6 , " <EFBFBD> <EFBFBD>
/***/ } ) ,
/***/ 186 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const command _1 = _ _webpack _require _ _ ( 351 ) ;
const os = _ _importStar ( _ _webpack _require _ _ ( 87 ) ) ;
const path = _ _importStar ( _ _webpack _require _ _ ( 622 ) ) ;
/ * *
* The code to exit an action
* /
var ExitCode ;
( function ( ExitCode ) {
/ * *
* A code indicating that the action was successful
* /
ExitCode [ ExitCode [ "Success" ] = 0 ] = "Success" ;
/ * *
* A code indicating that the action was a failure
* /
ExitCode [ ExitCode [ "Failure" ] = 1 ] = "Failure" ;
} ) ( ExitCode = exports . ExitCode || ( exports . ExitCode = { } ) ) ;
//-----------------------------------------------------------------------
// Variables
//-----------------------------------------------------------------------
/ * *
* Sets env variable for this action and future actions in the job
* @ param name the name of the variable to set
* @ param val the value of the variable . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function exportVariable ( name , val ) {
const convertedVal = command _1 . toCommandValue ( val ) ;
process . env [ name ] = convertedVal ;
command _1 . issueCommand ( 'set-env' , { name } , convertedVal ) ;
}
exports . exportVariable = exportVariable ;
/ * *
* Registers a secret which will get masked from logs
* @ param secret value of the secret
* /
function setSecret ( secret ) {
command _1 . issueCommand ( 'add-mask' , { } , secret ) ;
}
exports . setSecret = setSecret ;
/ * *
* Prepends inputPath to the PATH ( for this action and future actions )
* @ param inputPath
* /
function addPath ( inputPath ) {
command _1 . issueCommand ( 'add-path' , { } , inputPath ) ;
process . env [ 'PATH' ] = ` ${ inputPath } ${ path . delimiter } ${ process . env [ 'PATH' ] } ` ;
}
exports . addPath = addPath ;
/ * *
* Gets the value of an input . The value is also trimmed .
*
* @ param name name of the input to get
* @ param options optional . See InputOptions .
* @ returns string
* /
function getInput ( name , options ) {
const val = process . env [ ` INPUT_ ${ name . replace ( / /g , '_' ) . toUpperCase ( ) } ` ] || '' ;
if ( options && options . required && ! val ) {
throw new Error ( ` Input required and not supplied: ${ name } ` ) ;
}
return val . trim ( ) ;
}
exports . getInput = getInput ;
/ * *
* Sets the value of an output .
*
* @ param name name of the output to set
* @ param value value to store . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function setOutput ( name , value ) {
command _1 . issueCommand ( 'set-output' , { name } , value ) ;
}
exports . setOutput = setOutput ;
/ * *
* Enables or disables the echoing of commands into stdout for the rest of the step .
* Echoing is disabled by default if ACTIONS _STEP _DEBUG is not set .
*
* /
function setCommandEcho ( enabled ) {
command _1 . issue ( 'echo' , enabled ? 'on' : 'off' ) ;
}
exports . setCommandEcho = setCommandEcho ;
//-----------------------------------------------------------------------
// Results
//-----------------------------------------------------------------------
/ * *
* Sets the action status to failed .
* When the action exits it will be with an exit code of 1
* @ param message add error issue message
* /
function setFailed ( message ) {
process . exitCode = ExitCode . Failure ;
error ( message ) ;
}
exports . setFailed = setFailed ;
//-----------------------------------------------------------------------
// Logging Commands
//-----------------------------------------------------------------------
/ * *
* Gets whether Actions Step Debug is on or not
* /
function isDebug ( ) {
return process . env [ 'RUNNER_DEBUG' ] === '1' ;
}
exports . isDebug = isDebug ;
/ * *
* Writes debug message to user log
* @ param message debug message
* /
function debug ( message ) {
command _1 . issueCommand ( 'debug' , { } , message ) ;
}
exports . debug = debug ;
/ * *
* Adds an error issue
* @ param message error issue message . Errors will be converted to string via toString ( )
* /
function error ( message ) {
command _1 . issue ( 'error' , message instanceof Error ? message . toString ( ) : message ) ;
}
exports . error = error ;
/ * *
* Adds an warning issue
* @ param message warning issue message . Errors will be converted to string via toString ( )
* /
function warning ( message ) {
command _1 . issue ( 'warning' , message instanceof Error ? message . toString ( ) : message ) ;
}
exports . warning = warning ;
/ * *
* Writes info to log with console . log .
* @ param message info message
* /
function info ( message ) {
process . stdout . write ( message + os . EOL ) ;
}
exports . info = info ;
/ * *
* Begin an output group .
*
* Output until the next ` groupEnd ` will be foldable in this group
*
* @ param name The name of the output group
* /
function startGroup ( name ) {
command _1 . issue ( 'group' , name ) ;
}
exports . startGroup = startGroup ;
/ * *
* End an output group .
* /
function endGroup ( ) {
command _1 . issue ( 'endgroup' ) ;
}
exports . endGroup = endGroup ;
/ * *
* Wrap an asynchronous function call in a group .
*
* Returns the same type as the function itself .
*
* @ param name The name of the group
* @ param fn The function to wrap in the group
* /
function group ( name , fn ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
startGroup ( name ) ;
let result ;
try {
result = yield fn ( ) ;
}
finally {
endGroup ( ) ;
}
return result ;
} ) ;
}
exports . group = group ;
//-----------------------------------------------------------------------
// Wrapper action state
//-----------------------------------------------------------------------
/ * *
* Saves state for current action , the state can only be retrieved by this action ' s post job execution .
*
* @ param name name of the state to store
* @ param value value to store . Non - string values will be converted to a string via JSON . stringify
* /
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function saveState ( name , value ) {
command _1 . issueCommand ( 'save-state' , { name } , value ) ;
}
exports . saveState = saveState ;
/ * *
* Gets the value of an state set by this action ' s main execution .
*
* @ param name name of the state to get
* @ returns string
* /
function getState ( name ) {
return process . env [ ` STATE_ ${ name } ` ] || '' ;
}
exports . getState = getState ;
//# sourceMappingURL=core.js.map
/***/ } ) ,
/***/ 193 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var Buffer = _ _webpack _require _ _ ( 937 ) . Buffer ;
// Single-byte codec. Needs a 'chars' string parameter that contains 256 or 128 chars that
// correspond to encoded bytes (if 128 - then lower half is ASCII).
exports . _sbcs = SBCSCodec ;
function SBCSCodec ( codecOptions , iconv ) {
if ( ! codecOptions )
throw new Error ( "SBCS codec is called without the data." )
// Prepare char buffer for decoding.
if ( ! codecOptions . chars || ( codecOptions . chars . length !== 128 && codecOptions . chars . length !== 256 ) )
throw new Error ( "Encoding '" + codecOptions . type + "' has incorrect 'chars' (must be of len 128 or 256)" ) ;
if ( codecOptions . chars . length === 128 ) {
var asciiString = "" ;
for ( var i = 0 ; i < 128 ; i ++ )
asciiString += String . fromCharCode ( i ) ;
codecOptions . chars = asciiString + codecOptions . chars ;
}
this . decodeBuf = Buffer . from ( codecOptions . chars , 'ucs2' ) ;
// Encoding buffer.
var encodeBuf = Buffer . alloc ( 65536 , iconv . defaultCharSingleByte . charCodeAt ( 0 ) ) ;
for ( var i = 0 ; i < codecOptions . chars . length ; i ++ )
encodeBuf [ codecOptions . chars . charCodeAt ( i ) ] = i ;
this . encodeBuf = encodeBuf ;
}
SBCSCodec . prototype . encoder = SBCSEncoder ;
SBCSCodec . prototype . decoder = SBCSDecoder ;
function SBCSEncoder ( options , codec ) {
this . encodeBuf = codec . encodeBuf ;
}
SBCSEncoder . prototype . write = function ( str ) {
var buf = Buffer . alloc ( str . length ) ;
for ( var i = 0 ; i < str . length ; i ++ )
buf [ i ] = this . encodeBuf [ str . charCodeAt ( i ) ] ;
return buf ;
}
SBCSEncoder . prototype . end = function ( ) {
}
function SBCSDecoder ( options , codec ) {
this . decodeBuf = codec . decodeBuf ;
}
SBCSDecoder . prototype . write = function ( buf ) {
// Strings are immutable in JS -> we use ucs2 buffer to speed up computations.
var decodeBuf = this . decodeBuf ;
var newBuf = Buffer . alloc ( buf . length * 2 ) ;
var idx1 = 0 , idx2 = 0 ;
for ( var i = 0 ; i < buf . length ; i ++ ) {
idx1 = buf [ i ] * 2 ; idx2 = i * 2 ;
newBuf [ idx2 ] = decodeBuf [ idx1 ] ;
newBuf [ idx2 + 1 ] = decodeBuf [ idx1 + 1 ] ;
}
return newBuf . toString ( 'ucs2' ) ;
}
SBCSDecoder . prototype . end = function ( ) {
}
/***/ } ) ,
/***/ 194 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const compare = _ _webpack _require _ _ ( 309 )
const lt = ( a , b , loose ) => compare ( a , b , loose ) < 0
module . exports = lt
/***/ } ) ,
/***/ 211 :
/***/ ( function ( module ) {
module . exports = require ( "https" ) ;
/***/ } ) ,
/***/ 219 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var net = _ _webpack _require _ _ ( 631 ) ;
var tls = _ _webpack _require _ _ ( 818 ) ;
var http = _ _webpack _require _ _ ( 605 ) ;
var https = _ _webpack _require _ _ ( 211 ) ;
var events = _ _webpack _require _ _ ( 614 ) ;
var assert = _ _webpack _require _ _ ( 357 ) ;
var util = _ _webpack _require _ _ ( 669 ) ;
exports . httpOverHttp = httpOverHttp ;
exports . httpsOverHttp = httpsOverHttp ;
exports . httpOverHttps = httpOverHttps ;
exports . httpsOverHttps = httpsOverHttps ;
function httpOverHttp ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = http . request ;
return agent ;
}
function httpsOverHttp ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = http . request ;
agent . createSocket = createSecureSocket ;
agent . defaultPort = 443 ;
return agent ;
}
function httpOverHttps ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = https . request ;
return agent ;
}
function httpsOverHttps ( options ) {
var agent = new TunnelingAgent ( options ) ;
agent . request = https . request ;
agent . createSocket = createSecureSocket ;
agent . defaultPort = 443 ;
return agent ;
}
function TunnelingAgent ( options ) {
var self = this ;
self . options = options || { } ;
self . proxyOptions = self . options . proxy || { } ;
self . maxSockets = self . options . maxSockets || http . Agent . defaultMaxSockets ;
self . requests = [ ] ;
self . sockets = [ ] ;
self . on ( 'free' , function onFree ( socket , host , port , localAddress ) {
var options = toOptions ( host , port , localAddress ) ;
for ( var i = 0 , len = self . requests . length ; i < len ; ++ i ) {
var pending = self . requests [ i ] ;
if ( pending . host === options . host && pending . port === options . port ) {
// Detect the request to connect same origin server,
// reuse the connection.
self . requests . splice ( i , 1 ) ;
pending . request . onSocket ( socket ) ;
return ;
}
}
socket . destroy ( ) ;
self . removeSocket ( socket ) ;
} ) ;
}
util . inherits ( TunnelingAgent , events . EventEmitter ) ;
TunnelingAgent . prototype . addRequest = function addRequest ( req , host , port , localAddress ) {
var self = this ;
var options = mergeOptions ( { request : req } , self . options , toOptions ( host , port , localAddress ) ) ;
if ( self . sockets . length >= this . maxSockets ) {
// We are over limit so we'll add it to the queue.
self . requests . push ( options ) ;
return ;
}
// If we are under maxSockets create a new one.
self . createSocket ( options , function ( socket ) {
socket . on ( 'free' , onFree ) ;
socket . on ( 'close' , onCloseOrRemove ) ;
socket . on ( 'agentRemove' , onCloseOrRemove ) ;
req . onSocket ( socket ) ;
function onFree ( ) {
self . emit ( 'free' , socket , options ) ;
}
function onCloseOrRemove ( err ) {
self . removeSocket ( socket ) ;
socket . removeListener ( 'free' , onFree ) ;
socket . removeListener ( 'close' , onCloseOrRemove ) ;
socket . removeListener ( 'agentRemove' , onCloseOrRemove ) ;
}
} ) ;
} ;
TunnelingAgent . prototype . createSocket = function createSocket ( options , cb ) {
var self = this ;
var placeholder = { } ;
self . sockets . push ( placeholder ) ;
var connectOptions = mergeOptions ( { } , self . proxyOptions , {
method : 'CONNECT' ,
path : options . host + ':' + options . port ,
agent : false ,
headers : {
host : options . host + ':' + options . port
}
} ) ;
if ( options . localAddress ) {
connectOptions . localAddress = options . localAddress ;
}
if ( connectOptions . proxyAuth ) {
connectOptions . headers = connectOptions . headers || { } ;
connectOptions . headers [ 'Proxy-Authorization' ] = 'Basic ' +
new Buffer ( connectOptions . proxyAuth ) . toString ( 'base64' ) ;
}
debug ( 'making CONNECT request' ) ;
var connectReq = self . request ( connectOptions ) ;
connectReq . useChunkedEncodingByDefault = false ; // for v0.6
connectReq . once ( 'response' , onResponse ) ; // for v0.6
connectReq . once ( 'upgrade' , onUpgrade ) ; // for v0.6
connectReq . once ( 'connect' , onConnect ) ; // for v0.7 or later
connectReq . once ( 'error' , onError ) ;
connectReq . end ( ) ;
function onResponse ( res ) {
// Very hacky. This is necessary to avoid http-parser leaks.
res . upgrade = true ;
}
function onUpgrade ( res , socket , head ) {
// Hacky.
process . nextTick ( function ( ) {
onConnect ( res , socket , head ) ;
} ) ;
}
function onConnect ( res , socket , head ) {
connectReq . removeAllListeners ( ) ;
socket . removeAllListeners ( ) ;
if ( res . statusCode !== 200 ) {
debug ( 'tunneling socket could not be established, statusCode=%d' ,
res . statusCode ) ;
socket . destroy ( ) ;
var error = new Error ( 'tunneling socket could not be established, ' +
'statusCode=' + res . statusCode ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
return ;
}
if ( head . length > 0 ) {
debug ( 'got illegal response body from proxy' ) ;
socket . destroy ( ) ;
var error = new Error ( 'got illegal response body from proxy' ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
return ;
}
debug ( 'tunneling connection has established' ) ;
self . sockets [ self . sockets . indexOf ( placeholder ) ] = socket ;
return cb ( socket ) ;
}
function onError ( cause ) {
connectReq . removeAllListeners ( ) ;
debug ( 'tunneling socket could not be established, cause=%s\n' ,
cause . message , cause . stack ) ;
var error = new Error ( 'tunneling socket could not be established, ' +
'cause=' + cause . message ) ;
error . code = 'ECONNRESET' ;
options . request . emit ( 'error' , error ) ;
self . removeSocket ( placeholder ) ;
}
} ;
TunnelingAgent . prototype . removeSocket = function removeSocket ( socket ) {
var pos = this . sockets . indexOf ( socket )
if ( pos === - 1 ) {
return ;
}
this . sockets . splice ( pos , 1 ) ;
var pending = this . requests . shift ( ) ;
if ( pending ) {
// If we have pending requests and a socket gets closed a new one
// needs to be created to take over in the pool for the one that closed.
this . createSocket ( pending , function ( socket ) {
pending . request . onSocket ( socket ) ;
} ) ;
}
} ;
function createSecureSocket ( options , cb ) {
var self = this ;
TunnelingAgent . prototype . createSocket . call ( self , options , function ( socket ) {
var hostHeader = options . request . getHeader ( 'host' ) ;
var tlsOptions = mergeOptions ( { } , self . options , {
socket : socket ,
servername : hostHeader ? hostHeader . replace ( /:.*$/ , '' ) : options . host
} ) ;
// 0 is dummy port for v0.6
var secureSocket = tls . connect ( 0 , tlsOptions ) ;
self . sockets [ self . sockets . indexOf ( socket ) ] = secureSocket ;
cb ( secureSocket ) ;
} ) ;
}
function toOptions ( host , port , localAddress ) {
if ( typeof host === 'string' ) { // since v0.10
return {
host : host ,
port : port ,
localAddress : localAddress
} ;
}
return host ; // for v0.11 or later
}
function mergeOptions ( target ) {
for ( var i = 1 , len = arguments . length ; i < len ; ++ i ) {
var overrides = arguments [ i ] ;
if ( typeof overrides === 'object' ) {
var keys = Object . keys ( overrides ) ;
for ( var j = 0 , keyLen = keys . length ; j < keyLen ; ++ j ) {
var k = keys [ j ] ;
if ( overrides [ k ] !== undefined ) {
target [ k ] = overrides [ k ] ;
}
}
}
}
return target ;
}
var debug ;
if ( process . env . NODE _DEBUG && /\btunnel\b/ . test ( process . env . NODE _DEBUG ) ) {
debug = function ( ) {
var args = Array . prototype . slice . call ( arguments ) ;
if ( typeof args [ 0 ] === 'string' ) {
args [ 0 ] = 'TUNNEL: ' + args [ 0 ] ;
} else {
args . unshift ( 'TUNNEL:' ) ;
}
console . error . apply ( console , args ) ;
}
} else {
debug = function ( ) { } ;
}
exports . debug = debug ; // for test
/***/ } ) ,
/***/ 223 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
var wrappy = _ _webpack _require _ _ ( 940 )
module . exports = wrappy ( once )
module . exports . strict = wrappy ( onceStrict )
once . proto = once ( function ( ) {
Object . defineProperty ( Function . prototype , 'once' , {
value : function ( ) {
return once ( this )
} ,
configurable : true
} )
Object . defineProperty ( Function . prototype , 'onceStrict' , {
value : function ( ) {
return onceStrict ( this )
} ,
configurable : true
} )
} )
function once ( fn ) {
var f = function ( ) {
if ( f . called ) return f . value
f . called = true
return f . value = fn . apply ( this , arguments )
}
f . called = false
return f
}
function onceStrict ( fn ) {
var f = function ( ) {
if ( f . called )
throw new Error ( f . onceError )
f . called = true
return f . value = fn . apply ( this , arguments )
}
var name = fn . name || 'Function wrapped with `once`'
f . onceError = name + " shouldn't be called more than once"
f . called = false
return f
}
/***/ } ) ,
/***/ 234 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
function _interopDefault ( ex ) { return ( ex && ( typeof ex === 'object' ) && 'default' in ex ) ? ex [ 'default' ] : ex ; }
var endpoint = _ _webpack _require _ _ ( 440 ) ;
var universalUserAgent = _ _webpack _require _ _ ( 429 ) ;
var isPlainObject = _interopDefault ( _ _webpack _require _ _ ( 886 ) ) ;
var nodeFetch = _interopDefault ( _ _webpack _require _ _ ( 467 ) ) ;
var requestError = _ _webpack _require _ _ ( 537 ) ;
const VERSION = "5.4.7" ;
function getBufferResponse ( response ) {
return response . arrayBuffer ( ) ;
}
function fetchWrapper ( requestOptions ) {
if ( isPlainObject ( requestOptions . body ) || Array . isArray ( requestOptions . body ) ) {
requestOptions . body = JSON . stringify ( requestOptions . body ) ;
}
let headers = { } ;
let status ;
let url ;
const fetch = requestOptions . request && requestOptions . request . fetch || nodeFetch ;
return fetch ( requestOptions . url , Object . assign ( {
method : requestOptions . method ,
body : requestOptions . body ,
headers : requestOptions . headers ,
redirect : requestOptions . redirect
} , requestOptions . request ) ) . then ( response => {
url = response . url ;
status = response . status ;
for ( const keyAndValue of response . headers ) {
headers [ keyAndValue [ 0 ] ] = keyAndValue [ 1 ] ;
}
if ( status === 204 || status === 205 ) {
return ;
} // GitHub API returns 200 for HEAD requests
if ( requestOptions . method === "HEAD" ) {
if ( status < 400 ) {
return ;
}
throw new requestError . RequestError ( response . statusText , status , {
headers ,
request : requestOptions
} ) ;
}
if ( status === 304 ) {
throw new requestError . RequestError ( "Not modified" , status , {
headers ,
request : requestOptions
} ) ;
}
if ( status >= 400 ) {
return response . text ( ) . then ( message => {
const error = new requestError . RequestError ( message , status , {
headers ,
request : requestOptions
} ) ;
try {
let responseBody = JSON . parse ( error . message ) ;
Object . assign ( error , responseBody ) ;
let errors = responseBody . errors ; // Assumption `errors` would always be in Array format
error . message = error . message + ": " + errors . map ( JSON . stringify ) . join ( ", " ) ;
} catch ( e ) { // ignore, see octokit/rest.js#684
}
throw error ;
} ) ;
}
const contentType = response . headers . get ( "content-type" ) ;
if ( /application\/json/ . test ( contentType ) ) {
return response . json ( ) ;
}
if ( ! contentType || /^text\/|charset=utf-8$/ . test ( contentType ) ) {
return response . text ( ) ;
}
return getBufferResponse ( response ) ;
} ) . then ( data => {
return {
status ,
url ,
headers ,
data
} ;
} ) . catch ( error => {
if ( error instanceof requestError . RequestError ) {
throw error ;
}
throw new requestError . RequestError ( error . message , 500 , {
headers ,
request : requestOptions
} ) ;
} ) ;
}
function withDefaults ( oldEndpoint , newDefaults ) {
const endpoint = oldEndpoint . defaults ( newDefaults ) ;
const newApi = function ( route , parameters ) {
const endpointOptions = endpoint . merge ( route , parameters ) ;
if ( ! endpointOptions . request || ! endpointOptions . request . hook ) {
return fetchWrapper ( endpoint . parse ( endpointOptions ) ) ;
}
const request = ( route , parameters ) => {
return fetchWrapper ( endpoint . parse ( endpoint . merge ( route , parameters ) ) ) ;
} ;
Object . assign ( request , {
endpoint ,
defaults : withDefaults . bind ( null , endpoint )
} ) ;
return endpointOptions . request . hook ( request , endpointOptions ) ;
} ;
return Object . assign ( newApi , {
endpoint ,
defaults : withDefaults . bind ( null , endpoint )
} ) ;
}
const request = withDefaults ( endpoint . endpoint , {
headers : {
"user-agent" : ` octokit-request.js/ ${ VERSION } ${ universalUserAgent . getUserAgent ( ) } `
}
} ) ;
exports . request = request ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 274 :
/***/ ( function ( module ) {
module . exports = [ [ "0" , "\u0000" , 127 ] , [ "8141" , "갂갃갅갆갋" , 4 , "갘갞갟갡갢갣갥" , 6 , "갮갲갳갴" ] , [ "8161" , "갵갶갷갺갻갽갾갿걁" , 9 , "걌걎" , 5 , "걕" ] , [ "8181" , "걖걗걙걚걛걝" , 18 , "걲걳걵걶걹걻" , 4 , "겂겇겈겍겎겏겑겒겓겕" , 6 , "겞겢" , 5 , "겫겭겮겱" , 6 , "겺겾겿곀곂곃곅곆곇곉곊곋곍" , 7 , "곖곘" , 7 , "곢곣곥곦곩곫곭곮곲곴곷" , 4 , "곾곿괁괂괃괅괇" , 4 , "괎괐괒괓" ] , [ "8241" , "괔괕괖괗괙괚괛괝괞괟괡" , 7 , "괪괫괮" , 5 ] , [ "8261" , "괶괷괹괺괻괽" , 6 , "굆굈굊" , 5 , "굑굒굓굕굖굗" ] , [ "8281" , "굙" , 7 , "굢굤" , 7 , "굮굯굱굲굷굸굹굺굾궀궃" , 4 , "궊궋궍궎궏궑" , 10 , "궞" , 5 , "궥" , 17 , "궸" , 7 , "귂귃귅귆귇귉" , 6 , "귒귔" , 7 , "귝귞귟귡귢귣귥" , 18 ] , [ "8341" , "귺귻귽귾긂" , 5 , "긊긌긎" , 5 , "긕" , 7 ] , [ "8361" , "긝" , 18 , "긲긳긵긶긹긻긼" ] , [ "8381" , "긽긾긿깂깄깇깈깉깋깏깑깒깓깕깗" , 4 , "깞깢깣깤깦깧깪깫깭깮깯깱" , 6 , "깺깾" , 5 , "꺆" , 5 , "꺍" , 46 , "꺿껁껂껃껅" , 6 , "껎껒" , 5 , "껚껛껝" , 8 ] , [ "8441" , "껦껧껩껪껬껮" , 5 , "껵껶껷껹껺껻껽" , 8 ] , [ "8461" , "꼆꼉꼊꼋꼌꼎꼏꼑" , 18 ] , [ "8481" , "꼤" , 7 , "꼮꼯꼱꼳꼵" , 6 , "꼾꽀꽄꽅꽆꽇꽊" , 5 , "꽑" , 10 , "꽞" , 5 , "꽦" , 18 , "꽺" , 5 , "꾁꾂꾃꾅꾆꾇꾉" , 6 , "꾒꾓꾔꾖" , 5 , "꾝" , 26 , "꾺꾻꾽꾾" ] , [ "8541" , "꾿꿁" , 5 , "꿊꿌꿏" , 4 , "꿕" , 6 , "꿝" , 4 ] , [ "8561" , "꿢" , 5 , "꿪" , 5 , "꿲꿳꿵꿶꿷꿹" , 6 , "뀂뀃" ] , [ "8581" , "뀅" , 6 , "뀍뀎뀏뀑뀒뀓뀕" , 6 , "뀞" , 9 , "뀩" , 26 , "끆끇끉끋끍끏끐끑끒끖끘끚끛끜끞" , 29 , "끾끿낁낂낃낅" , 6 , "낎낐낒" , 5 , "낛낝낞낣낤" ] , [ "8641" , "낥낦낧낪낰낲낶낷낹낺낻낽" , 6 , "냆냊" , 5 , "냒" ] , [ "8661" , "냓냕냖냗냙" , 6 , "냡냢냣냤냦" , 10 ] , [ "8681" , "냱" , 22 , "넊넍넎넏넑넔넕넖넗넚넞" , 4 , "넦넧넩넪넫넭" , 6 , "넶넺" , 5 , "녂녃녅녆녇녉" , 6 , "녒녓녖녗녙녚녛녝녞녟녡" , 22 , "녺녻녽녾녿놁놃" , 4 , "놊놌놎놏놐놑놕놖놗놙놚놛놝" ] , [ "8741" , "놞" , 9 , "놩" , 15 ] , [ "8761" , "놹" , 18 , "뇍뇎뇏뇑뇒뇓뇕" ] , [ "8781" , "뇖" , 5 , "뇞뇠" , 7 , "뇪뇫뇭뇮뇯뇱" , 7 , "뇺뇼뇾" , 5 , "눆눇눉눊눍" , 6 , "눖눘눚" , 5 , "눡" , 18 , "눵" , 6 , "눽" , 26 , "뉙뉚뉛뉝뉞뉟뉡" , 6 , "뉪" , 4 ] , [ "8841" , "뉯" , 4 , "뉶" , 5 , "뉽" , 6 , "늆늇늈늊" , 4 ] , [ "8861" , "늏늒늓늕늖늗늛" , 4 , "늢늤늧늨늩늫늭늮늯늱늲늳늵늶늷" ] , [ "8881" , "늸" , 15 , "닊닋닍닎닏닑닓" , 4 , "닚닜닞닟닠닡닣닧닩닪닰닱닲닶닼닽닾댂댃댅댆댇댉" , 6 , "댒댖" , 5 , "댝" , 54 , "덗덙덚덝덠덡덢덣" ] , [ "8941" , "덦덨덪덬덭덯덲덳덵덶덷덹" , 6 , "뎂뎆" , 5 , "뎍" ] , [ "8961" , "뎎뎏뎑뎒뎓뎕" , 10 , "뎢" , 5 , "뎩뎪뎫뎭" ] , [ "8981" , "뎮" , 21 , "돆돇돉돊돍돏돑돒돓돖돘돚돜돞돟돡돢돣돥돦돧돩" , 18 , "돽" , 18 , "됑" , 6 , "됙됚됛됝됞됟됡" , 6 , "됪됬" , 7 , "됵" , 15 ] , [ "8a41" , "둅" , 10 , "둒둓둕둖둗둙" , 6 , "둢둤둦" ] , [ "8a61" , "둧" , 4 , "둭" , 18 , "뒁뒂" ] , [ "8a81" , "뒃" , 4 , "뒉" , 19 , "뒞" , 5 , "뒥뒦뒧뒩뒪뒫뒭" , 7 , "뒶뒸뒺" , 5 , "듁듂듃듅듆듇듉" , 6 , "듑듒듓듔듖" , 5 , "듞듟듡듢듥듧" , 4 , "듮듰듲" , 5 , "듹" , 26 , "딖딗딙딚딝" ] , [ "8b41" , "딞" , 5 , "딦딫" , 4 , "딲딳딵딶딷딹" , 6 , "땂땆" ] , [ "8b61" , "땇땈땉땊땎땏땑땒땓땕" , 6 , "땞땢" , 8 ] , [ "8b81" , "땫" , 52 , "떢떣떥떦떧떩떬떭떮떯떲떶" , 4 , "떾떿뗁뗂뗃뗅" , 6 , "뗎뗒" , 5 , "뗙" , 18 , "뗭" , 18 ] , [ "8c41" , "똀" , 15 , "똒똓똕똖똗똙" , 4 ] , [ "8c61" , "똞" , 6 , "똦" , 5 , "똭" , 6 , "똵" , 5 ] , [ "8c81" , "똻" , 12 , "뙉" , 26 , "뙥뙦뙧뙩" , 50 , "뚞뚟뚡뚢뚣뚥" , 5 , "뚭뚮뚯뚰뚲" , 16 ] , [ "8d41" , "뛃" , 16 , "뛕" , 8 ] , [ "8d61" , "뛞" , 17 , "뛱뛲뛳뛵뛶뛷뛹뛺" ] , [ "8d81" , "뛻" , 4 , "뜂뜃뜄뜆" , 33 , "뜪뜫뜭뜮뜱" , 6 , "뜺뜼" , 7 , "띅띆띇띉띊띋띍" , 6 , "띖" , 9 , "띡띢띣띥띦띧띩" , 6 , "띲띴띶" , 5 , "띾띿랁랂랃랅" , 6 , "랎랓랔랕랚랛랝랞" ] , [ "8e41" , "랟랡" , 6 , "랪랮" , 5 , "랶랷랹" , 8 ] , [ "8e61" , "럂" , 4 , "럈럊" , 19 ] , [ "8e81" , "럞" , 13 , "럮럯럱럲럳럵" , 6 , "럾렂" , 4 , "렊렋렍렎렏렑" , 6 , "렚렜렞" , 5 , "렦렧렩렪렫렭" , 6 , "렶렺" , 5 , "롁롂롃롅" , 11 , "롒롔" , 7 , "롞롟롡롢롣롥" , 6 , "롮롰롲" , 5 , "롹롺롻롽" , 7 ] , [ "8f41" , "뢅" , 7 , "뢎" , 17 ] , [ "8f61" , "뢠" , 7 , "뢩" , 6 , "뢱뢲뢳뢵뢶뢷뢹" , 4 ] , [ "8f81" , "뢾뢿룂룄룆" , 5 , "룍룎룏룑룒룓룕" , 7 , "룞룠룢" , 5 , "룪룫룭룮룯룱" , 6 , "룺룼룾" , 5 , "뤅" , 18 , "뤙" , 6 , "뤡" , 26 , " 뤾뤿륁륂륃 <EFBFBD>
/***/ } ) ,
/***/ 276 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
"use strict" ;
var iconvLite = _ _webpack _require _ _ ( 989 ) ;
// Load Iconv from an external file to be able to disable Iconv for webpack
// Add /\/iconv-loader$/ to webpack.IgnorePlugin to ignore it
var Iconv = _ _webpack _require _ _ ( 508 ) ;
// Expose to the world
module . exports . convert = convert ;
/ * *
* Convert encoding of an UTF - 8 string or a buffer
*
* @ param { String | Buffer } str String to be converted
* @ param { String } to Encoding to be converted to
* @ param { String } [ from = 'UTF-8' ] Encoding to be converted from
* @ param { Boolean } useLite If set to ture , force to use iconvLite
* @ return { Buffer } Encoded string
* /
function convert ( str , to , from , useLite ) {
from = checkEncoding ( from || 'UTF-8' ) ;
to = checkEncoding ( to || 'UTF-8' ) ;
str = str || '' ;
var result ;
if ( from !== 'UTF-8' && typeof str === 'string' ) {
str = new Buffer ( str , 'binary' ) ;
}
if ( from === to ) {
if ( typeof str === 'string' ) {
result = new Buffer ( str ) ;
} else {
result = str ;
}
} else if ( Iconv && ! useLite ) {
try {
result = convertIconv ( str , to , from ) ;
} catch ( E ) {
console . error ( E ) ;
try {
result = convertIconvLite ( str , to , from ) ;
} catch ( E ) {
console . error ( E ) ;
result = str ;
}
}
} else {
try {
result = convertIconvLite ( str , to , from ) ;
} catch ( E ) {
console . error ( E ) ;
result = str ;
}
}
if ( typeof result === 'string' ) {
result = new Buffer ( result , 'utf-8' ) ;
}
return result ;
}
/ * *
* Convert encoding of a string with node - iconv ( if available )
*
* @ param { String | Buffer } str String to be converted
* @ param { String } to Encoding to be converted to
* @ param { String } [ from = 'UTF-8' ] Encoding to be converted from
* @ return { Buffer } Encoded string
* /
function convertIconv ( str , to , from ) {
var response , iconv ;
iconv = new Iconv ( from , to + '//TRANSLIT//IGNORE' ) ;
response = iconv . convert ( str ) ;
return response . slice ( 0 , response . length ) ;
}
/ * *
* Convert encoding of astring with iconv - lite
*
* @ param { String | Buffer } str String to be converted
* @ param { String } to Encoding to be converted to
* @ param { String } [ from = 'UTF-8' ] Encoding to be converted from
* @ return { Buffer } Encoded string
* /
function convertIconvLite ( str , to , from ) {
if ( to === 'UTF-8' ) {
return iconvLite . decode ( str , from ) ;
} else if ( from === 'UTF-8' ) {
return iconvLite . encode ( str , to ) ;
} else {
return iconvLite . encode ( iconvLite . decode ( str , from ) , to ) ;
}
}
/ * *
* Converts charset name if needed
*
* @ param { String } name Character set
* @ return { String } Character set name
* /
function checkEncoding ( name ) {
return ( name || '' ) . toString ( ) . trim ( ) .
replace ( /^latin[\-_]?(\d+)$/i , 'ISO-8859-$1' ) .
replace ( /^win(?:dows)?[\-_]?(\d+)$/i , 'WINDOWS-$1' ) .
replace ( /^utf[\-_]?(\d+)$/i , 'UTF-$1' ) .
replace ( /^ks_c_5601\-1987$/i , 'CP949' ) .
replace ( /^us[\-_]?ascii$/i , 'ASCII' ) .
toUpperCase ( ) ;
}
/***/ } ) ,
/***/ 293 :
/***/ ( function ( module ) {
// Note: this is the semver.org version of the spec that it implements
// Not necessarily the package version of this code.
const SEMVER _SPEC _VERSION = '2.0.0'
const MAX _LENGTH = 256
const MAX _SAFE _INTEGER = Number . MAX _SAFE _INTEGER ||
/* istanbul ignore next */ 9007199254740991
// Max safe segment length for coercion.
const MAX _SAFE _COMPONENT _LENGTH = 16
module . exports = {
SEMVER _SPEC _VERSION ,
MAX _LENGTH ,
MAX _SAFE _INTEGER ,
MAX _SAFE _COMPONENT _LENGTH
}
/***/ } ) ,
/***/ 294 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
module . exports = _ _webpack _require _ _ ( 219 ) ;
/***/ } ) ,
/***/ 295 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importDefault = ( this && this . _ _importDefault ) || function ( mod ) {
return ( mod && mod . _ _esModule ) ? mod : { "default" : mod } ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
2020-09-03 11:49:39 +02:00
exports . parseVersion = exports . getVersion = exports . isAvailable = exports . getSecret = exports . getImageID = exports . getImageIDFile = void 0 ;
2020-09-02 10:07:11 +02:00
const fs _1 = _ _importDefault ( _ _webpack _require _ _ ( 747 ) ) ;
const path _1 = _ _importDefault ( _ _webpack _require _ _ ( 622 ) ) ;
const tmp _1 = _ _importDefault ( _ _webpack _require _ _ ( 517 ) ) ;
const semver = _ _importStar ( _ _webpack _require _ _ ( 383 ) ) ;
const context = _ _importStar ( _ _webpack _require _ _ ( 842 ) ) ;
const exec = _ _importStar ( _ _webpack _require _ _ ( 757 ) ) ;
function getImageIDFile ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return path _1 . default . join ( context . tmpDir , 'iidfile' ) ;
} ) ;
}
exports . getImageIDFile = getImageIDFile ;
function getImageID ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const iidFile = yield getImageIDFile ( ) ;
if ( ! fs _1 . default . existsSync ( iidFile ) ) {
return undefined ;
}
return fs _1 . default . readFileSync ( iidFile , { encoding : 'utf-8' } ) ;
} ) ;
}
exports . getImageID = getImageID ;
function getSecret ( kvp ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const [ key , value ] = kvp . split ( '=' ) ;
const secretFile = tmp _1 . default . tmpNameSync ( {
tmpdir : context . tmpDir
} ) ;
yield fs _1 . default . writeFileSync ( secretFile , value ) ;
return ` id= ${ key } ,src= ${ secretFile } ` ;
} ) ;
}
exports . getSecret = getSecret ;
function isAvailable ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return yield exec . exec ( ` docker ` , [ 'buildx' ] , true ) . then ( res => {
if ( res . stderr != '' && ! res . success ) {
return false ;
}
return res . success ;
} ) ;
} ) ;
}
exports . isAvailable = isAvailable ;
function getVersion ( ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
return yield exec . exec ( ` docker ` , [ 'buildx' , 'version' ] , true ) . then ( res => {
if ( res . stderr != '' && ! res . success ) {
throw new Error ( res . stderr ) ;
}
return parseVersion ( res . stdout ) ;
} ) ;
} ) ;
}
exports . getVersion = getVersion ;
function parseVersion ( stdout ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const matches = /\sv?([0-9.]+)/ . exec ( stdout ) ;
if ( ! matches ) {
throw new Error ( ` Cannot parse Buildx version ` ) ;
}
return semver . clean ( matches [ 1 ] ) ;
} ) ;
}
exports . parseVersion = parseVersion ;
//# sourceMappingURL=buildx.js.map
/***/ } ) ,
/***/ 297 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const parse = _ _webpack _require _ _ ( 925 )
const eq = _ _webpack _require _ _ ( 898 )
const diff = ( version1 , version2 ) => {
if ( eq ( version1 , version2 ) ) {
return null
} else {
const v1 = parse ( version1 )
const v2 = parse ( version2 )
const hasPre = v1 . prerelease . length || v2 . prerelease . length
const prefix = hasPre ? 'pre' : ''
const defaultResult = hasPre ? 'prerelease' : ''
for ( const key in v1 ) {
if ( key === 'major' || key === 'minor' || key === 'patch' ) {
if ( v1 [ key ] !== v2 [ key ] ) {
return prefix + key
}
}
}
return defaultResult // may be undefined
}
}
module . exports = diff
/***/ } ) ,
/***/ 299 :
/***/ ( function ( module ) {
module . exports = [ [ "0" , "\u0000" , 128 ] , [ "a1" , "。" , 62 ] , [ "8140" , " 、。,.・:;?!゛゜´`¨^ ̄_ヽヾゝゞ〃仝々〆〇ー―‐/\~∥|…‥‘’“”()〔〕[]{}〈" , 9 , "+-±×" ] , [ "8180" , "÷=≠<>≦≧∞∴♂♀°′″℃¥$¢£%#&*@§☆★○●◎◇◆□■△▲▽▼※〒→←↑↓〓" ] , [ "81b8" , "∈∋⊆⊇⊂⊃∪∩" ] , [ "81c8" , "∧∨¬⇒⇔∀∃" ] , [ "81da" , "∠⊥⌒∂∇≡≒≪≫√∽∝∵∫∬" ] , [ "81f0" , "ʼn♯♭♪†‡¶" ] , [ "81fc" , "◯" ] , [ "824f" , "0 " , 9 ] , [ "8260" , "A " , 25 ] , [ "8281" , "a " , 25 ] , [ "829f" , "ぁ" , 82 ] , [ "8340" , "ァ" , 62 ] , [ "8380" , "ム" , 22 ] , [ "839f" , "Α " , 16 , "Σ" , 6 ] , [ "83bf" , "α " , 16 , "σ " , 6 ] , [ "8440" , "А " , 5 , "ЁЖ" , 25 ] , [ "8470" , "а " , 5 , "ёж" , 7 ] , [ "8480" , "о " , 17 ] , [ "849f" , "─│┌┐┘└├┬┤┴┼━┃┏┓┛┗┣┳┫┻╋┠┯┨┷┿┝┰┥┸╂" ] , [ "8740" , "①" , 19 , "Ⅰ " , 9 ] , [ "875f" , "㍉㌔㌢㍍㌘㌧㌃㌶㍑㍗㌍㌦㌣㌫㍊㌻㎜㎝㎞㎎㎏㏄㎡" ] , [ "877e" , "㍻" ] , [ "8780" , "〝〟№㏍℡㊤" , 4 , "㈱㈲㈹㍾㍽㍼≒≡∫∮∑√⊥∠∟⊿∵∩∪" ] , [ "889f" , "亜唖娃阿哀愛挨姶逢葵茜穐悪握渥旭葦芦鯵梓圧斡扱宛姐虻飴絢綾鮎或粟袷安庵按暗案闇鞍杏以伊位依偉囲夷委威尉惟意慰易椅為畏異移維緯胃萎衣謂違遺医井亥域育郁磯一壱溢逸稲茨芋鰯允印咽員因姻引飲淫胤蔭" ] , [ "8940" , "院陰隠韻吋右宇烏羽迂雨卯鵜窺丑碓臼渦嘘唄欝蔚鰻姥厩浦瓜閏噂云運雲荏餌叡営嬰影映曳栄永泳洩瑛盈穎頴英衛詠鋭液疫益駅悦謁越閲榎厭円" ] , [ "8980" , "園堰奄宴延怨掩援沿演炎焔煙燕猿縁艶苑薗遠鉛鴛塩於汚甥凹央奥往応押旺横欧殴王翁襖鴬鴎黄岡沖荻億屋憶臆桶牡乙俺卸恩温穏音下化仮何伽価佳加可嘉夏嫁家寡科暇果架歌河火珂禍禾稼箇花苛茄荷華菓蝦課嘩貨迦過霞蚊俄峨我牙画臥芽蛾賀雅餓駕介会解回塊壊廻快怪悔恢懐戒拐改" ] , [ "8a40" , "魁晦械海灰界皆絵芥蟹開階貝凱劾外咳害崖慨概涯碍蓋街該鎧骸浬馨蛙垣柿蛎鈎劃嚇各廓拡撹格核殻獲確穫覚角赫較郭閣隔革学岳楽額顎掛笠樫" ] , [ "8a80" , "橿梶鰍潟割喝恰括活渇滑葛褐轄且鰹叶椛樺鞄株兜竃蒲釜鎌噛鴨栢茅萱粥刈苅瓦乾侃冠寒刊勘勧巻喚堪姦完官寛干幹患感慣憾換敢柑桓棺款歓汗漢澗潅環甘監看竿管簡緩缶翰肝艦莞観諌貫還鑑間閑関陥韓館舘丸含岸巌玩癌眼岩翫贋雁頑顔願企伎危喜器基奇嬉寄岐希幾忌揮机旗既期棋棄" ] , [ "8b40" , "機帰毅気汽畿祈季稀紀徽規記貴起軌輝飢騎鬼亀偽儀妓宜戯技擬欺犠疑祇義蟻誼議掬菊鞠吉吃喫桔橘詰砧杵黍却客脚虐逆丘久仇休及吸宮弓急救" ] , [ "8b80" , "朽求汲泣灸球究窮笈級糾給旧牛去居巨拒拠挙渠虚許距鋸漁禦魚亨享京供侠僑兇競共凶協匡卿叫喬境峡強彊怯恐恭挟教橋況狂狭矯胸脅興蕎郷鏡響饗驚仰凝尭暁業局曲極玉桐粁僅勤均巾錦斤欣欽琴禁禽筋緊芹菌衿襟謹近金吟銀九倶句区狗玖矩苦躯駆駈駒具愚虞喰空偶寓遇隅串櫛釧屑屈" ] , [ "8c40" , "掘窟沓靴轡窪熊隈粂栗繰桑鍬勲君薫訓群軍郡卦袈祁係傾刑兄啓圭珪型契形径恵慶慧憩掲携敬景桂渓畦稽系経継繋罫茎荊蛍計詣警軽頚鶏芸迎鯨" ] , [ "8c80" , "劇戟撃激隙桁傑欠決潔穴結血訣月件倹倦健兼券剣喧圏堅嫌建憲懸拳捲検権牽犬献研硯絹県肩見謙賢軒遣鍵険顕験鹸元原厳幻弦減源玄現絃舷言諺限乎個古呼固姑孤己庫弧戸故枯湖狐糊袴股胡菰虎誇跨鈷雇顧鼓五互伍午呉吾娯後御悟梧檎瑚碁語誤護醐乞鯉交佼侯候倖光公功効勾厚口向" ] , [ "8d40" , "后喉坑垢好孔孝宏工巧巷幸広庚康弘恒慌抗拘控攻昂晃更杭校梗構江洪浩港溝甲皇硬稿糠紅紘絞綱耕考肯肱腔膏航荒行衡講貢購郊酵鉱砿鋼閤降" ] , [ "8d80" , " 項香高鴻剛劫号合壕拷濠豪轟麹克刻告国穀酷鵠黒獄漉腰甑忽惚骨狛込此頃今困坤墾婚恨懇昏昆根梱混痕紺艮 <EFBFBD> <EFBFBD>
/***/ } ) ,
/***/ 300 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var Buffer = _ _webpack _require _ _ ( 937 ) . Buffer ;
// Note: UTF16-LE (or UCS2) codec is Node.js native. See encodings/internal.js
// == UTF16-BE codec. ==========================================================
exports . utf16be = Utf16BECodec ;
function Utf16BECodec ( ) {
}
Utf16BECodec . prototype . encoder = Utf16BEEncoder ;
Utf16BECodec . prototype . decoder = Utf16BEDecoder ;
Utf16BECodec . prototype . bomAware = true ;
// -- Encoding
function Utf16BEEncoder ( ) {
}
Utf16BEEncoder . prototype . write = function ( str ) {
var buf = Buffer . from ( str , 'ucs2' ) ;
for ( var i = 0 ; i < buf . length ; i += 2 ) {
var tmp = buf [ i ] ; buf [ i ] = buf [ i + 1 ] ; buf [ i + 1 ] = tmp ;
}
return buf ;
}
Utf16BEEncoder . prototype . end = function ( ) {
}
// -- Decoding
function Utf16BEDecoder ( ) {
this . overflowByte = - 1 ;
}
Utf16BEDecoder . prototype . write = function ( buf ) {
if ( buf . length == 0 )
return '' ;
var buf2 = Buffer . alloc ( buf . length + 1 ) ,
i = 0 , j = 0 ;
if ( this . overflowByte !== - 1 ) {
buf2 [ 0 ] = buf [ 0 ] ;
buf2 [ 1 ] = this . overflowByte ;
i = 1 ; j = 2 ;
}
for ( ; i < buf . length - 1 ; i += 2 , j += 2 ) {
buf2 [ j ] = buf [ i + 1 ] ;
buf2 [ j + 1 ] = buf [ i ] ;
}
this . overflowByte = ( i == buf . length - 1 ) ? buf [ buf . length - 1 ] : - 1 ;
return buf2 . slice ( 0 , j ) . toString ( 'ucs2' ) ;
}
Utf16BEDecoder . prototype . end = function ( ) {
}
// == UTF-16 codec =============================================================
// Decoder chooses automatically from UTF-16LE and UTF-16BE using BOM and space-based heuristic.
// Defaults to UTF-16LE, as it's prevalent and default in Node.
// http://en.wikipedia.org/wiki/UTF-16 and http://encoding.spec.whatwg.org/#utf-16le
// Decoder default can be changed: iconv.decode(buf, 'utf16', {defaultEncoding: 'utf-16be'});
// Encoder uses UTF-16LE and prepends BOM (which can be overridden with addBOM: false).
exports . utf16 = Utf16Codec ;
function Utf16Codec ( codecOptions , iconv ) {
this . iconv = iconv ;
}
Utf16Codec . prototype . encoder = Utf16Encoder ;
Utf16Codec . prototype . decoder = Utf16Decoder ;
// -- Encoding (pass-through)
function Utf16Encoder ( options , codec ) {
options = options || { } ;
if ( options . addBOM === undefined )
options . addBOM = true ;
this . encoder = codec . iconv . getEncoder ( 'utf-16le' , options ) ;
}
Utf16Encoder . prototype . write = function ( str ) {
return this . encoder . write ( str ) ;
}
Utf16Encoder . prototype . end = function ( ) {
return this . encoder . end ( ) ;
}
// -- Decoding
function Utf16Decoder ( options , codec ) {
this . decoder = null ;
this . initialBytes = [ ] ;
this . initialBytesLen = 0 ;
this . options = options || { } ;
this . iconv = codec . iconv ;
}
Utf16Decoder . prototype . write = function ( buf ) {
if ( ! this . decoder ) {
// Codec is not chosen yet. Accumulate initial bytes.
this . initialBytes . push ( buf ) ;
this . initialBytesLen += buf . length ;
if ( this . initialBytesLen < 16 ) // We need more bytes to use space heuristic (see below)
return '' ;
// We have enough bytes -> detect endianness.
var buf = Buffer . concat ( this . initialBytes ) ,
encoding = detectEncoding ( buf , this . options . defaultEncoding ) ;
this . decoder = this . iconv . getDecoder ( encoding , this . options ) ;
this . initialBytes . length = this . initialBytesLen = 0 ;
}
return this . decoder . write ( buf ) ;
}
Utf16Decoder . prototype . end = function ( ) {
if ( ! this . decoder ) {
var buf = Buffer . concat ( this . initialBytes ) ,
encoding = detectEncoding ( buf , this . options . defaultEncoding ) ;
this . decoder = this . iconv . getDecoder ( encoding , this . options ) ;
var res = this . decoder . write ( buf ) ,
trail = this . decoder . end ( ) ;
return trail ? ( res + trail ) : res ;
}
return this . decoder . end ( ) ;
}
function detectEncoding ( buf , defaultEncoding ) {
var enc = defaultEncoding || 'utf-16le' ;
if ( buf . length >= 2 ) {
// Check BOM.
if ( buf [ 0 ] == 0xFE && buf [ 1 ] == 0xFF ) // UTF-16BE BOM
enc = 'utf-16be' ;
else if ( buf [ 0 ] == 0xFF && buf [ 1 ] == 0xFE ) // UTF-16LE BOM
enc = 'utf-16le' ;
else {
// No BOM found. Try to deduce encoding from initial content.
// Most of the time, the content has ASCII chars (U+00**), but the opposite (U+**00) is uncommon.
// So, we count ASCII as if it was LE or BE, and decide from that.
var asciiCharsLE = 0 , asciiCharsBE = 0 , // Counts of chars in both positions
_len = Math . min ( buf . length - ( buf . length % 2 ) , 64 ) ; // Len is always even.
for ( var i = 0 ; i < _len ; i += 2 ) {
if ( buf [ i ] === 0 && buf [ i + 1 ] !== 0 ) asciiCharsBE ++ ;
if ( buf [ i ] !== 0 && buf [ i + 1 ] === 0 ) asciiCharsLE ++ ;
}
if ( asciiCharsBE > asciiCharsLE )
enc = 'utf-16be' ;
else if ( asciiCharsBE < asciiCharsLE )
enc = 'utf-16le' ;
}
}
return enc ;
}
/***/ } ) ,
/***/ 304 :
/***/ ( function ( module ) {
module . exports = require ( "string_decoder" ) ;
/***/ } ) ,
/***/ 309 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const SemVer = _ _webpack _require _ _ ( 88 )
const compare = ( a , b , loose ) =>
new SemVer ( a , loose ) . compare ( new SemVer ( b , loose ) )
module . exports = compare
/***/ } ) ,
/***/ 320 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
"use strict" ;
// Description of supported double byte encodings and aliases.
// Tables are not require()-d until they are needed to speed up library load.
// require()-s are direct to support Browserify.
module . exports = {
// == Japanese/ShiftJIS ====================================================
// All japanese encodings are based on JIS X set of standards:
// JIS X 0201 - Single-byte encoding of ASCII + ¥ + Kana chars at 0xA1-0xDF.
// JIS X 0208 - Main set of 6879 characters, placed in 94x94 plane, to be encoded by 2 bytes.
// Has several variations in 1978, 1983, 1990 and 1997.
// JIS X 0212 - Supplementary plane of 6067 chars in 94x94 plane. 1990. Effectively dead.
// JIS X 0213 - Extension and modern replacement of 0208 and 0212. Total chars: 11233.
// 2 planes, first is superset of 0208, second - revised 0212.
// Introduced in 2000, revised 2004. Some characters are in Unicode Plane 2 (0x2xxxx)
// Byte encodings are:
// * Shift_JIS: Compatible with 0201, uses not defined chars in top half as lead bytes for double-byte
// encoding of 0208. Lead byte ranges: 0x81-0x9F, 0xE0-0xEF; Trail byte ranges: 0x40-0x7E, 0x80-0x9E, 0x9F-0xFC.
// Windows CP932 is a superset of Shift_JIS. Some companies added more chars, notably KDDI.
// * EUC-JP: Up to 3 bytes per character. Used mostly on *nixes.
// 0x00-0x7F - lower part of 0201
// 0x8E, 0xA1-0xDF - upper part of 0201
// (0xA1-0xFE)x2 - 0208 plane (94x94).
// 0x8F, (0xA1-0xFE)x2 - 0212 plane (94x94).
// * JIS X 208: 7-bit, direct encoding of 0208. Byte ranges: 0x21-0x7E (94 values). Uncommon.
// Used as-is in ISO2022 family.
// * ISO2022-JP: Stateful encoding, with escape sequences to switch between ASCII,
// 0201-1976 Roman, 0208-1978, 0208-1983.
// * ISO2022-JP-1: Adds esc seq for 0212-1990.
// * ISO2022-JP-2: Adds esc seq for GB2313-1980, KSX1001-1992, ISO8859-1, ISO8859-7.
// * ISO2022-JP-3: Adds esc seq for 0201-1976 Kana set, 0213-2000 Planes 1, 2.
// * ISO2022-JP-2004: Adds 0213-2004 Plane 1.
//
// After JIS X 0213 appeared, Shift_JIS-2004, EUC-JISX0213 and ISO2022-JP-2004 followed, with just changing the planes.
//
// Overall, it seems that it's a mess :( http://www8.plala.or.jp/tkubota1/unicode-symbols-map2.html
'shiftjis' : {
type : '_dbcs' ,
table : function ( ) { return _ _webpack _require _ _ ( 299 ) } ,
encodeAdd : { '\u00a5' : 0x5C , '\u203E' : 0x7E } ,
encodeSkipVals : [ { from : 0xED40 , to : 0xF940 } ] ,
} ,
'csshiftjis' : 'shiftjis' ,
'mskanji' : 'shiftjis' ,
'sjis' : 'shiftjis' ,
'windows31j' : 'shiftjis' ,
'ms31j' : 'shiftjis' ,
'xsjis' : 'shiftjis' ,
'windows932' : 'shiftjis' ,
'ms932' : 'shiftjis' ,
'932' : 'shiftjis' ,
'cp932' : 'shiftjis' ,
'eucjp' : {
type : '_dbcs' ,
table : function ( ) { return _ _webpack _require _ _ ( 655 ) } ,
encodeAdd : { '\u00a5' : 0x5C , '\u203E' : 0x7E } ,
} ,
// TODO: KDDI extension to Shift_JIS
// TODO: IBM CCSID 942 = CP932, but F0-F9 custom chars and other char changes.
// TODO: IBM CCSID 943 = Shift_JIS = CP932 with original Shift_JIS lower 128 chars.
// == Chinese/GBK ==========================================================
// http://en.wikipedia.org/wiki/GBK
// We mostly implement W3C recommendation: https://www.w3.org/TR/encoding/#gbk-encoder
// Oldest GB2312 (1981, ~7600 chars) is a subset of CP936
'gb2312' : 'cp936' ,
'gb231280' : 'cp936' ,
'gb23121980' : 'cp936' ,
'csgb2312' : 'cp936' ,
'csiso58gb231280' : 'cp936' ,
'euccn' : 'cp936' ,
// Microsoft's CP936 is a subset and approximation of GBK.
'windows936' : 'cp936' ,
'ms936' : 'cp936' ,
'936' : 'cp936' ,
'cp936' : {
type : '_dbcs' ,
table : function ( ) { return _ _webpack _require _ _ ( 185 ) } ,
} ,
// GBK (~22000 chars) is an extension of CP936 that added user-mapped chars and some other.
'gbk' : {
type : '_dbcs' ,
table : function ( ) { return _ _webpack _require _ _ ( 185 ) . concat ( _ _webpack _require _ _ ( 489 ) ) } ,
} ,
'xgbk' : 'gbk' ,
'isoir58' : 'gbk' ,
// GB18030 is an algorithmic extension of GBK.
// Main source: https://www.w3.org/TR/encoding/#gbk-encoder
// http://icu-project.org/docs/papers/gb18030.html
// http://source.icu-project.org/repos/icu/data/trunk/charset/data/xml/gb-18030-2000.xml
// http://www.khngai.com/chinese/charmap/tblgbk.php?page=0
'gb18030' : {
type : '_dbcs' ,
table : function ( ) { return _ _webpack _require _ _ ( 185 ) . concat ( _ _webpack _require _ _ ( 489 ) ) } ,
gb18030 : function ( ) { return _ _webpack _require _ _ ( 15 ) } ,
encodeSkipVals : [ 0x80 ] ,
encodeAdd : { '€' : 0xA2E3 } ,
} ,
'chinese' : 'gb18030' ,
// == Korean ===============================================================
// EUC-KR, KS_C_5601 and KS X 1001 are exactly the same.
'windows949' : 'cp949' ,
'ms949' : 'cp949' ,
'949' : 'cp949' ,
'cp949' : {
type : '_dbcs' ,
table : function ( ) { return _ _webpack _require _ _ ( 274 ) } ,
} ,
'cseuckr' : 'cp949' ,
'csksc56011987' : 'cp949' ,
'euckr' : 'cp949' ,
'isoir149' : 'cp949' ,
'korean' : 'cp949' ,
'ksc56011987' : 'cp949' ,
'ksc56011989' : 'cp949' ,
'ksc5601' : 'cp949' ,
// == Big5/Taiwan/Hong Kong ================================================
// There are lots of tables for Big5 and cp950. Please see the following links for history:
// http://moztw.org/docs/big5/ http://www.haible.de/bruno/charsets/conversion-tables/Big5.html
// Variations, in roughly number of defined chars:
// * Windows CP 950: Microsoft variant of Big5. Canonical: http://www.unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP950.TXT
// * Windows CP 951: Microsoft variant of Big5-HKSCS-2001. Seems to be never public. http://me.abelcheung.org/articles/research/what-is-cp951/
// * Big5-2003 (Taiwan standard) almost superset of cp950.
// * Unicode-at-on (UAO) / Mozilla 1.8. Falling out of use on the Web. Not supported by other browsers.
// * Big5-HKSCS (-2001, -2004, -2008). Hong Kong standard.
// many unicode code points moved from PUA to Supplementary plane (U+2XXXX) over the years.
// Plus, it has 4 combining sequences.
// Seems that Mozilla refused to support it for 10 yrs. https://bugzilla.mozilla.org/show_bug.cgi?id=162431 https://bugzilla.mozilla.org/show_bug.cgi?id=310299
// because big5-hkscs is the only encoding to include astral characters in non-algorithmic way.
// Implementations are not consistent within browsers; sometimes labeled as just big5.
// MS Internet Explorer switches from big5 to big5-hkscs when a patch applied.
// Great discussion & recap of what's going on https://bugzilla.mozilla.org/show_bug.cgi?id=912470#c31
// In the encoder, it might make sense to support encoding old PUA mappings to Big5 bytes seq-s.
// Official spec: http://www.ogcio.gov.hk/en/business/tech_promotion/ccli/terms/doc/2003cmp_2008.txt
// http://www.ogcio.gov.hk/tc/business/tech_promotion/ccli/terms/doc/hkscs-2008-big5-iso.txt
//
// Current understanding of how to deal with Big5(-HKSCS) is in the Encoding Standard, http://encoding.spec.whatwg.org/#big5-encoder
// Unicode mapping (http://www.unicode.org/Public/MAPPINGS/OBSOLETE/EASTASIA/OTHER/BIG5.TXT) is said to be wrong.
'windows950' : 'cp950' ,
'ms950' : 'cp950' ,
'950' : 'cp950' ,
'cp950' : {
type : '_dbcs' ,
table : function ( ) { return _ _webpack _require _ _ ( 627 ) } ,
} ,
// Big5 has many variations and is an extension of cp950. We use Encoding Standard's as a consensus.
'big5' : 'big5hkscs' ,
'big5hkscs' : {
type : '_dbcs' ,
table : function ( ) { return _ _webpack _require _ _ ( 627 ) . concat ( _ _webpack _require _ _ ( 495 ) ) } ,
encodeSkipVals : [ 0xa2cc ] ,
} ,
'cnbig5' : 'big5hkscs' ,
'csbig5' : 'big5hkscs' ,
'xxbig5' : 'big5hkscs' ,
} ;
/***/ } ) ,
/***/ 323 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const outside = _ _webpack _require _ _ ( 420 )
// Determine if version is less than all the versions possible in the range
const ltr = ( version , range , options ) => outside ( version , range , '<' , options )
module . exports = ltr
/***/ } ) ,
/***/ 334 :
/***/ ( function ( _ _unusedmodule , exports ) {
"use strict" ;
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
async function auth ( token ) {
const tokenType = token . split ( /\./ ) . length === 3 ? "app" : /^v\d+\./ . test ( token ) ? "installation" : "oauth" ;
return {
type : "token" ,
token : token ,
tokenType
} ;
}
/ * *
* Prefix token for usage in the Authorization header
*
* @ param token OAuth token or JSON Web Token
* /
function withAuthorizationPrefix ( token ) {
if ( token . split ( /\./ ) . length === 3 ) {
return ` bearer ${ token } ` ;
}
return ` token ${ token } ` ;
}
async function hook ( token , request , route , parameters ) {
const endpoint = request . endpoint . merge ( route , parameters ) ;
endpoint . headers . authorization = withAuthorizationPrefix ( token ) ;
return request ( endpoint ) ;
}
const createTokenAuth = function createTokenAuth ( token ) {
if ( ! token ) {
throw new Error ( "[@octokit/auth-token] No token passed to createTokenAuth" ) ;
}
if ( typeof token !== "string" ) {
throw new Error ( "[@octokit/auth-token] Token passed to createTokenAuth is not a string" ) ;
}
token = token . replace ( /^(token|bearer) +/i , "" ) ;
return Object . assign ( auth . bind ( null , token ) , {
hook : hook . bind ( null , token )
} ) ;
} ;
exports . createTokenAuth = createTokenAuth ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 351 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const os = _ _importStar ( _ _webpack _require _ _ ( 87 ) ) ;
/ * *
* Commands
*
* Command Format :
* : : name key = value , key = value : : message
*
* Examples :
* : : warning : : This is the message
* : : set - env name = MY _VAR : : some value
* /
function issueCommand ( command , properties , message ) {
const cmd = new Command ( command , properties , message ) ;
process . stdout . write ( cmd . toString ( ) + os . EOL ) ;
}
exports . issueCommand = issueCommand ;
function issue ( name , message = '' ) {
issueCommand ( name , { } , message ) ;
}
exports . issue = issue ;
const CMD _STRING = '::' ;
class Command {
constructor ( command , properties , message ) {
if ( ! command ) {
command = 'missing.command' ;
}
this . command = command ;
this . properties = properties ;
this . message = message ;
}
toString ( ) {
let cmdStr = CMD _STRING + this . command ;
if ( this . properties && Object . keys ( this . properties ) . length > 0 ) {
cmdStr += ' ' ;
let first = true ;
for ( const key in this . properties ) {
if ( this . properties . hasOwnProperty ( key ) ) {
const val = this . properties [ key ] ;
if ( val ) {
if ( first ) {
first = false ;
}
else {
cmdStr += ',' ;
}
cmdStr += ` ${ key } = ${ escapeProperty ( val ) } ` ;
}
}
}
}
cmdStr += ` ${ CMD _STRING } ${ escapeData ( this . message ) } ` ;
return cmdStr ;
}
}
/ * *
* Sanitizes an input into a string so it can be passed into issueCommand safely
* @ param input input to sanitize into a string
* /
function toCommandValue ( input ) {
if ( input === null || input === undefined ) {
return '' ;
}
else if ( typeof input === 'string' || input instanceof String ) {
return input ;
}
return JSON . stringify ( input ) ;
}
exports . toCommandValue = toCommandValue ;
function escapeData ( s ) {
return toCommandValue ( s )
. replace ( /%/g , '%25' )
. replace ( /\r/g , '%0D' )
. replace ( /\n/g , '%0A' ) ;
}
function escapeProperty ( s ) {
return toCommandValue ( s )
. replace ( /%/g , '%25' )
. replace ( /\r/g , '%0D' )
. replace ( /\n/g , '%0A' )
. replace ( /:/g , '%3A' )
. replace ( /,/g , '%2C' ) ;
}
//# sourceMappingURL=command.js.map
/***/ } ) ,
/***/ 357 :
/***/ ( function ( module ) {
module . exports = require ( "assert" ) ;
/***/ } ) ,
/***/ 380 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
// Determine if version is greater than all the versions possible in the range.
const outside = _ _webpack _require _ _ ( 420 )
const gtr = ( version , range , options ) => outside ( version , range , '>' , options )
module . exports = gtr
/***/ } ) ,
/***/ 383 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
// just pre-load all the stuff that index.js lazily exports
const internalRe = _ _webpack _require _ _ ( 523 )
module . exports = {
re : internalRe . re ,
src : internalRe . src ,
tokens : internalRe . t ,
SEMVER _SPEC _VERSION : _ _webpack _require _ _ ( 293 ) . SEMVER _SPEC _VERSION ,
SemVer : _ _webpack _require _ _ ( 88 ) ,
compareIdentifiers : _ _webpack _require _ _ ( 463 ) . compareIdentifiers ,
rcompareIdentifiers : _ _webpack _require _ _ ( 463 ) . rcompareIdentifiers ,
parse : _ _webpack _require _ _ ( 925 ) ,
valid : _ _webpack _require _ _ ( 601 ) ,
clean : _ _webpack _require _ _ ( 848 ) ,
inc : _ _webpack _require _ _ ( 900 ) ,
diff : _ _webpack _require _ _ ( 297 ) ,
major : _ _webpack _require _ _ ( 688 ) ,
minor : _ _webpack _require _ _ ( 447 ) ,
patch : _ _webpack _require _ _ ( 866 ) ,
prerelease : _ _webpack _require _ _ ( 16 ) ,
compare : _ _webpack _require _ _ ( 309 ) ,
rcompare : _ _webpack _require _ _ ( 499 ) ,
compareLoose : _ _webpack _require _ _ ( 804 ) ,
compareBuild : _ _webpack _require _ _ ( 156 ) ,
sort : _ _webpack _require _ _ ( 426 ) ,
rsort : _ _webpack _require _ _ ( 701 ) ,
gt : _ _webpack _require _ _ ( 123 ) ,
lt : _ _webpack _require _ _ ( 194 ) ,
eq : _ _webpack _require _ _ ( 898 ) ,
neq : _ _webpack _require _ _ ( 17 ) ,
gte : _ _webpack _require _ _ ( 522 ) ,
lte : _ _webpack _require _ _ ( 520 ) ,
cmp : _ _webpack _require _ _ ( 98 ) ,
coerce : _ _webpack _require _ _ ( 466 ) ,
Comparator : _ _webpack _require _ _ ( 532 ) ,
Range : _ _webpack _require _ _ ( 828 ) ,
satisfies : _ _webpack _require _ _ ( 55 ) ,
toComparators : _ _webpack _require _ _ ( 706 ) ,
maxSatisfying : _ _webpack _require _ _ ( 579 ) ,
minSatisfying : _ _webpack _require _ _ ( 832 ) ,
minVersion : _ _webpack _require _ _ ( 179 ) ,
validRange : _ _webpack _require _ _ ( 741 ) ,
outside : _ _webpack _require _ _ ( 420 ) ,
gtr : _ _webpack _require _ _ ( 380 ) ,
ltr : _ _webpack _require _ _ ( 323 ) ,
intersects : _ _webpack _require _ _ ( 8 ) ,
simplifyRange : _ _webpack _require _ _ ( 561 ) ,
subset : _ _webpack _require _ _ ( 807 ) ,
}
/***/ } ) ,
/***/ 391 :
/***/ ( function ( module ) {
"use strict" ;
// Manually added data to be used by sbcs codec in addition to generated one.
module . exports = {
// Not supported by iconv, not sure why.
"10029" : "maccenteuro" ,
"maccenteuro" : {
"type" : "_sbcs" ,
"chars" : "ÄĀāÉĄÖÜáąČäčĆć鏟ĎíďĒēĖóėôöõúĚěü†°Ę£§•¶ß®©™ę¨≠ģĮįĪ≤≥īĶ∂∑łĻļĽľĹĺŅņѬ√ńŇ∆«»… ňŐÕőŌ–—“”‘’÷◊ōŔŕŘ‹›řŖŗŠ‚„šŚśÁŤťÍŽžŪÓÔūŮÚůŰűŲųÝýķŻŁżĢˇ"
} ,
"808" : "cp808" ,
"ibm808" : "cp808" ,
"cp808" : {
"type" : "_sbcs" ,
"chars" : "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмноп░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀рстуфхцчшщъыьэюяЁёЄєЇїЎў°∙·√№€■ "
} ,
"mik" : {
"type" : "_sbcs" ,
"chars" : "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя└┴┬├─┼╣║╚╔╩╦╠═╬┐░▒▓│┤№§╗╝┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ "
} ,
// Aliases of generated encodings.
"ascii8bit" : "ascii" ,
"usascii" : "ascii" ,
"ansix34" : "ascii" ,
"ansix341968" : "ascii" ,
"ansix341986" : "ascii" ,
"csascii" : "ascii" ,
"cp367" : "ascii" ,
"ibm367" : "ascii" ,
"isoir6" : "ascii" ,
"iso646us" : "ascii" ,
"iso646irv" : "ascii" ,
"us" : "ascii" ,
"latin1" : "iso88591" ,
"latin2" : "iso88592" ,
"latin3" : "iso88593" ,
"latin4" : "iso88594" ,
"latin5" : "iso88599" ,
"latin6" : "iso885910" ,
"latin7" : "iso885913" ,
"latin8" : "iso885914" ,
"latin9" : "iso885915" ,
"latin10" : "iso885916" ,
"csisolatin1" : "iso88591" ,
"csisolatin2" : "iso88592" ,
"csisolatin3" : "iso88593" ,
"csisolatin4" : "iso88594" ,
"csisolatincyrillic" : "iso88595" ,
"csisolatinarabic" : "iso88596" ,
"csisolatingreek" : "iso88597" ,
"csisolatinhebrew" : "iso88598" ,
"csisolatin5" : "iso88599" ,
"csisolatin6" : "iso885910" ,
"l1" : "iso88591" ,
"l2" : "iso88592" ,
"l3" : "iso88593" ,
"l4" : "iso88594" ,
"l5" : "iso88599" ,
"l6" : "iso885910" ,
"l7" : "iso885913" ,
"l8" : "iso885914" ,
"l9" : "iso885915" ,
"l10" : "iso885916" ,
"isoir14" : "iso646jp" ,
"isoir57" : "iso646cn" ,
"isoir100" : "iso88591" ,
"isoir101" : "iso88592" ,
"isoir109" : "iso88593" ,
"isoir110" : "iso88594" ,
"isoir144" : "iso88595" ,
"isoir127" : "iso88596" ,
"isoir126" : "iso88597" ,
"isoir138" : "iso88598" ,
"isoir148" : "iso88599" ,
"isoir157" : "iso885910" ,
"isoir166" : "tis620" ,
"isoir179" : "iso885913" ,
"isoir199" : "iso885914" ,
"isoir203" : "iso885915" ,
"isoir226" : "iso885916" ,
"cp819" : "iso88591" ,
"ibm819" : "iso88591" ,
"cyrillic" : "iso88595" ,
"arabic" : "iso88596" ,
"arabic8" : "iso88596" ,
"ecma114" : "iso88596" ,
"asmo708" : "iso88596" ,
"greek" : "iso88597" ,
"greek8" : "iso88597" ,
"ecma118" : "iso88597" ,
"elot928" : "iso88597" ,
"hebrew" : "iso88598" ,
"hebrew8" : "iso88598" ,
"turkish" : "iso88599" ,
"turkish8" : "iso88599" ,
"thai" : "iso885911" ,
"thai8" : "iso885911" ,
"celtic" : "iso885914" ,
"celtic8" : "iso885914" ,
"isoceltic" : "iso885914" ,
"tis6200" : "tis620" ,
"tis62025291" : "tis620" ,
"tis62025330" : "tis620" ,
"10000" : "macroman" ,
"10006" : "macgreek" ,
"10007" : "maccyrillic" ,
"10079" : "maciceland" ,
"10081" : "macturkish" ,
"cspc8codepage437" : "cp437" ,
"cspc775baltic" : "cp775" ,
"cspc850multilingual" : "cp850" ,
"cspcp852" : "cp852" ,
"cspc862latinhebrew" : "cp862" ,
"cpgr" : "cp869" ,
"msee" : "cp1250" ,
"mscyrl" : "cp1251" ,
"msansi" : "cp1252" ,
"msgreek" : "cp1253" ,
"msturk" : "cp1254" ,
"mshebr" : "cp1255" ,
"msarab" : "cp1256" ,
"winbaltrim" : "cp1257" ,
"cp20866" : "koi8r" ,
"20866" : "koi8r" ,
"ibm878" : "koi8r" ,
"cskoi8r" : "koi8r" ,
"cp21866" : "koi8u" ,
"21866" : "koi8u" ,
"ibm1168" : "koi8u" ,
"strk10482002" : "rk1048" ,
"tcvn5712" : "tcvn" ,
"tcvn57121" : "tcvn" ,
"gb198880" : "iso646cn" ,
"cn" : "iso646cn" ,
"csiso14jisc6220ro" : "iso646jp" ,
"jisc62201969ro" : "iso646jp" ,
"jp" : "iso646jp" ,
"cshproman8" : "hproman8" ,
"r8" : "hproman8" ,
"roman8" : "hproman8" ,
"xroman8" : "hproman8" ,
"ibm1051" : "hproman8" ,
"mac" : "macintosh" ,
"csmacintosh" : "macintosh" ,
} ;
/***/ } ) ,
/***/ 407 :
/***/ ( function ( module ) {
module . exports = require ( "buffer" ) ;
/***/ } ) ,
/***/ 413 :
/***/ ( function ( module ) {
module . exports = require ( "stream" ) ;
/***/ } ) ,
/***/ 417 :
/***/ ( function ( module ) {
module . exports = require ( "crypto" ) ;
/***/ } ) ,
/***/ 420 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const SemVer = _ _webpack _require _ _ ( 88 )
const Comparator = _ _webpack _require _ _ ( 532 )
const { ANY } = Comparator
const Range = _ _webpack _require _ _ ( 828 )
const satisfies = _ _webpack _require _ _ ( 55 )
const gt = _ _webpack _require _ _ ( 123 )
const lt = _ _webpack _require _ _ ( 194 )
const lte = _ _webpack _require _ _ ( 520 )
const gte = _ _webpack _require _ _ ( 522 )
const outside = ( version , range , hilo , options ) => {
version = new SemVer ( version , options )
range = new Range ( range , options )
let gtfn , ltefn , ltfn , comp , ecomp
switch ( hilo ) {
case '>' :
gtfn = gt
ltefn = lte
ltfn = lt
comp = '>'
ecomp = '>='
break
case '<' :
gtfn = lt
ltefn = gte
ltfn = gt
comp = '<'
ecomp = '<='
break
default :
throw new TypeError ( 'Must provide a hilo val of "<" or ">"' )
}
// If it satisifes the range it is not outside
if ( satisfies ( version , range , options ) ) {
return false
}
// From now on, variable terms are as if we're in "gtr" mode.
// but note that everything is flipped for the "ltr" function.
for ( let i = 0 ; i < range . set . length ; ++ i ) {
const comparators = range . set [ i ]
let high = null
let low = null
comparators . forEach ( ( comparator ) => {
if ( comparator . semver === ANY ) {
comparator = new Comparator ( '>=0.0.0' )
}
high = high || comparator
low = low || comparator
if ( gtfn ( comparator . semver , high . semver , options ) ) {
high = comparator
} else if ( ltfn ( comparator . semver , low . semver , options ) ) {
low = comparator
}
} )
// If the edge version comparator has a operator then our version
// isn't outside it
if ( high . operator === comp || high . operator === ecomp ) {
return false
}
// If the lowest version comparator has an operator and our version
// is less than it then it isn't higher than the range
if ( ( ! low . operator || low . operator === comp ) &&
ltefn ( version , low . semver ) ) {
return false
} else if ( low . operator === ecomp && ltfn ( version , low . semver ) ) {
return false
}
}
return true
}
module . exports = outside
/***/ } ) ,
/***/ 426 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const compareBuild = _ _webpack _require _ _ ( 156 )
const sort = ( list , loose ) => list . sort ( ( a , b ) => compareBuild ( a , b , loose ) )
module . exports = sort
/***/ } ) ,
/***/ 427 :
/***/ ( function ( module ) {
const debug = (
typeof process === 'object' &&
process . env &&
process . env . NODE _DEBUG &&
/\bsemver\b/i . test ( process . env . NODE _DEBUG )
) ? ( ... args ) => console . error ( 'SEMVER' , ... args )
: ( ) => { }
module . exports = debug
/***/ } ) ,
/***/ 429 :
/***/ ( function ( _ _unusedmodule , exports ) {
"use strict" ;
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
function getUserAgent ( ) {
if ( typeof navigator === "object" && "userAgent" in navigator ) {
return navigator . userAgent ;
}
if ( typeof process === "object" && "version" in process ) {
return ` Node.js/ ${ process . version . substr ( 1 ) } ( ${ process . platform } ; ${ process . arch } ) ` ;
}
return "<environment undetectable>" ;
}
exports . getUserAgent = getUserAgent ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 431 :
/***/ ( function ( _ _unusedmodule , exports ) {
"use strict" ;
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
const VERSION = "2.3.1" ;
/ * *
* Some “ list ” response that can be paginated have a different response structure
*
* They have a ` total_count ` key in the response ( search also has ` incomplete_results ` ,
* / i n s t a l l a t i o n / r e p o s i t o r i e s a l s o h a s ` r e p o s i t o r y _ s e l e c t i o n ` ) , a s w e l l a s a k e y w i t h
* the list of the items which name varies from endpoint to endpoint .
*
* Octokit normalizes these responses so that paginated results are always returned following
* the same structure . One challenge is that if the list response has only one page , no Link
* header is provided , so this header alone is not sufficient to check wether a response is
* paginated or not .
*
* We check if a "total_count" key is present in the response data , but also make sure that
* a "url" property is not , as the "Get the combined status for a specific ref" endpoint would
* otherwise match : https : //developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref
* /
function normalizePaginatedListResponse ( response ) {
const responseNeedsNormalization = "total_count" in response . data && ! ( "url" in response . data ) ;
if ( ! responseNeedsNormalization ) return response ; // keep the additional properties intact as there is currently no other way
// to retrieve the same information.
const incompleteResults = response . data . incomplete _results ;
const repositorySelection = response . data . repository _selection ;
const totalCount = response . data . total _count ;
delete response . data . incomplete _results ;
delete response . data . repository _selection ;
delete response . data . total _count ;
const namespaceKey = Object . keys ( response . data ) [ 0 ] ;
const data = response . data [ namespaceKey ] ;
response . data = data ;
if ( typeof incompleteResults !== "undefined" ) {
response . data . incomplete _results = incompleteResults ;
}
if ( typeof repositorySelection !== "undefined" ) {
response . data . repository _selection = repositorySelection ;
}
response . data . total _count = totalCount ;
return response ;
}
function iterator ( octokit , route , parameters ) {
const options = typeof route === "function" ? route . endpoint ( parameters ) : octokit . request . endpoint ( route , parameters ) ;
const requestMethod = typeof route === "function" ? route : octokit . request ;
const method = options . method ;
const headers = options . headers ;
let url = options . url ;
return {
[ Symbol . asyncIterator ] : ( ) => ( {
next ( ) {
if ( ! url ) {
return Promise . resolve ( {
done : true
} ) ;
}
return requestMethod ( {
method ,
url ,
headers
} ) . then ( normalizePaginatedListResponse ) . then ( response => {
// `response.headers.link` format:
// '<https://api.github.com/users/aseemk/followers?page=2>; rel="next", <https://api.github.com/users/aseemk/followers?page=2>; rel="last"'
// sets `url` to undefined if "next" URL is not present or `link` header is not set
url = ( ( response . headers . link || "" ) . match ( /<([^>]+)>;\s*rel="next"/ ) || [ ] ) [ 1 ] ;
return {
value : response
} ;
} ) ;
}
} )
} ;
}
function paginate ( octokit , route , parameters , mapFn ) {
if ( typeof parameters === "function" ) {
mapFn = parameters ;
parameters = undefined ;
}
return gather ( octokit , [ ] , iterator ( octokit , route , parameters ) [ Symbol . asyncIterator ] ( ) , mapFn ) ;
}
function gather ( octokit , results , iterator , mapFn ) {
return iterator . next ( ) . then ( result => {
if ( result . done ) {
return results ;
}
let earlyExit = false ;
function done ( ) {
earlyExit = true ;
}
results = results . concat ( mapFn ? mapFn ( result . value , done ) : result . value . data ) ;
if ( earlyExit ) {
return results ;
}
return gather ( octokit , results , iterator , mapFn ) ;
} ) ;
}
/ * *
* @ param octokit Octokit instance
* @ param options Options passed to Octokit constructor
* /
function paginateRest ( octokit ) {
return {
paginate : Object . assign ( paginate . bind ( null , octokit ) , {
iterator : iterator . bind ( null , octokit )
} )
} ;
}
paginateRest . VERSION = VERSION ;
exports . paginateRest = paginateRest ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 436 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const childProcess = _ _webpack _require _ _ ( 129 ) ;
const path = _ _webpack _require _ _ ( 622 ) ;
const util _1 = _ _webpack _require _ _ ( 669 ) ;
const ioUtil = _ _webpack _require _ _ ( 962 ) ;
const exec = util _1 . promisify ( childProcess . exec ) ;
/ * *
* Copies a file or folder .
* Based off of shelljs - https : //github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js
*
* @ param source source path
* @ param dest destination path
* @ param options optional . See CopyOptions .
* /
function cp ( source , dest , options = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const { force , recursive } = readCopyOptions ( options ) ;
const destStat = ( yield ioUtil . exists ( dest ) ) ? yield ioUtil . stat ( dest ) : null ;
// Dest is an existing file, but not forcing
if ( destStat && destStat . isFile ( ) && ! force ) {
return ;
}
// If dest is an existing directory, should copy inside.
const newDest = destStat && destStat . isDirectory ( )
? path . join ( dest , path . basename ( source ) )
: dest ;
if ( ! ( yield ioUtil . exists ( source ) ) ) {
throw new Error ( ` no such file or directory: ${ source } ` ) ;
}
const sourceStat = yield ioUtil . stat ( source ) ;
if ( sourceStat . isDirectory ( ) ) {
if ( ! recursive ) {
throw new Error ( ` Failed to copy. ${ source } is a directory, but tried to copy without recursive flag. ` ) ;
}
else {
yield cpDirRecursive ( source , newDest , 0 , force ) ;
}
}
else {
if ( path . relative ( source , newDest ) === '' ) {
// a file cannot be copied to itself
throw new Error ( ` ' ${ newDest } ' and ' ${ source } ' are the same file ` ) ;
}
yield copyFile ( source , newDest , force ) ;
}
} ) ;
}
exports . cp = cp ;
/ * *
* Moves a path .
*
* @ param source source path
* @ param dest destination path
* @ param options optional . See MoveOptions .
* /
function mv ( source , dest , options = { } ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( yield ioUtil . exists ( dest ) ) {
let destExists = true ;
if ( yield ioUtil . isDirectory ( dest ) ) {
// If dest is directory copy src into dest
dest = path . join ( dest , path . basename ( source ) ) ;
destExists = yield ioUtil . exists ( dest ) ;
}
if ( destExists ) {
if ( options . force == null || options . force ) {
yield rmRF ( dest ) ;
}
else {
throw new Error ( 'Destination already exists' ) ;
}
}
}
yield mkdirP ( path . dirname ( dest ) ) ;
yield ioUtil . rename ( source , dest ) ;
} ) ;
}
exports . mv = mv ;
/ * *
* Remove a path recursively with force
*
* @ param inputPath path to remove
* /
function rmRF ( inputPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ioUtil . IS _WINDOWS ) {
// Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another
// program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del.
try {
if ( yield ioUtil . isDirectory ( inputPath , true ) ) {
yield exec ( ` rd /s /q " ${ inputPath } " ` ) ;
}
else {
yield exec ( ` del /f /a " ${ inputPath } " ` ) ;
}
}
catch ( err ) {
// if you try to delete a file that doesn't exist, desired result is achieved
// other errors are valid
if ( err . code !== 'ENOENT' )
throw err ;
}
// Shelling out fails to remove a symlink folder with missing source, this unlink catches that
try {
yield ioUtil . unlink ( inputPath ) ;
}
catch ( err ) {
// if you try to delete a file that doesn't exist, desired result is achieved
// other errors are valid
if ( err . code !== 'ENOENT' )
throw err ;
}
}
else {
let isDir = false ;
try {
isDir = yield ioUtil . isDirectory ( inputPath ) ;
}
catch ( err ) {
// if you try to delete a file that doesn't exist, desired result is achieved
// other errors are valid
if ( err . code !== 'ENOENT' )
throw err ;
return ;
}
if ( isDir ) {
yield exec ( ` rm -rf " ${ inputPath } " ` ) ;
}
else {
yield ioUtil . unlink ( inputPath ) ;
}
}
} ) ;
}
exports . rmRF = rmRF ;
/ * *
* Make a directory . Creates the full path with folders in between
* Will throw if it fails
*
* @ param fsPath path to create
* @ returns Promise < void >
* /
function mkdirP ( fsPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
yield ioUtil . mkdirP ( fsPath ) ;
} ) ;
}
exports . mkdirP = mkdirP ;
/ * *
* Returns path of a tool had the tool actually been invoked . Resolves via paths .
* If you check and the tool does not exist , it will throw .
*
* @ param tool name of the tool
* @ param check whether to check if tool exists
* @ returns Promise < string > path to tool
* /
function which ( tool , check ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ! tool ) {
throw new Error ( "parameter 'tool' is required" ) ;
}
// recursive when check=true
if ( check ) {
const result = yield which ( tool , false ) ;
if ( ! result ) {
if ( ioUtil . IS _WINDOWS ) {
throw new Error ( ` Unable to locate executable file: ${ tool } . Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file. ` ) ;
}
else {
throw new Error ( ` Unable to locate executable file: ${ tool } . Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable. ` ) ;
}
}
}
try {
// build the list of extensions to try
const extensions = [ ] ;
if ( ioUtil . IS _WINDOWS && process . env . PATHEXT ) {
for ( const extension of process . env . PATHEXT . split ( path . delimiter ) ) {
if ( extension ) {
extensions . push ( extension ) ;
}
}
}
// if it's rooted, return it if exists. otherwise return empty.
if ( ioUtil . isRooted ( tool ) ) {
const filePath = yield ioUtil . tryGetExecutablePath ( tool , extensions ) ;
if ( filePath ) {
return filePath ;
}
return '' ;
}
// if any path separators, return empty
if ( tool . includes ( '/' ) || ( ioUtil . IS _WINDOWS && tool . includes ( '\\' ) ) ) {
return '' ;
}
// build the list of directories
//
// Note, technically "where" checks the current directory on Windows. From a toolkit perspective,
// it feels like we should not do this. Checking the current directory seems like more of a use
// case of a shell, and the which() function exposed by the toolkit should strive for consistency
// across platforms.
const directories = [ ] ;
if ( process . env . PATH ) {
for ( const p of process . env . PATH . split ( path . delimiter ) ) {
if ( p ) {
directories . push ( p ) ;
}
}
}
// return the first match
for ( const directory of directories ) {
const filePath = yield ioUtil . tryGetExecutablePath ( directory + path . sep + tool , extensions ) ;
if ( filePath ) {
return filePath ;
}
}
return '' ;
}
catch ( err ) {
throw new Error ( ` which failed with message ${ err . message } ` ) ;
}
} ) ;
}
exports . which = which ;
function readCopyOptions ( options ) {
const force = options . force == null ? true : options . force ;
const recursive = Boolean ( options . recursive ) ;
return { force , recursive } ;
}
function cpDirRecursive ( sourceDir , destDir , currentDepth , force ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
// Ensure there is not a run away recursive copy
if ( currentDepth >= 255 )
return ;
currentDepth ++ ;
yield mkdirP ( destDir ) ;
const files = yield ioUtil . readdir ( sourceDir ) ;
for ( const fileName of files ) {
const srcFile = ` ${ sourceDir } / ${ fileName } ` ;
const destFile = ` ${ destDir } / ${ fileName } ` ;
const srcFileStat = yield ioUtil . lstat ( srcFile ) ;
if ( srcFileStat . isDirectory ( ) ) {
// Recurse
yield cpDirRecursive ( srcFile , destFile , currentDepth , force ) ;
}
else {
yield copyFile ( srcFile , destFile , force ) ;
}
}
// Change the mode for the newly created directory
yield ioUtil . chmod ( destDir , ( yield ioUtil . stat ( sourceDir ) ) . mode ) ;
} ) ;
}
// Buffered file copy
function copyFile ( srcFile , destFile , force ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
if ( ( yield ioUtil . lstat ( srcFile ) ) . isSymbolicLink ( ) ) {
// unlink/re-link it
try {
yield ioUtil . lstat ( destFile ) ;
yield ioUtil . unlink ( destFile ) ;
}
catch ( e ) {
// Try to override file permission
if ( e . code === 'EPERM' ) {
yield ioUtil . chmod ( destFile , '0666' ) ;
yield ioUtil . unlink ( destFile ) ;
}
// other errors = it doesn't exist, no work to do
}
// Copy over symlink
const symlinkFull = yield ioUtil . readlink ( srcFile ) ;
yield ioUtil . symlink ( symlinkFull , destFile , ioUtil . IS _WINDOWS ? 'junction' : null ) ;
}
else if ( ! ( yield ioUtil . exists ( destFile ) ) || force ) {
yield ioUtil . copyFile ( srcFile , destFile ) ;
}
} ) ;
}
//# sourceMappingURL=io.js.map
/***/ } ) ,
/***/ 438 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
exports . getOctokit = exports . context = void 0 ;
const Context = _ _importStar ( _ _webpack _require _ _ ( 53 ) ) ;
const utils _1 = _ _webpack _require _ _ ( 30 ) ;
exports . context = new Context . Context ( ) ;
/ * *
* Returns a hydrated octokit ready to use for GitHub Actions
*
* @ param token the repo PAT or GITHUB _TOKEN
* @ param options other options to set
* /
function getOctokit ( token , options ) {
return new utils _1 . GitHub ( utils _1 . getOctokitOptions ( token , options ) ) ;
}
exports . getOctokit = getOctokit ;
//# sourceMappingURL=github.js.map
/***/ } ) ,
/***/ 440 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
function _interopDefault ( ex ) { return ( ex && ( typeof ex === 'object' ) && 'default' in ex ) ? ex [ 'default' ] : ex ; }
var isPlainObject = _interopDefault ( _ _webpack _require _ _ ( 38 ) ) ;
var universalUserAgent = _ _webpack _require _ _ ( 429 ) ;
function lowercaseKeys ( object ) {
if ( ! object ) {
return { } ;
}
return Object . keys ( object ) . reduce ( ( newObj , key ) => {
newObj [ key . toLowerCase ( ) ] = object [ key ] ;
return newObj ;
} , { } ) ;
}
function mergeDeep ( defaults , options ) {
const result = Object . assign ( { } , defaults ) ;
Object . keys ( options ) . forEach ( key => {
if ( isPlainObject ( options [ key ] ) ) {
if ( ! ( key in defaults ) ) Object . assign ( result , {
[ key ] : options [ key ]
} ) ; else result [ key ] = mergeDeep ( defaults [ key ] , options [ key ] ) ;
} else {
Object . assign ( result , {
[ key ] : options [ key ]
} ) ;
}
} ) ;
return result ;
}
function merge ( defaults , route , options ) {
if ( typeof route === "string" ) {
let [ method , url ] = route . split ( " " ) ;
options = Object . assign ( url ? {
method ,
url
} : {
url : method
} , options ) ;
} else {
options = Object . assign ( { } , route ) ;
} // lowercase header names before merging with defaults to avoid duplicates
options . headers = lowercaseKeys ( options . headers ) ;
const mergedOptions = mergeDeep ( defaults || { } , options ) ; // mediaType.previews arrays are merged, instead of overwritten
if ( defaults && defaults . mediaType . previews . length ) {
mergedOptions . mediaType . previews = defaults . mediaType . previews . filter ( preview => ! mergedOptions . mediaType . previews . includes ( preview ) ) . concat ( mergedOptions . mediaType . previews ) ;
}
mergedOptions . mediaType . previews = mergedOptions . mediaType . previews . map ( preview => preview . replace ( /-preview/ , "" ) ) ;
return mergedOptions ;
}
function addQueryParameters ( url , parameters ) {
const separator = /\?/ . test ( url ) ? "&" : "?" ;
const names = Object . keys ( parameters ) ;
if ( names . length === 0 ) {
return url ;
}
return url + separator + names . map ( name => {
if ( name === "q" ) {
return "q=" + parameters . q . split ( "+" ) . map ( encodeURIComponent ) . join ( "+" ) ;
}
return ` ${ name } = ${ encodeURIComponent ( parameters [ name ] ) } ` ;
} ) . join ( "&" ) ;
}
const urlVariableRegex = /\{[^}]+\}/g ;
function removeNonChars ( variableName ) {
return variableName . replace ( /^\W+|\W+$/g , "" ) . split ( /,/ ) ;
}
function extractUrlVariableNames ( url ) {
const matches = url . match ( urlVariableRegex ) ;
if ( ! matches ) {
return [ ] ;
}
return matches . map ( removeNonChars ) . reduce ( ( a , b ) => a . concat ( b ) , [ ] ) ;
}
function omit ( object , keysToOmit ) {
return Object . keys ( object ) . filter ( option => ! keysToOmit . includes ( option ) ) . reduce ( ( obj , key ) => {
obj [ key ] = object [ key ] ;
return obj ;
} , { } ) ;
}
// Based on https://github.com/bramstein/url-template, licensed under BSD
// TODO: create separate package.
//
// Copyright (c) 2012-2014, Bram Stein
// All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
// 1. Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// 3. The name of the author may not be used to endorse or promote products
// derived from this software without specific prior written permission.
// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
/* istanbul ignore file */
function encodeReserved ( str ) {
return str . split ( /(%[0-9A-Fa-f]{2})/g ) . map ( function ( part ) {
if ( ! /%[0-9A-Fa-f]/ . test ( part ) ) {
part = encodeURI ( part ) . replace ( /%5B/g , "[" ) . replace ( /%5D/g , "]" ) ;
}
return part ;
} ) . join ( "" ) ;
}
function encodeUnreserved ( str ) {
return encodeURIComponent ( str ) . replace ( /[!'()*]/g , function ( c ) {
return "%" + c . charCodeAt ( 0 ) . toString ( 16 ) . toUpperCase ( ) ;
} ) ;
}
function encodeValue ( operator , value , key ) {
value = operator === "+" || operator === "#" ? encodeReserved ( value ) : encodeUnreserved ( value ) ;
if ( key ) {
return encodeUnreserved ( key ) + "=" + value ;
} else {
return value ;
}
}
function isDefined ( value ) {
return value !== undefined && value !== null ;
}
function isKeyOperator ( operator ) {
return operator === ";" || operator === "&" || operator === "?" ;
}
function getValues ( context , operator , key , modifier ) {
var value = context [ key ] ,
result = [ ] ;
if ( isDefined ( value ) && value !== "" ) {
if ( typeof value === "string" || typeof value === "number" || typeof value === "boolean" ) {
value = value . toString ( ) ;
if ( modifier && modifier !== "*" ) {
value = value . substring ( 0 , parseInt ( modifier , 10 ) ) ;
}
result . push ( encodeValue ( operator , value , isKeyOperator ( operator ) ? key : "" ) ) ;
} else {
if ( modifier === "*" ) {
if ( Array . isArray ( value ) ) {
value . filter ( isDefined ) . forEach ( function ( value ) {
result . push ( encodeValue ( operator , value , isKeyOperator ( operator ) ? key : "" ) ) ;
} ) ;
} else {
Object . keys ( value ) . forEach ( function ( k ) {
if ( isDefined ( value [ k ] ) ) {
result . push ( encodeValue ( operator , value [ k ] , k ) ) ;
}
} ) ;
}
} else {
const tmp = [ ] ;
if ( Array . isArray ( value ) ) {
value . filter ( isDefined ) . forEach ( function ( value ) {
tmp . push ( encodeValue ( operator , value ) ) ;
} ) ;
} else {
Object . keys ( value ) . forEach ( function ( k ) {
if ( isDefined ( value [ k ] ) ) {
tmp . push ( encodeUnreserved ( k ) ) ;
tmp . push ( encodeValue ( operator , value [ k ] . toString ( ) ) ) ;
}
} ) ;
}
if ( isKeyOperator ( operator ) ) {
result . push ( encodeUnreserved ( key ) + "=" + tmp . join ( "," ) ) ;
} else if ( tmp . length !== 0 ) {
result . push ( tmp . join ( "," ) ) ;
}
}
}
} else {
if ( operator === ";" ) {
if ( isDefined ( value ) ) {
result . push ( encodeUnreserved ( key ) ) ;
}
} else if ( value === "" && ( operator === "&" || operator === "?" ) ) {
result . push ( encodeUnreserved ( key ) + "=" ) ;
} else if ( value === "" ) {
result . push ( "" ) ;
}
}
return result ;
}
function parseUrl ( template ) {
return {
expand : expand . bind ( null , template )
} ;
}
function expand ( template , context ) {
var operators = [ "+" , "#" , "." , "/" , ";" , "?" , "&" ] ;
return template . replace ( /\{([^\{\}]+)\}|([^\{\}]+)/g , function ( _ , expression , literal ) {
if ( expression ) {
let operator = "" ;
const values = [ ] ;
if ( operators . indexOf ( expression . charAt ( 0 ) ) !== - 1 ) {
operator = expression . charAt ( 0 ) ;
expression = expression . substr ( 1 ) ;
}
expression . split ( /,/g ) . forEach ( function ( variable ) {
var tmp = /([^:\*]*)(?::(\d+)|(\*))?/ . exec ( variable ) ;
values . push ( getValues ( context , operator , tmp [ 1 ] , tmp [ 2 ] || tmp [ 3 ] ) ) ;
} ) ;
if ( operator && operator !== "+" ) {
var separator = "," ;
if ( operator === "?" ) {
separator = "&" ;
} else if ( operator !== "#" ) {
separator = operator ;
}
return ( values . length !== 0 ? operator : "" ) + values . join ( separator ) ;
} else {
return values . join ( "," ) ;
}
} else {
return encodeReserved ( literal ) ;
}
} ) ;
}
function parse ( options ) {
// https://fetch.spec.whatwg.org/#methods
let method = options . method . toUpperCase ( ) ; // replace :varname with {varname} to make it RFC 6570 compatible
let url = ( options . url || "/" ) . replace ( /:([a-z]\w+)/g , "{+$1}" ) ;
let headers = Object . assign ( { } , options . headers ) ;
let body ;
let parameters = omit ( options , [ "method" , "baseUrl" , "url" , "headers" , "request" , "mediaType" ] ) ; // extract variable names from URL to calculate remaining variables later
const urlVariableNames = extractUrlVariableNames ( url ) ;
url = parseUrl ( url ) . expand ( parameters ) ;
if ( ! /^http/ . test ( url ) ) {
url = options . baseUrl + url ;
}
const omittedParameters = Object . keys ( options ) . filter ( option => urlVariableNames . includes ( option ) ) . concat ( "baseUrl" ) ;
const remainingParameters = omit ( parameters , omittedParameters ) ;
const isBinaryRequset = /application\/octet-stream/i . test ( headers . accept ) ;
if ( ! isBinaryRequset ) {
if ( options . mediaType . format ) {
// e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw
headers . accept = headers . accept . split ( /,/ ) . map ( preview => preview . replace ( /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/ , ` application/vnd $ 1 $ 2. ${ options . mediaType . format } ` ) ) . join ( "," ) ;
}
if ( options . mediaType . previews . length ) {
const previewsFromAcceptHeader = headers . accept . match ( /[\w-]+(?=-preview)/g ) || [ ] ;
headers . accept = previewsFromAcceptHeader . concat ( options . mediaType . previews ) . map ( preview => {
const format = options . mediaType . format ? ` . ${ options . mediaType . format } ` : "+json" ;
return ` application/vnd.github. ${ preview } -preview ${ format } ` ;
} ) . join ( "," ) ;
}
} // for GET/HEAD requests, set URL query parameters from remaining parameters
// for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters
if ( [ "GET" , "HEAD" ] . includes ( method ) ) {
url = addQueryParameters ( url , remainingParameters ) ;
} else {
if ( "data" in remainingParameters ) {
body = remainingParameters . data ;
} else {
if ( Object . keys ( remainingParameters ) . length ) {
body = remainingParameters ;
} else {
headers [ "content-length" ] = 0 ;
}
}
} // default content-type for JSON if body is set
if ( ! headers [ "content-type" ] && typeof body !== "undefined" ) {
headers [ "content-type" ] = "application/json; charset=utf-8" ;
} // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.
// fetch does not allow to set `content-length` header, but we can set body to an empty string
if ( [ "PATCH" , "PUT" ] . includes ( method ) && typeof body === "undefined" ) {
body = "" ;
} // Only return body/request keys if present
return Object . assign ( {
method ,
url ,
headers
} , typeof body !== "undefined" ? {
body
} : null , options . request ? {
request : options . request
} : null ) ;
}
function endpointWithDefaults ( defaults , route , options ) {
return parse ( merge ( defaults , route , options ) ) ;
}
function withDefaults ( oldDefaults , newDefaults ) {
const DEFAULTS = merge ( oldDefaults , newDefaults ) ;
const endpoint = endpointWithDefaults . bind ( null , DEFAULTS ) ;
return Object . assign ( endpoint , {
DEFAULTS ,
defaults : withDefaults . bind ( null , DEFAULTS ) ,
merge : merge . bind ( null , DEFAULTS ) ,
parse
} ) ;
}
const VERSION = "6.0.5" ;
const userAgent = ` octokit-endpoint.js/ ${ VERSION } ${ universalUserAgent . getUserAgent ( ) } ` ; // DEFAULTS has all properties set that EndpointOptions has, except url.
// So we use RequestParameters and add method as additional required property.
const DEFAULTS = {
method : "GET" ,
baseUrl : "https://api.github.com" ,
headers : {
accept : "application/vnd.github.v3+json" ,
"user-agent" : userAgent
} ,
mediaType : {
format : "" ,
previews : [ ]
}
} ;
const endpoint = withDefaults ( null , DEFAULTS ) ;
exports . endpoint = endpoint ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 443 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const url = _ _webpack _require _ _ ( 835 ) ;
function getProxyUrl ( reqUrl ) {
let usingSsl = reqUrl . protocol === 'https:' ;
let proxyUrl ;
if ( checkBypass ( reqUrl ) ) {
return proxyUrl ;
}
let proxyVar ;
if ( usingSsl ) {
proxyVar = process . env [ 'https_proxy' ] || process . env [ 'HTTPS_PROXY' ] ;
}
else {
proxyVar = process . env [ 'http_proxy' ] || process . env [ 'HTTP_PROXY' ] ;
}
if ( proxyVar ) {
proxyUrl = url . parse ( proxyVar ) ;
}
return proxyUrl ;
}
exports . getProxyUrl = getProxyUrl ;
function checkBypass ( reqUrl ) {
if ( ! reqUrl . hostname ) {
return false ;
}
let noProxy = process . env [ 'no_proxy' ] || process . env [ 'NO_PROXY' ] || '' ;
if ( ! noProxy ) {
return false ;
}
// Determine the request port
let reqPort ;
if ( reqUrl . port ) {
reqPort = Number ( reqUrl . port ) ;
}
else if ( reqUrl . protocol === 'http:' ) {
reqPort = 80 ;
}
else if ( reqUrl . protocol === 'https:' ) {
reqPort = 443 ;
}
// Format the request hostname and hostname with port
let upperReqHosts = [ reqUrl . hostname . toUpperCase ( ) ] ;
if ( typeof reqPort === 'number' ) {
upperReqHosts . push ( ` ${ upperReqHosts [ 0 ] } : ${ reqPort } ` ) ;
}
// Compare request host against noproxy
for ( let upperNoProxyItem of noProxy
. split ( ',' )
. map ( x => x . trim ( ) . toUpperCase ( ) )
. filter ( x => x ) ) {
if ( upperReqHosts . some ( x => x === upperNoProxyItem ) ) {
return true ;
}
}
return false ;
}
exports . checkBypass = checkBypass ;
/***/ } ) ,
/***/ 447 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const SemVer = _ _webpack _require _ _ ( 88 )
const minor = ( a , loose ) => new SemVer ( a , loose ) . minor
module . exports = minor
/***/ } ) ,
/***/ 463 :
/***/ ( function ( module ) {
const numeric = /^[0-9]+$/
const compareIdentifiers = ( a , b ) => {
const anum = numeric . test ( a )
const bnum = numeric . test ( b )
if ( anum && bnum ) {
a = + a
b = + b
}
return a === b ? 0
: ( anum && ! bnum ) ? - 1
: ( bnum && ! anum ) ? 1
: a < b ? - 1
: 1
}
const rcompareIdentifiers = ( a , b ) => compareIdentifiers ( b , a )
module . exports = {
compareIdentifiers ,
rcompareIdentifiers
}
/***/ } ) ,
/***/ 464 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
"use strict" ;
var Buffer = _ _webpack _require _ _ ( 937 ) . Buffer ;
// Export Node.js internal encodings.
module . exports = {
// Encodings
utf8 : { type : "_internal" , bomAware : true } ,
cesu8 : { type : "_internal" , bomAware : true } ,
unicode11utf8 : "utf8" ,
ucs2 : { type : "_internal" , bomAware : true } ,
utf16le : "ucs2" ,
binary : { type : "_internal" } ,
base64 : { type : "_internal" } ,
hex : { type : "_internal" } ,
// Codec.
_internal : InternalCodec ,
} ;
//------------------------------------------------------------------------------
function InternalCodec ( codecOptions , iconv ) {
this . enc = codecOptions . encodingName ;
this . bomAware = codecOptions . bomAware ;
if ( this . enc === "base64" )
this . encoder = InternalEncoderBase64 ;
else if ( this . enc === "cesu8" ) {
this . enc = "utf8" ; // Use utf8 for decoding.
this . encoder = InternalEncoderCesu8 ;
// Add decoder for versions of Node not supporting CESU-8
if ( Buffer . from ( 'eda0bdedb2a9' , 'hex' ) . toString ( ) !== '💩' ) {
this . decoder = InternalDecoderCesu8 ;
this . defaultCharUnicode = iconv . defaultCharUnicode ;
}
}
}
InternalCodec . prototype . encoder = InternalEncoder ;
InternalCodec . prototype . decoder = InternalDecoder ;
//------------------------------------------------------------------------------
// We use node.js internal decoder. Its signature is the same as ours.
var StringDecoder = _ _webpack _require _ _ ( 304 ) . StringDecoder ;
if ( ! StringDecoder . prototype . end ) // Node v0.8 doesn't have this method.
StringDecoder . prototype . end = function ( ) { } ;
function InternalDecoder ( options , codec ) {
StringDecoder . call ( this , codec . enc ) ;
}
InternalDecoder . prototype = StringDecoder . prototype ;
//------------------------------------------------------------------------------
// Encoder is mostly trivial
function InternalEncoder ( options , codec ) {
this . enc = codec . enc ;
}
InternalEncoder . prototype . write = function ( str ) {
return Buffer . from ( str , this . enc ) ;
}
InternalEncoder . prototype . end = function ( ) {
}
//------------------------------------------------------------------------------
// Except base64 encoder, which must keep its state.
function InternalEncoderBase64 ( options , codec ) {
this . prevStr = '' ;
}
InternalEncoderBase64 . prototype . write = function ( str ) {
str = this . prevStr + str ;
var completeQuads = str . length - ( str . length % 4 ) ;
this . prevStr = str . slice ( completeQuads ) ;
str = str . slice ( 0 , completeQuads ) ;
return Buffer . from ( str , "base64" ) ;
}
InternalEncoderBase64 . prototype . end = function ( ) {
return Buffer . from ( this . prevStr , "base64" ) ;
}
//------------------------------------------------------------------------------
// CESU-8 encoder is also special.
function InternalEncoderCesu8 ( options , codec ) {
}
InternalEncoderCesu8 . prototype . write = function ( str ) {
var buf = Buffer . alloc ( str . length * 3 ) , bufIdx = 0 ;
for ( var i = 0 ; i < str . length ; i ++ ) {
var charCode = str . charCodeAt ( i ) ;
// Naive implementation, but it works because CESU-8 is especially easy
// to convert from UTF-16 (which all JS strings are encoded in).
if ( charCode < 0x80 )
buf [ bufIdx ++ ] = charCode ;
else if ( charCode < 0x800 ) {
buf [ bufIdx ++ ] = 0xC0 + ( charCode >>> 6 ) ;
buf [ bufIdx ++ ] = 0x80 + ( charCode & 0x3f ) ;
}
else { // charCode will always be < 0x10000 in javascript.
buf [ bufIdx ++ ] = 0xE0 + ( charCode >>> 12 ) ;
buf [ bufIdx ++ ] = 0x80 + ( ( charCode >>> 6 ) & 0x3f ) ;
buf [ bufIdx ++ ] = 0x80 + ( charCode & 0x3f ) ;
}
}
return buf . slice ( 0 , bufIdx ) ;
}
InternalEncoderCesu8 . prototype . end = function ( ) {
}
//------------------------------------------------------------------------------
// CESU-8 decoder is not implemented in Node v4.0+
function InternalDecoderCesu8 ( options , codec ) {
this . acc = 0 ;
this . contBytes = 0 ;
this . accBytes = 0 ;
this . defaultCharUnicode = codec . defaultCharUnicode ;
}
InternalDecoderCesu8 . prototype . write = function ( buf ) {
var acc = this . acc , contBytes = this . contBytes , accBytes = this . accBytes ,
res = '' ;
for ( var i = 0 ; i < buf . length ; i ++ ) {
var curByte = buf [ i ] ;
if ( ( curByte & 0xC0 ) !== 0x80 ) { // Leading byte
if ( contBytes > 0 ) { // Previous code is invalid
res += this . defaultCharUnicode ;
contBytes = 0 ;
}
if ( curByte < 0x80 ) { // Single-byte code
res += String . fromCharCode ( curByte ) ;
} else if ( curByte < 0xE0 ) { // Two-byte code
acc = curByte & 0x1F ;
contBytes = 1 ; accBytes = 1 ;
} else if ( curByte < 0xF0 ) { // Three-byte code
acc = curByte & 0x0F ;
contBytes = 2 ; accBytes = 1 ;
} else { // Four or more are not supported for CESU-8.
res += this . defaultCharUnicode ;
}
} else { // Continuation byte
if ( contBytes > 0 ) { // We're waiting for it.
acc = ( acc << 6 ) | ( curByte & 0x3f ) ;
contBytes -- ; accBytes ++ ;
if ( contBytes === 0 ) {
// Check for overlong encoding, but support Modified UTF-8 (encoding NULL as C0 80)
if ( accBytes === 2 && acc < 0x80 && acc > 0 )
res += this . defaultCharUnicode ;
else if ( accBytes === 3 && acc < 0x800 )
res += this . defaultCharUnicode ;
else
// Actually add character.
res += String . fromCharCode ( acc ) ;
}
} else { // Unexpected continuation byte
res += this . defaultCharUnicode ;
}
}
}
this . acc = acc ; this . contBytes = contBytes ; this . accBytes = accBytes ;
return res ;
}
InternalDecoderCesu8 . prototype . end = function ( ) {
var res = 0 ;
if ( this . contBytes > 0 )
res += this . defaultCharUnicode ;
return res ;
}
/***/ } ) ,
/***/ 466 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const SemVer = _ _webpack _require _ _ ( 88 )
const parse = _ _webpack _require _ _ ( 925 )
const { re , t } = _ _webpack _require _ _ ( 523 )
const coerce = ( version , options ) => {
if ( version instanceof SemVer ) {
return version
}
if ( typeof version === 'number' ) {
version = String ( version )
}
if ( typeof version !== 'string' ) {
return null
}
options = options || { }
let match = null
if ( ! options . rtl ) {
match = version . match ( re [ t . COERCE ] )
} else {
// Find the right-most coercible string that does not share
// a terminus with a more left-ward coercible string.
// Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4'
//
// Walk through the string checking with a /g regexp
// Manually set the index so as to pick up overlapping matches.
// Stop when we get a match that ends at the string end, since no
// coercible string can be more right-ward without the same terminus.
let next
while ( ( next = re [ t . COERCERTL ] . exec ( version ) ) &&
( ! match || match . index + match [ 0 ] . length !== version . length )
) {
if ( ! match ||
next . index + next [ 0 ] . length !== match . index + match [ 0 ] . length ) {
match = next
}
re [ t . COERCERTL ] . lastIndex = next . index + next [ 1 ] . length + next [ 2 ] . length
}
// leave it in a clean state
re [ t . COERCERTL ] . lastIndex = - 1
}
if ( match === null )
return null
return parse ( ` ${ match [ 2 ] } . ${ match [ 3 ] || '0' } . ${ match [ 4 ] || '0' } ` , options )
}
module . exports = coerce
/***/ } ) ,
/***/ 467 :
/***/ ( function ( module , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
function _interopDefault ( ex ) { return ( ex && ( typeof ex === 'object' ) && 'default' in ex ) ? ex [ 'default' ] : ex ; }
var Stream = _interopDefault ( _ _webpack _require _ _ ( 413 ) ) ;
var http = _interopDefault ( _ _webpack _require _ _ ( 605 ) ) ;
var Url = _interopDefault ( _ _webpack _require _ _ ( 835 ) ) ;
var https = _interopDefault ( _ _webpack _require _ _ ( 211 ) ) ;
var zlib = _interopDefault ( _ _webpack _require _ _ ( 761 ) ) ;
// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js
// fix for "Readable" isn't a named export issue
const Readable = Stream . Readable ;
const BUFFER = Symbol ( 'buffer' ) ;
const TYPE = Symbol ( 'type' ) ;
class Blob {
constructor ( ) {
this [ TYPE ] = '' ;
const blobParts = arguments [ 0 ] ;
const options = arguments [ 1 ] ;
const buffers = [ ] ;
let size = 0 ;
if ( blobParts ) {
const a = blobParts ;
const length = Number ( a . length ) ;
for ( let i = 0 ; i < length ; i ++ ) {
const element = a [ i ] ;
let buffer ;
if ( element instanceof Buffer ) {
buffer = element ;
} else if ( ArrayBuffer . isView ( element ) ) {
buffer = Buffer . from ( element . buffer , element . byteOffset , element . byteLength ) ;
} else if ( element instanceof ArrayBuffer ) {
buffer = Buffer . from ( element ) ;
} else if ( element instanceof Blob ) {
buffer = element [ BUFFER ] ;
} else {
buffer = Buffer . from ( typeof element === 'string' ? element : String ( element ) ) ;
}
size += buffer . length ;
buffers . push ( buffer ) ;
}
}
this [ BUFFER ] = Buffer . concat ( buffers ) ;
let type = options && options . type !== undefined && String ( options . type ) . toLowerCase ( ) ;
if ( type && ! /[^\u0020-\u007E]/ . test ( type ) ) {
this [ TYPE ] = type ;
}
}
get size ( ) {
return this [ BUFFER ] . length ;
}
get type ( ) {
return this [ TYPE ] ;
}
text ( ) {
return Promise . resolve ( this [ BUFFER ] . toString ( ) ) ;
}
arrayBuffer ( ) {
const buf = this [ BUFFER ] ;
const ab = buf . buffer . slice ( buf . byteOffset , buf . byteOffset + buf . byteLength ) ;
return Promise . resolve ( ab ) ;
}
stream ( ) {
const readable = new Readable ( ) ;
readable . _read = function ( ) { } ;
readable . push ( this [ BUFFER ] ) ;
readable . push ( null ) ;
return readable ;
}
toString ( ) {
return '[object Blob]' ;
}
slice ( ) {
const size = this . size ;
const start = arguments [ 0 ] ;
const end = arguments [ 1 ] ;
let relativeStart , relativeEnd ;
if ( start === undefined ) {
relativeStart = 0 ;
} else if ( start < 0 ) {
relativeStart = Math . max ( size + start , 0 ) ;
} else {
relativeStart = Math . min ( start , size ) ;
}
if ( end === undefined ) {
relativeEnd = size ;
} else if ( end < 0 ) {
relativeEnd = Math . max ( size + end , 0 ) ;
} else {
relativeEnd = Math . min ( end , size ) ;
}
const span = Math . max ( relativeEnd - relativeStart , 0 ) ;
const buffer = this [ BUFFER ] ;
const slicedBuffer = buffer . slice ( relativeStart , relativeStart + span ) ;
const blob = new Blob ( [ ] , { type : arguments [ 2 ] } ) ;
blob [ BUFFER ] = slicedBuffer ;
return blob ;
}
}
Object . defineProperties ( Blob . prototype , {
size : { enumerable : true } ,
type : { enumerable : true } ,
slice : { enumerable : true }
} ) ;
Object . defineProperty ( Blob . prototype , Symbol . toStringTag , {
value : 'Blob' ,
writable : false ,
enumerable : false ,
configurable : true
} ) ;
/ * *
* fetch - error . js
*
* FetchError interface for operational errors
* /
/ * *
* Create FetchError instance
*
* @ param String message Error message for human
* @ param String type Error type for machine
* @ param String systemError For Node . js system error
* @ return FetchError
* /
function FetchError ( message , type , systemError ) {
Error . call ( this , message ) ;
this . message = message ;
this . type = type ;
// when err.type is `system`, err.code contains system error code
if ( systemError ) {
this . code = this . errno = systemError . code ;
}
// hide custom error implementation details from end-users
Error . captureStackTrace ( this , this . constructor ) ;
}
FetchError . prototype = Object . create ( Error . prototype ) ;
FetchError . prototype . constructor = FetchError ;
FetchError . prototype . name = 'FetchError' ;
let convert ;
try {
convert = _ _webpack _require _ _ ( 276 ) . convert ;
} catch ( e ) { }
const INTERNALS = Symbol ( 'Body internals' ) ;
// fix an issue where "PassThrough" isn't a named export for node <10
const PassThrough = Stream . PassThrough ;
/ * *
* Body mixin
*
* Ref : https : //fetch.spec.whatwg.org/#body
*
* @ param Stream body Readable stream
* @ param Object opts Response options
* @ return Void
* /
function Body ( body ) {
var _this = this ;
var _ref = arguments . length > 1 && arguments [ 1 ] !== undefined ? arguments [ 1 ] : { } ,
_ref$size = _ref . size ;
let size = _ref$size === undefined ? 0 : _ref$size ;
var _ref$timeout = _ref . timeout ;
let timeout = _ref$timeout === undefined ? 0 : _ref$timeout ;
if ( body == null ) {
// body is undefined or null
body = null ;
} else if ( isURLSearchParams ( body ) ) {
// body is a URLSearchParams
body = Buffer . from ( body . toString ( ) ) ;
} else if ( isBlob ( body ) ) ; else if ( Buffer . isBuffer ( body ) ) ; else if ( Object . prototype . toString . call ( body ) === '[object ArrayBuffer]' ) {
// body is ArrayBuffer
body = Buffer . from ( body ) ;
} else if ( ArrayBuffer . isView ( body ) ) {
// body is ArrayBufferView
body = Buffer . from ( body . buffer , body . byteOffset , body . byteLength ) ;
} else if ( body instanceof Stream ) ; else {
// none of the above
// coerce to string then buffer
body = Buffer . from ( String ( body ) ) ;
}
this [ INTERNALS ] = {
body ,
disturbed : false ,
error : null
} ;
this . size = size ;
this . timeout = timeout ;
if ( body instanceof Stream ) {
body . on ( 'error' , function ( err ) {
const error = err . name === 'AbortError' ? err : new FetchError ( ` Invalid response body while trying to fetch ${ _this . url } : ${ err . message } ` , 'system' , err ) ;
_this [ INTERNALS ] . error = error ;
} ) ;
}
}
Body . prototype = {
get body ( ) {
return this [ INTERNALS ] . body ;
} ,
get bodyUsed ( ) {
return this [ INTERNALS ] . disturbed ;
} ,
/ * *
* Decode response as ArrayBuffer
*
* @ return Promise
* /
arrayBuffer ( ) {
return consumeBody . call ( this ) . then ( function ( buf ) {
return buf . buffer . slice ( buf . byteOffset , buf . byteOffset + buf . byteLength ) ;
} ) ;
} ,
/ * *
* Return raw response as Blob
*
* @ return Promise
* /
blob ( ) {
let ct = this . headers && this . headers . get ( 'content-type' ) || '' ;
return consumeBody . call ( this ) . then ( function ( buf ) {
return Object . assign (
// Prevent copying
new Blob ( [ ] , {
type : ct . toLowerCase ( )
} ) , {
[ BUFFER ] : buf
} ) ;
} ) ;
} ,
/ * *
* Decode response as json
*
* @ return Promise
* /
json ( ) {
var _this2 = this ;
return consumeBody . call ( this ) . then ( function ( buffer ) {
try {
return JSON . parse ( buffer . toString ( ) ) ;
} catch ( err ) {
return Body . Promise . reject ( new FetchError ( ` invalid json response body at ${ _this2 . url } reason: ${ err . message } ` , 'invalid-json' ) ) ;
}
} ) ;
} ,
/ * *
* Decode response as text
*
* @ return Promise
* /
text ( ) {
return consumeBody . call ( this ) . then ( function ( buffer ) {
return buffer . toString ( ) ;
} ) ;
} ,
/ * *
* Decode response as buffer ( non - spec api )
*
* @ return Promise
* /
buffer ( ) {
return consumeBody . call ( this ) ;
} ,
/ * *
* Decode response as text , while automatically detecting the encoding and
* trying to decode to UTF - 8 ( non - spec api )
*
* @ return Promise
* /
textConverted ( ) {
var _this3 = this ;
return consumeBody . call ( this ) . then ( function ( buffer ) {
return convertBody ( buffer , _this3 . headers ) ;
} ) ;
}
} ;
// In browsers, all properties are enumerable.
Object . defineProperties ( Body . prototype , {
body : { enumerable : true } ,
bodyUsed : { enumerable : true } ,
arrayBuffer : { enumerable : true } ,
blob : { enumerable : true } ,
json : { enumerable : true } ,
text : { enumerable : true }
} ) ;
Body . mixIn = function ( proto ) {
for ( const name of Object . getOwnPropertyNames ( Body . prototype ) ) {
// istanbul ignore else: future proof
if ( ! ( name in proto ) ) {
const desc = Object . getOwnPropertyDescriptor ( Body . prototype , name ) ;
Object . defineProperty ( proto , name , desc ) ;
}
}
} ;
/ * *
* Consume and convert an entire Body to a Buffer .
*
* Ref : https : //fetch.spec.whatwg.org/#concept-body-consume-body
*
* @ return Promise
* /
function consumeBody ( ) {
var _this4 = this ;
if ( this [ INTERNALS ] . disturbed ) {
return Body . Promise . reject ( new TypeError ( ` body used already for: ${ this . url } ` ) ) ;
}
this [ INTERNALS ] . disturbed = true ;
if ( this [ INTERNALS ] . error ) {
return Body . Promise . reject ( this [ INTERNALS ] . error ) ;
}
let body = this . body ;
// body is null
if ( body === null ) {
return Body . Promise . resolve ( Buffer . alloc ( 0 ) ) ;
}
// body is blob
if ( isBlob ( body ) ) {
body = body . stream ( ) ;
}
// body is buffer
if ( Buffer . isBuffer ( body ) ) {
return Body . Promise . resolve ( body ) ;
}
// istanbul ignore if: should never happen
if ( ! ( body instanceof Stream ) ) {
return Body . Promise . resolve ( Buffer . alloc ( 0 ) ) ;
}
// body is stream
// get ready to actually consume the body
let accum = [ ] ;
let accumBytes = 0 ;
let abort = false ;
return new Body . Promise ( function ( resolve , reject ) {
let resTimeout ;
// allow timeout on slow response body
if ( _this4 . timeout ) {
resTimeout = setTimeout ( function ( ) {
abort = true ;
reject ( new FetchError ( ` Response timeout while trying to fetch ${ _this4 . url } (over ${ _this4 . timeout } ms) ` , 'body-timeout' ) ) ;
} , _this4 . timeout ) ;
}
// handle stream errors
body . on ( 'error' , function ( err ) {
if ( err . name === 'AbortError' ) {
// if the request was aborted, reject with this Error
abort = true ;
reject ( err ) ;
} else {
// other errors, such as incorrect content-encoding
reject ( new FetchError ( ` Invalid response body while trying to fetch ${ _this4 . url } : ${ err . message } ` , 'system' , err ) ) ;
}
} ) ;
body . on ( 'data' , function ( chunk ) {
if ( abort || chunk === null ) {
return ;
}
if ( _this4 . size && accumBytes + chunk . length > _this4 . size ) {
abort = true ;
reject ( new FetchError ( ` content size at ${ _this4 . url } over limit: ${ _this4 . size } ` , 'max-size' ) ) ;
return ;
}
accumBytes += chunk . length ;
accum . push ( chunk ) ;
} ) ;
body . on ( 'end' , function ( ) {
if ( abort ) {
return ;
}
clearTimeout ( resTimeout ) ;
try {
resolve ( Buffer . concat ( accum , accumBytes ) ) ;
} catch ( err ) {
// handle streams that have accumulated too much data (issue #414)
reject ( new FetchError ( ` Could not create Buffer from response body for ${ _this4 . url } : ${ err . message } ` , 'system' , err ) ) ;
}
} ) ;
} ) ;
}
/ * *
* Detect buffer encoding and convert to target encoding
* ref : http : //www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding
*
* @ param Buffer buffer Incoming buffer
* @ param String encoding Target encoding
* @ return String
* /
function convertBody ( buffer , headers ) {
if ( typeof convert !== 'function' ) {
throw new Error ( 'The package `encoding` must be installed to use the textConverted() function' ) ;
}
const ct = headers . get ( 'content-type' ) ;
let charset = 'utf-8' ;
let res , str ;
// header
if ( ct ) {
res = /charset=([^;]*)/i . exec ( ct ) ;
}
// no charset in content type, peek at response body for at most 1024 bytes
str = buffer . slice ( 0 , 1024 ) . toString ( ) ;
// html5
if ( ! res && str ) {
res = /<meta.+?charset=(['"])(.+?)\1/i . exec ( str ) ;
}
// html4
if ( ! res && str ) {
res = /<meta[\s]+?http-equiv=(['"])content-type\1[\s]+?content=(['"])(.+?)\2/i . exec ( str ) ;
2020-09-22 20:49:18 +02:00
if ( ! res ) {
res = /<meta[\s]+?content=(['"])(.+?)\1[\s]+?http-equiv=(['"])content-type\3/i . exec ( str ) ;
if ( res ) {
res . pop ( ) ; // drop last quote
}
}
2020-09-02 10:07:11 +02:00
if ( res ) {
res = /charset=(.*)/i . exec ( res . pop ( ) ) ;
}
}
// xml
if ( ! res && str ) {
res = /<\?xml.+?encoding=(['"])(.+?)\1/i . exec ( str ) ;
}
// found charset
if ( res ) {
charset = res . pop ( ) ;
// prevent decode issues when sites use incorrect encoding
// ref: https://hsivonen.fi/encoding-menu/
if ( charset === 'gb2312' || charset === 'gbk' ) {
charset = 'gb18030' ;
}
}
// turn raw buffers into a single utf-8 buffer
return convert ( buffer , 'UTF-8' , charset ) . toString ( ) ;
}
/ * *
* Detect a URLSearchParams object
* ref : https : //github.com/bitinn/node-fetch/issues/296#issuecomment-307598143
*
* @ param Object obj Object to detect by type or brand
* @ return String
* /
function isURLSearchParams ( obj ) {
// Duck-typing as a necessary condition.
if ( typeof obj !== 'object' || typeof obj . append !== 'function' || typeof obj . delete !== 'function' || typeof obj . get !== 'function' || typeof obj . getAll !== 'function' || typeof obj . has !== 'function' || typeof obj . set !== 'function' ) {
return false ;
}
// Brand-checking and more duck-typing as optional condition.
return obj . constructor . name === 'URLSearchParams' || Object . prototype . toString . call ( obj ) === '[object URLSearchParams]' || typeof obj . sort === 'function' ;
}
/ * *
* Check if ` obj ` is a W3C ` Blob ` object ( which ` File ` inherits from )
* @ param { * } obj
* @ return { boolean }
* /
function isBlob ( obj ) {
return typeof obj === 'object' && typeof obj . arrayBuffer === 'function' && typeof obj . type === 'string' && typeof obj . stream === 'function' && typeof obj . constructor === 'function' && typeof obj . constructor . name === 'string' && /^(Blob|File)$/ . test ( obj . constructor . name ) && /^(Blob|File)$/ . test ( obj [ Symbol . toStringTag ] ) ;
}
/ * *
* Clone body given Res / Req instance
*
* @ param Mixed instance Response or Request instance
* @ return Mixed
* /
function clone ( instance ) {
let p1 , p2 ;
let body = instance . body ;
// don't allow cloning a used body
if ( instance . bodyUsed ) {
throw new Error ( 'cannot clone body after it is used' ) ;
}
// check that body is a stream and not form-data object
// note: we can't clone the form-data object without having it as a dependency
if ( body instanceof Stream && typeof body . getBoundary !== 'function' ) {
// tee instance body
p1 = new PassThrough ( ) ;
p2 = new PassThrough ( ) ;
body . pipe ( p1 ) ;
body . pipe ( p2 ) ;
// set instance body to teed body and return the other teed body
instance [ INTERNALS ] . body = p1 ;
body = p2 ;
}
return body ;
}
/ * *
* Performs the operation "extract a `Content-Type` value from |object|" as
* specified in the specification :
* https : //fetch.spec.whatwg.org/#concept-bodyinit-extract
*
* This function assumes that instance . body is present .
*
* @ param Mixed instance Any options . body input
* /
function extractContentType ( body ) {
if ( body === null ) {
// body is null
return null ;
} else if ( typeof body === 'string' ) {
// body is string
return 'text/plain;charset=UTF-8' ;
} else if ( isURLSearchParams ( body ) ) {
// body is a URLSearchParams
return 'application/x-www-form-urlencoded;charset=UTF-8' ;
} else if ( isBlob ( body ) ) {
// body is blob
return body . type || null ;
} else if ( Buffer . isBuffer ( body ) ) {
// body is buffer
return null ;
} else if ( Object . prototype . toString . call ( body ) === '[object ArrayBuffer]' ) {
// body is ArrayBuffer
return null ;
} else if ( ArrayBuffer . isView ( body ) ) {
// body is ArrayBufferView
return null ;
} else if ( typeof body . getBoundary === 'function' ) {
// detect form data input from form-data module
return ` multipart/form-data;boundary= ${ body . getBoundary ( ) } ` ;
} else if ( body instanceof Stream ) {
// body is stream
// can't really do much about this
return null ;
} else {
// Body constructor defaults other things to string
return 'text/plain;charset=UTF-8' ;
}
}
/ * *
* The Fetch Standard treats this as if "total bytes" is a property on the body .
* For us , we have to explicitly get it with a function .
*
* ref : https : //fetch.spec.whatwg.org/#concept-body-total-bytes
*
* @ param Body instance Instance of Body
* @ return Number ? Number of bytes , or null if not possible
* /
function getTotalBytes ( instance ) {
const body = instance . body ;
if ( body === null ) {
// body is null
return 0 ;
} else if ( isBlob ( body ) ) {
return body . size ;
} else if ( Buffer . isBuffer ( body ) ) {
// body is buffer
return body . length ;
} else if ( body && typeof body . getLengthSync === 'function' ) {
// detect form data input from form-data module
if ( body . _lengthRetrievers && body . _lengthRetrievers . length == 0 || // 1.x
body . hasKnownLength && body . hasKnownLength ( ) ) {
// 2.x
return body . getLengthSync ( ) ;
}
return null ;
} else {
// body is stream
return null ;
}
}
/ * *
* Write a Body to a Node . js WritableStream ( e . g . http . Request ) object .
*
* @ param Body instance Instance of Body
* @ return Void
* /
function writeToStream ( dest , instance ) {
const body = instance . body ;
if ( body === null ) {
// body is null
dest . end ( ) ;
} else if ( isBlob ( body ) ) {
body . stream ( ) . pipe ( dest ) ;
} else if ( Buffer . isBuffer ( body ) ) {
// body is buffer
dest . write ( body ) ;
dest . end ( ) ;
} else {
// body is stream
body . pipe ( dest ) ;
}
}
// expose Promise
Body . Promise = global . Promise ;
/ * *
* headers . js
*
* Headers class offers convenient helpers
* /
const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/ ;
const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/ ;
function validateName ( name ) {
name = ` ${ name } ` ;
if ( invalidTokenRegex . test ( name ) || name === '' ) {
throw new TypeError ( ` ${ name } is not a legal HTTP header name ` ) ;
}
}
function validateValue ( value ) {
value = ` ${ value } ` ;
if ( invalidHeaderCharRegex . test ( value ) ) {
throw new TypeError ( ` ${ value } is not a legal HTTP header value ` ) ;
}
}
/ * *
* Find the key in the map object given a header name .
*
* Returns undefined if not found .
*
* @ param String name Header name
* @ return String | Undefined
* /
function find ( map , name ) {
name = name . toLowerCase ( ) ;
for ( const key in map ) {
if ( key . toLowerCase ( ) === name ) {
return key ;
}
}
return undefined ;
}
const MAP = Symbol ( 'map' ) ;
class Headers {
/ * *
* Headers class
*
* @ param Object headers Response headers
* @ return Void
* /
constructor ( ) {
let init = arguments . length > 0 && arguments [ 0 ] !== undefined ? arguments [ 0 ] : undefined ;
this [ MAP ] = Object . create ( null ) ;
if ( init instanceof Headers ) {
const rawHeaders = init . raw ( ) ;
const headerNames = Object . keys ( rawHeaders ) ;
for ( const headerName of headerNames ) {
for ( const value of rawHeaders [ headerName ] ) {
this . append ( headerName , value ) ;
}
}
return ;
}
// We don't worry about converting prop to ByteString here as append()
// will handle it.
if ( init == null ) ; else if ( typeof init === 'object' ) {
const method = init [ Symbol . iterator ] ;
if ( method != null ) {
if ( typeof method !== 'function' ) {
throw new TypeError ( 'Header pairs must be iterable' ) ;
}
// sequence<sequence<ByteString>>
// Note: per spec we have to first exhaust the lists then process them
const pairs = [ ] ;
for ( const pair of init ) {
if ( typeof pair !== 'object' || typeof pair [ Symbol . iterator ] !== 'function' ) {
throw new TypeError ( 'Each header pair must be iterable' ) ;
}
pairs . push ( Array . from ( pair ) ) ;
}
for ( const pair of pairs ) {
if ( pair . length !== 2 ) {
throw new TypeError ( 'Each header pair must be a name/value tuple' ) ;
}
this . append ( pair [ 0 ] , pair [ 1 ] ) ;
}
} else {
// record<ByteString, ByteString>
for ( const key of Object . keys ( init ) ) {
const value = init [ key ] ;
this . append ( key , value ) ;
}
}
} else {
throw new TypeError ( 'Provided initializer must be an object' ) ;
}
}
/ * *
* Return combined header value given name
*
* @ param String name Header name
* @ return Mixed
* /
get ( name ) {
name = ` ${ name } ` ;
validateName ( name ) ;
const key = find ( this [ MAP ] , name ) ;
if ( key === undefined ) {
return null ;
}
return this [ MAP ] [ key ] . join ( ', ' ) ;
}
/ * *
* Iterate over all headers
*
* @ param Function callback Executed for each item with parameters ( value , name , thisArg )
* @ param Boolean thisArg ` this ` context for callback function
* @ return Void
* /
forEach ( callback ) {
let thisArg = arguments . length > 1 && arguments [ 1 ] !== undefined ? arguments [ 1 ] : undefined ;
let pairs = getHeaders ( this ) ;
let i = 0 ;
while ( i < pairs . length ) {
var _pairs$i = pairs [ i ] ;
const name = _pairs$i [ 0 ] ,
value = _pairs$i [ 1 ] ;
callback . call ( thisArg , value , name , this ) ;
pairs = getHeaders ( this ) ;
i ++ ;
}
}
/ * *
* Overwrite header values given name
*
* @ param String name Header name
* @ param String value Header value
* @ return Void
* /
set ( name , value ) {
name = ` ${ name } ` ;
value = ` ${ value } ` ;
validateName ( name ) ;
validateValue ( value ) ;
const key = find ( this [ MAP ] , name ) ;
this [ MAP ] [ key !== undefined ? key : name ] = [ value ] ;
}
/ * *
* Append a value onto existing header
*
* @ param String name Header name
* @ param String value Header value
* @ return Void
* /
append ( name , value ) {
name = ` ${ name } ` ;
value = ` ${ value } ` ;
validateName ( name ) ;
validateValue ( value ) ;
const key = find ( this [ MAP ] , name ) ;
if ( key !== undefined ) {
this [ MAP ] [ key ] . push ( value ) ;
} else {
this [ MAP ] [ name ] = [ value ] ;
}
}
/ * *
* Check for header name existence
*
* @ param String name Header name
* @ return Boolean
* /
has ( name ) {
name = ` ${ name } ` ;
validateName ( name ) ;
return find ( this [ MAP ] , name ) !== undefined ;
}
/ * *
* Delete all header values given name
*
* @ param String name Header name
* @ return Void
* /
delete ( name ) {
name = ` ${ name } ` ;
validateName ( name ) ;
const key = find ( this [ MAP ] , name ) ;
if ( key !== undefined ) {
delete this [ MAP ] [ key ] ;
}
}
/ * *
* Return raw headers ( non - spec api )
*
* @ return Object
* /
raw ( ) {
return this [ MAP ] ;
}
/ * *
* Get an iterator on keys .
*
* @ return Iterator
* /
keys ( ) {
return createHeadersIterator ( this , 'key' ) ;
}
/ * *
* Get an iterator on values .
*
* @ return Iterator
* /
values ( ) {
return createHeadersIterator ( this , 'value' ) ;
}
/ * *
* Get an iterator on entries .
*
* This is the default iterator of the Headers object .
*
* @ return Iterator
* /
[ Symbol . iterator ] ( ) {
return createHeadersIterator ( this , 'key+value' ) ;
}
}
Headers . prototype . entries = Headers . prototype [ Symbol . iterator ] ;
Object . defineProperty ( Headers . prototype , Symbol . toStringTag , {
value : 'Headers' ,
writable : false ,
enumerable : false ,
configurable : true
} ) ;
Object . defineProperties ( Headers . prototype , {
get : { enumerable : true } ,
forEach : { enumerable : true } ,
set : { enumerable : true } ,
append : { enumerable : true } ,
has : { enumerable : true } ,
delete : { enumerable : true } ,
keys : { enumerable : true } ,
values : { enumerable : true } ,
entries : { enumerable : true }
} ) ;
function getHeaders ( headers ) {
let kind = arguments . length > 1 && arguments [ 1 ] !== undefined ? arguments [ 1 ] : 'key+value' ;
const keys = Object . keys ( headers [ MAP ] ) . sort ( ) ;
return keys . map ( kind === 'key' ? function ( k ) {
return k . toLowerCase ( ) ;
} : kind === 'value' ? function ( k ) {
return headers [ MAP ] [ k ] . join ( ', ' ) ;
} : function ( k ) {
return [ k . toLowerCase ( ) , headers [ MAP ] [ k ] . join ( ', ' ) ] ;
} ) ;
}
const INTERNAL = Symbol ( 'internal' ) ;
function createHeadersIterator ( target , kind ) {
const iterator = Object . create ( HeadersIteratorPrototype ) ;
iterator [ INTERNAL ] = {
target ,
kind ,
index : 0
} ;
return iterator ;
}
const HeadersIteratorPrototype = Object . setPrototypeOf ( {
next ( ) {
// istanbul ignore if
if ( ! this || Object . getPrototypeOf ( this ) !== HeadersIteratorPrototype ) {
throw new TypeError ( 'Value of `this` is not a HeadersIterator' ) ;
}
var _INTERNAL = this [ INTERNAL ] ;
const target = _INTERNAL . target ,
kind = _INTERNAL . kind ,
index = _INTERNAL . index ;
const values = getHeaders ( target , kind ) ;
const len = values . length ;
if ( index >= len ) {
return {
value : undefined ,
done : true
} ;
}
this [ INTERNAL ] . index = index + 1 ;
return {
value : values [ index ] ,
done : false
} ;
}
} , Object . getPrototypeOf ( Object . getPrototypeOf ( [ ] [ Symbol . iterator ] ( ) ) ) ) ;
Object . defineProperty ( HeadersIteratorPrototype , Symbol . toStringTag , {
value : 'HeadersIterator' ,
writable : false ,
enumerable : false ,
configurable : true
} ) ;
/ * *
* Export the Headers object in a form that Node . js can consume .
*
* @ param Headers headers
* @ return Object
* /
function exportNodeCompatibleHeaders ( headers ) {
const obj = Object . assign ( { _ _proto _ _ : null } , headers [ MAP ] ) ;
// http.request() only supports string as Host header. This hack makes
// specifying custom Host header possible.
const hostHeaderKey = find ( headers [ MAP ] , 'Host' ) ;
if ( hostHeaderKey !== undefined ) {
obj [ hostHeaderKey ] = obj [ hostHeaderKey ] [ 0 ] ;
}
return obj ;
}
/ * *
* Create a Headers object from an object of headers , ignoring those that do
* not conform to HTTP grammar productions .
*
* @ param Object obj Object of headers
* @ return Headers
* /
function createHeadersLenient ( obj ) {
const headers = new Headers ( ) ;
for ( const name of Object . keys ( obj ) ) {
if ( invalidTokenRegex . test ( name ) ) {
continue ;
}
if ( Array . isArray ( obj [ name ] ) ) {
for ( const val of obj [ name ] ) {
if ( invalidHeaderCharRegex . test ( val ) ) {
continue ;
}
if ( headers [ MAP ] [ name ] === undefined ) {
headers [ MAP ] [ name ] = [ val ] ;
} else {
headers [ MAP ] [ name ] . push ( val ) ;
}
}
} else if ( ! invalidHeaderCharRegex . test ( obj [ name ] ) ) {
headers [ MAP ] [ name ] = [ obj [ name ] ] ;
}
}
return headers ;
}
const INTERNALS$1 = Symbol ( 'Response internals' ) ;
// fix an issue where "STATUS_CODES" aren't a named export for node <10
const STATUS _CODES = http . STATUS _CODES ;
/ * *
* Response class
*
* @ param Stream body Readable stream
* @ param Object opts Response options
* @ return Void
* /
class Response {
constructor ( ) {
let body = arguments . length > 0 && arguments [ 0 ] !== undefined ? arguments [ 0 ] : null ;
let opts = arguments . length > 1 && arguments [ 1 ] !== undefined ? arguments [ 1 ] : { } ;
Body . call ( this , body , opts ) ;
const status = opts . status || 200 ;
const headers = new Headers ( opts . headers ) ;
if ( body != null && ! headers . has ( 'Content-Type' ) ) {
const contentType = extractContentType ( body ) ;
if ( contentType ) {
headers . append ( 'Content-Type' , contentType ) ;
}
}
this [ INTERNALS$1 ] = {
url : opts . url ,
status ,
statusText : opts . statusText || STATUS _CODES [ status ] ,
headers ,
counter : opts . counter
} ;
}
get url ( ) {
return this [ INTERNALS$1 ] . url || '' ;
}
get status ( ) {
return this [ INTERNALS$1 ] . status ;
}
/ * *
* Convenience property representing if the request ended normally
* /
get ok ( ) {
return this [ INTERNALS$1 ] . status >= 200 && this [ INTERNALS$1 ] . status < 300 ;
}
get redirected ( ) {
return this [ INTERNALS$1 ] . counter > 0 ;
}
get statusText ( ) {
return this [ INTERNALS$1 ] . statusText ;
}
get headers ( ) {
return this [ INTERNALS$1 ] . headers ;
}
/ * *
* Clone this response
*
* @ return Response
* /
clone ( ) {
return new Response ( clone ( this ) , {
url : this . url ,
status : this . status ,
statusText : this . statusText ,
headers : this . headers ,
ok : this . ok ,
redirected : this . redirected
} ) ;
}
}
Body . mixIn ( Response . prototype ) ;
Object . defineProperties ( Response . prototype , {
url : { enumerable : true } ,
status : { enumerable : true } ,
ok : { enumerable : true } ,
redirected : { enumerable : true } ,
statusText : { enumerable : true } ,
headers : { enumerable : true } ,
clone : { enumerable : true }
} ) ;
Object . defineProperty ( Response . prototype , Symbol . toStringTag , {
value : 'Response' ,
writable : false ,
enumerable : false ,
configurable : true
} ) ;
const INTERNALS$2 = Symbol ( 'Request internals' ) ;
// fix an issue where "format", "parse" aren't a named export for node <10
const parse _url = Url . parse ;
const format _url = Url . format ;
const streamDestructionSupported = 'destroy' in Stream . Readable . prototype ;
/ * *
* Check if a value is an instance of Request .
*
* @ param Mixed input
* @ return Boolean
* /
function isRequest ( input ) {
return typeof input === 'object' && typeof input [ INTERNALS$2 ] === 'object' ;
}
function isAbortSignal ( signal ) {
const proto = signal && typeof signal === 'object' && Object . getPrototypeOf ( signal ) ;
return ! ! ( proto && proto . constructor . name === 'AbortSignal' ) ;
}
/ * *
* Request class
*
* @ param Mixed input Url or Request instance
* @ param Object init Custom options
* @ return Void
* /
class Request {
constructor ( input ) {
let init = arguments . length > 1 && arguments [ 1 ] !== undefined ? arguments [ 1 ] : { } ;
let parsedURL ;
// normalize input
if ( ! isRequest ( input ) ) {
if ( input && input . href ) {
// in order to support Node.js' Url objects; though WHATWG's URL objects
// will fall into this branch also (since their `toString()` will return
// `href` property anyway)
parsedURL = parse _url ( input . href ) ;
} else {
// coerce input to a string before attempting to parse
parsedURL = parse _url ( ` ${ input } ` ) ;
}
input = { } ;
} else {
parsedURL = parse _url ( input . url ) ;
}
let method = init . method || input . method || 'GET' ;
method = method . toUpperCase ( ) ;
if ( ( init . body != null || isRequest ( input ) && input . body !== null ) && ( method === 'GET' || method === 'HEAD' ) ) {
throw new TypeError ( 'Request with GET/HEAD method cannot have body' ) ;
}
let inputBody = init . body != null ? init . body : isRequest ( input ) && input . body !== null ? clone ( input ) : null ;
Body . call ( this , inputBody , {
timeout : init . timeout || input . timeout || 0 ,
size : init . size || input . size || 0
} ) ;
const headers = new Headers ( init . headers || input . headers || { } ) ;
if ( inputBody != null && ! headers . has ( 'Content-Type' ) ) {
const contentType = extractContentType ( inputBody ) ;
if ( contentType ) {
headers . append ( 'Content-Type' , contentType ) ;
}
}
let signal = isRequest ( input ) ? input . signal : null ;
if ( 'signal' in init ) signal = init . signal ;
if ( signal != null && ! isAbortSignal ( signal ) ) {
throw new TypeError ( 'Expected signal to be an instanceof AbortSignal' ) ;
}
this [ INTERNALS$2 ] = {
method ,
redirect : init . redirect || input . redirect || 'follow' ,
headers ,
parsedURL ,
signal
} ;
// node-fetch-only options
this . follow = init . follow !== undefined ? init . follow : input . follow !== undefined ? input . follow : 20 ;
this . compress = init . compress !== undefined ? init . compress : input . compress !== undefined ? input . compress : true ;
this . counter = init . counter || input . counter || 0 ;
this . agent = init . agent || input . agent ;
}
get method ( ) {
return this [ INTERNALS$2 ] . method ;
}
get url ( ) {
return format _url ( this [ INTERNALS$2 ] . parsedURL ) ;
}
get headers ( ) {
return this [ INTERNALS$2 ] . headers ;
}
get redirect ( ) {
return this [ INTERNALS$2 ] . redirect ;
}
get signal ( ) {
return this [ INTERNALS$2 ] . signal ;
}
/ * *
* Clone this request
*
* @ return Request
* /
clone ( ) {
return new Request ( this ) ;
}
}
Body . mixIn ( Request . prototype ) ;
Object . defineProperty ( Request . prototype , Symbol . toStringTag , {
value : 'Request' ,
writable : false ,
enumerable : false ,
configurable : true
} ) ;
Object . defineProperties ( Request . prototype , {
method : { enumerable : true } ,
url : { enumerable : true } ,
headers : { enumerable : true } ,
redirect : { enumerable : true } ,
clone : { enumerable : true } ,
signal : { enumerable : true }
} ) ;
/ * *
* Convert a Request to Node . js http request options .
*
* @ param Request A Request instance
* @ return Object The options object to be passed to http . request
* /
function getNodeRequestOptions ( request ) {
const parsedURL = request [ INTERNALS$2 ] . parsedURL ;
const headers = new Headers ( request [ INTERNALS$2 ] . headers ) ;
// fetch step 1.3
if ( ! headers . has ( 'Accept' ) ) {
headers . set ( 'Accept' , '*/*' ) ;
}
// Basic fetch
if ( ! parsedURL . protocol || ! parsedURL . hostname ) {
throw new TypeError ( 'Only absolute URLs are supported' ) ;
}
if ( ! /^https?:$/ . test ( parsedURL . protocol ) ) {
throw new TypeError ( 'Only HTTP(S) protocols are supported' ) ;
}
if ( request . signal && request . body instanceof Stream . Readable && ! streamDestructionSupported ) {
throw new Error ( 'Cancellation of streamed requests with AbortSignal is not supported in node < 8' ) ;
}
// HTTP-network-or-cache fetch steps 2.4-2.7
let contentLengthValue = null ;
if ( request . body == null && /^(POST|PUT)$/i . test ( request . method ) ) {
contentLengthValue = '0' ;
}
if ( request . body != null ) {
const totalBytes = getTotalBytes ( request ) ;
if ( typeof totalBytes === 'number' ) {
contentLengthValue = String ( totalBytes ) ;
}
}
if ( contentLengthValue ) {
headers . set ( 'Content-Length' , contentLengthValue ) ;
}
// HTTP-network-or-cache fetch step 2.11
if ( ! headers . has ( 'User-Agent' ) ) {
headers . set ( 'User-Agent' , 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)' ) ;
}
// HTTP-network-or-cache fetch step 2.15
if ( request . compress && ! headers . has ( 'Accept-Encoding' ) ) {
headers . set ( 'Accept-Encoding' , 'gzip,deflate' ) ;
}
let agent = request . agent ;
if ( typeof agent === 'function' ) {
agent = agent ( parsedURL ) ;
}
if ( ! headers . has ( 'Connection' ) && ! agent ) {
headers . set ( 'Connection' , 'close' ) ;
}
// HTTP-network fetch step 4.2
// chunked encoding is handled by Node.js
return Object . assign ( { } , parsedURL , {
method : request . method ,
headers : exportNodeCompatibleHeaders ( headers ) ,
agent
} ) ;
}
/ * *
* abort - error . js
*
* AbortError interface for cancelled requests
* /
/ * *
* Create AbortError instance
*
* @ param String message Error message for human
* @ return AbortError
* /
function AbortError ( message ) {
Error . call ( this , message ) ;
this . type = 'aborted' ;
this . message = message ;
// hide custom error implementation details from end-users
Error . captureStackTrace ( this , this . constructor ) ;
}
AbortError . prototype = Object . create ( Error . prototype ) ;
AbortError . prototype . constructor = AbortError ;
AbortError . prototype . name = 'AbortError' ;
// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
const PassThrough$1 = Stream . PassThrough ;
const resolve _url = Url . resolve ;
/ * *
* Fetch function
*
* @ param Mixed url Absolute url or Request instance
* @ param Object opts Fetch options
* @ return Promise
* /
function fetch ( url , opts ) {
// allow custom promise
if ( ! fetch . Promise ) {
throw new Error ( 'native promise missing, set fetch.Promise to your favorite alternative' ) ;
}
Body . Promise = fetch . Promise ;
// wrap http.request into fetch
return new fetch . Promise ( function ( resolve , reject ) {
// build request object
const request = new Request ( url , opts ) ;
const options = getNodeRequestOptions ( request ) ;
const send = ( options . protocol === 'https:' ? https : http ) . request ;
const signal = request . signal ;
let response = null ;
const abort = function abort ( ) {
let error = new AbortError ( 'The user aborted a request.' ) ;
reject ( error ) ;
if ( request . body && request . body instanceof Stream . Readable ) {
request . body . destroy ( error ) ;
}
if ( ! response || ! response . body ) return ;
response . body . emit ( 'error' , error ) ;
} ;
if ( signal && signal . aborted ) {
abort ( ) ;
return ;
}
const abortAndFinalize = function abortAndFinalize ( ) {
abort ( ) ;
finalize ( ) ;
} ;
// send request
const req = send ( options ) ;
let reqTimeout ;
if ( signal ) {
signal . addEventListener ( 'abort' , abortAndFinalize ) ;
}
function finalize ( ) {
req . abort ( ) ;
if ( signal ) signal . removeEventListener ( 'abort' , abortAndFinalize ) ;
clearTimeout ( reqTimeout ) ;
}
if ( request . timeout ) {
req . once ( 'socket' , function ( socket ) {
reqTimeout = setTimeout ( function ( ) {
reject ( new FetchError ( ` network timeout at: ${ request . url } ` , 'request-timeout' ) ) ;
finalize ( ) ;
} , request . timeout ) ;
} ) ;
}
req . on ( 'error' , function ( err ) {
reject ( new FetchError ( ` request to ${ request . url } failed, reason: ${ err . message } ` , 'system' , err ) ) ;
finalize ( ) ;
} ) ;
req . on ( 'response' , function ( res ) {
clearTimeout ( reqTimeout ) ;
const headers = createHeadersLenient ( res . headers ) ;
// HTTP fetch step 5
if ( fetch . isRedirect ( res . statusCode ) ) {
// HTTP fetch step 5.2
const location = headers . get ( 'Location' ) ;
// HTTP fetch step 5.3
const locationURL = location === null ? null : resolve _url ( request . url , location ) ;
// HTTP fetch step 5.5
switch ( request . redirect ) {
case 'error' :
2020-09-22 20:49:18 +02:00
reject ( new FetchError ( ` uri requested responds with a redirect, redirect mode is set to error: ${ request . url } ` , 'no-redirect' ) ) ;
2020-09-02 10:07:11 +02:00
finalize ( ) ;
return ;
case 'manual' :
// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
if ( locationURL !== null ) {
// handle corrupted header
try {
headers . set ( 'Location' , locationURL ) ;
} catch ( err ) {
// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request
reject ( err ) ;
}
}
break ;
case 'follow' :
// HTTP-redirect fetch step 2
if ( locationURL === null ) {
break ;
}
// HTTP-redirect fetch step 5
if ( request . counter >= request . follow ) {
reject ( new FetchError ( ` maximum redirect reached at: ${ request . url } ` , 'max-redirect' ) ) ;
finalize ( ) ;
return ;
}
// HTTP-redirect fetch step 6 (counter increment)
// Create a new Request object.
const requestOpts = {
headers : new Headers ( request . headers ) ,
follow : request . follow ,
counter : request . counter + 1 ,
agent : request . agent ,
compress : request . compress ,
method : request . method ,
body : request . body ,
signal : request . signal ,
2020-09-22 20:49:18 +02:00
timeout : request . timeout ,
size : request . size
2020-09-02 10:07:11 +02:00
} ;
// HTTP-redirect fetch step 9
if ( res . statusCode !== 303 && request . body && getTotalBytes ( request ) === null ) {
reject ( new FetchError ( 'Cannot follow redirect with body being a readable stream' , 'unsupported-redirect' ) ) ;
finalize ( ) ;
return ;
}
// HTTP-redirect fetch step 11
if ( res . statusCode === 303 || ( res . statusCode === 301 || res . statusCode === 302 ) && request . method === 'POST' ) {
requestOpts . method = 'GET' ;
requestOpts . body = undefined ;
requestOpts . headers . delete ( 'content-length' ) ;
}
// HTTP-redirect fetch step 15
resolve ( fetch ( new Request ( locationURL , requestOpts ) ) ) ;
finalize ( ) ;
return ;
}
}
// prepare response
res . once ( 'end' , function ( ) {
if ( signal ) signal . removeEventListener ( 'abort' , abortAndFinalize ) ;
} ) ;
let body = res . pipe ( new PassThrough$1 ( ) ) ;
const response _options = {
url : request . url ,
status : res . statusCode ,
statusText : res . statusMessage ,
headers : headers ,
size : request . size ,
timeout : request . timeout ,
counter : request . counter
} ;
// HTTP-network fetch step 12.1.1.3
const codings = headers . get ( 'Content-Encoding' ) ;
// HTTP-network fetch step 12.1.1.4: handle content codings
// in following scenarios we ignore compression support
// 1. compression support is disabled
// 2. HEAD request
// 3. no Content-Encoding header
// 4. no content response (204)
// 5. content not modified response (304)
if ( ! request . compress || request . method === 'HEAD' || codings === null || res . statusCode === 204 || res . statusCode === 304 ) {
response = new Response ( body , response _options ) ;
resolve ( response ) ;
return ;
}
// For Node v6+
// Be less strict when decoding compressed responses, since sometimes
// servers send slightly invalid responses that are still accepted
// by common browsers.
// Always using Z_SYNC_FLUSH is what cURL does.
const zlibOptions = {
flush : zlib . Z _SYNC _FLUSH ,
finishFlush : zlib . Z _SYNC _FLUSH
} ;
// for gzip
if ( codings == 'gzip' || codings == 'x-gzip' ) {
body = body . pipe ( zlib . createGunzip ( zlibOptions ) ) ;
response = new Response ( body , response _options ) ;
resolve ( response ) ;
return ;
}
// for deflate
if ( codings == 'deflate' || codings == 'x-deflate' ) {
// handle the infamous raw deflate response from old servers
// a hack for old IIS and Apache servers
const raw = res . pipe ( new PassThrough$1 ( ) ) ;
raw . once ( 'data' , function ( chunk ) {
// see http://stackoverflow.com/questions/37519828
if ( ( chunk [ 0 ] & 0x0F ) === 0x08 ) {
body = body . pipe ( zlib . createInflate ( ) ) ;
} else {
body = body . pipe ( zlib . createInflateRaw ( ) ) ;
}
response = new Response ( body , response _options ) ;
resolve ( response ) ;
} ) ;
return ;
}
// for br
if ( codings == 'br' && typeof zlib . createBrotliDecompress === 'function' ) {
body = body . pipe ( zlib . createBrotliDecompress ( ) ) ;
response = new Response ( body , response _options ) ;
resolve ( response ) ;
return ;
}
// otherwise, use response as-is
response = new Response ( body , response _options ) ;
resolve ( response ) ;
} ) ;
writeToStream ( req , request ) ;
} ) ;
}
/ * *
* Redirect code matching
*
* @ param Number code Status code
* @ return Boolean
* /
fetch . isRedirect = function ( code ) {
return code === 301 || code === 302 || code === 303 || code === 307 || code === 308 ;
} ;
// expose Promise
fetch . Promise = global . Promise ;
module . exports = exports = fetch ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
exports . default = exports ;
exports . Headers = Headers ;
exports . Request = Request ;
exports . Response = Response ;
exports . FetchError = FetchError ;
/***/ } ) ,
/***/ 489 :
/***/ ( function ( module ) {
module . exports = [ [ "a140" , "" , 62 ] , [ "a180" , "" , 32 ] , [ "a240" , "" , 62 ] , [ "a280" , "" , 32 ] , [ "a2ab" , "" , 5 ] , [ "a2e3" , "€" ] , [ "a2ef" , "" ] , [ "a2fd" , "" ] , [ "a340" , "" , 62 ] , [ "a380" , "" , 31 , " " ] , [ "a440" , "" , 62 ] , [ "a480" , "" , 32 ] , [ "a4f4" , "" , 10 ] , [ "a540" , "" , 62 ] , [ "a580" , "" , 32 ] , [ "a5f7" , "" , 7 ] , [ "a640" , "" , 62 ] , [ "a680" , "" , 32 ] , [ "a6b9" , "" , 7 ] , [ "a6d9" , "" , 6 ] , [ "a6ec" , "" ] , [ "a6f3" , "" ] , [ "a6f6" , "" , 8 ] , [ "a740" , "" , 62 ] , [ "a780" , "" , 32 ] , [ "a7c2" , "" , 14 ] , [ "a7f2" , "" , 12 ] , [ "a896" , "" , 10 ] , [ "a8bc" , "" ] , [ "a8bf" , "ǹ" ] , [ "a8c1" , "" ] , [ "a8ea" , "" , 20 ] , [ "a958" , "" ] , [ "a95b" , "" ] , [ "a95d" , "" ] , [ "a989" , "〾⿰" , 11 ] , [ "a997" , "" , 12 ] , [ "a9f0" , "" , 14 ] , [ "aaa1" , "" , 93 ] , [ "aba1" , "" , 93 ] , [ "aca1" , "" , 93 ] , [ "ada1" , "" , 93 ] , [ "aea1" , "" , 93 ] , [ "afa1" , "" , 93 ] , [ "d7fa" , "" , 4 ] , [ "f8a1" , "" , 93 ] , [ "f9a1" , "" , 93 ] , [ "faa1" , "" , 93 ] , [ "fba1" , "" , 93 ] , [ "fca1" , "" , 93 ] , [ "fda1" , "" , 93 ] , [ "fe50" , "⺁⺄㑳㑇⺈⺋㖞㘚㘎⺌⺗㥮㤘㧏㧟㩳㧐㭎㱮㳠⺧⺪䁖䅟⺮䌷⺳⺶⺷䎱䎬⺻䏝䓖䙡䙌" ] , [ "fe80" , "䜣䜩䝼䞍⻊䥇䥺䥽䦂䦃䦅䦆䦟䦛䦷䦶䲣䲟䲠䲡䱷䲢䴓" , 6 , "䶮" , 93 ] ] ;
/***/ } ) ,
/***/ 492 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
var wrappy = _ _webpack _require _ _ ( 940 )
var reqs = Object . create ( null )
var once = _ _webpack _require _ _ ( 223 )
module . exports = wrappy ( inflight )
function inflight ( key , cb ) {
if ( reqs [ key ] ) {
reqs [ key ] . push ( cb )
return null
} else {
reqs [ key ] = [ cb ]
return makeres ( key )
}
}
function makeres ( key ) {
return once ( function RES ( ) {
var cbs = reqs [ key ]
var len = cbs . length
var args = slice ( arguments )
// XXX It's somewhat ambiguous whether a new callback added in this
// pass should be queued for later execution if something in the
// list of callbacks throws, or if it should just be discarded.
// However, it's such an edge case that it hardly matters, and either
// choice is likely as surprising as the other.
// As it happens, we do go ahead and schedule it for later execution.
try {
for ( var i = 0 ; i < len ; i ++ ) {
cbs [ i ] . apply ( null , args )
}
} finally {
if ( cbs . length > len ) {
// added more in the interim.
// de-zalgo, just in case, but don't call again.
cbs . splice ( 0 , len )
process . nextTick ( function ( ) {
RES . apply ( null , args )
} )
} else {
delete reqs [ key ]
}
}
} )
}
function slice ( args ) {
var length = args . length
var array = [ ]
for ( var i = 0 ; i < length ; i ++ ) array [ i ] = args [ i ]
return array
}
/***/ } ) ,
/***/ 495 :
/***/ ( function ( module ) {
module . exports = [ [ "8740" , "䏰䰲䘃䖦䕸𧉧䵷䖳𧲱䳢𧳅㮕䜶䝄䱇䱀𤊿𣘗𧍒𦺋𧃒䱗𪍑䝏䗚䲅𧱬䴇䪤䚡𦬣爥𥩔𡩣𣸆𣽡晍囻" ] , [ "8767" , "綕夝𨮹㷴霴𧯯寛𡵞媤㘥𩺰嫑宷峼杮薓𩥅瑡璝㡵𡵓𣚞𦀡㻬" ] , [ "87a1" , "𥣞㫵竼龗𤅡𨤍𣇪𠪊𣉞䌊蒄龖鐯䤰蘓墖靊鈘秐稲晠権袝瑌篅枂稬剏遆㓦珄𥶹瓆鿇垳䤯呌䄱𣚎堘穲𧭥讏䚮𦺈䆁𥶙箮𢒼鿈𢓁𢓉𢓌鿉蔄𣖻䂴鿊䓡𪷿拁灮鿋" ] , [ "8840" , "㇀" , 4 , "𠄌㇅𠃑𠃍㇆㇇𠃋𡿨㇈𠃊㇉㇊㇋㇌𠄎㇍㇎ĀÁǍÀĒÉĚÈŌÓǑÒÊ̄ẾÊ̌ỀÊāáǎàɑēéěèīíǐìōóǒòūúǔùǖǘǚ" ] , [ "88a1" , "ǜüê̄ếê̌ềêɡ⏚⏛" ] , [ "8940" , "𪎩𡅅" ] , [ "8943" , "攊" ] , [ "8946" , "丽滝鵎釟" ] , [ "894c" , "𧜵撑会伨侨兖兴农凤务动医华发变团声处备夲头学实実岚庆总斉柾栄桥济炼电纤纬纺织经统缆缷艺苏药视设询车轧轮" ] , [ "89a1" , "琑糼緍楆竉刧" ] , [ "89ab" , "醌碸酞肼" ] , [ "89b0" , "贋胶𠧧" ] , [ "89b5" , "肟黇䳍鷉鸌䰾𩷶𧀎鸊𪄳㗁" ] , [ "89c1" , "溚舾甙" ] , [ "89c5" , "䤑马骏龙禇𨑬𡷊𠗐𢫦两亁亀亇亿仫伷㑌侽㹈倃傈㑽㒓㒥円夅凛凼刅争剹劐匧㗇厩㕑厰㕓参吣㕭㕲㚁咓咣咴咹哐哯唘唣唨㖘唿㖥㖿嗗㗅" ] , [ "8a40" , "𧶄唥" ] , [ "8a43" , "𠱂𠴕𥄫喐𢳆㧬𠍁蹆𤶸𩓥䁓𨂾睺𢰸㨴䟕𨅝𦧲𤷪擝𠵼𠾴𠳕𡃴撍蹾𠺖𠰋𠽤𢲩𨉖𤓓" ] , [ "8a64" , "𠵆𩩍𨃩䟴𤺧𢳂骲㩧𩗴㿭㔆𥋇𩟔𧣈𢵄鵮頕" ] , [ "8a76" , "䏙𦂥撴哣𢵌𢯊𡁷㧻𡁯" ] , [ "8aa1" , "𦛚𦜖𧦠擪𥁒𠱃蹨𢆡𨭌𠜱" ] , [ "8aac" , "䠋𠆩㿺塳𢶍" ] , [ "8ab2" , "𤗈𠓼𦂗𠽌𠶖啹䂻䎺" ] , [ "8abb" , "䪴𢩦𡂝膪飵𠶜捹㧾𢝵跀嚡摼㹃" ] , [ "8ac9" , "𪘁𠸉𢫏𢳉" ] , [ "8ace" , "𡃈𣧂㦒㨆𨊛㕸𥹉𢃇噒𠼱𢲲𩜠㒼氽𤸻" ] , [ "8adf" , "𧕴𢺋𢈈𪙛𨳍𠹺𠰴𦠜羓𡃏𢠃𢤹㗻𥇣𠺌𠾍𠺪㾓𠼰𠵇𡅏𠹌" ] , [ "8af6" , "𠺫𠮩𠵈𡃀𡄽㿹𢚖搲𠾭" ] , [ "8b40" , "𣏴𧘹𢯎𠵾𠵿𢱑𢱕㨘𠺘𡃇𠼮𪘲𦭐𨳒𨶙𨳊閪哌苄喹" ] , [ "8b55" , "𩻃鰦骶𧝞𢷮煀腭胬尜𦕲脴㞗卟𨂽醶𠻺𠸏𠹷𠻻㗝𤷫㘉𠳖嚯𢞵𡃉𠸐𠹸𡁸𡅈𨈇𡑕𠹹𤹐𢶤婔𡀝𡀞𡃵𡃶垜𠸑" ] , [ "8ba1" , "𧚔𨋍𠾵𠹻𥅾㜃𠾶𡆀𥋘𪊽𤧚𡠺𤅷𨉼墙剨㘚𥜽箲孨䠀䬬鼧䧧鰟鮍𥭴𣄽嗻㗲嚉丨夂𡯁屮靑𠂆乛亻㔾尣彑忄㣺扌攵歺氵氺灬爫丬犭𤣩罒礻糹罓𦉪㓁" ] , [ "8bde" , "𦍋耂肀𦘒𦥑卝衤见𧢲讠贝钅镸长门𨸏韦页风飞饣𩠐鱼鸟黄歯龜丷𠂇阝户钢" ] , [ "8c40" , "倻淾𩱳龦㷉袏𤅎灷峵䬠𥇍㕙𥴰愢𨨲辧釶熑朙玺𣊁𪄇㲋𡦀䬐磤琂冮𨜏䀉橣𪊺䈣蘏𠩯稪𩥇𨫪靕灍匤𢁾鏴盙𨧣龧矝亣俰傼丯众龨吴綋墒壐𡶶庒庙忂𢜒斋" ] , [ "8ca1" , "𣏹椙橃𣱣泿" ] , [ "8ca7" , "爀𤔅玌㻛𤨓嬕璹讃𥲤𥚕窓篬糃繬苸薗龩袐龪躹龫迏蕟駠鈡龬𨶹𡐿䁱䊢娚" ] , [ "8cc9" , "顨杫䉶圽" ] , [ "8cce" , "藖𤥻芿𧄍䲁𦵴嵻𦬕𦾾龭龮宖龯曧繛湗秊㶈䓃𣉖𢞖䎚䔶" ] , [ "8ce6" , "峕𣬚諹屸㴒𣕑嵸龲煗䕘𤃬𡸣䱷㥸㑊𠆤𦱁諌侴𠈹妿腬顖𩣺弻" ] , [ "8d40" , "𠮟" ] , [ "8d42" , "𢇁𨥭䄂䚻𩁹㼇龳𪆵䃸㟖䛷𦱆䅼𨚲𧏿䕭㣔𥒚䕡䔛䶉䱻䵶䗪㿈𤬏㙡䓞䒽䇭崾嵈嵖㷼㠏嶤嶹㠠㠸幂庽弥徃㤈㤔㤿㥍惗愽峥㦉憷憹懏㦸戬抐拥挘㧸嚱" ] , [ "8da1" , "㨃揢揻搇摚㩋擀崕嘡龟㪗斆㪽旿晓㫲暒㬢朖㭂枤栀㭘桊梄㭲㭱㭻椉楃牜楤榟榅㮼槖㯝橥橴橱檂㯬檙㯲檫檵櫔櫶殁毁毪汵沪㳋洂洆洦涁㳯涤涱渕渘温溆𨧀溻滢滚齿滨滩漤漴㵆𣽁澁澾㵪㵵熷岙㶊瀬㶑灐灔灯灿炉𠌥䏁㗱𠻘" ] , [ "8e40" , "𣻗垾𦻓焾𥟠㙎榢𨯩孴穉𥣡𩓙穥穽𥦬窻窰竂竃燑𦒍䇊竚竝竪䇯咲𥰁笋筕笩𥌎𥳾箢筯莜𥮴𦱿篐萡箒箸𥴠㶭𥱥蒒篺簆簵𥳁籄粃𤢂粦晽𤕸糉糇糦籴糳糵糎" ] , [ "8ea1" , " 繧䔝𦹄絝𦻖璍綉綫焵綳緒𤁗𦀩緤㴓緵𡟹緥𨍭縝𦄡𦅚繮纒䌫鑬縧罀罁罇礶𦋐駡羗𦍑羣𡙡𠁨䕜𣝦䔃𨌺翺𦒉者耈耝耨耯𪂇𦳃耻耼聡𢜔䦉𦘦𣷣𦛨朥肧𨩈脇脚墰𢛶汿𦒘𤾸擧𡒊舘𡡞橓𤩥𤪕䑺舩𠬍𦩒𣵾俹 <EFBFBD>
/***/ } ) ,
/***/ 499 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const compare = _ _webpack _require _ _ ( 309 )
const rcompare = ( a , b , loose ) => compare ( b , a , loose )
module . exports = rcompare
/***/ } ) ,
/***/ 508 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
"use strict" ;
var iconv _package ;
var Iconv ;
try {
// this is to fool browserify so it doesn't try (in vain) to install iconv.
iconv _package = 'iconv' ;
Iconv = _ _webpack _require _ _ ( 133 ) . Iconv ;
} catch ( E ) {
// node-iconv not present
}
module . exports = Iconv ;
/***/ } ) ,
/***/ 514 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) result [ k ] = mod [ k ] ;
result [ "default" ] = mod ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const tr = _ _importStar ( _ _webpack _require _ _ ( 159 ) ) ;
/ * *
* Exec a command .
* Output will be streamed to the live console .
* Returns promise with return code
*
* @ param commandLine command to execute ( can include additional args ) . Must be correctly escaped .
* @ param args optional arguments for tool . Escaping is handled by the lib .
* @ param options optional exec options . See ExecOptions
* @ returns Promise < number > exit code
* /
function exec ( commandLine , args , options ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const commandArgs = tr . argStringToArray ( commandLine ) ;
if ( commandArgs . length === 0 ) {
throw new Error ( ` Parameter 'commandLine' cannot be null or empty. ` ) ;
}
// Path to tool to execute should be first arg
const toolPath = commandArgs [ 0 ] ;
args = commandArgs . slice ( 1 ) . concat ( args || [ ] ) ;
const runner = new tr . ToolRunner ( toolPath , args , options ) ;
return runner . exec ( ) ;
} ) ;
}
exports . exec = exec ;
//# sourceMappingURL=exec.js.map
/***/ } ) ,
/***/ 517 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
/ * !
* Tmp
*
* Copyright ( c ) 2011 - 2017 KARASZI Istvan < github @ spam . raszi . hu >
*
* MIT Licensed
* /
/ *
* Module dependencies .
* /
const fs = _ _webpack _require _ _ ( 747 ) ;
const os = _ _webpack _require _ _ ( 87 ) ;
const path = _ _webpack _require _ _ ( 622 ) ;
const crypto = _ _webpack _require _ _ ( 417 ) ;
const _c = { fs : fs . constants , os : os . constants } ;
const rimraf = _ _webpack _require _ _ ( 959 ) ;
/ *
* The working inner variables .
* /
const
// the random characters to choose from
RANDOM _CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz' ,
TEMPLATE _PATTERN = /XXXXXX/ ,
DEFAULT _TRIES = 3 ,
CREATE _FLAGS = ( _c . O _CREAT || _c . fs . O _CREAT ) | ( _c . O _EXCL || _c . fs . O _EXCL ) | ( _c . O _RDWR || _c . fs . O _RDWR ) ,
// constants are off on the windows platform and will not match the actual errno codes
IS _WIN32 = os . platform ( ) === 'win32' ,
EBADF = _c . EBADF || _c . os . errno . EBADF ,
ENOENT = _c . ENOENT || _c . os . errno . ENOENT ,
DIR _MODE = 0o700 /* 448 */ ,
FILE _MODE = 0o600 /* 384 */ ,
EXIT = 'exit' ,
// this will hold the objects need to be removed on exit
_removeObjects = [ ] ,
// API change in fs.rmdirSync leads to error when passing in a second parameter, e.g. the callback
FN _RMDIR _SYNC = fs . rmdirSync . bind ( fs ) ,
FN _RIMRAF _SYNC = rimraf . sync ;
let
_gracefulCleanup = false ;
/ * *
* Gets a temporary file name .
*
* @ param { ( Options | tmpNameCallback ) } options options or callback
* @ param { ? tmpNameCallback } callback the callback function
* /
function tmpName ( options , callback ) {
const
args = _parseArguments ( options , callback ) ,
opts = args [ 0 ] ,
cb = args [ 1 ] ;
try {
_assertAndSanitizeOptions ( opts ) ;
} catch ( err ) {
return cb ( err ) ;
}
let tries = opts . tries ;
( function _getUniqueName ( ) {
try {
const name = _generateTmpName ( opts ) ;
// check whether the path exists then retry if needed
fs . stat ( name , function ( err ) {
/* istanbul ignore else */
if ( ! err ) {
/* istanbul ignore else */
if ( tries -- > 0 ) return _getUniqueName ( ) ;
return cb ( new Error ( 'Could not get a unique tmp filename, max tries reached ' + name ) ) ;
}
cb ( null , name ) ;
} ) ;
} catch ( err ) {
cb ( err ) ;
}
} ( ) ) ;
}
/ * *
* Synchronous version of tmpName .
*
* @ param { Object } options
* @ returns { string } the generated random name
* @ throws { Error } if the options are invalid or could not generate a filename
* /
function tmpNameSync ( options ) {
const
args = _parseArguments ( options ) ,
opts = args [ 0 ] ;
_assertAndSanitizeOptions ( opts ) ;
let tries = opts . tries ;
do {
const name = _generateTmpName ( opts ) ;
try {
fs . statSync ( name ) ;
} catch ( e ) {
return name ;
}
} while ( tries -- > 0 ) ;
throw new Error ( 'Could not get a unique tmp filename, max tries reached' ) ;
}
/ * *
* Creates and opens a temporary file .
*
* @ param { ( Options | null | undefined | fileCallback ) } options the config options or the callback function or null or undefined
* @ param { ? fileCallback } callback
* /
function file ( options , callback ) {
const
args = _parseArguments ( options , callback ) ,
opts = args [ 0 ] ,
cb = args [ 1 ] ;
// gets a temporary filename
tmpName ( opts , function _tmpNameCreated ( err , name ) {
/* istanbul ignore else */
if ( err ) return cb ( err ) ;
// create and open the file
fs . open ( name , CREATE _FLAGS , opts . mode || FILE _MODE , function _fileCreated ( err , fd ) {
/* istanbu ignore else */
if ( err ) return cb ( err ) ;
if ( opts . discardDescriptor ) {
return fs . close ( fd , function _discardCallback ( possibleErr ) {
// the chance of getting an error on close here is rather low and might occur in the most edgiest cases only
return cb ( possibleErr , name , undefined , _prepareTmpFileRemoveCallback ( name , - 1 , opts , false ) ) ;
} ) ;
} else {
// detachDescriptor passes the descriptor whereas discardDescriptor closes it, either way, we no longer care
// about the descriptor
const discardOrDetachDescriptor = opts . discardDescriptor || opts . detachDescriptor ;
cb ( null , name , fd , _prepareTmpFileRemoveCallback ( name , discardOrDetachDescriptor ? - 1 : fd , opts , false ) ) ;
}
} ) ;
} ) ;
}
/ * *
* Synchronous version of file .
*
* @ param { Options } options
* @ returns { FileSyncObject } object consists of name , fd and removeCallback
* @ throws { Error } if cannot create a file
* /
function fileSync ( options ) {
const
args = _parseArguments ( options ) ,
opts = args [ 0 ] ;
const discardOrDetachDescriptor = opts . discardDescriptor || opts . detachDescriptor ;
const name = tmpNameSync ( opts ) ;
var fd = fs . openSync ( name , CREATE _FLAGS , opts . mode || FILE _MODE ) ;
/* istanbul ignore else */
if ( opts . discardDescriptor ) {
fs . closeSync ( fd ) ;
fd = undefined ;
}
return {
name : name ,
fd : fd ,
removeCallback : _prepareTmpFileRemoveCallback ( name , discardOrDetachDescriptor ? - 1 : fd , opts , true )
} ;
}
/ * *
* Creates a temporary directory .
*
* @ param { ( Options | dirCallback ) } options the options or the callback function
* @ param { ? dirCallback } callback
* /
function dir ( options , callback ) {
const
args = _parseArguments ( options , callback ) ,
opts = args [ 0 ] ,
cb = args [ 1 ] ;
// gets a temporary filename
tmpName ( opts , function _tmpNameCreated ( err , name ) {
/* istanbul ignore else */
if ( err ) return cb ( err ) ;
// create the directory
fs . mkdir ( name , opts . mode || DIR _MODE , function _dirCreated ( err ) {
/* istanbul ignore else */
if ( err ) return cb ( err ) ;
cb ( null , name , _prepareTmpDirRemoveCallback ( name , opts , false ) ) ;
} ) ;
} ) ;
}
/ * *
* Synchronous version of dir .
*
* @ param { Options } options
* @ returns { DirSyncObject } object consists of name and removeCallback
* @ throws { Error } if it cannot create a directory
* /
function dirSync ( options ) {
const
args = _parseArguments ( options ) ,
opts = args [ 0 ] ;
const name = tmpNameSync ( opts ) ;
fs . mkdirSync ( name , opts . mode || DIR _MODE ) ;
return {
name : name ,
removeCallback : _prepareTmpDirRemoveCallback ( name , opts , true )
} ;
}
/ * *
* Removes files asynchronously .
*
* @ param { Object } fdPath
* @ param { Function } next
* @ private
* /
function _removeFileAsync ( fdPath , next ) {
const _handler = function ( err ) {
if ( err && ! _isENOENT ( err ) ) {
// reraise any unanticipated error
return next ( err ) ;
}
next ( ) ;
} ;
if ( 0 <= fdPath [ 0 ] )
fs . close ( fdPath [ 0 ] , function ( ) {
fs . unlink ( fdPath [ 1 ] , _handler ) ;
} ) ;
else fs . unlink ( fdPath [ 1 ] , _handler ) ;
}
/ * *
* Removes files synchronously .
*
* @ param { Object } fdPath
* @ private
* /
function _removeFileSync ( fdPath ) {
let rethrownException = null ;
try {
if ( 0 <= fdPath [ 0 ] ) fs . closeSync ( fdPath [ 0 ] ) ;
} catch ( e ) {
// reraise any unanticipated error
if ( ! _isEBADF ( e ) && ! _isENOENT ( e ) ) throw e ;
} finally {
try {
fs . unlinkSync ( fdPath [ 1 ] ) ;
}
catch ( e ) {
// reraise any unanticipated error
if ( ! _isENOENT ( e ) ) rethrownException = e ;
}
}
if ( rethrownException !== null ) {
throw rethrownException ;
}
}
/ * *
* Prepares the callback for removal of the temporary file .
*
* Returns either a sync callback or a async callback depending on whether
* fileSync or file was called , which is expressed by the sync parameter .
*
* @ param { string } name the path of the file
* @ param { number } fd file descriptor
* @ param { Object } opts
* @ param { boolean } sync
* @ returns { fileCallback | fileCallbackSync }
* @ private
* /
function _prepareTmpFileRemoveCallback ( name , fd , opts , sync ) {
const removeCallbackSync = _prepareRemoveCallback ( _removeFileSync , [ fd , name ] , sync ) ;
const removeCallback = _prepareRemoveCallback ( _removeFileAsync , [ fd , name ] , sync , removeCallbackSync ) ;
if ( ! opts . keep ) _removeObjects . unshift ( removeCallbackSync ) ;
return sync ? removeCallbackSync : removeCallback ;
}
/ * *
* Prepares the callback for removal of the temporary directory .
*
* Returns either a sync callback or a async callback depending on whether
* tmpFileSync or tmpFile was called , which is expressed by the sync parameter .
*
* @ param { string } name
* @ param { Object } opts
* @ param { boolean } sync
* @ returns { Function } the callback
* @ private
* /
function _prepareTmpDirRemoveCallback ( name , opts , sync ) {
const removeFunction = opts . unsafeCleanup ? rimraf : fs . rmdir . bind ( fs ) ;
const removeFunctionSync = opts . unsafeCleanup ? FN _RIMRAF _SYNC : FN _RMDIR _SYNC ;
const removeCallbackSync = _prepareRemoveCallback ( removeFunctionSync , name , sync ) ;
const removeCallback = _prepareRemoveCallback ( removeFunction , name , sync , removeCallbackSync ) ;
if ( ! opts . keep ) _removeObjects . unshift ( removeCallbackSync ) ;
return sync ? removeCallbackSync : removeCallback ;
}
/ * *
* Creates a guarded function wrapping the removeFunction call .
*
* The cleanup callback is save to be called multiple times .
* Subsequent invocations will be ignored .
*
* @ param { Function } removeFunction
* @ param { string } fileOrDirName
* @ param { boolean } sync
* @ param { cleanupCallbackSync ? } cleanupCallbackSync
* @ returns { cleanupCallback | cleanupCallbackSync }
* @ private
* /
function _prepareRemoveCallback ( removeFunction , fileOrDirName , sync , cleanupCallbackSync ) {
let called = false ;
// if sync is true, the next parameter will be ignored
return function _cleanupCallback ( next ) {
/* istanbul ignore else */
if ( ! called ) {
// remove cleanupCallback from cache
const toRemove = cleanupCallbackSync || _cleanupCallback ;
const index = _removeObjects . indexOf ( toRemove ) ;
/* istanbul ignore else */
if ( index >= 0 ) _removeObjects . splice ( index , 1 ) ;
called = true ;
if ( sync || removeFunction === FN _RMDIR _SYNC || removeFunction === FN _RIMRAF _SYNC ) {
return removeFunction ( fileOrDirName ) ;
} else {
return removeFunction ( fileOrDirName , next || function ( ) { } ) ;
}
}
} ;
}
/ * *
* The garbage collector .
*
* @ private
* /
function _garbageCollector ( ) {
/* istanbul ignore else */
if ( ! _gracefulCleanup ) return ;
// the function being called removes itself from _removeObjects,
// loop until _removeObjects is empty
while ( _removeObjects . length ) {
try {
_removeObjects [ 0 ] ( ) ;
} catch ( e ) {
// already removed?
}
}
}
/ * *
* Random name generator based on crypto .
* Adapted from http : //blog.tompawlak.org/how-to-generate-random-values-nodejs-javascript
*
* @ param { number } howMany
* @ returns { string } the generated random name
* @ private
* /
function _randomChars ( howMany ) {
let
value = [ ] ,
rnd = null ;
// make sure that we do not fail because we ran out of entropy
try {
rnd = crypto . randomBytes ( howMany ) ;
} catch ( e ) {
rnd = crypto . pseudoRandomBytes ( howMany ) ;
}
for ( var i = 0 ; i < howMany ; i ++ ) {
value . push ( RANDOM _CHARS [ rnd [ i ] % RANDOM _CHARS . length ] ) ;
}
return value . join ( '' ) ;
}
/ * *
* Helper which determines whether a string s is blank , that is undefined , or empty or null .
*
* @ private
* @ param { string } s
* @ returns { Boolean } true whether the string s is blank , false otherwise
* /
function _isBlank ( s ) {
return s === null || _isUndefined ( s ) || ! s . trim ( ) ;
}
/ * *
* Checks whether the ` obj ` parameter is defined or not .
*
* @ param { Object } obj
* @ returns { boolean } true if the object is undefined
* @ private
* /
function _isUndefined ( obj ) {
return typeof obj === 'undefined' ;
}
/ * *
* Parses the function arguments .
*
* This function helps to have optional arguments .
*
* @ param { ( Options | null | undefined | Function ) } options
* @ param { ? Function } callback
* @ returns { Array } parsed arguments
* @ private
* /
function _parseArguments ( options , callback ) {
/* istanbul ignore else */
if ( typeof options === 'function' ) {
return [ { } , options ] ;
}
/* istanbul ignore else */
if ( _isUndefined ( options ) ) {
return [ { } , callback ] ;
}
// copy options so we do not leak the changes we make internally
const actualOptions = { } ;
for ( const key of Object . getOwnPropertyNames ( options ) ) {
actualOptions [ key ] = options [ key ] ;
}
return [ actualOptions , callback ] ;
}
/ * *
* Generates a new temporary name .
*
* @ param { Object } opts
* @ returns { string } the new random name according to opts
* @ private
* /
function _generateTmpName ( opts ) {
const tmpDir = opts . tmpdir ;
/* istanbul ignore else */
if ( ! _isUndefined ( opts . name ) )
return path . join ( tmpDir , opts . dir , opts . name ) ;
/* istanbul ignore else */
if ( ! _isUndefined ( opts . template ) )
return path . join ( tmpDir , opts . dir , opts . template ) . replace ( TEMPLATE _PATTERN , _randomChars ( 6 ) ) ;
// prefix and postfix
const name = [
opts . prefix ? opts . prefix : 'tmp' ,
'-' ,
process . pid ,
'-' ,
_randomChars ( 12 ) ,
opts . postfix ? '-' + opts . postfix : ''
] . join ( '' ) ;
return path . join ( tmpDir , opts . dir , name ) ;
}
/ * *
* Asserts whether the specified options are valid , also sanitizes options and provides sane defaults for missing
* options .
*
* @ param { Options } options
* @ private
* /
function _assertAndSanitizeOptions ( options ) {
options . tmpdir = _getTmpDir ( options ) ;
const tmpDir = options . tmpdir ;
/* istanbul ignore else */
if ( ! _isUndefined ( options . name ) )
_assertIsRelative ( options . name , 'name' , tmpDir ) ;
/* istanbul ignore else */
if ( ! _isUndefined ( options . dir ) )
_assertIsRelative ( options . dir , 'dir' , tmpDir ) ;
/* istanbul ignore else */
if ( ! _isUndefined ( options . template ) ) {
_assertIsRelative ( options . template , 'template' , tmpDir ) ;
if ( ! options . template . match ( TEMPLATE _PATTERN ) )
throw new Error ( ` Invalid template, found " ${ options . template } ". ` ) ;
}
/* istanbul ignore else */
if ( ! _isUndefined ( options . tries ) && isNaN ( options . tries ) || options . tries < 0 )
throw new Error ( ` Invalid tries, found " ${ options . tries } ". ` ) ;
// if a name was specified we will try once
options . tries = _isUndefined ( options . name ) ? options . tries || DEFAULT _TRIES : 1 ;
options . keep = ! ! options . keep ;
options . detachDescriptor = ! ! options . detachDescriptor ;
options . discardDescriptor = ! ! options . discardDescriptor ;
options . unsafeCleanup = ! ! options . unsafeCleanup ;
// sanitize dir, also keep (multiple) blanks if the user, purportedly sane, requests us to
options . dir = _isUndefined ( options . dir ) ? '' : path . relative ( tmpDir , _resolvePath ( options . dir , tmpDir ) ) ;
options . template = _isUndefined ( options . template ) ? undefined : path . relative ( tmpDir , _resolvePath ( options . template , tmpDir ) ) ;
// sanitize further if template is relative to options.dir
options . template = _isBlank ( options . template ) ? undefined : path . relative ( options . dir , options . template ) ;
// for completeness' sake only, also keep (multiple) blanks if the user, purportedly sane, requests us to
options . name = _isUndefined ( options . name ) ? undefined : _sanitizeName ( options . name ) ;
options . prefix = _isUndefined ( options . prefix ) ? '' : options . prefix ;
options . postfix = _isUndefined ( options . postfix ) ? '' : options . postfix ;
}
/ * *
* Resolve the specified path name in respect to tmpDir .
*
* The specified name might include relative path components , e . g . . . /
* so we need to resolve in order to be sure that is is located inside tmpDir
*
* @ param name
* @ param tmpDir
* @ returns { string }
* @ private
* /
function _resolvePath ( name , tmpDir ) {
const sanitizedName = _sanitizeName ( name ) ;
if ( sanitizedName . startsWith ( tmpDir ) ) {
return path . resolve ( sanitizedName ) ;
} else {
return path . resolve ( path . join ( tmpDir , sanitizedName ) ) ;
}
}
/ * *
* Sanitize the specified path name by removing all quote characters .
*
* @ param name
* @ returns { string }
* @ private
* /
function _sanitizeName ( name ) {
if ( _isBlank ( name ) ) {
return name ;
}
return name . replace ( /["']/g , '' ) ;
}
/ * *
* Asserts whether specified name is relative to the specified tmpDir .
*
* @ param { string } name
* @ param { string } option
* @ param { string } tmpDir
* @ throws { Error }
* @ private
* /
function _assertIsRelative ( name , option , tmpDir ) {
if ( option === 'name' ) {
// assert that name is not absolute and does not contain a path
if ( path . isAbsolute ( name ) )
throw new Error ( ` ${ option } option must not contain an absolute path, found " ${ name } ". ` ) ;
// must not fail on valid .<name> or ..<name> or similar such constructs
let basename = path . basename ( name ) ;
if ( basename === '..' || basename === '.' || basename !== name )
throw new Error ( ` ${ option } option must not contain a path, found " ${ name } ". ` ) ;
}
else { // if (option === 'dir' || option === 'template') {
// assert that dir or template are relative to tmpDir
if ( path . isAbsolute ( name ) && ! name . startsWith ( tmpDir ) ) {
throw new Error ( ` ${ option } option must be relative to " ${ tmpDir } ", found " ${ name } ". ` ) ;
}
let resolvedPath = _resolvePath ( name , tmpDir ) ;
if ( ! resolvedPath . startsWith ( tmpDir ) )
throw new Error ( ` ${ option } option must be relative to " ${ tmpDir } ", found " ${ resolvedPath } ". ` ) ;
}
}
/ * *
* Helper for testing against EBADF to compensate changes made to Node 7. x under Windows .
*
* @ private
* /
function _isEBADF ( error ) {
return _isExpectedError ( error , - EBADF , 'EBADF' ) ;
}
/ * *
* Helper for testing against ENOENT to compensate changes made to Node 7. x under Windows .
*
* @ private
* /
function _isENOENT ( error ) {
return _isExpectedError ( error , - ENOENT , 'ENOENT' ) ;
}
/ * *
* Helper to determine whether the expected error code matches the actual code and errno ,
* which will differ between the supported node versions .
*
* - Node >= 7.0 :
* error . code { string }
* error . errno { number } any numerical value will be negated
*
* CAVEAT
*
* On windows , the errno for EBADF is - 4083 but os . constants . errno . EBADF is different and we must assume that ENOENT
* is no different here .
*
* @ param { SystemError } error
* @ param { number } errno
* @ param { string } code
* @ private
* /
function _isExpectedError ( error , errno , code ) {
return IS _WIN32 ? error . code === code : error . code === code && error . errno === errno ;
}
/ * *
* Sets the graceful cleanup .
*
* If graceful cleanup is set , tmp will remove all controlled temporary objects on process exit , otherwise the
* temporary objects will remain in place , waiting to be cleaned up on system restart or otherwise scheduled temporary
* object removals .
* /
function setGracefulCleanup ( ) {
_gracefulCleanup = true ;
}
/ * *
* Returns the currently configured tmp dir from os . tmpdir ( ) .
*
* @ private
* @ param { ? Options } options
* @ returns { string } the currently configured tmp dir
* /
function _getTmpDir ( options ) {
return path . resolve ( _sanitizeName ( options && options . tmpdir || os . tmpdir ( ) ) ) ;
}
// Install process exit listener
process . addListener ( EXIT , _garbageCollector ) ;
/ * *
* Configuration options .
*
* @ typedef { Object } Options
* @ property { ? boolean } keep the temporary object ( file or dir ) will not be garbage collected
* @ property { ? number } tries the number of tries before give up the name generation
* @ property ( ? int ) mode the access mode , defaults are 0o700 for directories and 0o600 for files
* @ property { ? string } template the "mkstemp" like filename template
* @ property { ? string } name fixed name relative to tmpdir or the specified dir option
* @ property { ? string } dir tmp directory relative to the root tmp directory in use
* @ property { ? string } prefix prefix for the generated name
* @ property { ? string } postfix postfix for the generated name
* @ property { ? string } tmpdir the root tmp directory which overrides the os tmpdir
* @ property { ? boolean } unsafeCleanup recursively removes the created temporary directory , even when it ' s not empty
* @ property { ? boolean } detachDescriptor detaches the file descriptor , caller is responsible for closing the file , tmp will no longer try closing the file during garbage collection
* @ property { ? boolean } discardDescriptor discards the file descriptor ( closes file , fd is - 1 ) , tmp will no longer try closing the file during garbage collection
* /
/ * *
* @ typedef { Object } FileSyncObject
* @ property { string } name the name of the file
* @ property { string } fd the file descriptor or - 1 if the fd has been discarded
* @ property { fileCallback } removeCallback the callback function to remove the file
* /
/ * *
* @ typedef { Object } DirSyncObject
* @ property { string } name the name of the directory
* @ property { fileCallback } removeCallback the callback function to remove the directory
* /
/ * *
* @ callback tmpNameCallback
* @ param { ? Error } err the error object if anything goes wrong
* @ param { string } name the temporary file name
* /
/ * *
* @ callback fileCallback
* @ param { ? Error } err the error object if anything goes wrong
* @ param { string } name the temporary file name
* @ param { number } fd the file descriptor or - 1 if the fd had been discarded
* @ param { cleanupCallback } fn the cleanup callback function
* /
/ * *
* @ callback fileCallbackSync
* @ param { ? Error } err the error object if anything goes wrong
* @ param { string } name the temporary file name
* @ param { number } fd the file descriptor or - 1 if the fd had been discarded
* @ param { cleanupCallbackSync } fn the cleanup callback function
* /
/ * *
* @ callback dirCallback
* @ param { ? Error } err the error object if anything goes wrong
* @ param { string } name the temporary file name
* @ param { cleanupCallback } fn the cleanup callback function
* /
/ * *
* @ callback dirCallbackSync
* @ param { ? Error } err the error object if anything goes wrong
* @ param { string } name the temporary file name
* @ param { cleanupCallbackSync } fn the cleanup callback function
* /
/ * *
* Removes the temporary created file or directory .
*
* @ callback cleanupCallback
* @ param { simpleCallback } [ next ] function to call whenever the tmp object needs to be removed
* /
/ * *
* Removes the temporary created file or directory .
*
* @ callback cleanupCallbackSync
* /
/ * *
* Callback function for function composition .
* @ see { @ link https : //github.com/raszi/node-tmp/issues/57|raszi/node-tmp#57}
*
* @ callback simpleCallback
* /
// exporting all the needed methods
// evaluate _getTmpDir() lazily, mainly for simplifying testing but it also will
// allow users to reconfigure the temporary directory
Object . defineProperty ( module . exports , 'tmpdir' , {
enumerable : true ,
configurable : false ,
get : function ( ) {
return _getTmpDir ( ) ;
}
} ) ;
module . exports . dir = dir ;
module . exports . dirSync = dirSync ;
module . exports . file = file ;
module . exports . fileSync = fileSync ;
module . exports . tmpName = tmpName ;
module . exports . tmpNameSync = tmpNameSync ;
module . exports . setGracefulCleanup = setGracefulCleanup ;
/***/ } ) ,
/***/ 520 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const compare = _ _webpack _require _ _ ( 309 )
const lte = ( a , b , loose ) => compare ( a , b , loose ) <= 0
module . exports = lte
/***/ } ) ,
/***/ 522 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const compare = _ _webpack _require _ _ ( 309 )
const gte = ( a , b , loose ) => compare ( a , b , loose ) >= 0
module . exports = gte
/***/ } ) ,
/***/ 523 :
/***/ ( function ( module , exports , _ _webpack _require _ _ ) {
const { MAX _SAFE _COMPONENT _LENGTH } = _ _webpack _require _ _ ( 293 )
const debug = _ _webpack _require _ _ ( 427 )
exports = module . exports = { }
// The actual regexps go on exports.re
const re = exports . re = [ ]
const src = exports . src = [ ]
const t = exports . t = { }
let R = 0
const createToken = ( name , value , isGlobal ) => {
const index = R ++
debug ( index , value )
t [ name ] = index
src [ index ] = value
re [ index ] = new RegExp ( value , isGlobal ? 'g' : undefined )
}
// The following Regular Expressions can be used for tokenizing,
// validating, and parsing SemVer version strings.
// ## Numeric Identifier
// A single `0`, or a non-zero digit followed by zero or more digits.
createToken ( 'NUMERICIDENTIFIER' , '0|[1-9]\\d*' )
createToken ( 'NUMERICIDENTIFIERLOOSE' , '[0-9]+' )
// ## Non-numeric Identifier
// Zero or more digits, followed by a letter or hyphen, and then zero or
// more letters, digits, or hyphens.
createToken ( 'NONNUMERICIDENTIFIER' , '\\d*[a-zA-Z-][a-zA-Z0-9-]*' )
// ## Main Version
// Three dot-separated numeric identifiers.
createToken ( 'MAINVERSION' , ` ( ${ src [ t . NUMERICIDENTIFIER ] } ) \\ . ` +
` ( ${ src [ t . NUMERICIDENTIFIER ] } ) \\ . ` +
` ( ${ src [ t . NUMERICIDENTIFIER ] } ) ` )
createToken ( 'MAINVERSIONLOOSE' , ` ( ${ src [ t . NUMERICIDENTIFIERLOOSE ] } ) \\ . ` +
` ( ${ src [ t . NUMERICIDENTIFIERLOOSE ] } ) \\ . ` +
` ( ${ src [ t . NUMERICIDENTIFIERLOOSE ] } ) ` )
// ## Pre-release Version Identifier
// A numeric identifier, or a non-numeric identifier.
createToken ( 'PRERELEASEIDENTIFIER' , ` (?: ${ src [ t . NUMERICIDENTIFIER ]
} | $ { src [ t . NONNUMERICIDENTIFIER ] } ) ` )
createToken ( 'PRERELEASEIDENTIFIERLOOSE' , ` (?: ${ src [ t . NUMERICIDENTIFIERLOOSE ]
} | $ { src [ t . NONNUMERICIDENTIFIER ] } ) ` )
// ## Pre-release Version
// Hyphen, followed by one or more dot-separated pre-release version
// identifiers.
createToken ( 'PRERELEASE' , ` (?:-( ${ src [ t . PRERELEASEIDENTIFIER ]
} ( ? : \ \ . $ { src [ t . PRERELEASEIDENTIFIER ] } ) * ) ) ` )
createToken ( 'PRERELEASELOOSE' , ` (?:-?( ${ src [ t . PRERELEASEIDENTIFIERLOOSE ]
} ( ? : \ \ . $ { src [ t . PRERELEASEIDENTIFIERLOOSE ] } ) * ) ) ` )
// ## Build Metadata Identifier
// Any combination of digits, letters, or hyphens.
createToken ( 'BUILDIDENTIFIER' , '[0-9A-Za-z-]+' )
// ## Build Metadata
// Plus sign, followed by one or more period-separated build metadata
// identifiers.
createToken ( 'BUILD' , ` (?: \\ +( ${ src [ t . BUILDIDENTIFIER ]
} ( ? : \ \ . $ { src [ t . BUILDIDENTIFIER ] } ) * ) ) ` )
// ## Full Version String
// A main version, followed optionally by a pre-release version and
// build metadata.
// Note that the only major, minor, patch, and pre-release sections of
// the version string are capturing groups. The build metadata is not a
// capturing group, because it should not ever be used in version
// comparison.
createToken ( 'FULLPLAIN' , ` v? ${ src [ t . MAINVERSION ]
} $ { src [ t . PRERELEASE ] } ? $ {
src [ t . BUILD ] } ? ` )
createToken ( 'FULL' , ` ^ ${ src [ t . FULLPLAIN ] } $ ` )
// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
// common in the npm registry.
createToken ( 'LOOSEPLAIN' , ` [v= \\ s]* ${ src [ t . MAINVERSIONLOOSE ]
} $ { src [ t . PRERELEASELOOSE ] } ? $ {
src [ t . BUILD ] } ? ` )
createToken ( 'LOOSE' , ` ^ ${ src [ t . LOOSEPLAIN ] } $ ` )
createToken ( 'GTLT' , '((?:<|>)?=?)' )
// Something like "2.*" or "1.2.x".
// Note that "x.x" is a valid xRange identifer, meaning "any version"
// Only the first item is strictly required.
createToken ( 'XRANGEIDENTIFIERLOOSE' , ` ${ src [ t . NUMERICIDENTIFIERLOOSE ] } |x|X| \\ * ` )
createToken ( 'XRANGEIDENTIFIER' , ` ${ src [ t . NUMERICIDENTIFIER ] } |x|X| \\ * ` )
createToken ( 'XRANGEPLAIN' , ` [v= \\ s]*( ${ src [ t . XRANGEIDENTIFIER ] } ) ` +
` (?: \\ .( ${ src [ t . XRANGEIDENTIFIER ] } ) ` +
` (?: \\ .( ${ src [ t . XRANGEIDENTIFIER ] } ) ` +
` (?: ${ src [ t . PRERELEASE ] } )? ${
src [ t . BUILD ] } ? ` +
` )?)? ` )
createToken ( 'XRANGEPLAINLOOSE' , ` [v= \\ s]*( ${ src [ t . XRANGEIDENTIFIERLOOSE ] } ) ` +
` (?: \\ .( ${ src [ t . XRANGEIDENTIFIERLOOSE ] } ) ` +
` (?: \\ .( ${ src [ t . XRANGEIDENTIFIERLOOSE ] } ) ` +
` (?: ${ src [ t . PRERELEASELOOSE ] } )? ${
src [ t . BUILD ] } ? ` +
` )?)? ` )
createToken ( 'XRANGE' , ` ^ ${ src [ t . GTLT ] } \\ s* ${ src [ t . XRANGEPLAIN ] } $ ` )
createToken ( 'XRANGELOOSE' , ` ^ ${ src [ t . GTLT ] } \\ s* ${ src [ t . XRANGEPLAINLOOSE ] } $ ` )
// Coercion.
// Extract anything that could conceivably be a part of a valid semver
createToken ( 'COERCE' , ` ${ '(^|[^\\d])' +
'(\\d{1,' } $ { MAX _SAFE _COMPONENT _LENGTH } } ) ` +
` (?: \\ .( \\ d{1, ${ MAX _SAFE _COMPONENT _LENGTH } }))? ` +
` (?: \\ .( \\ d{1, ${ MAX _SAFE _COMPONENT _LENGTH } }))? ` +
` (?: $ |[^ \\ d]) ` )
createToken ( 'COERCERTL' , src [ t . COERCE ] , true )
// Tilde ranges.
// Meaning is "reasonably at or greater than"
createToken ( 'LONETILDE' , '(?:~>?)' )
createToken ( 'TILDETRIM' , ` ( \\ s*) ${ src [ t . LONETILDE ] } \\ s+ ` , true )
exports . tildeTrimReplace = '$1~'
createToken ( 'TILDE' , ` ^ ${ src [ t . LONETILDE ] } ${ src [ t . XRANGEPLAIN ] } $ ` )
createToken ( 'TILDELOOSE' , ` ^ ${ src [ t . LONETILDE ] } ${ src [ t . XRANGEPLAINLOOSE ] } $ ` )
// Caret ranges.
// Meaning is "at least and backwards compatible with"
createToken ( 'LONECARET' , '(?:\\^)' )
createToken ( 'CARETTRIM' , ` ( \\ s*) ${ src [ t . LONECARET ] } \\ s+ ` , true )
exports . caretTrimReplace = '$1^'
createToken ( 'CARET' , ` ^ ${ src [ t . LONECARET ] } ${ src [ t . XRANGEPLAIN ] } $ ` )
createToken ( 'CARETLOOSE' , ` ^ ${ src [ t . LONECARET ] } ${ src [ t . XRANGEPLAINLOOSE ] } $ ` )
// A simple gt/lt/eq thing, or just "" to indicate "any version"
createToken ( 'COMPARATORLOOSE' , ` ^ ${ src [ t . GTLT ] } \\ s*( ${ src [ t . LOOSEPLAIN ] } ) $ |^ $ ` )
createToken ( 'COMPARATOR' , ` ^ ${ src [ t . GTLT ] } \\ s*( ${ src [ t . FULLPLAIN ] } ) $ |^ $ ` )
// An expression to strip any whitespace between the gtlt and the thing
// it modifies, so that `> 1.2.3` ==> `>1.2.3`
createToken ( 'COMPARATORTRIM' , ` ( \\ s*) ${ src [ t . GTLT ]
} \ \ s * ( $ { src [ t . LOOSEPLAIN ] } | $ { src [ t . XRANGEPLAIN ] } ) ` , true)
exports . comparatorTrimReplace = '$1$2$3'
// Something like `1.2.3 - 1.2.4`
// Note that these all use the loose form, because they'll be
// checked against either the strict or loose comparator form
// later.
createToken ( 'HYPHENRANGE' , ` ^ \\ s*( ${ src [ t . XRANGEPLAIN ] } ) ` +
` \\ s+- \\ s+ ` +
` ( ${ src [ t . XRANGEPLAIN ] } ) ` +
` \\ s* $ ` )
createToken ( 'HYPHENRANGELOOSE' , ` ^ \\ s*( ${ src [ t . XRANGEPLAINLOOSE ] } ) ` +
` \\ s+- \\ s+ ` +
` ( ${ src [ t . XRANGEPLAINLOOSE ] } ) ` +
` \\ s* $ ` )
// Star ranges basically just allow anything at all.
createToken ( 'STAR' , '(<|>)?=?\\s*\\*' )
// >=0.0.0 is like a star
createToken ( 'GTE0' , '^\\s*>=\\s*0\.0\.0\\s*$' )
createToken ( 'GTE0PRE' , '^\\s*>=\\s*0\.0\.0-0\\s*$' )
/***/ } ) ,
/***/ 532 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const ANY = Symbol ( 'SemVer ANY' )
// hoisted class for cyclic dependency
class Comparator {
static get ANY ( ) {
return ANY
}
constructor ( comp , options ) {
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
}
}
if ( comp instanceof Comparator ) {
if ( comp . loose === ! ! options . loose ) {
return comp
} else {
comp = comp . value
}
}
debug ( 'comparator' , comp , options )
this . options = options
this . loose = ! ! options . loose
this . parse ( comp )
if ( this . semver === ANY ) {
this . value = ''
} else {
this . value = this . operator + this . semver . version
}
debug ( 'comp' , this )
}
parse ( comp ) {
const r = this . options . loose ? re [ t . COMPARATORLOOSE ] : re [ t . COMPARATOR ]
const m = comp . match ( r )
if ( ! m ) {
throw new TypeError ( ` Invalid comparator: ${ comp } ` )
}
this . operator = m [ 1 ] !== undefined ? m [ 1 ] : ''
if ( this . operator === '=' ) {
this . operator = ''
}
// if it literally is just '>' or '' then allow anything.
if ( ! m [ 2 ] ) {
this . semver = ANY
} else {
this . semver = new SemVer ( m [ 2 ] , this . options . loose )
}
}
toString ( ) {
return this . value
}
test ( version ) {
debug ( 'Comparator.test' , version , this . options . loose )
if ( this . semver === ANY || version === ANY ) {
return true
}
if ( typeof version === 'string' ) {
try {
version = new SemVer ( version , this . options )
} catch ( er ) {
return false
}
}
return cmp ( version , this . operator , this . semver , this . options )
}
intersects ( comp , options ) {
if ( ! ( comp instanceof Comparator ) ) {
throw new TypeError ( 'a Comparator is required' )
}
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
}
}
if ( this . operator === '' ) {
if ( this . value === '' ) {
return true
}
return new Range ( comp . value , options ) . test ( this . value )
} else if ( comp . operator === '' ) {
if ( comp . value === '' ) {
return true
}
return new Range ( this . value , options ) . test ( comp . semver )
}
const sameDirectionIncreasing =
( this . operator === '>=' || this . operator === '>' ) &&
( comp . operator === '>=' || comp . operator === '>' )
const sameDirectionDecreasing =
( this . operator === '<=' || this . operator === '<' ) &&
( comp . operator === '<=' || comp . operator === '<' )
const sameSemVer = this . semver . version === comp . semver . version
const differentDirectionsInclusive =
( this . operator === '>=' || this . operator === '<=' ) &&
( comp . operator === '>=' || comp . operator === '<=' )
const oppositeDirectionsLessThan =
cmp ( this . semver , '<' , comp . semver , options ) &&
( this . operator === '>=' || this . operator === '>' ) &&
( comp . operator === '<=' || comp . operator === '<' )
const oppositeDirectionsGreaterThan =
cmp ( this . semver , '>' , comp . semver , options ) &&
( this . operator === '<=' || this . operator === '<' ) &&
( comp . operator === '>=' || comp . operator === '>' )
return (
sameDirectionIncreasing ||
sameDirectionDecreasing ||
( sameSemVer && differentDirectionsInclusive ) ||
oppositeDirectionsLessThan ||
oppositeDirectionsGreaterThan
)
}
}
module . exports = Comparator
const { re , t } = _ _webpack _require _ _ ( 523 )
const cmp = _ _webpack _require _ _ ( 98 )
const debug = _ _webpack _require _ _ ( 427 )
const SemVer = _ _webpack _require _ _ ( 88 )
const Range = _ _webpack _require _ _ ( 828 )
/***/ } ) ,
/***/ 537 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
function _interopDefault ( ex ) { return ( ex && ( typeof ex === 'object' ) && 'default' in ex ) ? ex [ 'default' ] : ex ; }
var deprecation = _ _webpack _require _ _ ( 932 ) ;
var once = _interopDefault ( _ _webpack _require _ _ ( 223 ) ) ;
const logOnce = once ( deprecation => console . warn ( deprecation ) ) ;
/ * *
* Error with extra properties to help with debugging
* /
class RequestError extends Error {
constructor ( message , statusCode , options ) {
super ( message ) ; // Maintains proper stack trace (only available on V8)
/* istanbul ignore next */
if ( Error . captureStackTrace ) {
Error . captureStackTrace ( this , this . constructor ) ;
}
this . name = "HttpError" ;
this . status = statusCode ;
Object . defineProperty ( this , "code" , {
get ( ) {
logOnce ( new deprecation . Deprecation ( "[@octokit/request-error] `error.code` is deprecated, use `error.status`." ) ) ;
return statusCode ;
}
} ) ;
this . headers = options . headers || { } ; // redact request credentials without mutating original request options
const requestCopy = Object . assign ( { } , options . request ) ;
if ( options . request . headers . authorization ) {
requestCopy . headers = Object . assign ( { } , options . request . headers , {
authorization : options . request . headers . authorization . replace ( / .*$/ , " [REDACTED]" )
} ) ;
}
requestCopy . url = requestCopy . url // client_id & client_secret can be passed as URL query parameters to increase rate limit
// see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications
. replace ( /\bclient_secret=\w+/g , "client_secret=[REDACTED]" ) // OAuth tokens can be passed as URL query parameters, although it is not recommended
// see https://developer.github.com/v3/#oauth2-token-sent-in-a-header
. replace ( /\baccess_token=\w+/g , "access_token=[REDACTED]" ) ;
this . request = requestCopy ;
}
}
exports . RequestError = RequestError ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 544 :
/***/ ( function ( module ) {
if ( typeof Object . create === 'function' ) {
// implementation from standard node.js 'util' module
module . exports = function inherits ( ctor , superCtor ) {
if ( superCtor ) {
ctor . super _ = superCtor
ctor . prototype = Object . create ( superCtor . prototype , {
constructor : {
value : ctor ,
enumerable : false ,
writable : true ,
configurable : true
}
} )
}
} ;
} else {
// old school shim for old browsers
module . exports = function inherits ( ctor , superCtor ) {
if ( superCtor ) {
ctor . super _ = superCtor
var TempCtor = function ( ) { }
TempCtor . prototype = superCtor . prototype
ctor . prototype = new TempCtor ( )
ctor . prototype . constructor = ctor
}
}
}
/***/ } ) ,
/***/ 549 :
/***/ ( function ( module ) {
module . exports = addHook
function addHook ( state , kind , name , hook ) {
var orig = hook
if ( ! state . registry [ name ] ) {
state . registry [ name ] = [ ]
}
if ( kind === 'before' ) {
hook = function ( method , options ) {
return Promise . resolve ( )
. then ( orig . bind ( null , options ) )
. then ( method . bind ( null , options ) )
}
}
if ( kind === 'after' ) {
hook = function ( method , options ) {
var result
return Promise . resolve ( )
. then ( method . bind ( null , options ) )
. then ( function ( result _ ) {
result = result _
return orig ( result , options )
} )
. then ( function ( ) {
return result
} )
}
}
if ( kind === 'error' ) {
hook = function ( method , options ) {
return Promise . resolve ( )
. then ( method . bind ( null , options ) )
. catch ( function ( error ) {
return orig ( error , options )
} )
}
}
state . registry [ name ] . push ( {
hook : hook ,
orig : orig
} )
}
/***/ } ) ,
/***/ 554 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
"use strict" ;
var Buffer = _ _webpack _require _ _ ( 407 ) . Buffer ,
Transform = _ _webpack _require _ _ ( 413 ) . Transform ;
// == Exports ==================================================================
module . exports = function ( iconv ) {
// Additional Public API.
iconv . encodeStream = function encodeStream ( encoding , options ) {
return new IconvLiteEncoderStream ( iconv . getEncoder ( encoding , options ) , options ) ;
}
iconv . decodeStream = function decodeStream ( encoding , options ) {
return new IconvLiteDecoderStream ( iconv . getDecoder ( encoding , options ) , options ) ;
}
iconv . supportsStreams = true ;
// Not published yet.
iconv . IconvLiteEncoderStream = IconvLiteEncoderStream ;
iconv . IconvLiteDecoderStream = IconvLiteDecoderStream ;
iconv . _collect = IconvLiteDecoderStream . prototype . collect ;
} ;
// == Encoder stream =======================================================
function IconvLiteEncoderStream ( conv , options ) {
this . conv = conv ;
options = options || { } ;
options . decodeStrings = false ; // We accept only strings, so we don't need to decode them.
Transform . call ( this , options ) ;
}
IconvLiteEncoderStream . prototype = Object . create ( Transform . prototype , {
constructor : { value : IconvLiteEncoderStream }
} ) ;
IconvLiteEncoderStream . prototype . _transform = function ( chunk , encoding , done ) {
if ( typeof chunk != 'string' )
return done ( new Error ( "Iconv encoding stream needs strings as its input." ) ) ;
try {
var res = this . conv . write ( chunk ) ;
if ( res && res . length ) this . push ( res ) ;
done ( ) ;
}
catch ( e ) {
done ( e ) ;
}
}
IconvLiteEncoderStream . prototype . _flush = function ( done ) {
try {
var res = this . conv . end ( ) ;
if ( res && res . length ) this . push ( res ) ;
done ( ) ;
}
catch ( e ) {
done ( e ) ;
}
}
IconvLiteEncoderStream . prototype . collect = function ( cb ) {
var chunks = [ ] ;
this . on ( 'error' , cb ) ;
this . on ( 'data' , function ( chunk ) { chunks . push ( chunk ) ; } ) ;
this . on ( 'end' , function ( ) {
cb ( null , Buffer . concat ( chunks ) ) ;
} ) ;
return this ;
}
// == Decoder stream =======================================================
function IconvLiteDecoderStream ( conv , options ) {
this . conv = conv ;
options = options || { } ;
options . encoding = this . encoding = 'utf8' ; // We output strings.
Transform . call ( this , options ) ;
}
IconvLiteDecoderStream . prototype = Object . create ( Transform . prototype , {
constructor : { value : IconvLiteDecoderStream }
} ) ;
IconvLiteDecoderStream . prototype . _transform = function ( chunk , encoding , done ) {
if ( ! Buffer . isBuffer ( chunk ) )
return done ( new Error ( "Iconv decoding stream needs buffers as its input." ) ) ;
try {
var res = this . conv . write ( chunk ) ;
if ( res && res . length ) this . push ( res , this . encoding ) ;
done ( ) ;
}
catch ( e ) {
done ( e ) ;
}
}
IconvLiteDecoderStream . prototype . _flush = function ( done ) {
try {
var res = this . conv . end ( ) ;
if ( res && res . length ) this . push ( res , this . encoding ) ;
done ( ) ;
}
catch ( e ) {
done ( e ) ;
}
}
IconvLiteDecoderStream . prototype . collect = function ( cb ) {
var res = '' ;
this . on ( 'error' , cb ) ;
this . on ( 'data' , function ( chunk ) { res += chunk ; } ) ;
this . on ( 'end' , function ( ) {
cb ( null , res ) ;
} ) ;
return this ;
}
/***/ } ) ,
/***/ 561 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
// given a set of versions and a range, create a "simplified" range
// that includes the same versions that the original range does
// If the original range is shorter than the simplified one, return that.
const satisfies = _ _webpack _require _ _ ( 55 )
const compare = _ _webpack _require _ _ ( 309 )
module . exports = ( versions , range , options ) => {
const set = [ ]
let min = null
let prev = null
const v = versions . sort ( ( a , b ) => compare ( a , b , options ) )
for ( const version of v ) {
const included = satisfies ( version , range , options )
if ( included ) {
prev = version
if ( ! min )
min = version
} else {
if ( prev ) {
set . push ( [ min , prev ] )
}
prev = null
min = null
}
}
if ( min )
set . push ( [ min , null ] )
const ranges = [ ]
for ( const [ min , max ] of set ) {
if ( min === max )
ranges . push ( min )
else if ( ! max && min === v [ 0 ] )
ranges . push ( '*' )
else if ( ! max )
ranges . push ( ` >= ${ min } ` )
else if ( min === v [ 0 ] )
ranges . push ( ` <= ${ max } ` )
else
ranges . push ( ` ${ min } - ${ max } ` )
}
const simplified = ranges . join ( ' || ' )
const original = typeof range . raw === 'string' ? range . raw : String ( range )
return simplified . length < original . length ? simplified : range
}
/***/ } ) ,
/***/ 579 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const SemVer = _ _webpack _require _ _ ( 88 )
const Range = _ _webpack _require _ _ ( 828 )
const maxSatisfying = ( versions , range , options ) => {
let max = null
let maxSV = null
let rangeObj = null
try {
rangeObj = new Range ( range , options )
} catch ( er ) {
return null
}
versions . forEach ( ( v ) => {
if ( rangeObj . test ( v ) ) {
// satisfies(v, range, options)
if ( ! max || maxSV . compare ( v ) === - 1 ) {
// compare(max, v, true)
max = v
maxSV = new SemVer ( max , options )
}
}
} )
return max
}
module . exports = maxSatisfying
/***/ } ) ,
/***/ 582 :
/***/ ( function ( _ _unusedmodule , exports ) {
"use strict" ;
var BOMChar = '\uFEFF' ;
exports . PrependBOM = PrependBOMWrapper
function PrependBOMWrapper ( encoder , options ) {
this . encoder = encoder ;
this . addBOM = true ;
}
PrependBOMWrapper . prototype . write = function ( str ) {
if ( this . addBOM ) {
str = BOMChar + str ;
this . addBOM = false ;
}
return this . encoder . write ( str ) ;
}
PrependBOMWrapper . prototype . end = function ( ) {
return this . encoder . end ( ) ;
}
//------------------------------------------------------------------------------
exports . StripBOM = StripBOMWrapper ;
function StripBOMWrapper ( decoder , options ) {
this . decoder = decoder ;
this . pass = false ;
this . options = options || { } ;
}
StripBOMWrapper . prototype . write = function ( buf ) {
var res = this . decoder . write ( buf ) ;
if ( this . pass || ! res )
return res ;
if ( res [ 0 ] === BOMChar ) {
res = res . slice ( 1 ) ;
if ( typeof this . options . stripBOM === 'function' )
this . options . stripBOM ( ) ;
}
this . pass = true ;
return res ;
}
StripBOMWrapper . prototype . end = function ( ) {
return this . decoder . end ( ) ;
}
/***/ } ) ,
/***/ 601 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const parse = _ _webpack _require _ _ ( 925 )
const valid = ( version , options ) => {
const v = parse ( version , options )
return v ? v . version : null
}
module . exports = valid
/***/ } ) ,
/***/ 605 :
/***/ ( function ( module ) {
module . exports = require ( "http" ) ;
/***/ } ) ,
/***/ 614 :
/***/ ( function ( module ) {
module . exports = require ( "events" ) ;
/***/ } ) ,
/***/ 622 :
/***/ ( function ( module ) {
module . exports = require ( "path" ) ;
/***/ } ) ,
/***/ 625 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
exports . alphasort = alphasort
exports . alphasorti = alphasorti
exports . setopts = setopts
exports . ownProp = ownProp
exports . makeAbs = makeAbs
exports . finish = finish
exports . mark = mark
exports . isIgnored = isIgnored
exports . childrenIgnored = childrenIgnored
function ownProp ( obj , field ) {
return Object . prototype . hasOwnProperty . call ( obj , field )
}
var path = _ _webpack _require _ _ ( 622 )
var minimatch = _ _webpack _require _ _ ( 973 )
var isAbsolute = _ _webpack _require _ _ ( 714 )
var Minimatch = minimatch . Minimatch
function alphasorti ( a , b ) {
return a . toLowerCase ( ) . localeCompare ( b . toLowerCase ( ) )
}
function alphasort ( a , b ) {
return a . localeCompare ( b )
}
function setupIgnores ( self , options ) {
self . ignore = options . ignore || [ ]
if ( ! Array . isArray ( self . ignore ) )
self . ignore = [ self . ignore ]
if ( self . ignore . length ) {
self . ignore = self . ignore . map ( ignoreMap )
}
}
// ignore patterns are always in dot:true mode.
function ignoreMap ( pattern ) {
var gmatcher = null
if ( pattern . slice ( - 3 ) === '/**' ) {
var gpattern = pattern . replace ( /(\/\*\*)+$/ , '' )
gmatcher = new Minimatch ( gpattern , { dot : true } )
}
return {
matcher : new Minimatch ( pattern , { dot : true } ) ,
gmatcher : gmatcher
}
}
function setopts ( self , pattern , options ) {
if ( ! options )
options = { }
// base-matching: just use globstar for that.
if ( options . matchBase && - 1 === pattern . indexOf ( "/" ) ) {
if ( options . noglobstar ) {
throw new Error ( "base matching requires globstar" )
}
pattern = "**/" + pattern
}
self . silent = ! ! options . silent
self . pattern = pattern
self . strict = options . strict !== false
self . realpath = ! ! options . realpath
self . realpathCache = options . realpathCache || Object . create ( null )
self . follow = ! ! options . follow
self . dot = ! ! options . dot
self . mark = ! ! options . mark
self . nodir = ! ! options . nodir
if ( self . nodir )
self . mark = true
self . sync = ! ! options . sync
self . nounique = ! ! options . nounique
self . nonull = ! ! options . nonull
self . nosort = ! ! options . nosort
self . nocase = ! ! options . nocase
self . stat = ! ! options . stat
self . noprocess = ! ! options . noprocess
self . absolute = ! ! options . absolute
self . maxLength = options . maxLength || Infinity
self . cache = options . cache || Object . create ( null )
self . statCache = options . statCache || Object . create ( null )
self . symlinks = options . symlinks || Object . create ( null )
setupIgnores ( self , options )
self . changedCwd = false
var cwd = process . cwd ( )
if ( ! ownProp ( options , "cwd" ) )
self . cwd = cwd
else {
self . cwd = path . resolve ( options . cwd )
self . changedCwd = self . cwd !== cwd
}
self . root = options . root || path . resolve ( self . cwd , "/" )
self . root = path . resolve ( self . root )
if ( process . platform === "win32" )
self . root = self . root . replace ( /\\/g , "/" )
// TODO: is an absolute `cwd` supposed to be resolved against `root`?
// e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test')
self . cwdAbs = isAbsolute ( self . cwd ) ? self . cwd : makeAbs ( self , self . cwd )
if ( process . platform === "win32" )
self . cwdAbs = self . cwdAbs . replace ( /\\/g , "/" )
self . nomount = ! ! options . nomount
// disable comments and negation in Minimatch.
// Note that they are not supported in Glob itself anyway.
options . nonegate = true
options . nocomment = true
self . minimatch = new Minimatch ( pattern , options )
self . options = self . minimatch . options
}
function finish ( self ) {
var nou = self . nounique
var all = nou ? [ ] : Object . create ( null )
for ( var i = 0 , l = self . matches . length ; i < l ; i ++ ) {
var matches = self . matches [ i ]
if ( ! matches || Object . keys ( matches ) . length === 0 ) {
if ( self . nonull ) {
// do like the shell, and spit out the literal glob
var literal = self . minimatch . globSet [ i ]
if ( nou )
all . push ( literal )
else
all [ literal ] = true
}
} else {
// had matches
var m = Object . keys ( matches )
if ( nou )
all . push . apply ( all , m )
else
m . forEach ( function ( m ) {
all [ m ] = true
} )
}
}
if ( ! nou )
all = Object . keys ( all )
if ( ! self . nosort )
all = all . sort ( self . nocase ? alphasorti : alphasort )
// at *some* point we statted all of these
if ( self . mark ) {
for ( var i = 0 ; i < all . length ; i ++ ) {
all [ i ] = self . _mark ( all [ i ] )
}
if ( self . nodir ) {
all = all . filter ( function ( e ) {
var notDir = ! ( /\/$/ . test ( e ) )
var c = self . cache [ e ] || self . cache [ makeAbs ( self , e ) ]
if ( notDir && c )
notDir = c !== 'DIR' && ! Array . isArray ( c )
return notDir
} )
}
}
if ( self . ignore . length )
all = all . filter ( function ( m ) {
return ! isIgnored ( self , m )
} )
self . found = all
}
function mark ( self , p ) {
var abs = makeAbs ( self , p )
var c = self . cache [ abs ]
var m = p
if ( c ) {
var isDir = c === 'DIR' || Array . isArray ( c )
var slash = p . slice ( - 1 ) === '/'
if ( isDir && ! slash )
m += '/'
else if ( ! isDir && slash )
m = m . slice ( 0 , - 1 )
if ( m !== p ) {
var mabs = makeAbs ( self , m )
self . statCache [ mabs ] = self . statCache [ abs ]
self . cache [ mabs ] = self . cache [ abs ]
}
}
return m
}
// lotta situps...
function makeAbs ( self , f ) {
var abs = f
if ( f . charAt ( 0 ) === '/' ) {
abs = path . join ( self . root , f )
} else if ( isAbsolute ( f ) || f === '' ) {
abs = f
} else if ( self . changedCwd ) {
abs = path . resolve ( self . cwd , f )
} else {
abs = path . resolve ( f )
}
if ( process . platform === 'win32' )
abs = abs . replace ( /\\/g , '/' )
return abs
}
// Return true, if pattern ends with globstar '**', for the accompanying parent directory.
// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents
function isIgnored ( self , path ) {
if ( ! self . ignore . length )
return false
return self . ignore . some ( function ( item ) {
return item . matcher . match ( path ) || ! ! ( item . gmatcher && item . gmatcher . match ( path ) )
} )
}
function childrenIgnored ( self , path ) {
if ( ! self . ignore . length )
return false
return self . ignore . some ( function ( item ) {
return ! ! ( item . gmatcher && item . gmatcher . match ( path ) )
} )
}
/***/ } ) ,
/***/ 627 :
/***/ ( function ( module ) {
module . exports = [ [ "0" , "\u0000" , 127 ] , [ "a140" , " ,、。.‧;:?!︰…‥﹐﹑﹒·﹔﹕﹖﹗|–︱—︳╴︴﹏()︵︶{}︷︸〔〕︹︺【】︻︼《》︽︾〈〉︿﹀「」﹁﹂『』﹃﹄﹙﹚" ] , [ "a1a1" , "﹛﹜﹝﹞‘’“”〝〞‵′#&*※§〃○●△▲◎☆★◇◆□■▽▼㊣℅¯ ̄_ˍ﹉﹊﹍﹎﹋﹌﹟﹠﹡+-×÷±√<>=≦≧≠∞≒≡﹢" , 4 , "~∩∪⊥∠∟⊿㏒㏑∫∮∵∴♀♂⊕⊙↑↓←→↖↗↙↘∥∣/" ] , [ "a240" , "\∕﹨$¥〒¢£%@℃℉﹩﹪﹫㏕㎜㎝㎞㏎㎡㎎㎏㏄°兙兛兞兝兡兣嗧瓩糎▁" , 7 , "▏▎▍▌▋▊▉┼┴┬┤├▔─│▕┌┐└┘╭" ] , [ "a2a1" , "╮╰╯═╞╪╡◢◣◥◤╱╲╳0" , 9 , "Ⅰ " , 9 , "〡" , 8 , "十卄卅A" , 25 , "a " , 21 ] , [ "a340" , "w x y z Α " , 16 , "Σ" , 6 , "α " , 16 , "σ " , 6 , "ㄅ" , 10 ] , [ "a3a1" , "ㄐ" , 25 , "˙ˉˊˇˋ" ] , [ "a3e1" , "€" ] , [ "a440" , "一乙丁七乃九了二人儿入八几刀刁力匕十卜又三下丈上丫丸凡久么也乞于亡兀刃勺千叉口土士夕大女子孑孓寸小尢尸山川工己已巳巾干廾弋弓才" ] , [ "a4a1" , "丑丐不中丰丹之尹予云井互五亢仁什仃仆仇仍今介仄元允內六兮公冗凶分切刈勻勾勿化匹午升卅卞厄友及反壬天夫太夭孔少尤尺屯巴幻廿弔引心戈戶手扎支文斗斤方日曰月木欠止歹毋比毛氏水火爪父爻片牙牛犬王丙" ] , [ "a540" , "世丕且丘主乍乏乎以付仔仕他仗代令仙仞充兄冉冊冬凹出凸刊加功包匆北匝仟半卉卡占卯卮去可古右召叮叩叨叼司叵叫另只史叱台句叭叻四囚外" ] , [ "a5a1" , "央失奴奶孕它尼巨巧左市布平幼弁弘弗必戊打扔扒扑斥旦朮本未末札正母民氐永汁汀氾犯玄玉瓜瓦甘生用甩田由甲申疋白皮皿目矛矢石示禾穴立丞丟乒乓乩亙交亦亥仿伉伙伊伕伍伐休伏仲件任仰仳份企伋光兇兆先全" ] , [ "a640" , "共再冰列刑划刎刖劣匈匡匠印危吉吏同吊吐吁吋各向名合吃后吆吒因回囝圳地在圭圬圯圩夙多夷夸妄奸妃好她如妁字存宇守宅安寺尖屹州帆并年" ] , [ "a6a1" , "式弛忙忖戎戌戍成扣扛托收早旨旬旭曲曳有朽朴朱朵次此死氖汝汗汙江池汐汕污汛汍汎灰牟牝百竹米糸缶羊羽老考而耒耳聿肉肋肌臣自至臼舌舛舟艮色艾虫血行衣西阡串亨位住佇佗佞伴佛何估佐佑伽伺伸佃佔似但佣" ] , [ "a740" , "作你伯低伶余佝佈佚兌克免兵冶冷別判利刪刨劫助努劬匣即卵吝吭吞吾否呎吧呆呃吳呈呂君吩告吹吻吸吮吵吶吠吼呀吱含吟听囪困囤囫坊坑址坍" ] , [ "a7a1" , "均坎圾坐坏圻壯夾妝妒妨妞妣妙妖妍妤妓妊妥孝孜孚孛完宋宏尬局屁尿尾岐岑岔岌巫希序庇床廷弄弟彤形彷役忘忌志忍忱快忸忪戒我抄抗抖技扶抉扭把扼找批扳抒扯折扮投抓抑抆改攻攸旱更束李杏材村杜杖杞杉杆杠" ] , [ "a840" , "杓杗步每求汞沙沁沈沉沅沛汪決沐汰沌汨沖沒汽沃汲汾汴沆汶沍沔沘沂灶灼災灸牢牡牠狄狂玖甬甫男甸皂盯矣私秀禿究系罕肖肓肝肘肛肚育良芒" ] , [ "a8a1" , "芋芍見角言谷豆豕貝赤走足身車辛辰迂迆迅迄巡邑邢邪邦那酉釆里防阮阱阪阬並乖乳事些亞享京佯依侍佳使佬供例來侃佰併侈佩佻侖佾侏侑佺兔兒兕兩具其典冽函刻券刷刺到刮制剁劾劻卒協卓卑卦卷卸卹取叔受味呵" ] , [ "a940" , "咖呸咕咀呻呷咄咒咆呼咐呱呶和咚呢周咋命咎固垃坷坪坩坡坦坤坼夜奉奇奈奄奔妾妻委妹妮姑姆姐姍始姓姊妯妳姒姅孟孤季宗定官宜宙宛尚屈居" ] , [ "a9a1" , "屆岷岡岸岩岫岱岳帘帚帖帕帛帑幸庚店府底庖延弦弧弩往征彿彼忝忠忽念忿怏怔怯怵怖怪怕怡性怩怫怛或戕房戾所承拉拌拄抿拂抹拒招披拓拔拋拈抨抽押拐拙拇拍抵拚抱拘拖拗拆抬拎放斧於旺昔易昌昆昂明昀昏昕昊" ] , [ "aa40" , "昇服朋杭枋枕東果杳杷枇枝林杯杰板枉松析杵枚枓杼杪杲欣武歧歿氓氛泣注泳沱泌泥河沽沾沼波沫法泓沸泄油況沮泗泅泱沿治泡泛泊沬泯泜泖泠" ] , [ "aaa1" , " 炕 <EFBFBD> <EFBFBD>
/***/ } ) ,
/***/ 631 :
/***/ ( function ( module ) {
module . exports = require ( "net" ) ;
/***/ } ) ,
/***/ 647 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
exports . setTmpDir = exports . tmpDir = exports . IsPost = void 0 ;
const core = _ _importStar ( _ _webpack _require _ _ ( 186 ) ) ;
exports . IsPost = ! ! process . env [ 'STATE_isPost' ] ;
exports . tmpDir = process . env [ 'STATE_tmpDir' ] || '' ;
function setTmpDir ( tmpDir ) {
core . saveState ( 'tmpDir' , tmpDir ) ;
}
exports . setTmpDir = setTmpDir ;
if ( ! exports . IsPost ) {
core . saveState ( 'isPost' , 'true' ) ;
}
//# sourceMappingURL=state-helper.js.map
/***/ } ) ,
/***/ 655 :
/***/ ( function ( module ) {
module . exports = [ [ "0" , "\u0000" , 127 ] , [ "8ea1" , "。" , 62 ] , [ "a1a1" , " 、。,.・:;?!゛゜´`¨^ ̄_ヽヾゝゞ〃仝々〆〇ー―‐/\~∥|…‥‘’“”()〔〕[]{}〈" , 9 , "+-±×÷=≠<>≦≧∞∴♂♀°′″℃¥$¢£%#&*@§☆★○●◎◇" ] , [ "a2a1" , "◆□■△▲▽▼※〒→←↑↓〓" ] , [ "a2ba" , "∈∋⊆⊇⊂⊃∪∩" ] , [ "a2ca" , "∧∨¬⇒⇔∀∃" ] , [ "a2dc" , "∠⊥⌒∂∇≡≒≪≫√∽∝∵∫∬" ] , [ "a2f2" , "ʼn♯♭♪†‡¶" ] , [ "a2fe" , "◯" ] , [ "a3b0" , "0 " , 9 ] , [ "a3c1" , "A " , 25 ] , [ "a3e1" , "a " , 25 ] , [ "a4a1" , "ぁ" , 82 ] , [ "a5a1" , "ァ" , 85 ] , [ "a6a1" , "Α " , 16 , "Σ" , 6 ] , [ "a6c1" , "α " , 16 , "σ " , 6 ] , [ "a7a1" , "А " , 5 , "ЁЖ" , 25 ] , [ "a7d1" , "а " , 5 , "ёж" , 25 ] , [ "a8a1" , "─│┌┐┘└├┬┤┴┼━┃┏┓┛┗┣┳┫┻╋┠┯┨┷┿┝┰┥┸╂" ] , [ "ada1" , "①" , 19 , "Ⅰ " , 9 ] , [ "adc0" , "㍉㌔㌢㍍㌘㌧㌃㌶㍑㍗㌍㌦㌣㌫㍊㌻㎜㎝㎞㎎㎏㏄㎡" ] , [ "addf" , "㍻〝〟№㏍℡㊤" , 4 , "㈱㈲㈹㍾㍽㍼≒≡∫∮∑√⊥∠∟⊿∵∩∪" ] , [ "b0a1" , "亜唖娃阿哀愛挨姶逢葵茜穐悪握渥旭葦芦鯵梓圧斡扱宛姐虻飴絢綾鮎或粟袷安庵按暗案闇鞍杏以伊位依偉囲夷委威尉惟意慰易椅為畏異移維緯胃萎衣謂違遺医井亥域育郁磯一壱溢逸稲茨芋鰯允印咽員因姻引飲淫胤蔭" ] , [ "b1a1" , "院陰隠韻吋右宇烏羽迂雨卯鵜窺丑碓臼渦嘘唄欝蔚鰻姥厩浦瓜閏噂云運雲荏餌叡営嬰影映曳栄永泳洩瑛盈穎頴英衛詠鋭液疫益駅悦謁越閲榎厭円園堰奄宴延怨掩援沿演炎焔煙燕猿縁艶苑薗遠鉛鴛塩於汚甥凹央奥往応" ] , [ "b2a1" , "押旺横欧殴王翁襖鴬鴎黄岡沖荻億屋憶臆桶牡乙俺卸恩温穏音下化仮何伽価佳加可嘉夏嫁家寡科暇果架歌河火珂禍禾稼箇花苛茄荷華菓蝦課嘩貨迦過霞蚊俄峨我牙画臥芽蛾賀雅餓駕介会解回塊壊廻快怪悔恢懐戒拐改" ] , [ "b3a1" , "魁晦械海灰界皆絵芥蟹開階貝凱劾外咳害崖慨概涯碍蓋街該鎧骸浬馨蛙垣柿蛎鈎劃嚇各廓拡撹格核殻獲確穫覚角赫較郭閣隔革学岳楽額顎掛笠樫橿梶鰍潟割喝恰括活渇滑葛褐轄且鰹叶椛樺鞄株兜竃蒲釜鎌噛鴨栢茅萱" ] , [ "b4a1" , "粥刈苅瓦乾侃冠寒刊勘勧巻喚堪姦完官寛干幹患感慣憾換敢柑桓棺款歓汗漢澗潅環甘監看竿管簡緩缶翰肝艦莞観諌貫還鑑間閑関陥韓館舘丸含岸巌玩癌眼岩翫贋雁頑顔願企伎危喜器基奇嬉寄岐希幾忌揮机旗既期棋棄" ] , [ "b5a1" , "機帰毅気汽畿祈季稀紀徽規記貴起軌輝飢騎鬼亀偽儀妓宜戯技擬欺犠疑祇義蟻誼議掬菊鞠吉吃喫桔橘詰砧杵黍却客脚虐逆丘久仇休及吸宮弓急救朽求汲泣灸球究窮笈級糾給旧牛去居巨拒拠挙渠虚許距鋸漁禦魚亨享京" ] , [ "b6a1" , "供侠僑兇競共凶協匡卿叫喬境峡強彊怯恐恭挟教橋況狂狭矯胸脅興蕎郷鏡響饗驚仰凝尭暁業局曲極玉桐粁僅勤均巾錦斤欣欽琴禁禽筋緊芹菌衿襟謹近金吟銀九倶句区狗玖矩苦躯駆駈駒具愚虞喰空偶寓遇隅串櫛釧屑屈" ] , [ "b7a1" , "掘窟沓靴轡窪熊隈粂栗繰桑鍬勲君薫訓群軍郡卦袈祁係傾刑兄啓圭珪型契形径恵慶慧憩掲携敬景桂渓畦稽系経継繋罫茎荊蛍計詣警軽頚鶏芸迎鯨劇戟撃激隙桁傑欠決潔穴結血訣月件倹倦健兼券剣喧圏堅嫌建憲懸拳捲" ] , [ "b8a1" , "検権牽犬献研硯絹県肩見謙賢軒遣鍵険顕験鹸元原厳幻弦減源玄現絃舷言諺限乎個古呼固姑孤己庫弧戸故枯湖狐糊袴股胡菰虎誇跨鈷雇顧鼓五互伍午呉吾娯後御悟梧檎瑚碁語誤護醐乞鯉交佼侯候倖光公功効勾厚口向" ] , [ "b9a1" , "后喉坑垢好孔孝宏工巧巷幸広庚康弘恒慌抗拘控攻昂晃更杭校梗構江洪浩港溝甲皇硬稿糠紅紘絞綱耕考肯肱腔膏航荒行衡講貢購郊酵鉱砿鋼閤降項香高鴻剛劫号合壕拷濠豪轟麹克刻告国穀酷鵠黒獄漉腰甑忽惚骨狛込" ] , [ "baa1" , " 此頃今困坤墾婚恨懇昏昆根梱混痕紺艮魂些佐叉唆嵯左差査沙瑳砂詐鎖裟 <EFBFBD>
/***/ } ) ,
/***/ 668 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
var request = _ _webpack _require _ _ ( 234 ) ;
var universalUserAgent = _ _webpack _require _ _ ( 429 ) ;
const VERSION = "4.5.4" ;
class GraphqlError extends Error {
constructor ( request , response ) {
const message = response . data . errors [ 0 ] . message ;
super ( message ) ;
Object . assign ( this , response . data ) ;
Object . assign ( this , {
headers : response . headers
} ) ;
this . name = "GraphqlError" ;
this . request = request ; // Maintains proper stack trace (only available on V8)
/* istanbul ignore next */
if ( Error . captureStackTrace ) {
Error . captureStackTrace ( this , this . constructor ) ;
}
}
}
const NON _VARIABLE _OPTIONS = [ "method" , "baseUrl" , "url" , "headers" , "request" , "query" , "mediaType" ] ;
function graphql ( request , query , options ) {
options = typeof query === "string" ? options = Object . assign ( {
query
} , options ) : options = query ;
const requestOptions = Object . keys ( options ) . reduce ( ( result , key ) => {
if ( NON _VARIABLE _OPTIONS . includes ( key ) ) {
result [ key ] = options [ key ] ;
return result ;
}
if ( ! result . variables ) {
result . variables = { } ;
}
result . variables [ key ] = options [ key ] ;
return result ;
} , { } ) ;
return request ( requestOptions ) . then ( response => {
if ( response . data . errors ) {
const headers = { } ;
for ( const key of Object . keys ( response . headers ) ) {
headers [ key ] = response . headers [ key ] ;
}
throw new GraphqlError ( requestOptions , {
headers ,
data : response . data
} ) ;
}
return response . data . data ;
} ) ;
}
function withDefaults ( request$1 , newDefaults ) {
const newRequest = request$1 . defaults ( newDefaults ) ;
const newApi = ( query , options ) => {
return graphql ( newRequest , query , options ) ;
} ;
return Object . assign ( newApi , {
defaults : withDefaults . bind ( null , newRequest ) ,
endpoint : request . request . endpoint
} ) ;
}
const graphql$1 = withDefaults ( request . request , {
headers : {
"user-agent" : ` octokit-graphql.js/ ${ VERSION } ${ universalUserAgent . getUserAgent ( ) } `
} ,
method : "POST" ,
url : "/graphql"
} ) ;
function withCustomRequest ( customRequest ) {
return withDefaults ( customRequest , {
method : "POST" ,
url : "/graphql"
} ) ;
}
exports . graphql = graphql$1 ;
exports . withCustomRequest = withCustomRequest ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 669 :
/***/ ( function ( module ) {
module . exports = require ( "util" ) ;
/***/ } ) ,
/***/ 670 :
/***/ ( function ( module ) {
module . exports = register
function register ( state , name , method , options ) {
if ( typeof method !== 'function' ) {
throw new Error ( 'method for before hook must be a function' )
}
if ( ! options ) {
options = { }
}
if ( Array . isArray ( name ) ) {
return name . reverse ( ) . reduce ( function ( callback , name ) {
return register . bind ( null , state , name , callback , options )
} , method ) ( )
}
return Promise . resolve ( )
. then ( function ( ) {
if ( ! state . registry [ name ] ) {
return method ( options )
}
return ( state . registry [ name ] ) . reduce ( function ( method , registered ) {
return registered . hook . bind ( null , method , options )
} , method ) ( )
} )
}
/***/ } ) ,
/***/ 682 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
var register = _ _webpack _require _ _ ( 670 )
var addHook = _ _webpack _require _ _ ( 549 )
var removeHook = _ _webpack _require _ _ ( 819 )
// bind with array of arguments: https://stackoverflow.com/a/21792913
var bind = Function . bind
var bindable = bind . bind ( bind )
function bindApi ( hook , state , name ) {
var removeHookRef = bindable ( removeHook , null ) . apply ( null , name ? [ state , name ] : [ state ] )
hook . api = { remove : removeHookRef }
hook . remove = removeHookRef
; [ 'before' , 'error' , 'after' , 'wrap' ] . forEach ( function ( kind ) {
var args = name ? [ state , kind , name ] : [ state , kind ]
hook [ kind ] = hook . api [ kind ] = bindable ( addHook , null ) . apply ( null , args )
} )
}
function HookSingular ( ) {
var singularHookName = 'h'
var singularHookState = {
registry : { }
}
var singularHook = register . bind ( null , singularHookState , singularHookName )
bindApi ( singularHook , singularHookState , singularHookName )
return singularHook
}
function HookCollection ( ) {
var state = {
registry : { }
}
var hook = register . bind ( null , state )
bindApi ( hook , state )
return hook
}
var collectionHookDeprecationMessageDisplayed = false
function Hook ( ) {
if ( ! collectionHookDeprecationMessageDisplayed ) {
console . warn ( '[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4' )
collectionHookDeprecationMessageDisplayed = true
}
return HookCollection ( )
}
Hook . Singular = HookSingular . bind ( )
Hook . Collection = HookCollection . bind ( )
module . exports = Hook
// expose constructors as a named property for TypeScript
module . exports . Hook = Hook
module . exports . Singular = Hook . Singular
module . exports . Collection = Hook . Collection
/***/ } ) ,
/***/ 688 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const SemVer = _ _webpack _require _ _ ( 88 )
const major = ( a , loose ) => new SemVer ( a , loose ) . major
module . exports = major
/***/ } ) ,
/***/ 689 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var Buffer = _ _webpack _require _ _ ( 937 ) . Buffer ;
// UTF-7 codec, according to https://tools.ietf.org/html/rfc2152
// See also below a UTF-7-IMAP codec, according to http://tools.ietf.org/html/rfc3501#section-5.1.3
exports . utf7 = Utf7Codec ;
exports . unicode11utf7 = 'utf7' ; // Alias UNICODE-1-1-UTF-7
function Utf7Codec ( codecOptions , iconv ) {
this . iconv = iconv ;
} ;
Utf7Codec . prototype . encoder = Utf7Encoder ;
Utf7Codec . prototype . decoder = Utf7Decoder ;
Utf7Codec . prototype . bomAware = true ;
// -- Encoding
var nonDirectChars = /[^A-Za-z0-9'\(\),-\.\/:\? \n\r\t]+/g ;
function Utf7Encoder ( options , codec ) {
this . iconv = codec . iconv ;
}
Utf7Encoder . prototype . write = function ( str ) {
// Naive implementation.
// Non-direct chars are encoded as "+<base64>-"; single "+" char is encoded as "+-".
return Buffer . from ( str . replace ( nonDirectChars , function ( chunk ) {
return "+" + ( chunk === '+' ? '' :
this . iconv . encode ( chunk , 'utf16-be' ) . toString ( 'base64' ) . replace ( /=+$/ , '' ) )
+ "-" ;
} . bind ( this ) ) ) ;
}
Utf7Encoder . prototype . end = function ( ) {
}
// -- Decoding
function Utf7Decoder ( options , codec ) {
this . iconv = codec . iconv ;
this . inBase64 = false ;
this . base64Accum = '' ;
}
var base64Regex = /[A-Za-z0-9\/+]/ ;
var base64Chars = [ ] ;
for ( var i = 0 ; i < 256 ; i ++ )
base64Chars [ i ] = base64Regex . test ( String . fromCharCode ( i ) ) ;
var plusChar = '+' . charCodeAt ( 0 ) ,
minusChar = '-' . charCodeAt ( 0 ) ,
andChar = '&' . charCodeAt ( 0 ) ;
Utf7Decoder . prototype . write = function ( buf ) {
var res = "" , lastI = 0 ,
inBase64 = this . inBase64 ,
base64Accum = this . base64Accum ;
// The decoder is more involved as we must handle chunks in stream.
for ( var i = 0 ; i < buf . length ; i ++ ) {
if ( ! inBase64 ) { // We're in direct mode.
// Write direct chars until '+'
if ( buf [ i ] == plusChar ) {
res += this . iconv . decode ( buf . slice ( lastI , i ) , "ascii" ) ; // Write direct chars.
lastI = i + 1 ;
inBase64 = true ;
}
} else { // We decode base64.
if ( ! base64Chars [ buf [ i ] ] ) { // Base64 ended.
if ( i == lastI && buf [ i ] == minusChar ) { // "+-" -> "+"
res += "+" ;
} else {
var b64str = base64Accum + buf . slice ( lastI , i ) . toString ( ) ;
res += this . iconv . decode ( Buffer . from ( b64str , 'base64' ) , "utf16-be" ) ;
}
if ( buf [ i ] != minusChar ) // Minus is absorbed after base64.
i -- ;
lastI = i + 1 ;
inBase64 = false ;
base64Accum = '' ;
}
}
}
if ( ! inBase64 ) {
res += this . iconv . decode ( buf . slice ( lastI ) , "ascii" ) ; // Write direct chars.
} else {
var b64str = base64Accum + buf . slice ( lastI ) . toString ( ) ;
var canBeDecoded = b64str . length - ( b64str . length % 8 ) ; // Minimal chunk: 2 quads -> 2x3 bytes -> 3 chars.
base64Accum = b64str . slice ( canBeDecoded ) ; // The rest will be decoded in future.
b64str = b64str . slice ( 0 , canBeDecoded ) ;
res += this . iconv . decode ( Buffer . from ( b64str , 'base64' ) , "utf16-be" ) ;
}
this . inBase64 = inBase64 ;
this . base64Accum = base64Accum ;
return res ;
}
Utf7Decoder . prototype . end = function ( ) {
var res = "" ;
if ( this . inBase64 && this . base64Accum . length > 0 )
res = this . iconv . decode ( Buffer . from ( this . base64Accum , 'base64' ) , "utf16-be" ) ;
this . inBase64 = false ;
this . base64Accum = '' ;
return res ;
}
// UTF-7-IMAP codec.
// RFC3501 Sec. 5.1.3 Modified UTF-7 (http://tools.ietf.org/html/rfc3501#section-5.1.3)
// Differences:
// * Base64 part is started by "&" instead of "+"
// * Direct characters are 0x20-0x7E, except "&" (0x26)
// * In Base64, "," is used instead of "/"
// * Base64 must not be used to represent direct characters.
// * No implicit shift back from Base64 (should always end with '-')
// * String must end in non-shifted position.
// * "-&" while in base64 is not allowed.
exports . utf7imap = Utf7IMAPCodec ;
function Utf7IMAPCodec ( codecOptions , iconv ) {
this . iconv = iconv ;
} ;
Utf7IMAPCodec . prototype . encoder = Utf7IMAPEncoder ;
Utf7IMAPCodec . prototype . decoder = Utf7IMAPDecoder ;
Utf7IMAPCodec . prototype . bomAware = true ;
// -- Encoding
function Utf7IMAPEncoder ( options , codec ) {
this . iconv = codec . iconv ;
this . inBase64 = false ;
this . base64Accum = Buffer . alloc ( 6 ) ;
this . base64AccumIdx = 0 ;
}
Utf7IMAPEncoder . prototype . write = function ( str ) {
var inBase64 = this . inBase64 ,
base64Accum = this . base64Accum ,
base64AccumIdx = this . base64AccumIdx ,
buf = Buffer . alloc ( str . length * 5 + 10 ) , bufIdx = 0 ;
for ( var i = 0 ; i < str . length ; i ++ ) {
var uChar = str . charCodeAt ( i ) ;
if ( 0x20 <= uChar && uChar <= 0x7E ) { // Direct character or '&'.
if ( inBase64 ) {
if ( base64AccumIdx > 0 ) {
bufIdx += buf . write ( base64Accum . slice ( 0 , base64AccumIdx ) . toString ( 'base64' ) . replace ( /\//g , ',' ) . replace ( /=+$/ , '' ) , bufIdx ) ;
base64AccumIdx = 0 ;
}
buf [ bufIdx ++ ] = minusChar ; // Write '-', then go to direct mode.
inBase64 = false ;
}
if ( ! inBase64 ) {
buf [ bufIdx ++ ] = uChar ; // Write direct character
if ( uChar === andChar ) // Ampersand -> '&-'
buf [ bufIdx ++ ] = minusChar ;
}
} else { // Non-direct character
if ( ! inBase64 ) {
buf [ bufIdx ++ ] = andChar ; // Write '&', then go to base64 mode.
inBase64 = true ;
}
if ( inBase64 ) {
base64Accum [ base64AccumIdx ++ ] = uChar >> 8 ;
base64Accum [ base64AccumIdx ++ ] = uChar & 0xFF ;
if ( base64AccumIdx == base64Accum . length ) {
bufIdx += buf . write ( base64Accum . toString ( 'base64' ) . replace ( /\//g , ',' ) , bufIdx ) ;
base64AccumIdx = 0 ;
}
}
}
}
this . inBase64 = inBase64 ;
this . base64AccumIdx = base64AccumIdx ;
return buf . slice ( 0 , bufIdx ) ;
}
Utf7IMAPEncoder . prototype . end = function ( ) {
var buf = Buffer . alloc ( 10 ) , bufIdx = 0 ;
if ( this . inBase64 ) {
if ( this . base64AccumIdx > 0 ) {
bufIdx += buf . write ( this . base64Accum . slice ( 0 , this . base64AccumIdx ) . toString ( 'base64' ) . replace ( /\//g , ',' ) . replace ( /=+$/ , '' ) , bufIdx ) ;
this . base64AccumIdx = 0 ;
}
buf [ bufIdx ++ ] = minusChar ; // Write '-', then go to direct mode.
this . inBase64 = false ;
}
return buf . slice ( 0 , bufIdx ) ;
}
// -- Decoding
function Utf7IMAPDecoder ( options , codec ) {
this . iconv = codec . iconv ;
this . inBase64 = false ;
this . base64Accum = '' ;
}
var base64IMAPChars = base64Chars . slice ( ) ;
base64IMAPChars [ ',' . charCodeAt ( 0 ) ] = true ;
Utf7IMAPDecoder . prototype . write = function ( buf ) {
var res = "" , lastI = 0 ,
inBase64 = this . inBase64 ,
base64Accum = this . base64Accum ;
// The decoder is more involved as we must handle chunks in stream.
// It is forgiving, closer to standard UTF-7 (for example, '-' is optional at the end).
for ( var i = 0 ; i < buf . length ; i ++ ) {
if ( ! inBase64 ) { // We're in direct mode.
// Write direct chars until '&'
if ( buf [ i ] == andChar ) {
res += this . iconv . decode ( buf . slice ( lastI , i ) , "ascii" ) ; // Write direct chars.
lastI = i + 1 ;
inBase64 = true ;
}
} else { // We decode base64.
if ( ! base64IMAPChars [ buf [ i ] ] ) { // Base64 ended.
if ( i == lastI && buf [ i ] == minusChar ) { // "&-" -> "&"
res += "&" ;
} else {
var b64str = base64Accum + buf . slice ( lastI , i ) . toString ( ) . replace ( /,/g , '/' ) ;
res += this . iconv . decode ( Buffer . from ( b64str , 'base64' ) , "utf16-be" ) ;
}
if ( buf [ i ] != minusChar ) // Minus may be absorbed after base64.
i -- ;
lastI = i + 1 ;
inBase64 = false ;
base64Accum = '' ;
}
}
}
if ( ! inBase64 ) {
res += this . iconv . decode ( buf . slice ( lastI ) , "ascii" ) ; // Write direct chars.
} else {
var b64str = base64Accum + buf . slice ( lastI ) . toString ( ) . replace ( /,/g , '/' ) ;
var canBeDecoded = b64str . length - ( b64str . length % 8 ) ; // Minimal chunk: 2 quads -> 2x3 bytes -> 3 chars.
base64Accum = b64str . slice ( canBeDecoded ) ; // The rest will be decoded in future.
b64str = b64str . slice ( 0 , canBeDecoded ) ;
res += this . iconv . decode ( Buffer . from ( b64str , 'base64' ) , "utf16-be" ) ;
}
this . inBase64 = inBase64 ;
this . base64Accum = base64Accum ;
return res ;
}
Utf7IMAPDecoder . prototype . end = function ( ) {
var res = "" ;
if ( this . inBase64 && this . base64Accum . length > 0 )
res = this . iconv . decode ( Buffer . from ( this . base64Accum , 'base64' ) , "utf16-be" ) ;
this . inBase64 = false ;
this . base64Accum = '' ;
return res ;
}
/***/ } ) ,
/***/ 701 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const compareBuild = _ _webpack _require _ _ ( 156 )
const rsort = ( list , loose ) => list . sort ( ( a , b ) => compareBuild ( b , a , loose ) )
module . exports = rsort
/***/ } ) ,
/***/ 706 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const Range = _ _webpack _require _ _ ( 828 )
// Mostly just for testing and legacy API reasons
const toComparators = ( range , options ) =>
new Range ( range , options ) . set
. map ( comp => comp . map ( c => c . value ) . join ( ' ' ) . trim ( ) . split ( ' ' ) )
module . exports = toComparators
/***/ } ) ,
/***/ 714 :
/***/ ( function ( module ) {
"use strict" ;
function posix ( path ) {
return path . charAt ( 0 ) === '/' ;
}
function win32 ( path ) {
// https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56
var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/ ;
var result = splitDeviceRe . exec ( path ) ;
var device = result [ 1 ] || '' ;
var isUnc = Boolean ( device && device . charAt ( 1 ) !== ':' ) ;
// UNC paths are always absolute
return Boolean ( result [ 2 ] || isUnc ) ;
}
module . exports = process . platform === 'win32' ? win32 : posix ;
module . exports . posix = posix ;
module . exports . win32 = win32 ;
/***/ } ) ,
/***/ 717 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
var concatMap = _ _webpack _require _ _ ( 891 ) ;
var balanced = _ _webpack _require _ _ ( 760 ) ;
module . exports = expandTop ;
var escSlash = '\0SLASH' + Math . random ( ) + '\0' ;
var escOpen = '\0OPEN' + Math . random ( ) + '\0' ;
var escClose = '\0CLOSE' + Math . random ( ) + '\0' ;
var escComma = '\0COMMA' + Math . random ( ) + '\0' ;
var escPeriod = '\0PERIOD' + Math . random ( ) + '\0' ;
function numeric ( str ) {
return parseInt ( str , 10 ) == str
? parseInt ( str , 10 )
: str . charCodeAt ( 0 ) ;
}
function escapeBraces ( str ) {
return str . split ( '\\\\' ) . join ( escSlash )
. split ( '\\{' ) . join ( escOpen )
. split ( '\\}' ) . join ( escClose )
. split ( '\\,' ) . join ( escComma )
. split ( '\\.' ) . join ( escPeriod ) ;
}
function unescapeBraces ( str ) {
return str . split ( escSlash ) . join ( '\\' )
. split ( escOpen ) . join ( '{' )
. split ( escClose ) . join ( '}' )
. split ( escComma ) . join ( ',' )
. split ( escPeriod ) . join ( '.' ) ;
}
// Basically just str.split(","), but handling cases
// where we have nested braced sections, which should be
// treated as individual members, like {a,{b,c},d}
function parseCommaParts ( str ) {
if ( ! str )
return [ '' ] ;
var parts = [ ] ;
var m = balanced ( '{' , '}' , str ) ;
if ( ! m )
return str . split ( ',' ) ;
var pre = m . pre ;
var body = m . body ;
var post = m . post ;
var p = pre . split ( ',' ) ;
p [ p . length - 1 ] += '{' + body + '}' ;
var postParts = parseCommaParts ( post ) ;
if ( post . length ) {
p [ p . length - 1 ] += postParts . shift ( ) ;
p . push . apply ( p , postParts ) ;
}
parts . push . apply ( parts , p ) ;
return parts ;
}
function expandTop ( str ) {
if ( ! str )
return [ ] ;
// I don't know why Bash 4.3 does this, but it does.
// Anything starting with {} will have the first two bytes preserved
// but *only* at the top level, so {},a}b will not expand to anything,
// but a{},b}c will be expanded to [a}c,abc].
// One could argue that this is a bug in Bash, but since the goal of
// this module is to match Bash's rules, we escape a leading {}
if ( str . substr ( 0 , 2 ) === '{}' ) {
str = '\\{\\}' + str . substr ( 2 ) ;
}
return expand ( escapeBraces ( str ) , true ) . map ( unescapeBraces ) ;
}
function identity ( e ) {
return e ;
}
function embrace ( str ) {
return '{' + str + '}' ;
}
function isPadded ( el ) {
return /^-?0\d/ . test ( el ) ;
}
function lte ( i , y ) {
return i <= y ;
}
function gte ( i , y ) {
return i >= y ;
}
function expand ( str , isTop ) {
var expansions = [ ] ;
var m = balanced ( '{' , '}' , str ) ;
if ( ! m || /\$$/ . test ( m . pre ) ) return [ str ] ;
var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/ . test ( m . body ) ;
var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/ . test ( m . body ) ;
var isSequence = isNumericSequence || isAlphaSequence ;
var isOptions = m . body . indexOf ( ',' ) >= 0 ;
if ( ! isSequence && ! isOptions ) {
// {a},b}
if ( m . post . match ( /,.*\}/ ) ) {
str = m . pre + '{' + m . body + escClose + m . post ;
return expand ( str ) ;
}
return [ str ] ;
}
var n ;
if ( isSequence ) {
n = m . body . split ( /\.\./ ) ;
} else {
n = parseCommaParts ( m . body ) ;
if ( n . length === 1 ) {
// x{{a,b}}y ==> x{a}y x{b}y
n = expand ( n [ 0 ] , false ) . map ( embrace ) ;
if ( n . length === 1 ) {
var post = m . post . length
? expand ( m . post , false )
: [ '' ] ;
return post . map ( function ( p ) {
return m . pre + n [ 0 ] + p ;
} ) ;
}
}
}
// at this point, n is the parts, and we know it's not a comma set
// with a single entry.
// no need to expand pre, since it is guaranteed to be free of brace-sets
var pre = m . pre ;
var post = m . post . length
? expand ( m . post , false )
: [ '' ] ;
var N ;
if ( isSequence ) {
var x = numeric ( n [ 0 ] ) ;
var y = numeric ( n [ 1 ] ) ;
var width = Math . max ( n [ 0 ] . length , n [ 1 ] . length )
var incr = n . length == 3
? Math . abs ( numeric ( n [ 2 ] ) )
: 1 ;
var test = lte ;
var reverse = y < x ;
if ( reverse ) {
incr *= - 1 ;
test = gte ;
}
var pad = n . some ( isPadded ) ;
N = [ ] ;
for ( var i = x ; test ( i , y ) ; i += incr ) {
var c ;
if ( isAlphaSequence ) {
c = String . fromCharCode ( i ) ;
if ( c === '\\' )
c = '' ;
} else {
c = String ( i ) ;
if ( pad ) {
var need = width - c . length ;
if ( need > 0 ) {
var z = new Array ( need + 1 ) . join ( '0' ) ;
if ( i < 0 )
c = '-' + z + c . slice ( 1 ) ;
else
c = z + c ;
}
}
}
N . push ( c ) ;
}
} else {
N = concatMap ( n , function ( el ) { return expand ( el , false ) } ) ;
}
for ( var j = 0 ; j < N . length ; j ++ ) {
for ( var k = 0 ; k < post . length ; k ++ ) {
var expansion = pre + N [ j ] + post [ k ] ;
if ( ! isTop || isSequence || expansion )
expansions . push ( expansion ) ;
}
}
return expansions ;
}
/***/ } ) ,
/***/ 734 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
var pathModule = _ _webpack _require _ _ ( 622 ) ;
var isWindows = process . platform === 'win32' ;
var fs = _ _webpack _require _ _ ( 747 ) ;
// JavaScript implementation of realpath, ported from node pre-v6
var DEBUG = process . env . NODE _DEBUG && /fs/ . test ( process . env . NODE _DEBUG ) ;
function rethrow ( ) {
// Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and
// is fairly slow to generate.
var callback ;
if ( DEBUG ) {
var backtrace = new Error ;
callback = debugCallback ;
} else
callback = missingCallback ;
return callback ;
function debugCallback ( err ) {
if ( err ) {
backtrace . message = err . message ;
err = backtrace ;
missingCallback ( err ) ;
}
}
function missingCallback ( err ) {
if ( err ) {
if ( process . throwDeprecation )
throw err ; // Forgot a callback but don't know where? Use NODE_DEBUG=fs
else if ( ! process . noDeprecation ) {
var msg = 'fs: missing callback ' + ( err . stack || err . message ) ;
if ( process . traceDeprecation )
console . trace ( msg ) ;
else
console . error ( msg ) ;
}
}
}
}
function maybeCallback ( cb ) {
return typeof cb === 'function' ? cb : rethrow ( ) ;
}
var normalize = pathModule . normalize ;
// Regexp that finds the next partion of a (partial) path
// result is [base_with_slash, base], e.g. ['somedir/', 'somedir']
if ( isWindows ) {
var nextPartRe = /(.*?)(?:[\/\\]+|$)/g ;
} else {
var nextPartRe = /(.*?)(?:[\/]+|$)/g ;
}
// Regex to find the device root, including trailing slash. E.g. 'c:\\'.
if ( isWindows ) {
var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/ ;
} else {
var splitRootRe = /^[\/]*/ ;
}
exports . realpathSync = function realpathSync ( p , cache ) {
// make p is absolute
p = pathModule . resolve ( p ) ;
if ( cache && Object . prototype . hasOwnProperty . call ( cache , p ) ) {
return cache [ p ] ;
}
var original = p ,
seenLinks = { } ,
knownHard = { } ;
// current character position in p
var pos ;
// the partial path so far, including a trailing slash if any
var current ;
// the partial path without a trailing slash (except when pointing at a root)
var base ;
// the partial path scanned in the previous round, with slash
var previous ;
start ( ) ;
function start ( ) {
// Skip over roots
var m = splitRootRe . exec ( p ) ;
pos = m [ 0 ] . length ;
current = m [ 0 ] ;
base = m [ 0 ] ;
previous = '' ;
// On windows, check that the root exists. On unix there is no need.
if ( isWindows && ! knownHard [ base ] ) {
fs . lstatSync ( base ) ;
knownHard [ base ] = true ;
}
}
// walk down the path, swapping out linked pathparts for their real
// values
// NB: p.length changes.
while ( pos < p . length ) {
// find the next part
nextPartRe . lastIndex = pos ;
var result = nextPartRe . exec ( p ) ;
previous = current ;
current += result [ 0 ] ;
base = previous + result [ 1 ] ;
pos = nextPartRe . lastIndex ;
// continue if not a symlink
if ( knownHard [ base ] || ( cache && cache [ base ] === base ) ) {
continue ;
}
var resolvedLink ;
if ( cache && Object . prototype . hasOwnProperty . call ( cache , base ) ) {
// some known symbolic link. no need to stat again.
resolvedLink = cache [ base ] ;
} else {
var stat = fs . lstatSync ( base ) ;
if ( ! stat . isSymbolicLink ( ) ) {
knownHard [ base ] = true ;
if ( cache ) cache [ base ] = base ;
continue ;
}
// read the link if it wasn't read before
// dev/ino always return 0 on windows, so skip the check.
var linkTarget = null ;
if ( ! isWindows ) {
var id = stat . dev . toString ( 32 ) + ':' + stat . ino . toString ( 32 ) ;
if ( seenLinks . hasOwnProperty ( id ) ) {
linkTarget = seenLinks [ id ] ;
}
}
if ( linkTarget === null ) {
fs . statSync ( base ) ;
linkTarget = fs . readlinkSync ( base ) ;
}
resolvedLink = pathModule . resolve ( previous , linkTarget ) ;
// track this, if given a cache.
if ( cache ) cache [ base ] = resolvedLink ;
if ( ! isWindows ) seenLinks [ id ] = linkTarget ;
}
// resolve the link, then start over
p = pathModule . resolve ( resolvedLink , p . slice ( pos ) ) ;
start ( ) ;
}
if ( cache ) cache [ original ] = p ;
return p ;
} ;
exports . realpath = function realpath ( p , cache , cb ) {
if ( typeof cb !== 'function' ) {
cb = maybeCallback ( cache ) ;
cache = null ;
}
// make p is absolute
p = pathModule . resolve ( p ) ;
if ( cache && Object . prototype . hasOwnProperty . call ( cache , p ) ) {
return process . nextTick ( cb . bind ( null , null , cache [ p ] ) ) ;
}
var original = p ,
seenLinks = { } ,
knownHard = { } ;
// current character position in p
var pos ;
// the partial path so far, including a trailing slash if any
var current ;
// the partial path without a trailing slash (except when pointing at a root)
var base ;
// the partial path scanned in the previous round, with slash
var previous ;
start ( ) ;
function start ( ) {
// Skip over roots
var m = splitRootRe . exec ( p ) ;
pos = m [ 0 ] . length ;
current = m [ 0 ] ;
base = m [ 0 ] ;
previous = '' ;
// On windows, check that the root exists. On unix there is no need.
if ( isWindows && ! knownHard [ base ] ) {
fs . lstat ( base , function ( err ) {
if ( err ) return cb ( err ) ;
knownHard [ base ] = true ;
LOOP ( ) ;
} ) ;
} else {
process . nextTick ( LOOP ) ;
}
}
// walk down the path, swapping out linked pathparts for their real
// values
function LOOP ( ) {
// stop if scanned past end of path
if ( pos >= p . length ) {
if ( cache ) cache [ original ] = p ;
return cb ( null , p ) ;
}
// find the next part
nextPartRe . lastIndex = pos ;
var result = nextPartRe . exec ( p ) ;
previous = current ;
current += result [ 0 ] ;
base = previous + result [ 1 ] ;
pos = nextPartRe . lastIndex ;
// continue if not a symlink
if ( knownHard [ base ] || ( cache && cache [ base ] === base ) ) {
return process . nextTick ( LOOP ) ;
}
if ( cache && Object . prototype . hasOwnProperty . call ( cache , base ) ) {
// known symbolic link. no need to stat again.
return gotResolvedLink ( cache [ base ] ) ;
}
return fs . lstat ( base , gotStat ) ;
}
function gotStat ( err , stat ) {
if ( err ) return cb ( err ) ;
// if not a symlink, skip to the next path part
if ( ! stat . isSymbolicLink ( ) ) {
knownHard [ base ] = true ;
if ( cache ) cache [ base ] = base ;
return process . nextTick ( LOOP ) ;
}
// stat & read the link if not read before
// call gotTarget as soon as the link target is known
// dev/ino always return 0 on windows, so skip the check.
if ( ! isWindows ) {
var id = stat . dev . toString ( 32 ) + ':' + stat . ino . toString ( 32 ) ;
if ( seenLinks . hasOwnProperty ( id ) ) {
return gotTarget ( null , seenLinks [ id ] , base ) ;
}
}
fs . stat ( base , function ( err ) {
if ( err ) return cb ( err ) ;
fs . readlink ( base , function ( err , target ) {
if ( ! isWindows ) seenLinks [ id ] = target ;
gotTarget ( err , target ) ;
} ) ;
} ) ;
}
function gotTarget ( err , target , base ) {
if ( err ) return cb ( err ) ;
var resolvedLink = pathModule . resolve ( previous , target ) ;
if ( cache ) cache [ base ] = resolvedLink ;
gotResolvedLink ( resolvedLink ) ;
}
function gotResolvedLink ( resolvedLink ) {
// resolve the link, then start over
p = pathModule . resolve ( resolvedLink , p . slice ( pos ) ) ;
start ( ) ;
}
} ;
/***/ } ) ,
/***/ 741 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const Range = _ _webpack _require _ _ ( 828 )
const validRange = ( range , options ) => {
try {
// Return '*' instead of '' so that truthiness works.
// This will throw if it's invalid anyway
return new Range ( range , options ) . range || '*'
} catch ( er ) {
return null
}
}
module . exports = validRange
/***/ } ) ,
/***/ 747 :
/***/ ( function ( module ) {
module . exports = require ( "fs" ) ;
/***/ } ) ,
/***/ 757 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
exports . exec = void 0 ;
const aexec = _ _importStar ( _ _webpack _require _ _ ( 514 ) ) ;
exports . exec = ( command , args = [ ] , silent ) => _ _awaiter ( void 0 , void 0 , void 0 , function * ( ) {
let stdout = '' ;
let stderr = '' ;
const options = {
silent : silent ,
ignoreReturnCode : true
} ;
options . listeners = {
stdout : ( data ) => {
stdout += data . toString ( ) ;
} ,
stderr : ( data ) => {
stderr += data . toString ( ) ;
}
} ;
const returnCode = yield aexec . exec ( command , args , options ) ;
return {
success : returnCode === 0 ,
stdout : stdout . trim ( ) ,
stderr : stderr . trim ( )
} ;
} ) ;
//# sourceMappingURL=exec.js.map
/***/ } ) ,
/***/ 760 :
/***/ ( function ( module ) {
"use strict" ;
module . exports = balanced ;
function balanced ( a , b , str ) {
if ( a instanceof RegExp ) a = maybeMatch ( a , str ) ;
if ( b instanceof RegExp ) b = maybeMatch ( b , str ) ;
var r = range ( a , b , str ) ;
return r && {
start : r [ 0 ] ,
end : r [ 1 ] ,
pre : str . slice ( 0 , r [ 0 ] ) ,
body : str . slice ( r [ 0 ] + a . length , r [ 1 ] ) ,
post : str . slice ( r [ 1 ] + b . length )
} ;
}
function maybeMatch ( reg , str ) {
var m = str . match ( reg ) ;
return m ? m [ 0 ] : null ;
}
balanced . range = range ;
function range ( a , b , str ) {
var begs , beg , left , right , result ;
var ai = str . indexOf ( a ) ;
var bi = str . indexOf ( b , ai + 1 ) ;
var i = ai ;
if ( ai >= 0 && bi > 0 ) {
begs = [ ] ;
left = str . length ;
while ( i >= 0 && ! result ) {
if ( i == ai ) {
begs . push ( i ) ;
ai = str . indexOf ( a , i + 1 ) ;
} else if ( begs . length == 1 ) {
result = [ begs . pop ( ) , bi ] ;
} else {
beg = begs . pop ( ) ;
if ( beg < left ) {
left = beg ;
right = bi ;
}
bi = str . indexOf ( b , i + 1 ) ;
}
i = ai < bi && ai >= 0 ? ai : bi ;
}
if ( begs . length ) {
result = [ left , right ] ;
}
}
return result ;
}
/***/ } ) ,
/***/ 761 :
/***/ ( function ( module ) {
module . exports = require ( "zlib" ) ;
/***/ } ) ,
/***/ 762 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
var universalUserAgent = _ _webpack _require _ _ ( 429 ) ;
var beforeAfterHook = _ _webpack _require _ _ ( 682 ) ;
var request = _ _webpack _require _ _ ( 234 ) ;
var graphql = _ _webpack _require _ _ ( 668 ) ;
var authToken = _ _webpack _require _ _ ( 334 ) ;
function _defineProperty ( obj , key , value ) {
if ( key in obj ) {
Object . defineProperty ( obj , key , {
value : value ,
enumerable : true ,
configurable : true ,
writable : true
} ) ;
} else {
obj [ key ] = value ;
}
return obj ;
}
function ownKeys ( object , enumerableOnly ) {
var keys = Object . keys ( object ) ;
if ( Object . getOwnPropertySymbols ) {
var symbols = Object . getOwnPropertySymbols ( object ) ;
if ( enumerableOnly ) symbols = symbols . filter ( function ( sym ) {
return Object . getOwnPropertyDescriptor ( object , sym ) . enumerable ;
} ) ;
keys . push . apply ( keys , symbols ) ;
}
return keys ;
}
function _objectSpread2 ( target ) {
for ( var i = 1 ; i < arguments . length ; i ++ ) {
var source = arguments [ i ] != null ? arguments [ i ] : { } ;
if ( i % 2 ) {
ownKeys ( Object ( source ) , true ) . forEach ( function ( key ) {
_defineProperty ( target , key , source [ key ] ) ;
} ) ;
} else if ( Object . getOwnPropertyDescriptors ) {
Object . defineProperties ( target , Object . getOwnPropertyDescriptors ( source ) ) ;
} else {
ownKeys ( Object ( source ) ) . forEach ( function ( key ) {
Object . defineProperty ( target , key , Object . getOwnPropertyDescriptor ( source , key ) ) ;
} ) ;
}
}
return target ;
}
const VERSION = "3.1.2" ;
class Octokit {
constructor ( options = { } ) {
const hook = new beforeAfterHook . Collection ( ) ;
const requestDefaults = {
baseUrl : request . request . endpoint . DEFAULTS . baseUrl ,
headers : { } ,
request : Object . assign ( { } , options . request , {
hook : hook . bind ( null , "request" )
} ) ,
mediaType : {
previews : [ ] ,
format : ""
}
} ; // prepend default user agent with `options.userAgent` if set
requestDefaults . headers [ "user-agent" ] = [ options . userAgent , ` octokit-core.js/ ${ VERSION } ${ universalUserAgent . getUserAgent ( ) } ` ] . filter ( Boolean ) . join ( " " ) ;
if ( options . baseUrl ) {
requestDefaults . baseUrl = options . baseUrl ;
}
if ( options . previews ) {
requestDefaults . mediaType . previews = options . previews ;
}
if ( options . timeZone ) {
requestDefaults . headers [ "time-zone" ] = options . timeZone ;
}
this . request = request . request . defaults ( requestDefaults ) ;
this . graphql = graphql . withCustomRequest ( this . request ) . defaults ( _objectSpread2 ( _objectSpread2 ( { } , requestDefaults ) , { } , {
baseUrl : requestDefaults . baseUrl . replace ( /\/api\/v3$/ , "/api" )
} ) ) ;
this . log = Object . assign ( {
debug : ( ) => { } ,
info : ( ) => { } ,
warn : console . warn . bind ( console ) ,
error : console . error . bind ( console )
} , options . log ) ;
this . hook = hook ; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance
// is unauthenticated. The `this.auth()` method is a no-op and no request hook is registred.
// (2) If only `options.auth` is set, use the default token authentication strategy.
// (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.
// TODO: type `options.auth` based on `options.authStrategy`.
if ( ! options . authStrategy ) {
if ( ! options . auth ) {
// (1)
this . auth = async ( ) => ( {
type : "unauthenticated"
} ) ;
} else {
// (2)
const auth = authToken . createTokenAuth ( options . auth ) ; // @ts-ignore ¯\_(ツ)_/¯
hook . wrap ( "request" , auth . hook ) ;
this . auth = auth ;
}
} else {
const auth = options . authStrategy ( Object . assign ( {
request : this . request
} , options . auth ) ) ; // @ts-ignore ¯\_(ツ)_/¯
hook . wrap ( "request" , auth . hook ) ;
this . auth = auth ;
} // apply plugins
// https://stackoverflow.com/a/16345172
const classConstructor = this . constructor ;
classConstructor . plugins . forEach ( plugin => {
Object . assign ( this , plugin ( this , options ) ) ;
} ) ;
}
static defaults ( defaults ) {
const OctokitWithDefaults = class extends this {
constructor ( ... args ) {
const options = args [ 0 ] || { } ;
if ( typeof defaults === "function" ) {
super ( defaults ( options ) ) ;
return ;
}
super ( Object . assign ( { } , defaults , options , options . userAgent && defaults . userAgent ? {
userAgent : ` ${ options . userAgent } ${ defaults . userAgent } `
} : null ) ) ;
}
} ;
return OctokitWithDefaults ;
}
/ * *
* Attach a plugin ( or many ) to your Octokit instance .
*
* @ example
* const API = Octokit . plugin ( plugin1 , plugin2 , plugin3 , ... )
* /
static plugin ( ... newPlugins ) {
var _a ;
const currentPlugins = this . plugins ;
const NewOctokit = ( _a = class extends this { } , _a . plugins = currentPlugins . concat ( newPlugins . filter ( plugin => ! currentPlugins . includes ( plugin ) ) ) , _a ) ;
return NewOctokit ;
}
}
Octokit . VERSION = VERSION ;
Octokit . plugins = [ ] ;
exports . Octokit = Octokit ;
//# sourceMappingURL=index.js.map
/***/ } ) ,
/***/ 782 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
// Update this array if you add/rename/remove files in this directory.
// We support Browserify by skipping automatic module discovery and requiring modules directly.
var modules = [
_ _webpack _require _ _ ( 464 ) ,
_ _webpack _require _ _ ( 300 ) ,
_ _webpack _require _ _ ( 689 ) ,
_ _webpack _require _ _ ( 193 ) ,
_ _webpack _require _ _ ( 391 ) ,
_ _webpack _require _ _ ( 174 ) ,
_ _webpack _require _ _ ( 121 ) ,
_ _webpack _require _ _ ( 320 ) ,
] ;
// Put all encoding/alias/codec definitions to single object and export it.
for ( var i = 0 ; i < modules . length ; i ++ ) {
var module = modules [ i ] ;
for ( var enc in module )
if ( Object . prototype . hasOwnProperty . call ( module , enc ) )
exports [ enc ] = module [ enc ] ;
}
/***/ } ) ,
/***/ 804 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const compare = _ _webpack _require _ _ ( 309 )
const compareLoose = ( a , b ) => compare ( a , b , true )
module . exports = compareLoose
/***/ } ) ,
/***/ 807 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const Range = _ _webpack _require _ _ ( 828 )
const { ANY } = _ _webpack _require _ _ ( 532 )
const satisfies = _ _webpack _require _ _ ( 55 )
const compare = _ _webpack _require _ _ ( 309 )
// Complex range `r1 || r2 || ...` is a subset of `R1 || R2 || ...` iff:
// - Every simple range `r1, r2, ...` is a subset of some `R1, R2, ...`
//
// Simple range `c1 c2 ...` is a subset of simple range `C1 C2 ...` iff:
// - If c is only the ANY comparator
// - If C is only the ANY comparator, return true
// - Else return false
// - Let EQ be the set of = comparators in c
// - If EQ is more than one, return true (null set)
// - Let GT be the highest > or >= comparator in c
// - Let LT be the lowest < or <= comparator in c
// - If GT and LT, and GT.semver > LT.semver, return true (null set)
// - If EQ
// - If GT, and EQ does not satisfy GT, return true (null set)
// - If LT, and EQ does not satisfy LT, return true (null set)
// - If EQ satisfies every C, return true
// - Else return false
// - If GT
// - If GT is lower than any > or >= comp in C, return false
// - If GT is >=, and GT.semver does not satisfy every C, return false
// - If LT
// - If LT.semver is greater than that of any > comp in C, return false
// - If LT is <=, and LT.semver does not satisfy every C, return false
// - If any C is a = range, and GT or LT are set, return false
// - Else return true
const subset = ( sub , dom , options ) => {
sub = new Range ( sub , options )
dom = new Range ( dom , options )
let sawNonNull = false
OUTER : for ( const simpleSub of sub . set ) {
for ( const simpleDom of dom . set ) {
const isSub = simpleSubset ( simpleSub , simpleDom , options )
sawNonNull = sawNonNull || isSub !== null
if ( isSub )
continue OUTER
}
// the null set is a subset of everything, but null simple ranges in
// a complex range should be ignored. so if we saw a non-null range,
// then we know this isn't a subset, but if EVERY simple range was null,
// then it is a subset.
if ( sawNonNull )
return false
}
return true
}
const simpleSubset = ( sub , dom , options ) => {
if ( sub . length === 1 && sub [ 0 ] . semver === ANY )
return dom . length === 1 && dom [ 0 ] . semver === ANY
const eqSet = new Set ( )
let gt , lt
for ( const c of sub ) {
if ( c . operator === '>' || c . operator === '>=' )
gt = higherGT ( gt , c , options )
else if ( c . operator === '<' || c . operator === '<=' )
lt = lowerLT ( lt , c , options )
else
eqSet . add ( c . semver )
}
if ( eqSet . size > 1 )
return null
let gtltComp
if ( gt && lt ) {
gtltComp = compare ( gt . semver , lt . semver , options )
if ( gtltComp > 0 )
return null
else if ( gtltComp === 0 && ( gt . operator !== '>=' || lt . operator !== '<=' ) )
return null
}
// will iterate one or zero times
for ( const eq of eqSet ) {
if ( gt && ! satisfies ( eq , String ( gt ) , options ) )
return null
if ( lt && ! satisfies ( eq , String ( lt ) , options ) )
return null
for ( const c of dom ) {
if ( ! satisfies ( eq , String ( c ) , options ) )
return false
}
return true
}
let higher , lower
let hasDomLT , hasDomGT
for ( const c of dom ) {
hasDomGT = hasDomGT || c . operator === '>' || c . operator === '>='
hasDomLT = hasDomLT || c . operator === '<' || c . operator === '<='
if ( gt ) {
if ( c . operator === '>' || c . operator === '>=' ) {
higher = higherGT ( gt , c , options )
if ( higher === c )
return false
} else if ( gt . operator === '>=' && ! satisfies ( gt . semver , String ( c ) , options ) )
return false
}
if ( lt ) {
if ( c . operator === '<' || c . operator === '<=' ) {
lower = lowerLT ( lt , c , options )
if ( lower === c )
return false
} else if ( lt . operator === '<=' && ! satisfies ( lt . semver , String ( c ) , options ) )
return false
}
if ( ! c . operator && ( lt || gt ) && gtltComp !== 0 )
return false
}
// if there was a < or >, and nothing in the dom, then must be false
// UNLESS it was limited by another range in the other direction.
// Eg, >1.0.0 <1.0.1 is still a subset of <2.0.0
if ( gt && hasDomLT && ! lt && gtltComp !== 0 )
return false
if ( lt && hasDomGT && ! gt && gtltComp !== 0 )
return false
return true
}
// >=1.2.3 is lower than >1.2.3
const higherGT = ( a , b , options ) => {
if ( ! a )
return b
const comp = compare ( a . semver , b . semver , options )
return comp > 0 ? a
: comp < 0 ? b
: b . operator === '>' && a . operator === '>=' ? b
: a
}
// <=1.2.3 is higher than <1.2.3
const lowerLT = ( a , b , options ) => {
if ( ! a )
return b
const comp = compare ( a . semver , b . semver , options )
return comp < 0 ? a
: comp > 0 ? b
: b . operator === '<' && a . operator === '<=' ? b
: a
}
module . exports = subset
/***/ } ) ,
/***/ 818 :
/***/ ( function ( module ) {
module . exports = require ( "tls" ) ;
/***/ } ) ,
/***/ 819 :
/***/ ( function ( module ) {
module . exports = removeHook
function removeHook ( state , name , method ) {
if ( ! state . registry [ name ] ) {
return
}
var index = state . registry [ name ]
. map ( function ( registered ) { return registered . orig } )
. indexOf ( method )
if ( index === - 1 ) {
return
}
state . registry [ name ] . splice ( index , 1 )
}
/***/ } ) ,
/***/ 828 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
// hoisted class for cyclic dependency
class Range {
constructor ( range , options ) {
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
}
}
if ( range instanceof Range ) {
if (
range . loose === ! ! options . loose &&
range . includePrerelease === ! ! options . includePrerelease
) {
return range
} else {
return new Range ( range . raw , options )
}
}
if ( range instanceof Comparator ) {
// just put it in the set and return
this . raw = range . value
this . set = [ [ range ] ]
this . format ( )
return this
}
this . options = options
this . loose = ! ! options . loose
this . includePrerelease = ! ! options . includePrerelease
// First, split based on boolean or ||
this . raw = range
this . set = range
. split ( /\s*\|\|\s*/ )
// map the range to a 2d array of comparators
. map ( range => this . parseRange ( range . trim ( ) ) )
// throw out any comparator lists that are empty
// this generally means that it was not a valid range, which is allowed
// in loose mode, but will still throw if the WHOLE range is invalid.
. filter ( c => c . length )
if ( ! this . set . length ) {
throw new TypeError ( ` Invalid SemVer Range: ${ range } ` )
}
this . format ( )
}
format ( ) {
this . range = this . set
. map ( ( comps ) => {
return comps . join ( ' ' ) . trim ( )
} )
. join ( '||' )
. trim ( )
return this . range
}
toString ( ) {
return this . range
}
parseRange ( range ) {
const loose = this . options . loose
range = range . trim ( )
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
const hr = loose ? re [ t . HYPHENRANGELOOSE ] : re [ t . HYPHENRANGE ]
range = range . replace ( hr , hyphenReplace ( this . options . includePrerelease ) )
debug ( 'hyphen replace' , range )
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
range = range . replace ( re [ t . COMPARATORTRIM ] , comparatorTrimReplace )
debug ( 'comparator trim' , range , re [ t . COMPARATORTRIM ] )
// `~ 1.2.3` => `~1.2.3`
range = range . replace ( re [ t . TILDETRIM ] , tildeTrimReplace )
// `^ 1.2.3` => `^1.2.3`
range = range . replace ( re [ t . CARETTRIM ] , caretTrimReplace )
// normalize spaces
range = range . split ( /\s+/ ) . join ( ' ' )
// At this point, the range is completely trimmed and
// ready to be split into comparators.
const compRe = loose ? re [ t . COMPARATORLOOSE ] : re [ t . COMPARATOR ]
return range
. split ( ' ' )
. map ( comp => parseComparator ( comp , this . options ) )
. join ( ' ' )
. split ( /\s+/ )
. map ( comp => replaceGTE0 ( comp , this . options ) )
// in loose mode, throw out any that are not valid comparators
. filter ( this . options . loose ? comp => ! ! comp . match ( compRe ) : ( ) => true )
. map ( comp => new Comparator ( comp , this . options ) )
}
intersects ( range , options ) {
if ( ! ( range instanceof Range ) ) {
throw new TypeError ( 'a Range is required' )
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
return this . set . some ( ( thisComparators ) => {
return (
isSatisfiable ( thisComparators , options ) &&
range . set . some ( ( rangeComparators ) => {
return (
isSatisfiable ( rangeComparators , options ) &&
thisComparators . every ( ( thisComparator ) => {
return rangeComparators . every ( ( rangeComparator ) => {
return thisComparator . intersects ( rangeComparator , options )
} )
} )
)
} )
)
} )
}
// if ANY of the sets match ALL of its comparators, then pass
test ( version ) {
if ( ! version ) {
return false
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
if ( typeof version === 'string' ) {
try {
version = new SemVer ( version , this . options )
} catch ( er ) {
return false
}
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
for ( let i = 0 ; i < this . set . length ; i ++ ) {
if ( testSet ( this . set [ i ] , version , this . options ) ) {
return true
}
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
return false
}
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
module . exports = Range
const Comparator = _ _webpack _require _ _ ( 532 )
const debug = _ _webpack _require _ _ ( 427 )
const SemVer = _ _webpack _require _ _ ( 88 )
const {
re ,
t ,
comparatorTrimReplace ,
tildeTrimReplace ,
caretTrimReplace
} = _ _webpack _require _ _ ( 523 )
// take a set of comparators and determine whether there
// exists a version which can satisfy it
const isSatisfiable = ( comparators , options ) => {
let result = true
const remainingComparators = comparators . slice ( )
let testComparator = remainingComparators . pop ( )
while ( result && remainingComparators . length ) {
result = remainingComparators . every ( ( otherComparator ) => {
return testComparator . intersects ( otherComparator , options )
} )
testComparator = remainingComparators . pop ( )
}
return result
}
// comprised of xranges, tildes, stars, and gtlt's at this point.
// already replaced the hyphen ranges
// turn into a set of JUST comparators.
const parseComparator = ( comp , options ) => {
debug ( 'comp' , comp , options )
comp = replaceCarets ( comp , options )
debug ( 'caret' , comp )
comp = replaceTildes ( comp , options )
debug ( 'tildes' , comp )
comp = replaceXRanges ( comp , options )
debug ( 'xrange' , comp )
comp = replaceStars ( comp , options )
debug ( 'stars' , comp )
return comp
}
const isX = id => ! id || id . toLowerCase ( ) === 'x' || id === '*'
// ~, ~> --> * (any, kinda silly)
// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0-0
// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0-0
// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0-0
// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0
// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0
const replaceTildes = ( comp , options ) =>
comp . trim ( ) . split ( /\s+/ ) . map ( ( comp ) => {
return replaceTilde ( comp , options )
} ) . join ( ' ' )
const replaceTilde = ( comp , options ) => {
const r = options . loose ? re [ t . TILDELOOSE ] : re [ t . TILDE ]
return comp . replace ( r , ( _ , M , m , p , pr ) => {
debug ( 'tilde' , comp , _ , M , m , p , pr )
let ret
if ( isX ( M ) ) {
ret = ''
} else if ( isX ( m ) ) {
ret = ` >= ${ M } .0.0 < ${ + M + 1 } .0.0-0 `
} else if ( isX ( p ) ) {
// ~1.2 == >=1.2.0 <1.3.0-0
ret = ` >= ${ M } . ${ m } .0 < ${ M } . ${ + m + 1 } .0-0 `
} else if ( pr ) {
debug ( 'replaceTilde pr' , pr )
ret = ` >= ${ M } . ${ m } . ${ p } - ${ pr
} < $ { M } . $ { + m + 1 } . 0 - 0 `
} else {
// ~1.2.3 == >=1.2.3 <1.3.0-0
ret = ` >= ${ M } . ${ m } . ${ p
} < $ { M } . $ { + m + 1 } . 0 - 0 `
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
debug ( 'tilde return' , ret )
return ret
} )
}
// ^ --> * (any, kinda silly)
// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0-0
// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0-0
// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0-0
// ^1.2.3 --> >=1.2.3 <2.0.0-0
// ^1.2.0 --> >=1.2.0 <2.0.0-0
const replaceCarets = ( comp , options ) =>
comp . trim ( ) . split ( /\s+/ ) . map ( ( comp ) => {
return replaceCaret ( comp , options )
} ) . join ( ' ' )
const replaceCaret = ( comp , options ) => {
debug ( 'caret' , comp , options )
const r = options . loose ? re [ t . CARETLOOSE ] : re [ t . CARET ]
const z = options . includePrerelease ? '-0' : ''
return comp . replace ( r , ( _ , M , m , p , pr ) => {
debug ( 'caret' , comp , _ , M , m , p , pr )
let ret
if ( isX ( M ) ) {
ret = ''
} else if ( isX ( m ) ) {
ret = ` >= ${ M } .0.0 ${ z } < ${ + M + 1 } .0.0-0 `
} else if ( isX ( p ) ) {
if ( M === '0' ) {
ret = ` >= ${ M } . ${ m } .0 ${ z } < ${ M } . ${ + m + 1 } .0-0 `
} else {
ret = ` >= ${ M } . ${ m } .0 ${ z } < ${ + M + 1 } .0.0-0 `
}
} else if ( pr ) {
debug ( 'replaceCaret pr' , pr )
if ( M === '0' ) {
if ( m === '0' ) {
ret = ` >= ${ M } . ${ m } . ${ p } - ${ pr
} < $ { M } . $ { m } . $ { + p + 1 } - 0 `
} else {
ret = ` >= ${ M } . ${ m } . ${ p } - ${ pr
} < $ { M } . $ { + m + 1 } . 0 - 0 `
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
} else {
ret = ` >= ${ M } . ${ m } . ${ p } - ${ pr
} < $ { + M + 1 } . 0.0 - 0 `
}
} else {
debug ( 'no pr' )
if ( M === '0' ) {
if ( m === '0' ) {
ret = ` >= ${ M } . ${ m } . ${ p
} $ { z } < $ { M } . $ { m } . $ { + p + 1 } - 0 `
} else {
ret = ` >= ${ M } . ${ m } . ${ p
} $ { z } < $ { M } . $ { + m + 1 } . 0 - 0 `
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
} else {
ret = ` >= ${ M } . ${ m } . ${ p
} < $ { + M + 1 } . 0.0 - 0 `
}
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
debug ( 'caret return' , ret )
return ret
} )
}
const replaceXRanges = ( comp , options ) => {
debug ( 'replaceXRanges' , comp , options )
return comp . split ( /\s+/ ) . map ( ( comp ) => {
return replaceXRange ( comp , options )
} ) . join ( ' ' )
}
const replaceXRange = ( comp , options ) => {
comp = comp . trim ( )
const r = options . loose ? re [ t . XRANGELOOSE ] : re [ t . XRANGE ]
return comp . replace ( r , ( ret , gtlt , M , m , p , pr ) => {
debug ( 'xRange' , comp , ret , gtlt , M , m , p , pr )
const xM = isX ( M )
const xm = xM || isX ( m )
const xp = xm || isX ( p )
const anyX = xp
if ( gtlt === '=' && anyX ) {
gtlt = ''
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
// if we're including prereleases in the match, then we need
// to fix this to -0, the lowest possible prerelease value
pr = options . includePrerelease ? '-0' : ''
if ( xM ) {
if ( gtlt === '>' || gtlt === '<' ) {
// nothing is allowed
ret = '<0.0.0-0'
} else {
// nothing is forbidden
ret = '*'
}
} else if ( gtlt && anyX ) {
// we know patch is an x, because we have any x at all.
// replace X with 0
if ( xm ) {
m = 0
}
p = 0
if ( gtlt === '>' ) {
// >1 => >=2.0.0
// >1.2 => >=1.3.0
gtlt = '>='
if ( xm ) {
M = + M + 1
m = 0
p = 0
} else {
m = + m + 1
p = 0
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
} else if ( gtlt === '<=' ) {
// <=0.7.x is actually <0.8.0, since any 0.7.x should
// pass. Similarly, <=7.x is actually <8.0.0, etc.
gtlt = '<'
if ( xm ) {
M = + M + 1
} else {
m = + m + 1
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
}
if ( gtlt === '<' )
pr = '-0'
ret = ` ${ gtlt + M } . ${ m } . ${ p } ${ pr } `
} else if ( xm ) {
ret = ` >= ${ M } .0.0 ${ pr } < ${ + M + 1 } .0.0-0 `
} else if ( xp ) {
ret = ` >= ${ M } . ${ m } .0 ${ pr
} < $ { M } . $ { + m + 1 } . 0 - 0 `
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
debug ( 'xRange return' , ret )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
return ret
} )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// Because * is AND-ed with everything else in the comparator,
// and '' means "any version", just remove the *s entirely.
const replaceStars = ( comp , options ) => {
debug ( 'replaceStars' , comp , options )
// Looseness is ignored here. star is always as loose as it gets!
return comp . trim ( ) . replace ( re [ t . STAR ] , '' )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
const replaceGTE0 = ( comp , options ) => {
debug ( 'replaceGTE0' , comp , options )
return comp . trim ( )
. replace ( re [ options . includePrerelease ? t . GTE0PRE : t . GTE0 ] , '' )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// This function is passed to string.replace(re[t.HYPHENRANGE])
// M, m, patch, prerelease, build
// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
// 1.2.3 - 3.4 => >=1.2.0 <3.5.0-0 Any 3.4.x will do
// 1.2 - 3.4 => >=1.2.0 <3.5.0-0
const hyphenReplace = incPr => ( $0 ,
from , fM , fm , fp , fpr , fb ,
to , tM , tm , tp , tpr , tb ) => {
if ( isX ( fM ) ) {
from = ''
} else if ( isX ( fm ) ) {
from = ` >= ${ fM } .0.0 ${ incPr ? '-0' : '' } `
} else if ( isX ( fp ) ) {
from = ` >= ${ fM } . ${ fm } .0 ${ incPr ? '-0' : '' } `
} else if ( fpr ) {
from = ` >= ${ from } `
} else {
from = ` >= ${ from } ${ incPr ? '-0' : '' } `
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
if ( isX ( tM ) ) {
to = ''
} else if ( isX ( tm ) ) {
to = ` < ${ + tM + 1 } .0.0-0 `
} else if ( isX ( tp ) ) {
to = ` < ${ tM } . ${ + tm + 1 } .0-0 `
} else if ( tpr ) {
to = ` <= ${ tM } . ${ tm } . ${ tp } - ${ tpr } `
} else if ( incPr ) {
to = ` < ${ tM } . ${ tm } . ${ + tp + 1 } -0 `
} else {
to = ` <= ${ to } `
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
return ( ` ${ from } ${ to } ` ) . trim ( )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
const testSet = ( set , version , options ) => {
for ( let i = 0 ; i < set . length ; i ++ ) {
if ( ! set [ i ] . test ( version ) ) {
return false
}
}
if ( version . prerelease . length && ! options . includePrerelease ) {
// Find the set of versions that are allowed to have prereleases
// For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
// That should allow `1.2.3-pr.2` to pass.
// However, `1.2.4-alpha.notready` should NOT be allowed,
// even though it's within the range set by the comparators.
for ( let i = 0 ; i < set . length ; i ++ ) {
debug ( set [ i ] . semver )
if ( set [ i ] . semver === Comparator . ANY ) {
continue
}
if ( set [ i ] . semver . prerelease . length > 0 ) {
const allowed = set [ i ] . semver
if ( allowed . major === version . major &&
allowed . minor === version . minor &&
allowed . patch === version . patch ) {
return true
}
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// Version has a -pre, but it's not one of the ones we like.
return false
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
return true
2020-08-23 03:31:38 +02:00
}
2020-08-16 19:13:19 +02:00
/***/ } ) ,
2020-08-16 22:31:37 +02:00
2020-09-02 10:07:11 +02:00
/***/ 832 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-08-16 22:31:37 +02:00
2020-09-02 10:07:11 +02:00
const SemVer = _ _webpack _require _ _ ( 88 )
const Range = _ _webpack _require _ _ ( 828 )
const minSatisfying = ( versions , range , options ) => {
let min = null
let minSV = null
let rangeObj = null
try {
rangeObj = new Range ( range , options )
} catch ( er ) {
return null
}
versions . forEach ( ( v ) => {
if ( rangeObj . test ( v ) ) {
// satisfies(v, range, options)
if ( ! min || minSV . compare ( v ) === 1 ) {
// compare(min, v, true)
min = v
minSV = new SemVer ( min , options )
}
}
} )
return min
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
module . exports = minSatisfying
2020-08-21 13:39:42 +02:00
2020-08-23 03:31:38 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 834 :
2020-08-23 03:31:38 +02:00
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-09-02 10:07:11 +02:00
"use strict" ;
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var Buffer = _ _webpack _require _ _ ( 407 ) . Buffer ;
// Note: not polyfilled with safer-buffer on a purpose, as overrides Buffer
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// == Extend Node primitives to use iconv-lite =================================
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
module . exports = function ( iconv ) {
var original = undefined ; // Place to keep original methods.
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// Node authors rewrote Buffer internals to make it compatible with
// Uint8Array and we cannot patch key functions since then.
// Note: this does use older Buffer API on a purpose
iconv . supportsNodeEncodingsExtension = ! ( Buffer . from || new Buffer ( 0 ) instanceof Uint8Array ) ;
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
iconv . extendNodeEncodings = function extendNodeEncodings ( ) {
if ( original ) return ;
original = { } ;
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( ! iconv . supportsNodeEncodingsExtension ) {
console . error ( "ACTION NEEDED: require('iconv-lite').extendNodeEncodings() is not supported in your version of Node" ) ;
console . error ( "See more info at https://github.com/ashtuchkin/iconv-lite/wiki/Node-v4-compatibility" ) ;
return ;
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var nodeNativeEncodings = {
'hex' : true , 'utf8' : true , 'utf-8' : true , 'ascii' : true , 'binary' : true ,
'base64' : true , 'ucs2' : true , 'ucs-2' : true , 'utf16le' : true , 'utf-16le' : true ,
} ;
Buffer . isNativeEncoding = function ( enc ) {
return enc && nodeNativeEncodings [ enc . toLowerCase ( ) ] ;
}
// -- SlowBuffer -----------------------------------------------------------
var SlowBuffer = _ _webpack _require _ _ ( 407 ) . SlowBuffer ;
original . SlowBufferToString = SlowBuffer . prototype . toString ;
SlowBuffer . prototype . toString = function ( encoding , start , end ) {
encoding = String ( encoding || 'utf8' ) . toLowerCase ( ) ;
// Use native conversion when possible
if ( Buffer . isNativeEncoding ( encoding ) )
return original . SlowBufferToString . call ( this , encoding , start , end ) ;
// Otherwise, use our decoding method.
if ( typeof start == 'undefined' ) start = 0 ;
if ( typeof end == 'undefined' ) end = this . length ;
return iconv . decode ( this . slice ( start , end ) , encoding ) ;
}
original . SlowBufferWrite = SlowBuffer . prototype . write ;
SlowBuffer . prototype . write = function ( string , offset , length , encoding ) {
// Support both (string, offset, length, encoding)
// and the legacy (string, encoding, offset, length)
if ( isFinite ( offset ) ) {
if ( ! isFinite ( length ) ) {
encoding = length ;
length = undefined ;
}
} else { // legacy
var swap = encoding ;
encoding = offset ;
offset = length ;
length = swap ;
}
offset = + offset || 0 ;
var remaining = this . length - offset ;
if ( ! length ) {
length = remaining ;
} else {
length = + length ;
if ( length > remaining ) {
length = remaining ;
}
}
encoding = String ( encoding || 'utf8' ) . toLowerCase ( ) ;
// Use native conversion when possible
if ( Buffer . isNativeEncoding ( encoding ) )
return original . SlowBufferWrite . call ( this , string , offset , length , encoding ) ;
if ( string . length > 0 && ( length < 0 || offset < 0 ) )
throw new RangeError ( 'attempt to write beyond buffer bounds' ) ;
// Otherwise, use our encoding method.
var buf = iconv . encode ( string , encoding ) ;
if ( buf . length < length ) length = buf . length ;
buf . copy ( this , offset , 0 , length ) ;
return length ;
}
// -- Buffer ---------------------------------------------------------------
original . BufferIsEncoding = Buffer . isEncoding ;
Buffer . isEncoding = function ( encoding ) {
return Buffer . isNativeEncoding ( encoding ) || iconv . encodingExists ( encoding ) ;
}
original . BufferByteLength = Buffer . byteLength ;
Buffer . byteLength = SlowBuffer . byteLength = function ( str , encoding ) {
encoding = String ( encoding || 'utf8' ) . toLowerCase ( ) ;
// Use native conversion when possible
if ( Buffer . isNativeEncoding ( encoding ) )
return original . BufferByteLength . call ( this , str , encoding ) ;
// Slow, I know, but we don't have a better way yet.
return iconv . encode ( str , encoding ) . length ;
}
original . BufferToString = Buffer . prototype . toString ;
Buffer . prototype . toString = function ( encoding , start , end ) {
encoding = String ( encoding || 'utf8' ) . toLowerCase ( ) ;
// Use native conversion when possible
if ( Buffer . isNativeEncoding ( encoding ) )
return original . BufferToString . call ( this , encoding , start , end ) ;
// Otherwise, use our decoding method.
if ( typeof start == 'undefined' ) start = 0 ;
if ( typeof end == 'undefined' ) end = this . length ;
return iconv . decode ( this . slice ( start , end ) , encoding ) ;
}
original . BufferWrite = Buffer . prototype . write ;
Buffer . prototype . write = function ( string , offset , length , encoding ) {
var _offset = offset , _length = length , _encoding = encoding ;
// Support both (string, offset, length, encoding)
// and the legacy (string, encoding, offset, length)
if ( isFinite ( offset ) ) {
if ( ! isFinite ( length ) ) {
encoding = length ;
length = undefined ;
}
} else { // legacy
var swap = encoding ;
encoding = offset ;
offset = length ;
length = swap ;
}
encoding = String ( encoding || 'utf8' ) . toLowerCase ( ) ;
// Use native conversion when possible
if ( Buffer . isNativeEncoding ( encoding ) )
return original . BufferWrite . call ( this , string , _offset , _length , _encoding ) ;
offset = + offset || 0 ;
var remaining = this . length - offset ;
if ( ! length ) {
length = remaining ;
} else {
length = + length ;
if ( length > remaining ) {
length = remaining ;
}
}
if ( string . length > 0 && ( length < 0 || offset < 0 ) )
throw new RangeError ( 'attempt to write beyond buffer bounds' ) ;
// Otherwise, use our encoding method.
var buf = iconv . encode ( string , encoding ) ;
if ( buf . length < length ) length = buf . length ;
buf . copy ( this , offset , 0 , length ) ;
return length ;
// TODO: Set _charsWritten.
}
// -- Readable -------------------------------------------------------------
if ( iconv . supportsStreams ) {
var Readable = _ _webpack _require _ _ ( 413 ) . Readable ;
original . ReadableSetEncoding = Readable . prototype . setEncoding ;
Readable . prototype . setEncoding = function setEncoding ( enc , options ) {
// Use our own decoder, it has the same interface.
// We cannot use original function as it doesn't handle BOM-s.
this . _readableState . decoder = iconv . getDecoder ( enc , options ) ;
this . _readableState . encoding = enc ;
}
Readable . prototype . collect = iconv . _collect ;
}
}
// Remove iconv-lite Node primitive extensions.
iconv . undoExtendNodeEncodings = function undoExtendNodeEncodings ( ) {
if ( ! iconv . supportsNodeEncodingsExtension )
return ;
if ( ! original )
throw new Error ( "require('iconv-lite').undoExtendNodeEncodings(): Nothing to undo; extendNodeEncodings() is not called." )
delete Buffer . isNativeEncoding ;
var SlowBuffer = _ _webpack _require _ _ ( 407 ) . SlowBuffer ;
SlowBuffer . prototype . toString = original . SlowBufferToString ;
SlowBuffer . prototype . write = original . SlowBufferWrite ;
Buffer . isEncoding = original . BufferIsEncoding ;
Buffer . byteLength = original . BufferByteLength ;
Buffer . prototype . toString = original . BufferToString ;
Buffer . prototype . write = original . BufferWrite ;
if ( iconv . supportsStreams ) {
var Readable = _ _webpack _require _ _ ( 413 ) . Readable ;
Readable . prototype . setEncoding = original . ReadableSetEncoding ;
delete Readable . prototype . collect ;
}
original = undefined ;
}
2020-08-23 03:31:38 +02:00
}
2020-08-21 13:39:42 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 835 :
/***/ ( function ( module ) {
module . exports = require ( "url" ) ;
/***/ } ) ,
/***/ 842 :
2020-08-21 13:39:42 +02:00
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( k !== "default" && Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
2020-09-02 10:07:11 +02:00
exports . asyncForEach = exports . getInputList = exports . getArgs = exports . getInputs = exports . tmpDir = void 0 ;
const fs = _ _importStar ( _ _webpack _require _ _ ( 747 ) ) ;
const os = _ _importStar ( _ _webpack _require _ _ ( 87 ) ) ;
const path = _ _importStar ( _ _webpack _require _ _ ( 622 ) ) ;
2020-08-23 03:31:38 +02:00
const semver = _ _importStar ( _ _webpack _require _ _ ( 383 ) ) ;
2020-09-02 10:07:11 +02:00
const buildx = _ _importStar ( _ _webpack _require _ _ ( 295 ) ) ;
const core = _ _importStar ( _ _webpack _require _ _ ( 186 ) ) ;
const github = _ _importStar ( _ _webpack _require _ _ ( 438 ) ) ;
exports . tmpDir = fs . mkdtempSync ( path . join ( os . tmpdir ( ) , 'docker-build-push-' ) ) ;
2020-09-22 20:49:18 +02:00
const defaultContext = ` https://github.com/ ${ github . context . repo . owner } / ${ github . context . repo . repo } # ${ github . context . ref } ` ;
2020-09-02 10:07:11 +02:00
function getInputs ( ) {
2020-08-21 13:39:42 +02:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2020-09-02 10:07:11 +02:00
return {
2020-09-22 20:49:18 +02:00
context : core . getInput ( 'context' ) || defaultContext ,
2020-09-02 10:07:11 +02:00
file : core . getInput ( 'file' ) || 'Dockerfile' ,
buildArgs : yield getInputList ( 'build-args' ) ,
labels : yield getInputList ( 'labels' ) ,
tags : yield getInputList ( 'tags' ) ,
pull : /true/i . test ( core . getInput ( 'pull' ) ) ,
target : core . getInput ( 'target' ) ,
allow : yield getInputList ( 'allow' ) ,
noCache : /true/i . test ( core . getInput ( 'no-cache' ) ) ,
builder : core . getInput ( 'builder' ) ,
platforms : yield getInputList ( 'platforms' ) ,
load : /true/i . test ( core . getInput ( 'load' ) ) ,
push : /true/i . test ( core . getInput ( 'push' ) ) ,
outputs : yield getInputList ( 'outputs' , true ) ,
cacheFrom : yield getInputList ( 'cache-from' , true ) ,
cacheTo : yield getInputList ( 'cache-to' , true ) ,
2020-09-22 20:49:18 +02:00
secrets : yield getInputList ( 'secrets' , true ) ,
githubToken : core . getInput ( 'github-token' )
2020-09-02 10:07:11 +02:00
} ;
2020-08-21 13:39:42 +02:00
} ) ;
}
2020-09-02 10:07:11 +02:00
exports . getInputs = getInputs ;
function getArgs ( inputs , buildxVersion ) {
2020-08-21 13:39:42 +02:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2020-09-02 10:07:11 +02:00
let args = [ 'buildx' ] ;
args . push . apply ( args , yield getBuildArgs ( inputs , buildxVersion ) ) ;
args . push . apply ( args , yield getCommonArgs ( inputs ) ) ;
args . push ( inputs . context ) ;
return args ;
2020-08-21 13:39:42 +02:00
} ) ;
}
2020-09-02 10:07:11 +02:00
exports . getArgs = getArgs ;
function getBuildArgs ( inputs , buildxVersion ) {
2020-08-23 03:31:38 +02:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2020-09-02 10:07:11 +02:00
let args = [ 'build' ] ;
yield exports . asyncForEach ( inputs . buildArgs , ( buildArg ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
args . push ( '--build-arg' , buildArg ) ;
} ) ) ;
yield exports . asyncForEach ( inputs . labels , ( label ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
args . push ( '--label' , label ) ;
} ) ) ;
yield exports . asyncForEach ( inputs . tags , ( tag ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
args . push ( '--tag' , tag ) ;
} ) ) ;
if ( inputs . target ) {
args . push ( '--target' , inputs . target ) ;
}
if ( inputs . allow . length > 0 ) {
args . push ( '--allow' , inputs . allow . join ( ',' ) ) ;
}
if ( inputs . platforms . length > 0 ) {
args . push ( '--platform' , inputs . platforms . join ( ',' ) ) ;
}
if ( inputs . platforms . length == 0 || semver . satisfies ( buildxVersion , '>=0.4.2' ) ) {
args . push ( '--iidfile' , yield buildx . getImageIDFile ( ) ) ;
}
yield exports . asyncForEach ( inputs . outputs , ( output ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
args . push ( '--output' , output ) ;
} ) ) ;
yield exports . asyncForEach ( inputs . cacheFrom , ( cacheFrom ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
args . push ( '--cache-from' , cacheFrom ) ;
} ) ) ;
yield exports . asyncForEach ( inputs . cacheTo , ( cacheTo ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
args . push ( '--cache-to' , cacheTo ) ;
} ) ) ;
2020-09-22 20:49:18 +02:00
let hasGitAuthToken = false ;
2020-09-02 10:07:11 +02:00
yield exports . asyncForEach ( inputs . secrets , ( secret ) => _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2020-09-22 20:49:18 +02:00
if ( secret . startsWith ( 'GIT_AUTH_TOKEN=' ) ) {
hasGitAuthToken = true ;
}
2020-09-02 10:07:11 +02:00
args . push ( '--secret' , yield buildx . getSecret ( secret ) ) ;
} ) ) ;
2020-09-22 20:49:18 +02:00
if ( inputs . githubToken && ! hasGitAuthToken && inputs . context == defaultContext ) {
args . push ( '--secret' , yield buildx . getSecret ( ` GIT_AUTH_TOKEN= ${ inputs . githubToken } ` ) ) ;
}
2020-09-02 10:07:11 +02:00
if ( inputs . file ) {
args . push ( '--file' , inputs . file ) ;
}
return args ;
2020-08-23 03:31:38 +02:00
} ) ;
}
2020-09-02 10:07:11 +02:00
function getCommonArgs ( inputs ) {
2020-08-23 03:31:38 +02:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2020-09-02 10:07:11 +02:00
let args = [ ] ;
if ( inputs . noCache ) {
args . push ( '--no-cache' ) ;
2020-08-23 03:31:38 +02:00
}
2020-09-03 11:49:39 +02:00
if ( inputs . builder ) {
args . push ( '--builder' , inputs . builder ) ;
}
2020-09-02 10:07:11 +02:00
if ( inputs . pull ) {
args . push ( '--pull' ) ;
}
if ( inputs . load ) {
args . push ( '--load' ) ;
}
if ( inputs . push ) {
args . push ( '--push' ) ;
}
return args ;
2020-08-23 03:31:38 +02:00
} ) ;
}
2020-09-02 10:07:11 +02:00
function getInputList ( name , ignoreComma ) {
2020-08-21 13:39:42 +02:00
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
2020-09-02 10:07:11 +02:00
const items = core . getInput ( name ) ;
if ( items == '' ) {
return [ ] ;
}
return items
. split ( /\r?\n/ )
2020-09-05 02:52:09 +02:00
. filter ( x => x )
. reduce ( ( acc , line ) => acc . concat ( ! ignoreComma ? line . split ( ',' ) . filter ( x => x ) : line ) . map ( pat => pat . trim ( ) ) , [ ] ) ;
2020-08-21 13:39:42 +02:00
} ) ;
}
2020-09-02 10:07:11 +02:00
exports . getInputList = getInputList ;
exports . asyncForEach = ( array , callback ) => _ _awaiter ( void 0 , void 0 , void 0 , function * ( ) {
for ( let index = 0 ; index < array . length ; index ++ ) {
yield callback ( array [ index ] , index , array ) ;
}
} ) ;
//# sourceMappingURL=context.js.map
/***/ } ) ,
/***/ 848 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
const parse = _ _webpack _require _ _ ( 925 )
const clean = ( version , options ) => {
const s = parse ( version . trim ( ) . replace ( /^[=v]+/ , '' ) , options )
return s ? s . version : null
}
module . exports = clean
2020-08-21 13:39:42 +02:00
2020-08-23 03:31:38 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 863 :
2020-08-23 03:31:38 +02:00
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-09-02 10:07:11 +02:00
module . exports = realpath
realpath . realpath = realpath
realpath . sync = realpathSync
realpath . realpathSync = realpathSync
realpath . monkeypatch = monkeypatch
realpath . unmonkeypatch = unmonkeypatch
var fs = _ _webpack _require _ _ ( 747 )
var origRealpath = fs . realpath
var origRealpathSync = fs . realpathSync
var version = process . version
var ok = /^v[0-5]\./ . test ( version )
var old = _ _webpack _require _ _ ( 734 )
function newError ( er ) {
return er && er . syscall === 'realpath' && (
er . code === 'ELOOP' ||
er . code === 'ENOMEM' ||
er . code === 'ENAMETOOLONG'
)
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
function realpath ( p , cache , cb ) {
if ( ok ) {
return origRealpath ( p , cache , cb )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
if ( typeof cache === 'function' ) {
cb = cache
cache = null
}
origRealpath ( p , cache , function ( er , result ) {
if ( newError ( er ) ) {
old . realpath ( p , cache , cb )
} else {
cb ( er , result )
}
} )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
function realpathSync ( p , cache ) {
if ( ok ) {
return origRealpathSync ( p , cache )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
try {
return origRealpathSync ( p , cache )
} catch ( er ) {
if ( newError ( er ) ) {
return old . realpathSync ( p , cache )
} else {
throw er
}
}
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
function monkeypatch ( ) {
fs . realpath = realpath
fs . realpathSync = realpathSync
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
function unmonkeypatch ( ) {
fs . realpath = origRealpath
fs . realpathSync = origRealpathSync
}
2020-08-23 03:31:38 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 866 :
2020-08-23 03:31:38 +02:00
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-09-02 10:07:11 +02:00
const SemVer = _ _webpack _require _ _ ( 88 )
const patch = ( a , loose ) => new SemVer ( a , loose ) . patch
module . exports = patch
2020-08-23 03:31:38 +02:00
2020-08-21 13:39:42 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 886 :
/***/ ( function ( module ) {
2020-08-21 13:39:42 +02:00
"use strict" ;
2020-09-02 10:07:11 +02:00
/ * !
* is - plain - object < https : //github.com/jonschlinkert/is-plain-object>
2020-08-21 13:39:42 +02:00
*
2020-09-02 10:07:11 +02:00
* Copyright ( c ) 2014 - 2017 , Jon Schlinkert .
* Released under the MIT License .
2020-08-21 13:39:42 +02:00
* /
2020-09-02 10:07:11 +02:00
function isObject ( o ) {
return Object . prototype . toString . call ( o ) === '[object Object]' ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
function isPlainObject ( o ) {
var ctor , prot ;
if ( isObject ( o ) === false ) return false ;
// If has modified constructor
ctor = o . constructor ;
if ( ctor === undefined ) return true ;
// If has modified prototype
prot = ctor . prototype ;
if ( isObject ( prot ) === false ) return false ;
// If constructor does not have an Object-specific method
if ( prot . hasOwnProperty ( 'isPrototypeOf' ) === false ) {
return false ;
}
// Most likely a plain Object
return true ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
module . exports = isPlainObject ;
2020-08-21 13:39:42 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 891 :
2020-08-21 13:39:42 +02:00
/***/ ( function ( module ) {
2020-09-02 10:07:11 +02:00
module . exports = function ( xs , fn ) {
var res = [ ] ;
for ( var i = 0 ; i < xs . length ; i ++ ) {
var x = fn ( xs [ i ] , i ) ;
if ( isArray ( x ) ) res . push . apply ( res , x ) ;
else res . push ( x ) ;
}
return res ;
} ;
var isArray = Array . isArray || function ( xs ) {
return Object . prototype . toString . call ( xs ) === '[object Array]' ;
} ;
2020-08-21 13:39:42 +02:00
2020-08-23 03:31:38 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 898 :
2020-08-23 03:31:38 +02:00
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-09-02 10:07:11 +02:00
const compare = _ _webpack _require _ _ ( 309 )
const eq = ( a , b , loose ) => compare ( a , b , loose ) === 0
module . exports = eq
2020-08-23 03:31:38 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 900 :
2020-08-23 03:31:38 +02:00
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-09-02 10:07:11 +02:00
const SemVer = _ _webpack _require _ _ ( 88 )
const inc = ( version , release , options , identifier ) => {
if ( typeof ( options ) === 'string' ) {
identifier = options
options = undefined
}
try {
return new SemVer ( version , options ) . inc ( release , identifier ) . version
} catch ( er ) {
return null
}
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
module . exports = inc
2020-08-23 03:31:38 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 914 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
"use strict" ;
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var _ _createBinding = ( this && this . _ _createBinding ) || ( Object . create ? ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
Object . defineProperty ( o , k2 , { enumerable : true , get : function ( ) { return m [ k ] ; } } ) ;
} ) : ( function ( o , m , k , k2 ) {
if ( k2 === undefined ) k2 = k ;
o [ k2 ] = m [ k ] ;
} ) ) ;
var _ _setModuleDefault = ( this && this . _ _setModuleDefault ) || ( Object . create ? ( function ( o , v ) {
Object . defineProperty ( o , "default" , { enumerable : true , value : v } ) ;
} ) : function ( o , v ) {
o [ "default" ] = v ;
} ) ;
var _ _importStar = ( this && this . _ _importStar ) || function ( mod ) {
if ( mod && mod . _ _esModule ) return mod ;
var result = { } ;
if ( mod != null ) for ( var k in mod ) if ( Object . hasOwnProperty . call ( mod , k ) ) _ _createBinding ( result , mod , k ) ;
_ _setModuleDefault ( result , mod ) ;
return result ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
exports . getApiBaseUrl = exports . getProxyAgent = exports . getAuthString = void 0 ;
const httpClient = _ _importStar ( _ _webpack _require _ _ ( 936 ) ) ;
function getAuthString ( token , options ) {
if ( ! token && ! options . auth ) {
throw new Error ( 'Parameter token or opts.auth is required' ) ;
}
else if ( token && options . auth ) {
throw new Error ( 'Parameters token and opts.auth may not both be specified' ) ;
}
return typeof options . auth === 'string' ? options . auth : ` token ${ token } ` ;
}
exports . getAuthString = getAuthString ;
function getProxyAgent ( destinationUrl ) {
const hc = new httpClient . HttpClient ( ) ;
return hc . getAgent ( destinationUrl ) ;
}
exports . getProxyAgent = getProxyAgent ;
function getApiBaseUrl ( ) {
return process . env [ 'GITHUB_API_URL' ] || 'https://api.github.com' ;
}
exports . getApiBaseUrl = getApiBaseUrl ;
//# sourceMappingURL=utils.js.map
2020-08-23 03:31:38 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 925 :
2020-08-23 03:31:38 +02:00
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-09-02 10:07:11 +02:00
const { MAX _LENGTH } = _ _webpack _require _ _ ( 293 )
const { re , t } = _ _webpack _require _ _ ( 523 )
2020-08-23 03:31:38 +02:00
const SemVer = _ _webpack _require _ _ ( 88 )
2020-09-02 10:07:11 +02:00
const parse = ( version , options ) => {
if ( ! options || typeof options !== 'object' ) {
options = {
loose : ! ! options ,
includePrerelease : false
}
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
if ( version instanceof SemVer ) {
return version
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
if ( typeof version !== 'string' ) {
return null
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( version . length > MAX _LENGTH ) {
return null
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
const r = options . loose ? re [ t . LOOSE ] : re [ t . FULL ]
if ( ! r . test ( version ) ) {
return null
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
try {
return new SemVer ( version , options )
} catch ( er ) {
return null
2020-08-23 03:31:38 +02:00
}
}
2020-09-02 10:07:11 +02:00
module . exports = parse
2020-08-23 03:31:38 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 932 :
/***/ ( function ( _ _unusedmodule , exports ) {
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
"use strict" ;
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Object . defineProperty ( exports , '__esModule' , { value : true } ) ;
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
class Deprecation extends Error {
constructor ( message ) {
super ( message ) ; // Maintains proper stack trace (only available on V8)
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
/* istanbul ignore next */
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( Error . captureStackTrace ) {
Error . captureStackTrace ( this , this . constructor ) ;
}
this . name = 'Deprecation' ;
}
}
exports . Deprecation = Deprecation ;
2020-08-23 03:31:38 +02:00
2020-08-21 13:39:42 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 936 :
2020-08-21 13:39:42 +02:00
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
2020-09-02 10:07:11 +02:00
const url = _ _webpack _require _ _ ( 835 ) ;
const http = _ _webpack _require _ _ ( 605 ) ;
const https = _ _webpack _require _ _ ( 211 ) ;
const pm = _ _webpack _require _ _ ( 443 ) ;
let tunnel ;
var HttpCodes ;
( function ( HttpCodes ) {
HttpCodes [ HttpCodes [ "OK" ] = 200 ] = "OK" ;
HttpCodes [ HttpCodes [ "MultipleChoices" ] = 300 ] = "MultipleChoices" ;
HttpCodes [ HttpCodes [ "MovedPermanently" ] = 301 ] = "MovedPermanently" ;
HttpCodes [ HttpCodes [ "ResourceMoved" ] = 302 ] = "ResourceMoved" ;
HttpCodes [ HttpCodes [ "SeeOther" ] = 303 ] = "SeeOther" ;
HttpCodes [ HttpCodes [ "NotModified" ] = 304 ] = "NotModified" ;
HttpCodes [ HttpCodes [ "UseProxy" ] = 305 ] = "UseProxy" ;
HttpCodes [ HttpCodes [ "SwitchProxy" ] = 306 ] = "SwitchProxy" ;
HttpCodes [ HttpCodes [ "TemporaryRedirect" ] = 307 ] = "TemporaryRedirect" ;
HttpCodes [ HttpCodes [ "PermanentRedirect" ] = 308 ] = "PermanentRedirect" ;
HttpCodes [ HttpCodes [ "BadRequest" ] = 400 ] = "BadRequest" ;
HttpCodes [ HttpCodes [ "Unauthorized" ] = 401 ] = "Unauthorized" ;
HttpCodes [ HttpCodes [ "PaymentRequired" ] = 402 ] = "PaymentRequired" ;
HttpCodes [ HttpCodes [ "Forbidden" ] = 403 ] = "Forbidden" ;
HttpCodes [ HttpCodes [ "NotFound" ] = 404 ] = "NotFound" ;
HttpCodes [ HttpCodes [ "MethodNotAllowed" ] = 405 ] = "MethodNotAllowed" ;
HttpCodes [ HttpCodes [ "NotAcceptable" ] = 406 ] = "NotAcceptable" ;
HttpCodes [ HttpCodes [ "ProxyAuthenticationRequired" ] = 407 ] = "ProxyAuthenticationRequired" ;
HttpCodes [ HttpCodes [ "RequestTimeout" ] = 408 ] = "RequestTimeout" ;
HttpCodes [ HttpCodes [ "Conflict" ] = 409 ] = "Conflict" ;
HttpCodes [ HttpCodes [ "Gone" ] = 410 ] = "Gone" ;
HttpCodes [ HttpCodes [ "TooManyRequests" ] = 429 ] = "TooManyRequests" ;
HttpCodes [ HttpCodes [ "InternalServerError" ] = 500 ] = "InternalServerError" ;
HttpCodes [ HttpCodes [ "NotImplemented" ] = 501 ] = "NotImplemented" ;
HttpCodes [ HttpCodes [ "BadGateway" ] = 502 ] = "BadGateway" ;
HttpCodes [ HttpCodes [ "ServiceUnavailable" ] = 503 ] = "ServiceUnavailable" ;
HttpCodes [ HttpCodes [ "GatewayTimeout" ] = 504 ] = "GatewayTimeout" ;
} ) ( HttpCodes = exports . HttpCodes || ( exports . HttpCodes = { } ) ) ;
var Headers ;
( function ( Headers ) {
Headers [ "Accept" ] = "accept" ;
Headers [ "ContentType" ] = "content-type" ;
} ) ( Headers = exports . Headers || ( exports . Headers = { } ) ) ;
var MediaTypes ;
( function ( MediaTypes ) {
MediaTypes [ "ApplicationJson" ] = "application/json" ;
} ) ( MediaTypes = exports . MediaTypes || ( exports . MediaTypes = { } ) ) ;
2020-08-21 13:39:42 +02:00
/ * *
2020-09-02 10:07:11 +02:00
* Returns the proxy URL , depending upon the supplied url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
2020-08-21 13:39:42 +02:00
* /
2020-09-02 10:07:11 +02:00
function getProxyUrl ( serverUrl ) {
let proxyUrl = pm . getProxyUrl ( url . parse ( serverUrl ) ) ;
return proxyUrl ? proxyUrl . href : '' ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
exports . getProxyUrl = getProxyUrl ;
const HttpRedirectCodes = [
HttpCodes . MovedPermanently ,
HttpCodes . ResourceMoved ,
HttpCodes . SeeOther ,
HttpCodes . TemporaryRedirect ,
HttpCodes . PermanentRedirect
] ;
const HttpResponseRetryCodes = [
HttpCodes . BadGateway ,
HttpCodes . ServiceUnavailable ,
HttpCodes . GatewayTimeout
] ;
const RetryableHttpVerbs = [ 'OPTIONS' , 'GET' , 'DELETE' , 'HEAD' ] ;
const ExponentialBackoffCeiling = 10 ;
const ExponentialBackoffTimeSlice = 5 ;
class HttpClientResponse {
constructor ( message ) {
this . message = message ;
}
readBody ( ) {
return new Promise ( async ( resolve , reject ) => {
let output = Buffer . alloc ( 0 ) ;
this . message . on ( 'data' , ( chunk ) => {
output = Buffer . concat ( [ output , chunk ] ) ;
} ) ;
this . message . on ( 'end' , ( ) => {
resolve ( output . toString ( ) ) ;
} ) ;
} ) ;
}
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
exports . HttpClientResponse = HttpClientResponse ;
function isHttps ( requestUrl ) {
let parsedUrl = url . parse ( requestUrl ) ;
return parsedUrl . protocol === 'https:' ;
}
exports . isHttps = isHttps ;
class HttpClient {
constructor ( userAgent , handlers , requestOptions ) {
this . _ignoreSslError = false ;
this . _allowRedirects = true ;
this . _allowRedirectDowngrade = false ;
this . _maxRedirects = 50 ;
this . _allowRetries = false ;
this . _maxRetries = 1 ;
this . _keepAlive = false ;
this . _disposed = false ;
this . userAgent = userAgent ;
this . handlers = handlers || [ ] ;
this . requestOptions = requestOptions ;
if ( requestOptions ) {
if ( requestOptions . ignoreSslError != null ) {
this . _ignoreSslError = requestOptions . ignoreSslError ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
this . _socketTimeout = requestOptions . socketTimeout ;
if ( requestOptions . allowRedirects != null ) {
this . _allowRedirects = requestOptions . allowRedirects ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
if ( requestOptions . allowRedirectDowngrade != null ) {
this . _allowRedirectDowngrade = requestOptions . allowRedirectDowngrade ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
if ( requestOptions . maxRedirects != null ) {
this . _maxRedirects = Math . max ( requestOptions . maxRedirects , 0 ) ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
if ( requestOptions . keepAlive != null ) {
this . _keepAlive = requestOptions . keepAlive ;
}
if ( requestOptions . allowRetries != null ) {
this . _allowRetries = requestOptions . allowRetries ;
}
if ( requestOptions . maxRetries != null ) {
this . _maxRetries = requestOptions . maxRetries ;
2020-08-21 13:39:42 +02:00
}
}
2020-09-02 10:07:11 +02:00
}
options ( requestUrl , additionalHeaders ) {
return this . request ( 'OPTIONS' , requestUrl , null , additionalHeaders || { } ) ;
}
get ( requestUrl , additionalHeaders ) {
return this . request ( 'GET' , requestUrl , null , additionalHeaders || { } ) ;
}
del ( requestUrl , additionalHeaders ) {
return this . request ( 'DELETE' , requestUrl , null , additionalHeaders || { } ) ;
}
post ( requestUrl , data , additionalHeaders ) {
return this . request ( 'POST' , requestUrl , data , additionalHeaders || { } ) ;
}
patch ( requestUrl , data , additionalHeaders ) {
return this . request ( 'PATCH' , requestUrl , data , additionalHeaders || { } ) ;
}
put ( requestUrl , data , additionalHeaders ) {
return this . request ( 'PUT' , requestUrl , data , additionalHeaders || { } ) ;
}
head ( requestUrl , additionalHeaders ) {
return this . request ( 'HEAD' , requestUrl , null , additionalHeaders || { } ) ;
}
sendStream ( verb , requestUrl , stream , additionalHeaders ) {
return this . request ( verb , requestUrl , stream , additionalHeaders ) ;
}
/ * *
* Gets a typed object from an endpoint
* Be aware that not found returns a null . Other errors ( 4 xx , 5 xx ) reject the promise
* /
async getJson ( requestUrl , additionalHeaders = { } ) {
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
let res = await this . get ( requestUrl , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
async postJson ( requestUrl , obj , additionalHeaders = { } ) {
let data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
let res = await this . post ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
async putJson ( requestUrl , obj , additionalHeaders = { } ) {
let data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
let res = await this . put ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
async patchJson ( requestUrl , obj , additionalHeaders = { } ) {
let data = JSON . stringify ( obj , null , 2 ) ;
additionalHeaders [ Headers . Accept ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . Accept , MediaTypes . ApplicationJson ) ;
additionalHeaders [ Headers . ContentType ] = this . _getExistingOrDefaultHeader ( additionalHeaders , Headers . ContentType , MediaTypes . ApplicationJson ) ;
let res = await this . patch ( requestUrl , data , additionalHeaders ) ;
return this . _processResponse ( res , this . requestOptions ) ;
}
/ * *
* Makes a raw http request .
* All other methods such as get , post , patch , and request ultimately call this .
* Prefer get , del , post and patch
* /
async request ( verb , requestUrl , data , headers ) {
if ( this . _disposed ) {
throw new Error ( 'Client has already been disposed.' ) ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
let parsedUrl = url . parse ( requestUrl ) ;
let info = this . _prepareRequest ( verb , parsedUrl , headers ) ;
// Only perform retries on reads since writes may not be idempotent.
let maxTries = this . _allowRetries && RetryableHttpVerbs . indexOf ( verb ) != - 1
? this . _maxRetries + 1
: 1 ;
let numTries = 0 ;
let response ;
while ( numTries < maxTries ) {
response = await this . requestRaw ( info , data ) ;
// Check if it's an authentication challenge
if ( response &&
response . message &&
response . message . statusCode === HttpCodes . Unauthorized ) {
let authenticationHandler ;
for ( let i = 0 ; i < this . handlers . length ; i ++ ) {
if ( this . handlers [ i ] . canHandleAuthentication ( response ) ) {
authenticationHandler = this . handlers [ i ] ;
break ;
}
}
if ( authenticationHandler ) {
return authenticationHandler . handleAuthentication ( this , info , data ) ;
2020-08-21 13:39:42 +02:00
}
else {
2020-09-02 10:07:11 +02:00
// We have received an unauthorized response but have no handlers to handle it.
// Let the response return to the caller.
return response ;
2020-08-21 13:39:42 +02:00
}
}
2020-09-02 10:07:11 +02:00
let redirectsRemaining = this . _maxRedirects ;
while ( HttpRedirectCodes . indexOf ( response . message . statusCode ) != - 1 &&
this . _allowRedirects &&
redirectsRemaining > 0 ) {
const redirectUrl = response . message . headers [ 'location' ] ;
if ( ! redirectUrl ) {
// if there's no location to redirect to, we won't
break ;
}
let parsedRedirectUrl = url . parse ( redirectUrl ) ;
if ( parsedUrl . protocol == 'https:' &&
parsedUrl . protocol != parsedRedirectUrl . protocol &&
! this . _allowRedirectDowngrade ) {
throw new Error ( 'Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.' ) ;
}
// we need to finish reading the response before reassigning response
// which will leak the open socket.
await response . readBody ( ) ;
// strip authorization header if redirected to a different hostname
if ( parsedRedirectUrl . hostname !== parsedUrl . hostname ) {
for ( let header in headers ) {
// header names are case insensitive
if ( header . toLowerCase ( ) === 'authorization' ) {
delete headers [ header ] ;
}
2020-08-21 13:39:42 +02:00
}
}
2020-09-02 10:07:11 +02:00
// let's make the request with the new redirectUrl
info = this . _prepareRequest ( verb , parsedRedirectUrl , headers ) ;
response = await this . requestRaw ( info , data ) ;
redirectsRemaining -- ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
if ( HttpResponseRetryCodes . indexOf ( response . message . statusCode ) == - 1 ) {
// If not a retry code, return immediately instead of retrying
return response ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
numTries += 1 ;
if ( numTries < maxTries ) {
await response . readBody ( ) ;
await this . _performExponentialBackoff ( numTries ) ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
}
return response ;
}
/ * *
* Needs to be called if keepAlive is set to true in request options .
* /
dispose ( ) {
if ( this . _agent ) {
this . _agent . destroy ( ) ;
}
this . _disposed = true ;
}
/ * *
* Raw request .
* @ param info
* @ param data
* /
requestRaw ( info , data ) {
return new Promise ( ( resolve , reject ) => {
let callbackForResult = function ( err , res ) {
if ( err ) {
reject ( err ) ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
resolve ( res ) ;
} ;
this . requestRawWithCallback ( info , data , callbackForResult ) ;
} ) ;
}
/ * *
* Raw request with callback .
* @ param info
* @ param data
* @ param onResult
* /
requestRawWithCallback ( info , data , onResult ) {
let socket ;
if ( typeof data === 'string' ) {
info . options . headers [ 'Content-Length' ] = Buffer . byteLength ( data , 'utf8' ) ;
}
let callbackCalled = false ;
let handleResult = ( err , res ) => {
if ( ! callbackCalled ) {
callbackCalled = true ;
onResult ( err , res ) ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
} ;
let req = info . httpModule . request ( info . options , ( msg ) => {
let res = new HttpClientResponse ( msg ) ;
handleResult ( null , res ) ;
} ) ;
req . on ( 'socket' , sock => {
socket = sock ;
} ) ;
// If we ever get disconnected, we want the socket to timeout eventually
req . setTimeout ( this . _socketTimeout || 3 * 60000 , ( ) => {
if ( socket ) {
socket . end ( ) ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
handleResult ( new Error ( 'Request timeout: ' + info . options . path ) , null ) ;
} ) ;
req . on ( 'error' , function ( err ) {
// err has statusCode property
// res should have headers
handleResult ( err , null ) ;
} ) ;
if ( data && typeof data === 'string' ) {
req . write ( data , 'utf8' ) ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
if ( data && typeof data !== 'string' ) {
data . on ( 'close' , function ( ) {
req . end ( ) ;
} ) ;
data . pipe ( req ) ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
else {
req . end ( ) ;
}
}
/ * *
* Gets an http agent . This function is useful when you need an http agent that handles
* routing through a proxy server - depending upon the url and proxy environment variables .
* @ param serverUrl The server URL where the request will be sent . For example , https : //api.github.com
* /
getAgent ( serverUrl ) {
let parsedUrl = url . parse ( serverUrl ) ;
return this . _getAgent ( parsedUrl ) ;
}
_prepareRequest ( method , requestUrl , headers ) {
const info = { } ;
info . parsedUrl = requestUrl ;
const usingSsl = info . parsedUrl . protocol === 'https:' ;
info . httpModule = usingSsl ? https : http ;
const defaultPort = usingSsl ? 443 : 80 ;
info . options = { } ;
info . options . host = info . parsedUrl . hostname ;
info . options . port = info . parsedUrl . port
? parseInt ( info . parsedUrl . port )
: defaultPort ;
info . options . path =
( info . parsedUrl . pathname || '' ) + ( info . parsedUrl . search || '' ) ;
info . options . method = method ;
info . options . headers = this . _mergeHeaders ( headers ) ;
if ( this . userAgent != null ) {
info . options . headers [ 'user-agent' ] = this . userAgent ;
}
info . options . agent = this . _getAgent ( info . parsedUrl ) ;
// gives handlers an opportunity to participate
if ( this . handlers ) {
this . handlers . forEach ( handler => {
handler . prepareRequest ( info . options ) ;
} ) ;
}
return info ;
}
_mergeHeaders ( headers ) {
const lowercaseKeys = obj => Object . keys ( obj ) . reduce ( ( c , k ) => ( ( c [ k . toLowerCase ( ) ] = obj [ k ] ) , c ) , { } ) ;
if ( this . requestOptions && this . requestOptions . headers ) {
return Object . assign ( { } , lowercaseKeys ( this . requestOptions . headers ) , lowercaseKeys ( headers ) ) ;
}
return lowercaseKeys ( headers || { } ) ;
}
_getExistingOrDefaultHeader ( additionalHeaders , header , _default ) {
const lowercaseKeys = obj => Object . keys ( obj ) . reduce ( ( c , k ) => ( ( c [ k . toLowerCase ( ) ] = obj [ k ] ) , c ) , { } ) ;
let clientHeader ;
if ( this . requestOptions && this . requestOptions . headers ) {
clientHeader = lowercaseKeys ( this . requestOptions . headers ) [ header ] ;
}
return additionalHeaders [ header ] || clientHeader || _default ;
}
_getAgent ( parsedUrl ) {
let agent ;
let proxyUrl = pm . getProxyUrl ( parsedUrl ) ;
let useProxy = proxyUrl && proxyUrl . hostname ;
if ( this . _keepAlive && useProxy ) {
agent = this . _proxyAgent ;
}
if ( this . _keepAlive && ! useProxy ) {
agent = this . _agent ;
}
// if agent is already assigned use that agent.
if ( ! ! agent ) {
return agent ;
}
const usingSsl = parsedUrl . protocol === 'https:' ;
let maxSockets = 100 ;
if ( ! ! this . requestOptions ) {
maxSockets = this . requestOptions . maxSockets || http . globalAgent . maxSockets ;
}
if ( useProxy ) {
// If using proxy, need tunnel
if ( ! tunnel ) {
tunnel = _ _webpack _require _ _ ( 294 ) ;
}
const agentOptions = {
maxSockets : maxSockets ,
keepAlive : this . _keepAlive ,
proxy : {
proxyAuth : proxyUrl . auth ,
host : proxyUrl . hostname ,
port : proxyUrl . port
}
} ;
let tunnelAgent ;
const overHttps = proxyUrl . protocol === 'https:' ;
if ( usingSsl ) {
tunnelAgent = overHttps ? tunnel . httpsOverHttps : tunnel . httpsOverHttp ;
2020-08-21 13:39:42 +02:00
}
else {
2020-09-02 10:07:11 +02:00
tunnelAgent = overHttps ? tunnel . httpOverHttps : tunnel . httpOverHttp ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
agent = tunnelAgent ( agentOptions ) ;
this . _proxyAgent = agent ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
// if reusing agent across request and tunneling agent isn't assigned create a new agent
if ( this . _keepAlive && ! agent ) {
const options = { keepAlive : this . _keepAlive , maxSockets : maxSockets } ;
agent = usingSsl ? new https . Agent ( options ) : new http . Agent ( options ) ;
this . _agent = agent ;
}
// if not using private agent and tunnel agent isn't setup then use global agent
if ( ! agent ) {
agent = usingSsl ? https . globalAgent : http . globalAgent ;
}
if ( usingSsl && this . _ignoreSslError ) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
// we have to cast it to any and change it directly
agent . options = Object . assign ( agent . options || { } , {
rejectUnauthorized : false
} ) ;
}
return agent ;
}
_performExponentialBackoff ( retryNumber ) {
retryNumber = Math . min ( ExponentialBackoffCeiling , retryNumber ) ;
const ms = ExponentialBackoffTimeSlice * Math . pow ( 2 , retryNumber ) ;
return new Promise ( resolve => setTimeout ( ( ) => resolve ( ) , ms ) ) ;
}
static dateTimeDeserializer ( key , value ) {
if ( typeof value === 'string' ) {
let a = new Date ( value ) ;
if ( ! isNaN ( a . valueOf ( ) ) ) {
return a ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
}
return value ;
}
async _processResponse ( res , options ) {
return new Promise ( async ( resolve , reject ) => {
const statusCode = res . message . statusCode ;
const response = {
statusCode : statusCode ,
result : null ,
headers : { }
} ;
// not found leads to null obj returned
if ( statusCode == HttpCodes . NotFound ) {
resolve ( response ) ;
}
let obj ;
let contents ;
// get the result from the body
try {
contents = await res . readBody ( ) ;
if ( contents && contents . length > 0 ) {
if ( options && options . deserializeDates ) {
obj = JSON . parse ( contents , HttpClient . dateTimeDeserializer ) ;
}
else {
obj = JSON . parse ( contents ) ;
}
response . result = obj ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
response . headers = res . message . headers ;
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
catch ( err ) {
// Invalid resource (contents not json); leaving result obj null
}
// note that 3xx redirects are handled by the http layer.
if ( statusCode > 299 ) {
let msg ;
// if exception/error in body, attempt to get better error
if ( obj && obj . message ) {
msg = obj . message ;
}
else if ( contents && contents . length > 0 ) {
// it may be the case that the exception is in the body message as string
msg = contents ;
}
else {
msg = 'Failed request: (' + statusCode + ')' ;
}
let err = new Error ( msg ) ;
// attach statusCode and body obj (if available) to the error object
err [ 'statusCode' ] = statusCode ;
if ( response . result ) {
err [ 'result' ] = response . result ;
}
reject ( err ) ;
}
else {
resolve ( response ) ;
}
} ) ;
}
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
exports . HttpClient = HttpClient ;
2020-08-23 03:31:38 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 937 :
2020-08-23 03:31:38 +02:00
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-09-02 10:07:11 +02:00
"use strict" ;
/* eslint-disable node/no-deprecated-api */
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var buffer = _ _webpack _require _ _ ( 407 )
var Buffer = buffer . Buffer
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var safer = { }
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var key
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
for ( key in buffer ) {
if ( ! buffer . hasOwnProperty ( key ) ) continue
if ( key === 'SlowBuffer' || key === 'Buffer' ) continue
safer [ key ] = buffer [ key ]
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
var Safer = safer . Buffer = { }
for ( key in Buffer ) {
if ( ! Buffer . hasOwnProperty ( key ) ) continue
if ( key === 'allocUnsafe' || key === 'allocUnsafeSlow' ) continue
Safer [ key ] = Buffer [ key ]
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
safer . Buffer . prototype = Buffer . prototype
2020-08-21 13:39:42 +02:00
2020-09-02 10:07:11 +02:00
if ( ! Safer . from || Safer . from === Uint8Array . from ) {
Safer . from = function ( value , encodingOrOffset , length ) {
if ( typeof value === 'number' ) {
throw new TypeError ( 'The "value" argument must not be of type number. Received type ' + typeof value )
}
if ( value && typeof value . length === 'undefined' ) {
throw new TypeError ( 'The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type ' + typeof value )
}
return Buffer ( value , encodingOrOffset , length )
}
}
2020-08-21 13:39:42 +02:00
2020-09-02 10:07:11 +02:00
if ( ! Safer . alloc ) {
Safer . alloc = function ( size , fill , encoding ) {
if ( typeof size !== 'number' ) {
throw new TypeError ( 'The "size" argument must be of type number. Received type ' + typeof size )
}
if ( size < 0 || size >= 2 * ( 1 << 30 ) ) {
throw new RangeError ( 'The value "' + size + '" is invalid for option "size"' )
}
var buf = Buffer ( size )
if ( ! fill || fill . length === 0 ) {
buf . fill ( 0 )
} else if ( typeof encoding === 'string' ) {
buf . fill ( fill , encoding )
} else {
buf . fill ( fill )
}
return buf
}
2020-08-21 13:39:42 +02:00
}
2020-09-02 10:07:11 +02:00
if ( ! safer . kStringMaxLength ) {
try {
safer . kStringMaxLength = process . binding ( 'buffer' ) . kStringMaxLength
} catch ( e ) {
// we can't determine kStringMaxLength in environments where process.binding
// is unsupported, so let's not set it
}
}
2020-08-21 13:39:42 +02:00
2020-09-02 10:07:11 +02:00
if ( ! safer . constants ) {
safer . constants = {
MAX _LENGTH : safer . kMaxLength
}
if ( safer . kStringMaxLength ) {
safer . constants . MAX _STRING _LENGTH = safer . kStringMaxLength
}
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
module . exports = safer
2020-08-21 13:39:42 +02:00
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 940 :
/***/ ( function ( module ) {
2020-08-21 13:39:42 +02:00
2020-09-02 10:07:11 +02:00
// Returns a wrapper function that returns a wrapped callback
// The wrapper function should do some stuff, and return a
// presumably different callback function.
// This makes sure that own properties are retained, so that
// decorations and such are not lost along the way.
module . exports = wrappy
function wrappy ( fn , cb ) {
if ( fn && cb ) return wrappy ( fn ) ( cb )
2020-08-21 13:39:42 +02:00
2020-09-02 10:07:11 +02:00
if ( typeof fn !== 'function' )
throw new TypeError ( 'need wrapper function' )
2020-08-21 13:39:42 +02:00
2020-09-02 10:07:11 +02:00
Object . keys ( fn ) . forEach ( function ( k ) {
wrapper [ k ] = fn [ k ]
} )
2020-08-21 13:39:42 +02:00
2020-09-02 10:07:11 +02:00
return wrapper
2020-08-21 13:39:42 +02:00
2020-09-02 10:07:11 +02:00
function wrapper ( ) {
var args = new Array ( arguments . length )
for ( var i = 0 ; i < args . length ; i ++ ) {
args [ i ] = arguments [ i ]
}
var ret = fn . apply ( this , args )
var cb = args [ args . length - 1 ]
if ( typeof ret === 'function' && ret !== cb ) {
Object . keys ( cb ) . forEach ( function ( k ) {
ret [ k ] = cb [ k ]
} )
}
return ret
}
2020-08-23 03:31:38 +02:00
}
2020-08-21 13:39:42 +02:00
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
/***/ } ) ,
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
/***/ 957 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// Approach:
//
// 1. Get the minimatch set
// 2. For each pattern in the set, PROCESS(pattern, false)
// 3. Store matches per-set, then uniq them
//
// PROCESS(pattern, inGlobStar)
// Get the first [n] items from pattern that are all strings
// Join these together. This is PREFIX.
// If there is no more remaining, then stat(PREFIX) and
// add to matches if it succeeds. END.
//
// If inGlobStar and PREFIX is symlink and points to dir
// set ENTRIES = []
// else readdir(PREFIX) as ENTRIES
// If fail, END
//
// with ENTRIES
// If pattern[n] is GLOBSTAR
// // handle the case where the globstar match is empty
// // by pruning it out, and testing the resulting pattern
// PROCESS(pattern[0..n] + pattern[n+1 .. $], false)
// // handle other cases.
// for ENTRY in ENTRIES (not dotfiles)
// // attach globstar + tail onto the entry
// // Mark that this entry is a globstar match
// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true)
//
// else // not globstar
// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot)
// Test ENTRY against pattern[n]
// If fails, continue
// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $])
//
// Caveat:
// Cache all stats and readdirs results to minimize syscall. Since all
// we ever care about is existence and directory-ness, we can just keep
// `true` for files, and [children,...] for directories, or `false` for
// things that don't exist.
module . exports = glob
var fs = _ _webpack _require _ _ ( 747 )
var rp = _ _webpack _require _ _ ( 863 )
var minimatch = _ _webpack _require _ _ ( 973 )
var Minimatch = minimatch . Minimatch
var inherits = _ _webpack _require _ _ ( 124 )
var EE = _ _webpack _require _ _ ( 614 ) . EventEmitter
var path = _ _webpack _require _ _ ( 622 )
var assert = _ _webpack _require _ _ ( 357 )
var isAbsolute = _ _webpack _require _ _ ( 714 )
var globSync = _ _webpack _require _ _ ( 10 )
var common = _ _webpack _require _ _ ( 625 )
var alphasort = common . alphasort
var alphasorti = common . alphasorti
var setopts = common . setopts
var ownProp = common . ownProp
var inflight = _ _webpack _require _ _ ( 492 )
var util = _ _webpack _require _ _ ( 669 )
var childrenIgnored = common . childrenIgnored
var isIgnored = common . isIgnored
var once = _ _webpack _require _ _ ( 223 )
function glob ( pattern , options , cb ) {
if ( typeof options === 'function' ) cb = options , options = { }
if ( ! options ) options = { }
if ( options . sync ) {
if ( cb )
throw new TypeError ( 'callback provided to sync glob' )
return globSync ( pattern , options )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
return new Glob ( pattern , options , cb )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
glob . sync = globSync
var GlobSync = glob . GlobSync = globSync . GlobSync
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// old api surface
glob . glob = glob
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
function extend ( origin , add ) {
if ( add === null || typeof add !== 'object' ) {
return origin
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var keys = Object . keys ( add )
var i = keys . length
while ( i -- ) {
origin [ keys [ i ] ] = add [ keys [ i ] ]
}
return origin
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
glob . hasMagic = function ( pattern , options _ ) {
var options = extend ( { } , options _ )
options . noprocess = true
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var g = new Glob ( pattern , options )
var set = g . minimatch . set
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( ! pattern )
return false
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( set . length > 1 )
return true
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
for ( var j = 0 ; j < set [ 0 ] . length ; j ++ ) {
if ( typeof set [ 0 ] [ j ] !== 'string' )
return true
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
return false
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
glob . Glob = Glob
inherits ( Glob , EE )
function Glob ( pattern , options , cb ) {
if ( typeof options === 'function' ) {
cb = options
options = null
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( options && options . sync ) {
if ( cb )
throw new TypeError ( 'callback provided to sync glob' )
return new GlobSync ( pattern , options )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( ! ( this instanceof Glob ) )
return new Glob ( pattern , options , cb )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
setopts ( this , pattern , options )
this . _didRealPath = false
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// process each pattern in the minimatch set
var n = this . minimatch . set . length
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// The matches are stored as {<filename>: true,...} so that
// duplicates are automagically pruned.
// Later, we do an Object.keys() on these.
// Keep them as a list so we can fill in when nonull is set.
this . matches = new Array ( n )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( typeof cb === 'function' ) {
cb = once ( cb )
this . on ( 'error' , cb )
this . on ( 'end' , function ( matches ) {
cb ( null , matches )
} )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var self = this
this . _processing = 0
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
this . _emitQueue = [ ]
this . _processQueue = [ ]
this . paused = false
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( this . noprocess )
return this
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( n === 0 )
return done ( )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var sync = true
for ( var i = 0 ; i < n ; i ++ ) {
this . _process ( this . minimatch . set [ i ] , i , false , done )
}
sync = false
function done ( ) {
-- self . _processing
if ( self . _processing <= 0 ) {
if ( sync ) {
process . nextTick ( function ( ) {
self . _finish ( )
} )
} else {
self . _finish ( )
}
}
}
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . _finish = function ( ) {
assert ( this instanceof Glob )
if ( this . aborted )
return
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( this . realpath && ! this . _didRealpath )
return this . _realpath ( )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
common . finish ( this )
this . emit ( 'end' , this . found )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . _realpath = function ( ) {
if ( this . _didRealpath )
return
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
this . _didRealpath = true
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var n = this . matches . length
if ( n === 0 )
return this . _finish ( )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var self = this
for ( var i = 0 ; i < this . matches . length ; i ++ )
this . _realpathSet ( i , next )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
function next ( ) {
if ( -- n === 0 )
self . _finish ( )
}
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . _realpathSet = function ( index , cb ) {
var matchset = this . matches [ index ]
if ( ! matchset )
return cb ( )
var found = Object . keys ( matchset )
var self = this
var n = found . length
if ( n === 0 )
return cb ( )
var set = this . matches [ index ] = Object . create ( null )
found . forEach ( function ( p , i ) {
// If there's a problem with the stat, then it means that
// one or more of the links in the realpath couldn't be
// resolved. just return the abs value in that case.
p = self . _makeAbs ( p )
rp . realpath ( p , self . realpathCache , function ( er , real ) {
if ( ! er )
set [ real ] = true
else if ( er . syscall === 'stat' )
set [ p ] = true
else
self . emit ( 'error' , er ) // srsly wtf right here
if ( -- n === 0 ) {
self . matches [ index ] = set
cb ( )
}
} )
} )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . _mark = function ( p ) {
return common . mark ( this , p )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . _makeAbs = function ( f ) {
return common . makeAbs ( this , f )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . abort = function ( ) {
this . aborted = true
this . emit ( 'abort' )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . pause = function ( ) {
if ( ! this . paused ) {
this . paused = true
this . emit ( 'pause' )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
}
Glob . prototype . resume = function ( ) {
if ( this . paused ) {
this . emit ( 'resume' )
this . paused = false
if ( this . _emitQueue . length ) {
var eq = this . _emitQueue . slice ( 0 )
this . _emitQueue . length = 0
for ( var i = 0 ; i < eq . length ; i ++ ) {
var e = eq [ i ]
this . _emitMatch ( e [ 0 ] , e [ 1 ] )
2020-08-23 03:31:38 +02:00
}
}
2020-09-02 10:07:11 +02:00
if ( this . _processQueue . length ) {
var pq = this . _processQueue . slice ( 0 )
this . _processQueue . length = 0
for ( var i = 0 ; i < pq . length ; i ++ ) {
var p = pq [ i ]
this . _processing --
this . _process ( p [ 0 ] , p [ 1 ] , p [ 2 ] , p [ 3 ] )
2020-08-23 03:31:38 +02:00
}
}
2020-09-02 10:07:11 +02:00
}
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . _process = function ( pattern , index , inGlobStar , cb ) {
assert ( this instanceof Glob )
assert ( typeof cb === 'function' )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( this . aborted )
return
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
this . _processing ++
if ( this . paused ) {
this . _processQueue . push ( [ pattern , index , inGlobStar , cb ] )
return
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
//console.error('PROCESS %d', this._processing, pattern)
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// Get the first [n] parts of pattern that are all strings.
var n = 0
while ( typeof pattern [ n ] === 'string' ) {
n ++
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
// now n is the index of the first one that is *not* a string.
// see if there's anything else
var prefix
switch ( n ) {
// if not, then this is rather simple
case pattern . length :
this . _processSimple ( pattern . join ( '/' ) , index , cb )
return
case 0 :
// pattern *starts* with some non-trivial item.
// going to readdir(cwd), but not include the prefix in matches.
prefix = null
break
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
default :
// pattern has some string bits in the front.
// whatever it starts with, whether that's 'absolute' like /foo/bar,
// or 'relative' like '../baz'
prefix = pattern . slice ( 0 , n ) . join ( '/' )
break
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
var remain = pattern . slice ( n )
// get the list of entries.
var read
if ( prefix === null )
read = '.'
else if ( isAbsolute ( prefix ) || isAbsolute ( pattern . join ( '/' ) ) ) {
if ( ! prefix || ! isAbsolute ( prefix ) )
prefix = '/' + prefix
read = prefix
} else
read = prefix
var abs = this . _makeAbs ( read )
//if ignored, skip _processing
if ( childrenIgnored ( this , read ) )
return cb ( )
var isGlobStar = remain [ 0 ] === minimatch . GLOBSTAR
if ( isGlobStar )
this . _processGlobStar ( prefix , read , abs , remain , index , inGlobStar , cb )
else
this . _processReaddir ( prefix , read , abs , remain , index , inGlobStar , cb )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . _processReaddir = function ( prefix , read , abs , remain , index , inGlobStar , cb ) {
var self = this
this . _readdir ( abs , inGlobStar , function ( er , entries ) {
return self . _processReaddir2 ( prefix , read , abs , remain , index , inGlobStar , entries , cb )
} )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . _processReaddir2 = function ( prefix , read , abs , remain , index , inGlobStar , entries , cb ) {
// if the abs isn't a dir, then nothing can match!
if ( ! entries )
return cb ( )
// It will only match dot entries if it starts with a dot, or if
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
var pn = remain [ 0 ]
var negate = ! ! this . minimatch . negate
var rawGlob = pn . _glob
var dotOk = this . dot || rawGlob . charAt ( 0 ) === '.'
var matchedEntries = [ ]
for ( var i = 0 ; i < entries . length ; i ++ ) {
var e = entries [ i ]
if ( e . charAt ( 0 ) !== '.' || dotOk ) {
var m
if ( negate && ! prefix ) {
m = ! e . match ( pn )
} else {
m = e . match ( pn )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
if ( m )
matchedEntries . push ( e )
2020-08-23 03:31:38 +02:00
}
}
2020-09-02 10:07:11 +02:00
//console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries)
var len = matchedEntries . length
// If there are no matched entries, then nothing matches.
if ( len === 0 )
return cb ( )
// if this is the last remaining pattern bit, then no need for
// an additional stat *unless* the user has specified mark or
// stat explicitly. We know they exist, since readdir returned
// them.
if ( remain . length === 1 && ! this . mark && ! this . stat ) {
if ( ! this . matches [ index ] )
this . matches [ index ] = Object . create ( null )
for ( var i = 0 ; i < len ; i ++ ) {
var e = matchedEntries [ i ]
if ( prefix ) {
if ( prefix !== '/' )
e = prefix + '/' + e
else
e = prefix + e
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( e . charAt ( 0 ) === '/' && ! this . nomount ) {
e = path . join ( this . root , e )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
this . _emitMatch ( index , e )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
// This was the last one, and no stats were needed
return cb ( )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// now test all matched entries as stand-ins for that part
// of the pattern.
remain . shift ( )
for ( var i = 0 ; i < len ; i ++ ) {
var e = matchedEntries [ i ]
var newPattern
if ( prefix ) {
if ( prefix !== '/' )
e = prefix + '/' + e
else
e = prefix + e
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
this . _process ( [ e ] . concat ( remain ) , index , inGlobStar , cb )
}
cb ( )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . _emitMatch = function ( index , e ) {
if ( this . aborted )
return
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( isIgnored ( this , e ) )
return
if ( this . paused ) {
this . _emitQueue . push ( [ index , e ] )
return
}
var abs = isAbsolute ( e ) ? e : this . _makeAbs ( e )
if ( this . mark )
e = this . _mark ( e )
if ( this . absolute )
e = abs
if ( this . matches [ index ] [ e ] )
return
if ( this . nodir ) {
var c = this . cache [ abs ]
if ( c === 'DIR' || Array . isArray ( c ) )
return
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
this . matches [ index ] [ e ] = true
var st = this . statCache [ abs ]
if ( st )
this . emit ( 'stat' , e , st )
this . emit ( 'match' , e )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
Glob . prototype . _readdirInGlobStar = function ( abs , cb ) {
if ( this . aborted )
return
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// follow all symlinked directories forever
// just proceed as if this is a non-globstar situation
if ( this . follow )
return this . _readdir ( abs , false , cb )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var lstatkey = 'lstat\0' + abs
var self = this
var lstatcb = inflight ( lstatkey , lstatcb _ )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( lstatcb )
fs . lstat ( abs , lstatcb )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
function lstatcb _ ( er , lstat ) {
if ( er && er . code === 'ENOENT' )
return cb ( )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var isSym = lstat && lstat . isSymbolicLink ( )
self . symlinks [ abs ] = isSym
// If it's not a symlink or a dir, then it's definitely a regular file.
// don't bother doing a readdir in that case.
if ( ! isSym && lstat && ! lstat . isDirectory ( ) ) {
self . cache [ abs ] = 'FILE'
cb ( )
} else
self . _readdir ( abs , false , cb )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . _readdir = function ( abs , inGlobStar , cb ) {
if ( this . aborted )
return
cb = inflight ( 'readdir\0' + abs + '\0' + inGlobStar , cb )
if ( ! cb )
return
//console.error('RD %j %j', +inGlobStar, abs)
if ( inGlobStar && ! ownProp ( this . symlinks , abs ) )
return this . _readdirInGlobStar ( abs , cb )
if ( ownProp ( this . cache , abs ) ) {
var c = this . cache [ abs ]
if ( ! c || c === 'FILE' )
return cb ( )
if ( Array . isArray ( c ) )
return cb ( null , c )
}
var self = this
fs . readdir ( abs , readdirCb ( this , abs , cb ) )
}
function readdirCb ( self , abs , cb ) {
return function ( er , entries ) {
if ( er )
self . _readdirError ( abs , er , cb )
2020-08-23 03:31:38 +02:00
else
2020-09-02 10:07:11 +02:00
self . _readdirEntries ( abs , entries , cb )
2020-08-23 03:31:38 +02:00
}
}
2020-09-02 10:07:11 +02:00
Glob . prototype . _readdirEntries = function ( abs , entries , cb ) {
if ( this . aborted )
return
// if we haven't asked to stat everything, then just
// assume that everything in there exists, so we can avoid
// having to stat it a second time.
if ( ! this . mark && ! this . stat ) {
for ( var i = 0 ; i < entries . length ; i ++ ) {
var e = entries [ i ]
if ( abs === '/' )
e = abs + e
else
e = abs + '/' + e
this . cache [ e ] = true
}
}
this . cache [ abs ] = entries
return cb ( null , entries )
}
Glob . prototype . _readdirError = function ( f , er , cb ) {
if ( this . aborted )
return
// handle errors, and cache the information
switch ( er . code ) {
case 'ENOTSUP' : // https://github.com/isaacs/node-glob/issues/205
case 'ENOTDIR' : // totally normal. means it *does* exist.
var abs = this . _makeAbs ( f )
this . cache [ abs ] = 'FILE'
if ( abs === this . cwdAbs ) {
var error = new Error ( er . code + ' invalid cwd ' + this . cwd )
error . path = this . cwd
error . code = er . code
this . emit ( 'error' , error )
this . abort ( )
}
break
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
case 'ENOENT' : // not terribly unusual
case 'ELOOP' :
case 'ENAMETOOLONG' :
case 'UNKNOWN' :
this . cache [ this . _makeAbs ( f ) ] = false
break
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
default : // some unusual error. Treat as failure.
this . cache [ this . _makeAbs ( f ) ] = false
if ( this . strict ) {
this . emit ( 'error' , er )
// If the error is handled, then we abort
// if not, we threw out of here
this . abort ( )
}
if ( ! this . silent )
console . error ( 'glob error' , er )
break
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
return cb ( )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . _processGlobStar = function ( prefix , read , abs , remain , index , inGlobStar , cb ) {
var self = this
this . _readdir ( abs , inGlobStar , function ( er , entries ) {
self . _processGlobStar2 ( prefix , read , abs , remain , index , inGlobStar , entries , cb )
2020-08-23 03:31:38 +02:00
} )
}
2020-09-02 10:07:11 +02:00
Glob . prototype . _processGlobStar2 = function ( prefix , read , abs , remain , index , inGlobStar , entries , cb ) {
//console.error('pgs2', prefix, remain[0], entries)
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// no entries means not a dir, so it can never have matches
// foo.txt/** doesn't match foo.txt
if ( ! entries )
return cb ( )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// test without the globstar, and with every child both below
// and replacing the globstar.
var remainWithoutGlobStar = remain . slice ( 1 )
var gspref = prefix ? [ prefix ] : [ ]
var noGlobStar = gspref . concat ( remainWithoutGlobStar )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// the noGlobStar pattern exits the inGlobStar state
this . _process ( noGlobStar , index , false , cb )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var isSym = this . symlinks [ abs ]
var len = entries . length
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// If it's a symlink, and we're in a globstar, then stop
if ( isSym && inGlobStar )
return cb ( )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
for ( var i = 0 ; i < len ; i ++ ) {
var e = entries [ i ]
if ( e . charAt ( 0 ) === '.' && ! this . dot )
continue
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// these two cases enter the inGlobStar state
var instead = gspref . concat ( entries [ i ] , remainWithoutGlobStar )
this . _process ( instead , index , true , cb )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var below = gspref . concat ( entries [ i ] , remain )
this . _process ( below , index , true , cb )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
cb ( )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . _processSimple = function ( prefix , index , cb ) {
// XXX review this. Shouldn't it be doing the mounting etc
// before doing stat? kinda weird?
var self = this
this . _stat ( prefix , function ( er , exists ) {
self . _processSimple2 ( prefix , index , er , exists , cb )
} )
}
Glob . prototype . _processSimple2 = function ( prefix , index , er , exists , cb ) {
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
//console.error('ps2', prefix, exists)
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( ! this . matches [ index ] )
this . matches [ index ] = Object . create ( null )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// If it doesn't exist, then just mark the lack of results
if ( ! exists )
return cb ( )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( prefix && isAbsolute ( prefix ) && ! this . nomount ) {
var trail = /[\/\\]$/ . test ( prefix )
if ( prefix . charAt ( 0 ) === '/' ) {
prefix = path . join ( this . root , prefix )
} else {
prefix = path . resolve ( this . root , prefix )
if ( trail )
prefix += '/'
}
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( process . platform === 'win32' )
prefix = prefix . replace ( /\\/g , '/' )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// Mark this as a match
this . _emitMatch ( index , prefix )
cb ( )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// Returns either 'DIR', 'FILE', or false
Glob . prototype . _stat = function ( f , cb ) {
var abs = this . _makeAbs ( f )
var needDir = f . slice ( - 1 ) === '/'
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( f . length > this . maxLength )
return cb ( )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( ! this . stat && ownProp ( this . cache , abs ) ) {
var c = this . cache [ abs ]
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( Array . isArray ( c ) )
c = 'DIR'
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// It exists, but maybe not how we need it
if ( ! needDir || c === 'DIR' )
return cb ( null , c )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( needDir && c === 'FILE' )
return cb ( )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// otherwise we have to stat, because maybe c=true
// if we know it exists, but not what it is.
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var exists
var stat = this . statCache [ abs ]
if ( stat !== undefined ) {
if ( stat === false )
return cb ( null , stat )
else {
var type = stat . isDirectory ( ) ? 'DIR' : 'FILE'
if ( needDir && type === 'FILE' )
return cb ( )
else
return cb ( null , type , stat )
}
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var self = this
var statcb = inflight ( 'stat\0' + abs , lstatcb _ )
if ( statcb )
fs . lstat ( abs , statcb )
function lstatcb _ ( er , lstat ) {
if ( lstat && lstat . isSymbolicLink ( ) ) {
// If it's a symlink, then treat it as the target, unless
// the target does not exist, then treat it as a file.
return fs . stat ( abs , function ( er , stat ) {
if ( er )
self . _stat2 ( f , abs , null , lstat , cb )
else
self . _stat2 ( f , abs , er , stat , cb )
} )
} else {
self . _stat2 ( f , abs , er , lstat , cb )
}
}
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Glob . prototype . _stat2 = function ( f , abs , er , stat , cb ) {
if ( er && ( er . code === 'ENOENT' || er . code === 'ENOTDIR' ) ) {
this . statCache [ abs ] = false
return cb ( )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var needDir = f . slice ( - 1 ) === '/'
this . statCache [ abs ] = stat
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( abs . slice ( - 1 ) === '/' && stat && ! stat . isDirectory ( ) )
return cb ( null , false , stat )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var c = true
if ( stat )
c = stat . isDirectory ( ) ? 'DIR' : 'FILE'
this . cache [ abs ] = this . cache [ abs ] || c
if ( needDir && c === 'FILE' )
return cb ( )
return cb ( null , c , stat )
2020-08-23 03:31:38 +02:00
}
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 959 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
const assert = _ _webpack _require _ _ ( 357 )
const path = _ _webpack _require _ _ ( 622 )
const fs = _ _webpack _require _ _ ( 747 )
let glob = undefined
try {
glob = _ _webpack _require _ _ ( 957 )
} catch ( _err ) {
// treat glob as optional.
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
const defaultGlobOpts = {
nosort : true ,
silent : true
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// for EMFILE handling
let timeout = 0
const isWindows = ( process . platform === "win32" )
const defaults = options => {
const methods = [
'unlink' ,
'chmod' ,
'stat' ,
'lstat' ,
'rmdir' ,
'readdir'
]
methods . forEach ( m => {
options [ m ] = options [ m ] || fs [ m ]
m = m + 'Sync'
options [ m ] = options [ m ] || fs [ m ]
} )
2020-08-21 13:39:42 +02:00
2020-09-02 10:07:11 +02:00
options . maxBusyTries = options . maxBusyTries || 3
options . emfileWait = options . emfileWait || 1000
if ( options . glob === false ) {
options . disableGlob = true
}
if ( options . disableGlob !== true && glob === undefined ) {
throw Error ( 'glob dependency not found, set `options.disableGlob = true` if intentional' )
}
options . disableGlob = options . disableGlob || false
options . glob = options . glob || defaultGlobOpts
}
2020-08-21 13:39:42 +02:00
2020-09-02 10:07:11 +02:00
const rimraf = ( p , options , cb ) => {
if ( typeof options === 'function' ) {
cb = options
options = { }
}
2020-08-16 00:36:41 +02:00
2020-09-02 10:07:11 +02:00
assert ( p , 'rimraf: missing path' )
assert . equal ( typeof p , 'string' , 'rimraf: path should be a string' )
assert . equal ( typeof cb , 'function' , 'rimraf: callback function required' )
assert ( options , 'rimraf: invalid options argument provided' )
assert . equal ( typeof options , 'object' , 'rimraf: options should be object' )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
defaults ( options )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
let busyTries = 0
let errState = null
let n = 0
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
const next = ( er ) => {
errState = errState || er
if ( -- n === 0 )
cb ( errState )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
const afterGlob = ( er , results ) => {
if ( er )
return cb ( er )
n = results . length
if ( n === 0 )
return cb ( )
results . forEach ( p => {
const CB = ( er ) => {
if ( er ) {
if ( ( er . code === "EBUSY" || er . code === "ENOTEMPTY" || er . code === "EPERM" ) &&
busyTries < options . maxBusyTries ) {
busyTries ++
// try again, with the same exact callback as this one.
return setTimeout ( ( ) => rimraf _ ( p , options , CB ) , busyTries * 100 )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// this one won't happen if graceful-fs is used.
if ( er . code === "EMFILE" && timeout < options . emfileWait ) {
return setTimeout ( ( ) => rimraf _ ( p , options , CB ) , timeout ++ )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// already gone
if ( er . code === "ENOENT" ) er = null
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
timeout = 0
next ( er )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
rimraf _ ( p , options , CB )
} )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( options . disableGlob || ! glob . hasMagic ( p ) )
return afterGlob ( null , [ p ] )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
options . lstat ( p , ( er , stat ) => {
if ( ! er )
return afterGlob ( null , [ p ] )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
glob ( p , options . glob , afterGlob )
} )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// Two possible strategies.
// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR
// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR
//
// Both result in an extra syscall when you guess wrong. However, there
// are likely far more normal files in the world than directories. This
// is based on the assumption that a the average number of files per
// directory is >= 1.
//
// If anyone ever complains about this, then I guess the strategy could
// be made configurable somehow. But until then, YAGNI.
const rimraf _ = ( p , options , cb ) => {
assert ( p )
assert ( options )
assert ( typeof cb === 'function' )
// sunos lets the root user unlink directories, which is... weird.
// so we have to lstat here and make sure it's not a dir.
options . lstat ( p , ( er , st ) => {
if ( er && er . code === "ENOENT" )
return cb ( null )
// Windows can EPERM on stat. Life is suffering.
if ( er && er . code === "EPERM" && isWindows )
fixWinEPERM ( p , options , er , cb )
if ( st && st . isDirectory ( ) )
return rmdir ( p , options , er , cb )
options . unlink ( p , er => {
if ( er ) {
if ( er . code === "ENOENT" )
return cb ( null )
if ( er . code === "EPERM" )
return ( isWindows )
? fixWinEPERM ( p , options , er , cb )
: rmdir ( p , options , er , cb )
if ( er . code === "EISDIR" )
return rmdir ( p , options , er , cb )
}
return cb ( er )
} )
} )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
const fixWinEPERM = ( p , options , er , cb ) => {
assert ( p )
assert ( options )
assert ( typeof cb === 'function' )
options . chmod ( p , 0o666 , er2 => {
if ( er2 )
cb ( er2 . code === "ENOENT" ? null : er )
else
options . stat ( p , ( er3 , stats ) => {
if ( er3 )
cb ( er3 . code === "ENOENT" ? null : er )
else if ( stats . isDirectory ( ) )
rmdir ( p , options , er , cb )
else
options . unlink ( p , cb )
2020-08-23 03:31:38 +02:00
} )
2020-09-02 10:07:11 +02:00
} )
}
const fixWinEPERMSync = ( p , options , er ) => {
assert ( p )
assert ( options )
try {
options . chmodSync ( p , 0o666 )
} catch ( er2 ) {
if ( er2 . code === "ENOENT" )
return
else
throw er
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
let stats
try {
stats = options . statSync ( p )
} catch ( er3 ) {
if ( er3 . code === "ENOENT" )
return
else
throw er
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
if ( stats . isDirectory ( ) )
rmdirSync ( p , options , er )
else
options . unlinkSync ( p )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
const rmdir = ( p , options , originalEr , cb ) => {
assert ( p )
assert ( options )
assert ( typeof cb === 'function' )
// try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS)
// if we guessed wrong, and it's not a directory, then
// raise the original error.
options . rmdir ( p , er => {
if ( er && ( er . code === "ENOTEMPTY" || er . code === "EEXIST" || er . code === "EPERM" ) )
rmkids ( p , options , cb )
else if ( er && er . code === "ENOTDIR" )
cb ( originalEr )
else
cb ( er )
} )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
const rmkids = ( p , options , cb ) => {
assert ( p )
assert ( options )
assert ( typeof cb === 'function' )
options . readdir ( p , ( er , files ) => {
if ( er )
return cb ( er )
let n = files . length
if ( n === 0 )
return options . rmdir ( p , cb )
let errState
files . forEach ( f => {
rimraf ( path . join ( p , f ) , options , er => {
if ( errState )
return
if ( er )
return cb ( errState = er )
if ( -- n === 0 )
options . rmdir ( p , cb )
} )
} )
} )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// this looks simpler, and is strictly *faster*, but will
// tie up the JavaScript thread and fail on excessively
// deep directory trees.
const rimrafSync = ( p , options ) => {
options = options || { }
defaults ( options )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
assert ( p , 'rimraf: missing path' )
assert . equal ( typeof p , 'string' , 'rimraf: path should be a string' )
assert ( options , 'rimraf: missing options' )
assert . equal ( typeof options , 'object' , 'rimraf: options should be object' )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
let results
if ( options . disableGlob || ! glob . hasMagic ( p ) ) {
results = [ p ]
} else {
try {
options . lstatSync ( p )
results = [ p ]
} catch ( er ) {
results = glob . sync ( p , options . glob )
}
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
if ( ! results . length )
return
for ( let i = 0 ; i < results . length ; i ++ ) {
const p = results [ i ]
let st
try {
st = options . lstatSync ( p )
} catch ( er ) {
if ( er . code === "ENOENT" )
return
// Windows can EPERM on stat. Life is suffering.
if ( er . code === "EPERM" && isWindows )
fixWinEPERMSync ( p , options , er )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
try {
// sunos lets the root user unlink directories, which is... weird.
if ( st && st . isDirectory ( ) )
rmdirSync ( p , options , null )
else
options . unlinkSync ( p )
} catch ( er ) {
if ( er . code === "ENOENT" )
return
if ( er . code === "EPERM" )
return isWindows ? fixWinEPERMSync ( p , options , er ) : rmdirSync ( p , options , er )
if ( er . code !== "EISDIR" )
throw er
rmdirSync ( p , options , er )
}
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
const rmdirSync = ( p , options , originalEr ) => {
assert ( p )
assert ( options )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
try {
options . rmdirSync ( p )
} catch ( er ) {
if ( er . code === "ENOENT" )
return
if ( er . code === "ENOTDIR" )
throw originalEr
if ( er . code === "ENOTEMPTY" || er . code === "EEXIST" || er . code === "EPERM" )
rmkidsSync ( p , options )
}
}
const rmkidsSync = ( p , options ) => {
assert ( p )
assert ( options )
options . readdirSync ( p ) . forEach ( f => rimrafSync ( path . join ( p , f ) , options ) )
// We only end up here once we got ENOTEMPTY at least once, and
// at this point, we are guaranteed to have removed all the kids.
// So, we know that it won't be ENOENT or ENOTDIR or anything else.
// try really hard to delete stuff on windows, because it has a
// PROFOUNDLY annoying habit of not closing handles promptly when
// files are deleted, resulting in spurious ENOTEMPTY errors.
const retries = isWindows ? 100 : 1
let i = 0
do {
let threw = true
try {
const ret = options . rmdirSync ( p , options )
threw = false
return ret
} finally {
if ( ++ i < retries && threw )
continue
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
} while ( true )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
module . exports = rimraf
rimraf . sync = rimrafSync
/***/ } ) ,
/***/ 962 :
/***/ ( function ( _ _unusedmodule , exports , _ _webpack _require _ _ ) {
"use strict" ;
var _ _awaiter = ( this && this . _ _awaiter ) || function ( thisArg , _arguments , P , generator ) {
function adopt ( value ) { return value instanceof P ? value : new P ( function ( resolve ) { resolve ( value ) ; } ) ; }
return new ( P || ( P = Promise ) ) ( function ( resolve , reject ) {
function fulfilled ( value ) { try { step ( generator . next ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function rejected ( value ) { try { step ( generator [ "throw" ] ( value ) ) ; } catch ( e ) { reject ( e ) ; } }
function step ( result ) { result . done ? resolve ( result . value ) : adopt ( result . value ) . then ( fulfilled , rejected ) ; }
step ( ( generator = generator . apply ( thisArg , _arguments || [ ] ) ) . next ( ) ) ;
} ) ;
} ;
var _a ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
const assert _1 = _ _webpack _require _ _ ( 357 ) ;
const fs = _ _webpack _require _ _ ( 747 ) ;
const path = _ _webpack _require _ _ ( 622 ) ;
_a = fs . promises , exports . chmod = _a . chmod , exports . copyFile = _a . copyFile , exports . lstat = _a . lstat , exports . mkdir = _a . mkdir , exports . readdir = _a . readdir , exports . readlink = _a . readlink , exports . rename = _a . rename , exports . rmdir = _a . rmdir , exports . stat = _a . stat , exports . symlink = _a . symlink , exports . unlink = _a . unlink ;
exports . IS _WINDOWS = process . platform === 'win32' ;
function exists ( fsPath ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
try {
yield exports . stat ( fsPath ) ;
}
catch ( err ) {
if ( err . code === 'ENOENT' ) {
return false ;
}
throw err ;
}
return true ;
} ) ;
}
exports . exists = exists ;
function isDirectory ( fsPath , useStat = false ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
const stats = useStat ? yield exports . stat ( fsPath ) : yield exports . lstat ( fsPath ) ;
return stats . isDirectory ( ) ;
} ) ;
}
exports . isDirectory = isDirectory ;
/ * *
* On OSX / Linux , true if path starts with '/' . On Windows , true for paths like :
* \ , \ hello , \ \ hello \ share , C : , and C : \ hello ( and corresponding alternate separator cases ) .
* /
function isRooted ( p ) {
p = normalizeSeparators ( p ) ;
if ( ! p ) {
throw new Error ( 'isRooted() parameter "p" cannot be empty' ) ;
}
if ( exports . IS _WINDOWS ) {
return ( p . startsWith ( '\\' ) || /^[A-Z]:/i . test ( p ) // e.g. \ or \hello or \\hello
) ; // e.g. C: or C:\hello
}
return p . startsWith ( '/' ) ;
}
exports . isRooted = isRooted ;
/ * *
* Recursively create a directory at ` fsPath ` .
*
* This implementation is optimistic , meaning it attempts to create the full
* path first , and backs up the path stack from there .
*
* @ param fsPath The path to create
* @ param maxDepth The maximum recursion depth
* @ param depth The current recursion depth
* /
function mkdirP ( fsPath , maxDepth = 1000 , depth = 1 ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
assert _1 . ok ( fsPath , 'a path argument must be provided' ) ;
fsPath = path . resolve ( fsPath ) ;
if ( depth >= maxDepth )
return exports . mkdir ( fsPath ) ;
try {
yield exports . mkdir ( fsPath ) ;
return ;
}
catch ( err ) {
switch ( err . code ) {
case 'ENOENT' : {
yield mkdirP ( path . dirname ( fsPath ) , maxDepth , depth + 1 ) ;
yield exports . mkdir ( fsPath ) ;
return ;
}
default : {
let stats ;
try {
stats = yield exports . stat ( fsPath ) ;
}
catch ( err2 ) {
throw err ;
}
if ( ! stats . isDirectory ( ) )
throw err ;
}
}
}
} ) ;
}
exports . mkdirP = mkdirP ;
/ * *
* Best effort attempt to determine whether a file exists and is executable .
* @ param filePath file path to check
* @ param extensions additional file extensions to try
* @ return if file exists and is executable , returns the file path . otherwise empty string .
* /
function tryGetExecutablePath ( filePath , extensions ) {
return _ _awaiter ( this , void 0 , void 0 , function * ( ) {
let stats = undefined ;
try {
// test file exists
stats = yield exports . stat ( filePath ) ;
}
catch ( err ) {
if ( err . code !== 'ENOENT' ) {
// eslint-disable-next-line no-console
console . log ( ` Unexpected error attempting to determine if executable file exists ' ${ filePath } ': ${ err } ` ) ;
}
}
if ( stats && stats . isFile ( ) ) {
if ( exports . IS _WINDOWS ) {
// on Windows, test for valid extension
const upperExt = path . extname ( filePath ) . toUpperCase ( ) ;
if ( extensions . some ( validExt => validExt . toUpperCase ( ) === upperExt ) ) {
return filePath ;
}
}
else {
if ( isUnixExecutable ( stats ) ) {
return filePath ;
}
}
}
// try each extension
const originalFilePath = filePath ;
for ( const extension of extensions ) {
filePath = originalFilePath + extension ;
stats = undefined ;
try {
stats = yield exports . stat ( filePath ) ;
}
catch ( err ) {
if ( err . code !== 'ENOENT' ) {
// eslint-disable-next-line no-console
console . log ( ` Unexpected error attempting to determine if executable file exists ' ${ filePath } ': ${ err } ` ) ;
}
}
if ( stats && stats . isFile ( ) ) {
if ( exports . IS _WINDOWS ) {
// preserve the case of the actual file (since an extension was appended)
try {
const directory = path . dirname ( filePath ) ;
const upperName = path . basename ( filePath ) . toUpperCase ( ) ;
for ( const actualName of yield exports . readdir ( directory ) ) {
if ( upperName === actualName . toUpperCase ( ) ) {
filePath = path . join ( directory , actualName ) ;
break ;
}
}
}
catch ( err ) {
// eslint-disable-next-line no-console
console . log ( ` Unexpected error attempting to determine the actual case of the file ' ${ filePath } ': ${ err } ` ) ;
}
return filePath ;
}
else {
if ( isUnixExecutable ( stats ) ) {
return filePath ;
}
}
}
}
return '' ;
} ) ;
}
exports . tryGetExecutablePath = tryGetExecutablePath ;
function normalizeSeparators ( p ) {
p = p || '' ;
if ( exports . IS _WINDOWS ) {
// convert slashes on Windows
p = p . replace ( /\//g , '\\' ) ;
// remove redundant slashes
return p . replace ( /\\\\+/g , '\\' ) ;
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
// remove redundant slashes
return p . replace ( /\/\/+/g , '/' ) ;
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
// on Mac/Linux, test the execute bit
// R W X R W X R W X
// 256 128 64 32 16 8 4 2 1
function isUnixExecutable ( stats ) {
return ( ( stats . mode & 1 ) > 0 ||
( ( stats . mode & 8 ) > 0 && stats . gid === process . getgid ( ) ) ||
( ( stats . mode & 64 ) > 0 && stats . uid === process . getuid ( ) ) ) ;
}
//# sourceMappingURL=io-util.js.map
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
/***/ } ) ,
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
/***/ 973 :
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
module . exports = minimatch
minimatch . Minimatch = Minimatch
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var path = { sep : '/' }
try {
path = _ _webpack _require _ _ ( 622 )
} catch ( er ) { }
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var GLOBSTAR = minimatch . GLOBSTAR = Minimatch . GLOBSTAR = { }
var expand = _ _webpack _require _ _ ( 717 )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var plTypes = {
'!' : { open : '(?:(?!(?:' , close : '))[^/]*?)' } ,
'?' : { open : '(?:' , close : ')?' } ,
'+' : { open : '(?:' , close : ')+' } ,
'*' : { open : '(?:' , close : ')*' } ,
'@' : { open : '(?:' , close : ')' }
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
// any single thing other than /
// don't need to escape / when using new RegExp()
var qmark = '[^/]'
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// * => any number of characters
var star = qmark + '*?'
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// ** when dots are allowed. Anything goes, except .. and .
// not (^ or / followed by one or two dots followed by $ or /),
// followed by anything, any number of times.
var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?'
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// not a ^ or / followed by a dot,
// followed by anything, any number of times.
var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?'
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// characters that need to be escaped in RegExp.
var reSpecials = charSet ( '().*{}+?[]^$\\!' )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// "abc" -> { a:true, b:true, c:true }
function charSet ( s ) {
return s . split ( '' ) . reduce ( function ( set , c ) {
set [ c ] = true
return set
} , { } )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// normalizes slashes.
var slashSplit = /\/+/
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
minimatch . filter = filter
function filter ( pattern , options ) {
options = options || { }
return function ( p , i , list ) {
return minimatch ( p , pattern , options )
}
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
function ext ( a , b ) {
a = a || { }
b = b || { }
var t = { }
Object . keys ( b ) . forEach ( function ( k ) {
t [ k ] = b [ k ]
} )
Object . keys ( a ) . forEach ( function ( k ) {
t [ k ] = a [ k ]
} )
return t
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
minimatch . defaults = function ( def ) {
if ( ! def || ! Object . keys ( def ) . length ) return minimatch
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var orig = minimatch
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var m = function minimatch ( p , pattern , options ) {
return orig . minimatch ( p , pattern , ext ( def , options ) )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
m . Minimatch = function Minimatch ( pattern , options ) {
return new orig . Minimatch ( pattern , ext ( def , options ) )
}
return m
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
Minimatch . defaults = function ( def ) {
if ( ! def || ! Object . keys ( def ) . length ) return Minimatch
return minimatch . defaults ( def ) . Minimatch
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
function minimatch ( p , pattern , options ) {
if ( typeof pattern !== 'string' ) {
throw new TypeError ( 'glob pattern string required' )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
if ( ! options ) options = { }
// shortcut: comments match nothing.
if ( ! options . nocomment && pattern . charAt ( 0 ) === '#' ) {
return false
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
// "" only matches ""
if ( pattern . trim ( ) === '' ) return p === ''
return new Minimatch ( pattern , options ) . match ( p )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
function Minimatch ( pattern , options ) {
if ( ! ( this instanceof Minimatch ) ) {
return new Minimatch ( pattern , options )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
if ( typeof pattern !== 'string' ) {
throw new TypeError ( 'glob pattern string required' )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( ! options ) options = { }
pattern = pattern . trim ( )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// windows support: need to use /, not \
if ( path . sep !== '/' ) {
pattern = pattern . split ( path . sep ) . join ( '/' )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
this . options = options
this . set = [ ]
this . pattern = pattern
this . regexp = null
this . negate = false
this . comment = false
this . empty = false
// make the set of regexps etc.
this . make ( )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
Minimatch . prototype . debug = function ( ) { }
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Minimatch . prototype . make = make
function make ( ) {
// don't do it more than once.
if ( this . _made ) return
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var pattern = this . pattern
var options = this . options
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// empty patterns and comments match nothing.
if ( ! options . nocomment && pattern . charAt ( 0 ) === '#' ) {
this . comment = true
return
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
if ( ! pattern ) {
this . empty = true
return
}
// step 1: figure out negation, etc.
this . parseNegate ( )
// step 2: expand braces
var set = this . globSet = this . braceExpand ( )
if ( options . debug ) this . debug = console . error
this . debug ( this . pattern , set )
// step 3: now we have a set, so turn each one into a series of path-portion
// matching patterns.
// These will be regexps, except in the case of "**", which is
// set to the GLOBSTAR object for globstar behavior,
// and will not contain any / characters
set = this . globParts = set . map ( function ( s ) {
return s . split ( slashSplit )
2020-08-23 03:31:38 +02:00
} )
2020-09-02 10:07:11 +02:00
this . debug ( this . pattern , set )
// glob --> regexps
set = set . map ( function ( s , si , set ) {
return s . map ( this . parse , this )
} , this )
this . debug ( this . pattern , set )
// filter out everything that didn't compile properly.
set = set . filter ( function ( s ) {
return s . indexOf ( false ) === - 1
} )
this . debug ( this . pattern , set )
this . set = set
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
Minimatch . prototype . parseNegate = parseNegate
function parseNegate ( ) {
var pattern = this . pattern
var negate = false
var options = this . options
var negateOffset = 0
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( options . nonegate ) return
2020-08-16 19:13:19 +02:00
2020-09-02 10:07:11 +02:00
for ( var i = 0 , l = pattern . length
; i < l && pattern . charAt ( i ) === '!'
; i ++ ) {
negate = ! negate
negateOffset ++
}
2020-08-16 19:13:19 +02:00
2020-09-02 10:07:11 +02:00
if ( negateOffset ) this . pattern = pattern . substr ( negateOffset )
this . negate = negate
}
2020-08-16 22:31:37 +02:00
2020-09-02 10:07:11 +02:00
// Brace expansion:
// a{b,c}d -> abd acd
// a{b,}c -> abc ac
// a{0..3}d -> a0d a1d a2d a3d
// a{b,c{d,e}f}g -> abg acdfg acefg
// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
//
// Invalid sets are not expanded.
// a{2..}b -> a{2..}b
// a{b}c -> a{b}c
minimatch . braceExpand = function ( pattern , options ) {
return braceExpand ( pattern , options )
2020-08-17 18:35:15 +02:00
}
2020-09-02 10:07:11 +02:00
Minimatch . prototype . braceExpand = braceExpand
function braceExpand ( pattern , options ) {
if ( ! options ) {
if ( this instanceof Minimatch ) {
options = this . options
} else {
options = { }
}
}
pattern = typeof pattern === 'undefined'
? this . pattern : pattern
if ( typeof pattern === 'undefined' ) {
throw new TypeError ( 'undefined pattern' )
}
if ( options . nobrace ||
! pattern . match ( /\{.*\}/ ) ) {
// shortcut. no need to expand.
return [ pattern ]
}
return expand ( pattern )
2020-08-17 18:35:15 +02:00
}
2020-09-02 10:07:11 +02:00
// parse a component of the expanded set.
// At this point, no pattern may contain "/" in it
// so we're going to return a 2d array, where each entry is the full
// pattern, split on '/', and then turned into a regular expression.
// A regexp is made at the end which joins each array with an
// escaped /, and another full one which joins each regexp with |.
//
// Following the lead of Bash 4.1, note that "**" only has special meaning
// when it is the *only* thing in a path portion. Otherwise, any series
// of * is equivalent to a single *. Globstar behavior is enabled by
// default, and can be disabled by setting options.noglobstar.
Minimatch . prototype . parse = parse
var SUBPARSE = { }
function parse ( pattern , isSub ) {
if ( pattern . length > 1024 * 64 ) {
throw new TypeError ( 'pattern is too long' )
}
var options = this . options
// shortcuts
if ( ! options . noglobstar && pattern === '**' ) return GLOBSTAR
if ( pattern === '' ) return ''
var re = ''
var hasMagic = ! ! options . nocase
var escaping = false
// ? => one single character
var patternListStack = [ ]
var negativeLists = [ ]
var stateChar
var inClass = false
var reClassStart = - 1
var classStart = - 1
// . and .. never match anything that doesn't start with .,
// even when options.dot is set.
var patternStart = pattern . charAt ( 0 ) === '.' ? '' // anything
// not (start or / followed by . or .. followed by / or end)
: options . dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))'
: '(?!\\.)'
var self = this
function clearStateChar ( ) {
if ( stateChar ) {
// we had some state-tracking character
// that wasn't consumed by this pass.
switch ( stateChar ) {
case '*' :
re += star
hasMagic = true
break
case '?' :
re += qmark
hasMagic = true
break
default :
re += '\\' + stateChar
break
}
self . debug ( 'clearStateChar %j %j' , stateChar , re )
stateChar = false
}
}
for ( var i = 0 , len = pattern . length , c
; ( i < len ) && ( c = pattern . charAt ( i ) )
; i ++ ) {
this . debug ( '%s\t%s %s %j' , pattern , i , re , c )
// skip over any that are escaped.
if ( escaping && reSpecials [ c ] ) {
re += '\\' + c
escaping = false
continue
}
switch ( c ) {
case '/' :
// completely not allowed, even escaped.
// Should already be path-split by now.
return false
case '\\' :
clearStateChar ( )
escaping = true
continue
// the various stateChar values
// for the "extglob" stuff.
case '?' :
case '*' :
case '+' :
case '@' :
case '!' :
this . debug ( '%s\t%s %s %j <-- stateChar' , pattern , i , re , c )
// all of those are literals inside a class, except that
// the glob [!a] means [^a] in regexp
if ( inClass ) {
this . debug ( ' in class' )
if ( c === '!' && i === classStart + 1 ) c = '^'
re += c
continue
2020-08-17 18:35:15 +02:00
}
2020-09-02 10:07:11 +02:00
// if we already have a stateChar, then it means
// that there was something like ** or +? in there.
// Handle the stateChar, then proceed with this one.
self . debug ( 'call clearStateChar %j' , stateChar )
clearStateChar ( )
stateChar = c
// if extglob is disabled, then +(asdf|foo) isn't a thing.
// just clear the statechar *now*, rather than even diving into
// the patternList stuff.
if ( options . noext ) clearStateChar ( )
continue
case '(' :
if ( inClass ) {
re += '('
continue
2020-08-17 22:18:15 +02:00
}
2020-09-02 10:07:11 +02:00
if ( ! stateChar ) {
re += '\\('
continue
2020-08-17 22:18:15 +02:00
}
2020-09-02 10:07:11 +02:00
patternListStack . push ( {
type : stateChar ,
start : i - 1 ,
reStart : re . length ,
open : plTypes [ stateChar ] . open ,
close : plTypes [ stateChar ] . close
} )
// negation is (?:(?!js)[^/]*)
re += stateChar === '!' ? '(?:(?!(?:' : '(?:'
this . debug ( 'plType %j %j' , stateChar , re )
stateChar = false
continue
case ')' :
if ( inClass || ! patternListStack . length ) {
re += '\\)'
continue
2020-08-17 22:18:15 +02:00
}
2020-09-02 10:07:11 +02:00
clearStateChar ( )
hasMagic = true
var pl = patternListStack . pop ( )
// negation is (?:(?!js)[^/]*)
// The others are (?:<pattern>)<type>
re += pl . close
if ( pl . type === '!' ) {
negativeLists . push ( pl )
2020-08-17 22:18:15 +02:00
}
2020-09-02 10:07:11 +02:00
pl . reEnd = re . length
continue
case '|' :
if ( inClass || ! patternListStack . length || escaping ) {
re += '\\|'
escaping = false
continue
2020-08-17 18:35:15 +02:00
}
2020-08-16 22:31:37 +02:00
2020-09-02 10:07:11 +02:00
clearStateChar ( )
re += '|'
continue
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// these are mostly the same in regexp and glob
case '[' :
// swallow any state-tracking char before the [
clearStateChar ( )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( inClass ) {
re += '\\' + c
continue
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
inClass = true
classStart = i
reClassStart = re . length
re += c
continue
case ']' :
// a right bracket shall lose its special
// meaning and represent itself in
// a bracket expression if it occurs
// first in the list. -- POSIX.2 2.8.3.2
if ( i === classStart + 1 || ! inClass ) {
re += '\\' + c
escaping = false
continue
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// handle the case where we left a class open.
// "[z-a]" is valid, equivalent to "\[z-a\]"
if ( inClass ) {
// split where the last [ was, make sure we don't have
// an invalid re. if so, re-walk the contents of the
// would-be class to re-translate any characters that
// were passed through as-is
// TODO: It would probably be faster to determine this
// without a try/catch and a new RegExp, but it's tricky
// to do safely. For now, this is safe and works.
var cs = pattern . substring ( classStart + 1 , i )
try {
RegExp ( '[' + cs + ']' )
} catch ( er ) {
// not a valid class!
var sp = this . parse ( cs , SUBPARSE )
re = re . substr ( 0 , reClassStart ) + '\\[' + sp [ 0 ] + '\\]'
hasMagic = hasMagic || sp [ 1 ]
inClass = false
continue
}
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// finish up the class.
hasMagic = true
inClass = false
re += c
continue
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
default :
// swallow any state char that wasn't consumed
clearStateChar ( )
if ( escaping ) {
// no need
escaping = false
} else if ( reSpecials [ c ]
&& ! ( c === '^' && inClass ) ) {
re += '\\'
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
re += c
} // switch
} // for
// handle the case where we left a class open.
// "[abc" is valid, equivalent to "\[abc"
if ( inClass ) {
// split where the last [ was, and escape it
// this is a huge pita. We now have to re-walk
// the contents of the would-be class to re-translate
// any characters that were passed through as-is
cs = pattern . substr ( classStart + 1 )
sp = this . parse ( cs , SUBPARSE )
re = re . substr ( 0 , reClassStart ) + '\\[' + sp [ 0 ]
hasMagic = hasMagic || sp [ 1 ]
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// handle the case where we had a +( thing at the *end*
// of the pattern.
// each pattern list stack adds 3 chars, and we need to go through
// and escape any | chars that were passed through as-is for the regexp.
// Go through and escape them, taking care not to double-escape any
// | chars that were already escaped.
for ( pl = patternListStack . pop ( ) ; pl ; pl = patternListStack . pop ( ) ) {
var tail = re . slice ( pl . reStart + pl . open . length )
this . debug ( 'setting tail' , re , pl )
// maybe some even number of \, then maybe 1 \, followed by a |
tail = tail . replace ( /((?:\\{2}){0,64})(\\?)\|/g , function ( _ , $1 , $2 ) {
if ( ! $2 ) {
// the | isn't already escaped, so escape it.
$2 = '\\'
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// need to escape all those slashes *again*, without escaping the
// one that we need for escaping the | character. As it works out,
// escaping an even number of slashes can be done by simply repeating
// it exactly after itself. That's why this trick works.
//
// I am sorry that you have to see this.
return $1 + $1 + $2 + '|'
} )
this . debug ( 'tail=%j\n %s' , tail , tail , pl , re )
var t = pl . type === '*' ? star
: pl . type === '?' ? qmark
: '\\' + pl . type
hasMagic = true
re = re . slice ( 0 , pl . reStart ) + t + '\\(' + tail
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
// handle trailing things that only matter at the very end.
clearStateChar ( )
if ( escaping ) {
// trailing \\
re += '\\\\'
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// only need to apply the nodot start if the re starts with
// something that could conceivably capture a dot
var addPatternStart = false
switch ( re . charAt ( 0 ) ) {
case '.' :
case '[' :
case '(' : addPatternStart = true
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
// Hack to work around lack of negative lookbehind in JS
// A pattern like: *.!(x).!(y|z) needs to ensure that a name
// like 'a.xyz.yz' doesn't match. So, the first negative
// lookahead, has to look ALL the way ahead, to the end of
// the pattern.
for ( var n = negativeLists . length - 1 ; n > - 1 ; n -- ) {
var nl = negativeLists [ n ]
var nlBefore = re . slice ( 0 , nl . reStart )
var nlFirst = re . slice ( nl . reStart , nl . reEnd - 8 )
var nlLast = re . slice ( nl . reEnd - 8 , nl . reEnd )
var nlAfter = re . slice ( nl . reEnd )
nlLast += nlAfter
// Handle nested stuff like *(*.js|!(*.json)), where open parens
// mean that we should *not* include the ) in the bit that is considered
// "after" the negated section.
var openParensBefore = nlBefore . split ( '(' ) . length - 1
var cleanAfter = nlAfter
for ( i = 0 ; i < openParensBefore ; i ++ ) {
cleanAfter = cleanAfter . replace ( /\)[+*?]?/ , '' )
}
nlAfter = cleanAfter
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var dollar = ''
if ( nlAfter === '' && isSub !== SUBPARSE ) {
dollar = '$'
}
var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast
re = newRe
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
// if the re is not "" at this point, then we need to make sure
// it doesn't match against an empty path part.
// Otherwise a/* will match a/, which it should not.
if ( re !== '' && hasMagic ) {
re = '(?=.)' + re
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( addPatternStart ) {
re = patternStart + re
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// parsing just a piece of a larger pattern.
if ( isSub === SUBPARSE ) {
return [ re , hasMagic ]
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
// skip the regexp for non-magical patterns
// unescape anything in it, though, so that it'll be
// an exact match against a file etc.
if ( ! hasMagic ) {
return globUnescape ( pattern )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
var flags = options . nocase ? 'i' : ''
try {
var regExp = new RegExp ( '^' + re + '$' , flags )
} catch ( er ) {
// If it was an invalid regular expression, then it can't match
// anything. This trick looks for a character after the end of
// the string, which is of course impossible, except in multi-line
// mode, but it's not a /m regex.
return new RegExp ( '$.' )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
regExp . _glob = pattern
regExp . _src = re
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
return regExp
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
minimatch . makeRe = function ( pattern , options ) {
return new Minimatch ( pattern , options || { } ) . makeRe ( )
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
Minimatch . prototype . makeRe = makeRe
function makeRe ( ) {
if ( this . regexp || this . regexp === false ) return this . regexp
// at this point, this.set is a 2d array of partial
// pattern strings, or "**".
//
// It's better to use .match(). This function shouldn't
// be used, really, but it's pretty convenient sometimes,
// when you just want to work with a regex.
var set = this . set
if ( ! set . length ) {
this . regexp = false
return this . regexp
}
var options = this . options
var twoStar = options . noglobstar ? star
: options . dot ? twoStarDot
: twoStarNoDot
var flags = options . nocase ? 'i' : ''
var re = set . map ( function ( pattern ) {
return pattern . map ( function ( p ) {
return ( p === GLOBSTAR ) ? twoStar
: ( typeof p === 'string' ) ? regExpEscape ( p )
: p . _src
} ) . join ( '\\\/' )
} ) . join ( '|' )
// must match entire pattern
// ending in a * or ** will make it less strict.
re = '^(?:' + re + ')$'
// can match anything, as long as it's not this.
if ( this . negate ) re = '^(?!' + re + ').*$'
try {
this . regexp = new RegExp ( re , flags )
} catch ( ex ) {
this . regexp = false
}
return this . regexp
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
minimatch . match = function ( list , pattern , options ) {
options = options || { }
var mm = new Minimatch ( pattern , options )
list = list . filter ( function ( f ) {
return mm . match ( f )
} )
if ( mm . options . nonull && ! list . length ) {
list . push ( pattern )
}
return list
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
Minimatch . prototype . match = match
function match ( f , partial ) {
this . debug ( 'match' , f , this . pattern )
// short-circuit in the case of busted things.
// comments, etc.
if ( this . comment ) return false
if ( this . empty ) return f === ''
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
if ( f === '/' && partial ) return true
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var options = this . options
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// windows: need to use /, not \
if ( path . sep !== '/' ) {
f = f . split ( path . sep ) . join ( '/' )
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// treat the test path as a set of pathparts.
f = f . split ( slashSplit )
this . debug ( this . pattern , 'split' , f )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// just ONE of the pattern sets in this.set needs to match
// in order for it to be valid. If negating, then just one
// match means that we have failed.
// Either way, return on the first hit.
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var set = this . set
this . debug ( this . pattern , 'set' , set )
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// Find the basename of the path by looking for the last non-empty segment
var filename
var i
for ( i = f . length - 1 ; i >= 0 ; i -- ) {
filename = f [ i ]
if ( filename ) break
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
for ( i = 0 ; i < set . length ; i ++ ) {
var pattern = set [ i ]
var file = f
if ( options . matchBase && pattern . length === 1 ) {
file = [ filename ]
}
var hit = this . matchOne ( file , pattern , partial )
if ( hit ) {
if ( options . flipNegate ) return true
return ! this . negate
}
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// didn't get any hits. this is success if it's a negative
// pattern, failure otherwise.
if ( options . flipNegate ) return false
return this . negate
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// set partial to true to test if, for example,
// "/a/b" matches the start of "/*/b/*/d"
// Partial means, if you run out of file before you run
// out of pattern, then that's fine, as long as all
// the parts match.
Minimatch . prototype . matchOne = function ( file , pattern , partial ) {
var options = this . options
this . debug ( 'matchOne' ,
{ 'this' : this , file : file , pattern : pattern } )
this . debug ( 'matchOne' , file . length , pattern . length )
for ( var fi = 0 ,
pi = 0 ,
fl = file . length ,
pl = pattern . length
; ( fi < fl ) && ( pi < pl )
; fi ++ , pi ++ ) {
this . debug ( 'matchOne loop' )
var p = pattern [ pi ]
var f = file [ fi ]
this . debug ( pattern , p , f )
// should be impossible.
// some invalid regexp stuff in the set.
if ( p === false ) return false
if ( p === GLOBSTAR ) {
this . debug ( 'GLOBSTAR' , [ pattern , p , f ] )
// "**"
// a/**/b/**/c would match the following:
// a/b/x/y/z/c
// a/x/y/z/b/c
// a/b/x/b/x/c
// a/b/c
// To do this, take the rest of the pattern after
// the **, and see if it would match the file remainder.
// If so, return success.
// If not, the ** "swallows" a segment, and try again.
// This is recursively awful.
//
// a/**/b/**/c matching a/b/x/y/z/c
// - a matches a
// - doublestar
// - matchOne(b/x/y/z/c, b/**/c)
// - b matches b
// - doublestar
// - matchOne(x/y/z/c, c) -> no
// - matchOne(y/z/c, c) -> no
// - matchOne(z/c, c) -> no
// - matchOne(c, c) yes, hit
var fr = fi
var pr = pi + 1
if ( pr === pl ) {
this . debug ( '** at the end' )
// a ** at the end will just swallow the rest.
// We have found a match.
// however, it will not swallow /.x, unless
// options.dot is set.
// . and .. are *never* matched by **, for explosively
// exponential reasons.
for ( ; fi < fl ; fi ++ ) {
if ( file [ fi ] === '.' || file [ fi ] === '..' ||
( ! options . dot && file [ fi ] . charAt ( 0 ) === '.' ) ) return false
}
return true
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// ok, let's see if we can swallow whatever we can.
while ( fr < fl ) {
var swallowee = file [ fr ]
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
this . debug ( '\nglobstar while' , file , fr , pattern , pr , swallowee )
// XXX remove this slice. Just pass the start index.
if ( this . matchOne ( file . slice ( fr ) , pattern . slice ( pr ) , partial ) ) {
this . debug ( 'globstar found match!' , fr , fl , swallowee )
// found a match.
return true
} else {
// can't swallow "." or ".." ever.
// can only swallow ".foo" when explicitly asked.
if ( swallowee === '.' || swallowee === '..' ||
( ! options . dot && swallowee . charAt ( 0 ) === '.' ) ) {
this . debug ( 'dot detected!' , file , fr , pattern , pr )
break
}
// ** swallows a segment, and continue.
this . debug ( 'globstar swallow a segment, and continue' )
fr ++
}
}
// no match was found.
// However, in partial mode, we can't say this is necessarily over.
// If there's more *pattern* left, then
if ( partial ) {
// ran out of file
this . debug ( '\n>>> no match, partial?' , file , fr , pattern , pr )
if ( fr === fl ) return true
}
return false
}
// something other than **
// non-magic patterns just have to match exactly
// patterns with magic have been turned into regexps.
var hit
if ( typeof p === 'string' ) {
if ( options . nocase ) {
hit = f . toLowerCase ( ) === p . toLowerCase ( )
} else {
hit = f === p
}
this . debug ( 'string match' , p , f , hit )
} else {
hit = f . match ( p )
this . debug ( 'pattern match' , p , f , hit )
}
if ( ! hit ) return false
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
// Note: ending in / means that we'll get a final ""
// at the end of the pattern. This can only match a
// corresponding "" at the end of the file.
// If the file ends in /, then it can only match a
// a pattern that ends in /, unless the pattern just
// doesn't have any more for it. But, a/b/ should *not*
// match "a/b/*", even though "" matches against the
// [^/]*? pattern, except in partial mode, where it might
// simply not be reached yet.
// However, a/b/ should still satisfy a/*
// now either we fell off the end of the pattern, or we're done.
if ( fi === fl && pi === pl ) {
// ran out of pattern and filename at the same time.
// an exact hit!
return true
} else if ( fi === fl ) {
// ran out of file, but still had pattern left.
// this is ok if we're doing the match as part of
// a glob fs traversal.
return partial
} else if ( pi === pl ) {
// ran out of pattern, still have file left.
// this is only acceptable if we're on the very last
// empty segment of a file with a trailing slash.
// a/* should match a/b/
var emptyFileEnd = ( fi === fl - 1 ) && ( file [ fi ] === '' )
return emptyFileEnd
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
// should be unreachable.
throw new Error ( 'wtf?' )
}
// replace stuff like \* with *
function globUnescape ( s ) {
return s . replace ( /\\(.)/g , '$1' )
}
function regExpEscape ( s ) {
return s . replace ( /[-[\]{}()*+?.,\\^$|#\s]/g , '\\$&' )
2020-08-23 03:31:38 +02:00
}
/***/ } ) ,
2020-09-02 10:07:11 +02:00
/***/ 989 :
2020-08-23 03:31:38 +02:00
/***/ ( function ( module , _ _unusedexports , _ _webpack _require _ _ ) {
2020-09-02 10:07:11 +02:00
"use strict" ;
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// Some environments don't have global Buffer (e.g. React Native).
// Solution would be installing npm modules "buffer" and "stream" explicitly.
var Buffer = _ _webpack _require _ _ ( 937 ) . Buffer ;
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var bomHandling = _ _webpack _require _ _ ( 582 ) ,
iconv = module . exports ;
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// All codecs and aliases are kept here, keyed by encoding name/alias.
// They are lazy loaded in `iconv.getCodec` from `encodings/index.js`.
iconv . encodings = null ;
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// Characters emitted in case of error.
iconv . defaultCharUnicode = '<27> ' ;
iconv . defaultCharSingleByte = '?' ;
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
// Public API.
iconv . encode = function encode ( str , encoding , options ) {
str = "" + ( str || "" ) ; // Ensure string.
var encoder = iconv . getEncoder ( encoding , options ) ;
var res = encoder . write ( str ) ;
var trail = encoder . end ( ) ;
return ( trail && trail . length > 0 ) ? Buffer . concat ( [ res , trail ] ) : res ;
2020-08-23 03:31:38 +02:00
}
2020-09-02 10:07:11 +02:00
iconv . decode = function decode ( buf , encoding , options ) {
if ( typeof buf === 'string' ) {
if ( ! iconv . skipDecodeWarning ) {
console . error ( 'Iconv-lite warning: decode()-ing strings is deprecated. Refer to https://github.com/ashtuchkin/iconv-lite/wiki/Use-Buffers-when-decoding' ) ;
iconv . skipDecodeWarning = true ;
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
buf = Buffer . from ( "" + ( buf || "" ) , "binary" ) ; // Ensure buffer.
}
2020-08-23 03:31:38 +02:00
2020-09-02 10:07:11 +02:00
var decoder = iconv . getDecoder ( encoding , options ) ;
2020-08-16 22:31:37 +02:00
2020-09-02 10:07:11 +02:00
var res = decoder . write ( buf ) ;
var trail = decoder . end ( ) ;
2020-08-16 19:13:19 +02:00
2020-09-02 10:07:11 +02:00
return trail ? ( res + trail ) : res ;
}
2020-08-16 22:31:37 +02:00
2020-09-02 10:07:11 +02:00
iconv . encodingExists = function encodingExists ( enc ) {
try {
iconv . getCodec ( enc ) ;
2020-08-17 18:35:15 +02:00
return true ;
2020-09-02 10:07:11 +02:00
} catch ( e ) {
return false ;
2020-08-17 18:35:15 +02:00
}
2020-08-16 19:13:19 +02:00
}
2020-09-02 10:07:11 +02:00
// Legacy aliases to convert functions
iconv . toEncoding = iconv . encode ;
iconv . fromEncoding = iconv . decode ;
// Search for a codec in iconv.encodings. Cache codec data in iconv._codecDataCache.
iconv . _codecDataCache = { } ;
iconv . getCodec = function getCodec ( encoding ) {
if ( ! iconv . encodings )
iconv . encodings = _ _webpack _require _ _ ( 782 ) ; // Lazy load all encoding definitions.
// Canonicalize encoding name: strip all non-alphanumeric chars and appended year.
var enc = iconv . _canonicalizeEncoding ( encoding ) ;
// Traverse iconv.encodings to find actual codec.
var codecOptions = { } ;
while ( true ) {
var codec = iconv . _codecDataCache [ enc ] ;
if ( codec )
return codec ;
var codecDef = iconv . encodings [ enc ] ;
switch ( typeof codecDef ) {
case "string" : // Direct alias to other encoding.
enc = codecDef ;
break ;
case "object" : // Alias with options. Can be layered.
for ( var key in codecDef )
codecOptions [ key ] = codecDef [ key ] ;
if ( ! codecOptions . encodingName )
codecOptions . encodingName = enc ;
enc = codecDef . type ;
break ;
case "function" : // Codec itself.
if ( ! codecOptions . encodingName )
codecOptions . encodingName = enc ;
// The codec function must load all tables and return object with .encoder and .decoder methods.
// It'll be called only once (for each different options object).
codec = new codecDef ( codecOptions , iconv ) ;
iconv . _codecDataCache [ codecOptions . encodingName ] = codec ; // Save it to be reused later.
return codec ;
default :
throw new Error ( "Encoding not recognized: '" + encoding + "' (searched as: '" + enc + "')" ) ;
2020-08-16 22:31:37 +02:00
}
2020-09-02 10:07:11 +02:00
}
2020-08-16 22:31:37 +02:00
}
2020-09-02 10:07:11 +02:00
iconv . _canonicalizeEncoding = function ( encoding ) {
// Canonicalize encoding name: strip all non-alphanumeric chars and appended year.
return ( '' + encoding ) . toLowerCase ( ) . replace ( /:\d{4}$|[^0-9a-z]/g , "" ) ;
2020-08-16 19:13:19 +02:00
}
2020-09-02 10:07:11 +02:00
iconv . getEncoder = function getEncoder ( encoding , options ) {
var codec = iconv . getCodec ( encoding ) ,
encoder = new codec . encoder ( options , codec ) ;
if ( codec . bomAware && options && options . addBOM )
encoder = new bomHandling . PrependBOM ( encoder , options ) ;
return encoder ;
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
iconv . getDecoder = function getDecoder ( encoding , options ) {
var codec = iconv . getCodec ( encoding ) ,
decoder = new codec . decoder ( options , codec ) ;
if ( codec . bomAware && ! ( options && options . stripBOM === false ) )
decoder = new bomHandling . StripBOM ( decoder , options ) ;
return decoder ;
2020-08-16 00:36:41 +02:00
}
2020-09-02 10:07:11 +02:00
// Load extensions in Node. All of them are omitted in Browserify build via 'browser' field in package.json.
var nodeVer = typeof process !== 'undefined' && process . versions && process . versions . node ;
if ( nodeVer ) {
// Load streaming support in Node v0.10+
var nodeVerArr = nodeVer . split ( "." ) . map ( Number ) ;
if ( nodeVerArr [ 0 ] > 0 || nodeVerArr [ 1 ] >= 10 ) {
_ _webpack _require _ _ ( 554 ) ( iconv ) ;
}
// Load Node primitive extensions.
_ _webpack _require _ _ ( 834 ) ( iconv ) ;
}
if ( false ) { }
2020-08-16 00:36:41 +02:00
2020-08-16 22:31:37 +02:00
/***/ } )
2020-08-17 18:35:15 +02:00
/******/ } ) ;